Compare commits
240 Commits
charlie/wa
...
v0.0.84
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a9bcc15797 | ||
|
|
b6c856bd07 | ||
|
|
4beea0484a | ||
|
|
2679db1d10 | ||
|
|
3e73462e04 | ||
|
|
f63a87737a | ||
|
|
e7472eac1c | ||
|
|
db3c847771 | ||
|
|
4adbfc24a5 | ||
|
|
8f734a6562 | ||
|
|
bcf7519eb3 | ||
|
|
66089052ee | ||
|
|
d50cc8ff65 | ||
|
|
b75ea94f58 | ||
|
|
2c24e2fd28 | ||
|
|
f8dc208665 | ||
|
|
c72b8e8d1e | ||
|
|
b108c693fa | ||
|
|
aac1912ea7 | ||
|
|
3dcd26aac3 | ||
|
|
e53b9807f6 | ||
|
|
36fe8b76d4 | ||
|
|
f832f88c75 | ||
|
|
659a28de02 | ||
|
|
583149a472 | ||
|
|
206e6463be | ||
|
|
118a9feec8 | ||
|
|
1f2ccb059a | ||
|
|
f4d9d6c858 | ||
|
|
3477f5664a | ||
|
|
edefa5219c | ||
|
|
cf0d198365 | ||
|
|
08b14ed77e | ||
|
|
6ee3075867 | ||
|
|
cc8a945cbf | ||
|
|
1a1922b3fc | ||
|
|
48bd766298 | ||
|
|
1ece3873cd | ||
|
|
472d902486 | ||
|
|
4ac6a18d40 | ||
|
|
8a47ea91ba | ||
|
|
bd4394aa89 | ||
|
|
56f69ce71e | ||
|
|
248a6cd50b | ||
|
|
d9e659d817 | ||
|
|
77e5564f4b | ||
|
|
e79766d5ec | ||
|
|
c55fd76743 | ||
|
|
e2aedc5ba8 | ||
|
|
1b2d085460 | ||
|
|
cb138526b1 | ||
|
|
8eac270d8f | ||
|
|
6e19fd20bb | ||
|
|
10868445f5 | ||
|
|
e3ecf21287 | ||
|
|
fd849e112e | ||
|
|
af27471c77 | ||
|
|
bb466bc8d3 | ||
|
|
7741a713e2 | ||
|
|
3ab1cfc6f8 | ||
|
|
e73f13473d | ||
|
|
3e8ef5b40f | ||
|
|
c59610906c | ||
|
|
2353a52be8 | ||
|
|
bbffdd57ff | ||
|
|
3c15c578a7 | ||
|
|
6a8e31b2ff | ||
|
|
6407fd5a33 | ||
|
|
b64040cbb2 | ||
|
|
952a0eb4e3 | ||
|
|
3e28d6de04 | ||
|
|
9bbfd1d3b2 | ||
|
|
6fb82ab763 | ||
|
|
6b286e9bc1 | ||
|
|
bcddd9e97f | ||
|
|
3e789136af | ||
|
|
07ef3b8754 | ||
|
|
46e1b16472 | ||
|
|
720bfe0161 | ||
|
|
2f69be0d41 | ||
|
|
54cb2eb15b | ||
|
|
167992ad48 | ||
|
|
f0dab24079 | ||
|
|
77055faab6 | ||
|
|
e08e1caf71 | ||
|
|
0072dfd81e | ||
|
|
6ffe02ee05 | ||
|
|
688fc0cd02 | ||
|
|
f30e5e45ab | ||
|
|
1a68a38306 | ||
|
|
590aa92ead | ||
|
|
8868f57a74 | ||
|
|
71802f8861 | ||
|
|
5b6fb8cefa | ||
|
|
2ff964107c | ||
|
|
141132d5be | ||
|
|
8ba872ece4 | ||
|
|
209dce2033 | ||
|
|
90d88dfb10 | ||
|
|
4730911b25 | ||
|
|
4e9fb9907a | ||
|
|
b8dce8922d | ||
|
|
30877127bc | ||
|
|
8b66bbdc9b | ||
|
|
71d3a84b14 | ||
|
|
323a5c857c | ||
|
|
42cec3f5a0 | ||
|
|
ee42413e10 | ||
|
|
765db12b84 | ||
|
|
e1b711d9c6 | ||
|
|
35f593846e | ||
|
|
c384fa513b | ||
|
|
022ff64d29 | ||
|
|
5a06fb28fd | ||
|
|
46750a3e17 | ||
|
|
9cc902b802 | ||
|
|
c2a36ebd1e | ||
|
|
34ca225393 | ||
|
|
38c30905e6 | ||
|
|
2774194b03 | ||
|
|
71ebd39f35 | ||
|
|
a2f78ba2c7 | ||
|
|
b51a080a44 | ||
|
|
6a1d7d8a1c | ||
|
|
10b250ee57 | ||
|
|
30b1b1e15a | ||
|
|
aafe7c0c39 | ||
|
|
f060248656 | ||
|
|
bbe0220c72 | ||
|
|
129e2b6ad3 | ||
|
|
73e744b1d0 | ||
|
|
50a3fc5a67 | ||
|
|
de499f0258 | ||
|
|
e1abe37c6a | ||
|
|
7fe5945541 | ||
|
|
806f3fd4f6 | ||
|
|
2bba643dd2 | ||
|
|
54090bd7ac | ||
|
|
c62727db42 | ||
|
|
d0e1612507 | ||
|
|
5ccd907398 | ||
|
|
346610c2e3 | ||
|
|
307fa26515 | ||
|
|
136d412edd | ||
|
|
d9edec0ac9 | ||
|
|
473675fffb | ||
|
|
95dfc61315 | ||
|
|
dd496c7b52 | ||
|
|
78aafb4b34 | ||
|
|
99c66d513a | ||
|
|
04ade6a2f3 | ||
|
|
4cf2682cda | ||
|
|
e3a7357187 | ||
|
|
b60768debb | ||
|
|
25e476639f | ||
|
|
4645788205 | ||
|
|
0b9eda8836 | ||
|
|
ad23d6acee | ||
|
|
e3d1d01a1f | ||
|
|
6dfdd21a7c | ||
|
|
f17d3b3c44 | ||
|
|
da6b913317 | ||
|
|
bd34850f98 | ||
|
|
d5b33cdb40 | ||
|
|
3fb4cf7009 | ||
|
|
6e19539e28 | ||
|
|
4eac7a07f5 | ||
|
|
5141285c8e | ||
|
|
82cc139d2d | ||
|
|
df438ba051 | ||
|
|
a70624cd47 | ||
|
|
b307afc00c | ||
|
|
3d5bc1f51f | ||
|
|
aba01745f5 | ||
|
|
1eeeffab66 | ||
|
|
9b564c9cf4 | ||
|
|
5bf8b13644 | ||
|
|
f80d5e70dd | ||
|
|
44897b2a5b | ||
|
|
d1bcc919a2 | ||
|
|
03e1397427 | ||
|
|
fdb32330a9 | ||
|
|
4e6ae33a3a | ||
|
|
295ff8eb1a | ||
|
|
2449771d2f | ||
|
|
406491a3a2 | ||
|
|
bfae262359 | ||
|
|
af894f290f | ||
|
|
c901742244 | ||
|
|
7e4faf4b69 | ||
|
|
31a0b20271 | ||
|
|
0966bf2c66 | ||
|
|
64d8e25528 | ||
|
|
b049cced04 | ||
|
|
bc335f839e | ||
|
|
4819e19ba2 | ||
|
|
622b8adb79 | ||
|
|
558d9fcbe3 | ||
|
|
83f18193c2 | ||
|
|
46e6a1b3be | ||
|
|
4d0d433af9 | ||
|
|
11f7532e72 | ||
|
|
417764d309 | ||
|
|
1e36c109c6 | ||
|
|
3960016d55 | ||
|
|
75d669fa86 | ||
|
|
20989e12ba | ||
|
|
5a1b6c32eb | ||
|
|
46bdcb9080 | ||
|
|
d16a7252af | ||
|
|
ca6551eb37 | ||
|
|
43a4f5749e | ||
|
|
6fef4db433 | ||
|
|
7470d6832f | ||
|
|
63ba0bfeef | ||
|
|
91666fcaf6 | ||
|
|
643e27221d | ||
|
|
c7349b69c1 | ||
|
|
7f84753f3c | ||
|
|
e2ec62cf33 | ||
|
|
1d5592d937 | ||
|
|
886def13bd | ||
|
|
949e4d4077 | ||
|
|
c8cb2eead2 | ||
|
|
02ae494a0e | ||
|
|
dce86e065b | ||
|
|
d77979429c | ||
|
|
a3a15d2eb2 | ||
|
|
5af95428ff | ||
|
|
6338cad4e6 | ||
|
|
485881877f | ||
|
|
b8f517c70e | ||
|
|
9f601c2abd | ||
|
|
c0ce0b0c48 | ||
|
|
e5b16973a9 | ||
|
|
de9ceb2fe1 | ||
|
|
38b19b78b7 | ||
|
|
7043e15b57 | ||
|
|
9594079235 | ||
|
|
732f208e47 |
14
.editorconfig
Normal file
14
.editorconfig
Normal file
@@ -0,0 +1,14 @@
|
||||
# Check http://editorconfig.org for more information
|
||||
# This is the main config file for this project:
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
indent_size = 2
|
||||
|
||||
[*.{rs,py}]
|
||||
indent_size = 4
|
||||
@@ -3,3 +3,8 @@ repos:
|
||||
rev: v0.0.40
|
||||
hooks:
|
||||
- id: lint
|
||||
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.10.1
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
charlie.r.marsh@gmail.com.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
111
CONTRIBUTING.md
Normal file
111
CONTRIBUTING.md
Normal file
@@ -0,0 +1,111 @@
|
||||
# Contributing to ruff
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to ruff.
|
||||
|
||||
## The basics
|
||||
|
||||
ruff welcomes contributions in the form of Pull Requests. For small changes (e.g., bug fixes), feel
|
||||
free to submit a PR. For larger changes (e.g., new lint rules, new functionality, new configuration
|
||||
options), consider submitting an [Issue](https://github.com/charliermarsh/ruff/issues) outlining
|
||||
your proposed change.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
ruff is written in Rust (1.63.0). You'll need to install the
|
||||
[Rust toolchain](https://www.rust-lang.org/tools/install) for development.
|
||||
|
||||
You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
|
||||
```shell
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
### Development
|
||||
|
||||
After cloning the repository, run ruff locally with:
|
||||
|
||||
```shell
|
||||
cargo run resources/test/fixtures --no-cache
|
||||
```
|
||||
|
||||
Prior to opening a pull request, ensure that your code has been auto-formatted, and that it passes
|
||||
both the lint and test validation checks:
|
||||
|
||||
```shell
|
||||
cargo fmt # Auto-formatting...
|
||||
cargo clippy # Linting...
|
||||
cargo test # Testing...
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
Your Pull Request will be reviewed by a maintainer, which may involve a few rounds of iteration
|
||||
prior to merging.
|
||||
|
||||
### Example: Adding a new lint rule
|
||||
|
||||
There are three phases to adding a new lint rule:
|
||||
|
||||
1. Define the rule in `src/checks.rs`.
|
||||
2. Define the _logic_ for triggering the rule in `src/check_ast.rs` (for AST-based checks)
|
||||
or `src/check_lines.rs` (for text-based checks).
|
||||
3. Add a test fixture.
|
||||
|
||||
To define the rule, open up `src/checks.rs`. You'll need to define both a `CheckCode` and
|
||||
`CheckKind`. As an example, you can grep for `E402` and `ModuleImportNotAtTopOfFile`, and follow the
|
||||
pattern implemented therein.
|
||||
|
||||
To trigger the rule, you'll likely want to augment the logic in `src/check_ast.rs`, which defines
|
||||
the Python AST visitor, responsible for iterating over the abstract syntax tree and collecting
|
||||
lint-rule violations as it goes. Grep for the `Check::new` invocations to understand how other,
|
||||
similar rules are implemented.
|
||||
|
||||
To add a test fixture, create a file under `resources/test/fixtures`, named to match the `CheckCode`
|
||||
you defined earlier (e.g., `E402.py`). This file should contain a variety of violations and
|
||||
non-violations designed to evaluate and demonstrate the behavior of your lint rule. Run ruff locally
|
||||
with (e.g.) `cargo run resources/test/fixtures/E402.py`. Once you're satisfied with the output,
|
||||
codify the behavior as a snapshot test by adding a new function to the `mod tests` section of
|
||||
`src/linter.rs`, like so:
|
||||
|
||||
```rust
|
||||
#[test]
|
||||
fn e402() -> Result<()> {
|
||||
let mut checks = check_path(
|
||||
Path::new("./resources/test/fixtures/E402.py"),
|
||||
&settings::Settings::for_rule(CheckCode::E402),
|
||||
&fixer::Mode::Generate,
|
||||
)?;
|
||||
checks.sort_by_key(|check| check.location);
|
||||
insta::assert_yaml_snapshot!(checks);
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
Then, run `cargo test`. Your test will fail, but you'll be prompted to follow-up with
|
||||
`cargo insta review`. Accept the generated snapshot, then commit the snapshot file alongside the
|
||||
rest of your changes.
|
||||
|
||||
### Example: Adding a new configuration option
|
||||
|
||||
ruff's user-facing settings live in two places: first, the command-line options defined with
|
||||
[clap](https://docs.rs/clap/latest/clap/) via the `Cli` struct in `src/main.rs`; and second, the
|
||||
`Config` struct defined `src/pyproject.rs`, which is responsible for extracting user-defined
|
||||
settings from a `pyproject.toml` file.
|
||||
|
||||
Ultimately, these two sources of configuration are merged into the `Settings` struct defined
|
||||
in `src/settings.rs`, which is then threaded through the codebase.
|
||||
|
||||
To add a new configuration option, you'll likely want to _both_ add a CLI option to `src/main.rs`
|
||||
_and_ a `pyproject.toml` parameter to `src/pyproject.rs`. If you want to pattern-match against an
|
||||
existing example, grep for `dummy_variable_rgx`, which defines a regular expression to match against
|
||||
acceptable unused variables (e.g., `_`).
|
||||
|
||||
## Release process
|
||||
|
||||
As of now, ruff has an ad hoc release process: releases are cut with high frequency via GitHub
|
||||
Actions, which automatically generates the appropriate wheels across architectures and publishes
|
||||
them to [PyPI](https://pypi.org/project/ruff/).
|
||||
|
||||
ruff follows the [semver](https://semver.org/) versioning standard. However, as pre-1.0 software,
|
||||
even patch releases may contain [non-backwards-compatible changes](https://semver.org/#spec-item-4).
|
||||
765
Cargo.lock
generated
765
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
31
Cargo.toml
31
Cargo.toml
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.44"
|
||||
version = "0.0.84"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
@@ -9,32 +9,49 @@ name = "ruff"
|
||||
[dependencies]
|
||||
anyhow = { version = "1.0.60" }
|
||||
bincode = { version = "1.3.3" }
|
||||
cacache = { version = "10.0.1" }
|
||||
chrono = { version = "0.4.21" }
|
||||
clap = { version = "3.2.16", features = ["derive"] }
|
||||
clearscreen = { version = "1.0.10" }
|
||||
clap = { version = "4.0.1", features = ["derive"] }
|
||||
colored = { version = "2.0.0" }
|
||||
common-path = { version = "1.0.0" }
|
||||
dirs = { version = "4.0.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.17" }
|
||||
glob = { version = "0.3.0" }
|
||||
itertools = { version = "0.10.3" }
|
||||
itertools = { version = "0.10.5" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "32a044c127668df44582f85699358e67803b0d73" }
|
||||
log = { version = "0.4.17" }
|
||||
notify = { version = "4.0.17" }
|
||||
num-bigint = { version = "0.4.3" }
|
||||
once_cell = { version = "1.13.1" }
|
||||
path-absolutize = { version = "3.0.13", features = ["once_cell_cache"] }
|
||||
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }
|
||||
rayon = { version = "1.5.3" }
|
||||
regex = { version = "1.6.0" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/charliermarsh/RustPython.git", rev = "7d21c6923a506e79cc041708d83cef925efd33f4" }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/charliermarsh/RustPython.git", rev = "1b253a12705f84972cd76e8dc1cdaaccb233e5a5" }
|
||||
rustpython-common = { git = "https://github.com/charliermarsh/RustPython.git", rev = "1b253a12705f84972cd76e8dc1cdaaccb233e5a5" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/charliermarsh/RustPython.git", rev = "1b253a12705f84972cd76e8dc1cdaaccb233e5a5" }
|
||||
serde = { version = "1.0.143", features = ["derive"] }
|
||||
serde_json = { version = "1.0.83" }
|
||||
strum = { version = "0.24.1", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.24.3" }
|
||||
textwrap = { version = "0.15.1" }
|
||||
titlecase = { version = "2.2.1" }
|
||||
toml = { version = "0.5.9" }
|
||||
update-informer = { version = "0.5.0", default_features = false, features = ["pypi"], optional = true }
|
||||
walkdir = { version = "2.3.2" }
|
||||
|
||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||
cacache = { version = "10.0.1" } # uses async-std
|
||||
clearscreen = { version = "1.0.10" } # uses which
|
||||
|
||||
# https://docs.rs/getrandom/0.2.7/getrandom/#webassembly-support
|
||||
# For (future) wasm-pack support
|
||||
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies]
|
||||
getrandom = { version = "0.2.7", features = ["js"] }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { version = "2.0.4" }
|
||||
insta = { version = "1.19.1", features = ["yaml"] }
|
||||
test-case = { version = "2.2.2" }
|
||||
|
||||
[features]
|
||||
default = ["update-informer"]
|
||||
|
||||
637
README.md
637
README.md
@@ -1,4 +1,4 @@
|
||||
# ruff
|
||||
# Ruff
|
||||
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
[](https://pypi.python.org/pypi/ruff)
|
||||
@@ -20,17 +20,37 @@ An extremely fast Python linter, written in Rust.
|
||||
- 🤝 Python 3.10 compatibility
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 📦 [ESLint](https://eslint.org/docs/latest/user-guide/command-line-interface#caching)-inspired cache support
|
||||
- 🔧 [ESLint](https://eslint.org/docs/latest/user-guide/command-line-interface#caching)-inspired `--fix` support
|
||||
- 👀 [TypeScript](https://www.typescriptlang.org/docs/handbook/configuring-watch.html)-inspired `--watch` support
|
||||
- ⚖️ [Near-complete parity](#Parity-with-Flake8) with the built-in Flake8 rule set
|
||||
- 🔧 [ESLint](https://eslint.org/docs/latest/user-guide/command-line-interface#--fix)-inspired autofix support (e.g., automatically remove unused imports)
|
||||
- 👀 [TypeScript](https://www.typescriptlang.org/docs/handbook/configuring-watch.html)-inspired `--watch` support, for continuous file monitoring
|
||||
- ⚖️ [Near-parity](#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
|
||||
- 🔌 Native re-implementations of popular Flake8 plugins, like [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/) ([`pydocstyle`](https://pypi.org/project/pydocstyle/))
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface. Ruff can be used to replace Flake8 (plus a variety
|
||||
of plugins), [`pydocstyle`](https://pypi.org/project/pydocstyle/), [`yesqa`](https://github.com/asottile/yesqa),
|
||||
and even a subset of [`pyupgrade`](https://pypi.org/project/pyupgrade/) and [`autoflake`](https://pypi.org/project/autoflake/)
|
||||
all while executing tens or hundreds of times faster than any individual tool.
|
||||
|
||||
Read the [launch blog post](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||
|
||||
## Installation and usage
|
||||
## Table of Contents
|
||||
|
||||
1. [Installation and Usage](#installation-and-usage)
|
||||
2. [Configuration](#configuration)
|
||||
3. [Supported Rules](#supported-rules)
|
||||
4. [Editor Integrations](#editor-integrations)
|
||||
5. [FAQ](#faq)
|
||||
6. [Development](#development)
|
||||
7. [Releases](#releases)
|
||||
8. [Benchmarks](#benchmarks)
|
||||
9. [License](#license)
|
||||
10. [Contributing](#contributing)
|
||||
|
||||
## Installation and Usage
|
||||
|
||||
### Installation
|
||||
|
||||
Available as [ruff](https://pypi.org/project/ruff/) on PyPI:
|
||||
Available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
pip install ruff
|
||||
@@ -38,7 +58,7 @@ pip install ruff
|
||||
|
||||
### Usage
|
||||
|
||||
To run ruff, try any of the following:
|
||||
To run Ruff, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff path/to/code/to/check.py
|
||||
@@ -46,27 +66,27 @@ ruff path/to/code/
|
||||
ruff path/to/code/*.py
|
||||
```
|
||||
|
||||
You can run ruff in `--watch` mode to automatically re-run on-change:
|
||||
You can run Ruff in `--watch` mode to automatically re-run on-change:
|
||||
|
||||
```shell
|
||||
ruff path/to/code/ --watch
|
||||
```
|
||||
|
||||
ruff also works with [pre-commit](https://pre-commit.com):
|
||||
Ruff also works with [pre-commit](https://pre-commit.com):
|
||||
|
||||
```yaml
|
||||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.40
|
||||
rev: v0.0.84
|
||||
hooks:
|
||||
- id: lint
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
ruff is configurable both via `pyproject.toml` and the command line.
|
||||
Ruff is configurable both via `pyproject.toml` and the command line.
|
||||
|
||||
For example, you could configure ruff to only enforce a subset of rules with:
|
||||
For example, you could configure Ruff to only enforce a subset of rules with:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
@@ -80,55 +100,70 @@ select = [
|
||||
Alternatively, on the command-line:
|
||||
|
||||
```shell
|
||||
ruff path/to/code/ --select F401 F403
|
||||
ruff path/to/code/ --select F401 --select F403
|
||||
```
|
||||
|
||||
See `ruff --help` for more:
|
||||
|
||||
```shell
|
||||
ruff (v0.0.44) 0.0.44
|
||||
An extremely fast Python linter.
|
||||
ruff: An extremely fast Python linter.
|
||||
|
||||
USAGE:
|
||||
ruff [OPTIONS] <FILES>...
|
||||
Usage: ruff [OPTIONS] <FILES>...
|
||||
|
||||
ARGS:
|
||||
<FILES>...
|
||||
Arguments:
|
||||
<FILES>...
|
||||
|
||||
OPTIONS:
|
||||
-e, --exit-zero
|
||||
Exit with status code "0", even upon detecting errors
|
||||
--exclude <EXCLUDE>...
|
||||
List of paths, used to exclude files and/or directories from checks
|
||||
--extend-exclude <EXTEND_EXCLUDE>...
|
||||
Like --exclude, but adds additional files and directories on top of the excluded ones
|
||||
-f, --fix
|
||||
Attempt to automatically fix lint errors
|
||||
--format <FORMAT>
|
||||
Output serialization format for error messages [default: text] [possible values: text,
|
||||
json]
|
||||
-h, --help
|
||||
Print help information
|
||||
--ignore <IGNORE>...
|
||||
List of error codes to ignore
|
||||
-n, --no-cache
|
||||
Disable cache reads
|
||||
-q, --quiet
|
||||
Disable all logging (but still exit with status code "1" upon detecting errors)
|
||||
--select <SELECT>...
|
||||
List of error codes to enable
|
||||
--show-files
|
||||
See the files ruff will be run against with the current configuration options
|
||||
--show-settings
|
||||
See ruff's settings
|
||||
-v, --verbose
|
||||
Enable verbose logging
|
||||
-V, --version
|
||||
Print version information
|
||||
-w, --watch
|
||||
Run in watch mode by re-running whenever files change
|
||||
Options:
|
||||
--config <CONFIG>
|
||||
Path to the `pyproject.toml` file to use for configuration
|
||||
-v, --verbose
|
||||
Enable verbose logging
|
||||
-q, --quiet
|
||||
Disable all logging (but still exit with status code "1" upon detecting errors)
|
||||
-e, --exit-zero
|
||||
Exit with status code "0", even upon detecting errors
|
||||
-w, --watch
|
||||
Run in watch mode by re-running whenever files change
|
||||
-f, --fix
|
||||
Attempt to automatically fix lint errors
|
||||
-n, --no-cache
|
||||
Disable cache reads
|
||||
--select <SELECT>
|
||||
List of error codes to enable
|
||||
--extend-select <EXTEND_SELECT>
|
||||
Like --select, but adds additional error codes on top of the selected ones
|
||||
--ignore <IGNORE>
|
||||
List of error codes to ignore
|
||||
--extend-ignore <EXTEND_IGNORE>
|
||||
Like --ignore, but adds additional error codes on top of the ignored ones
|
||||
--exclude <EXCLUDE>
|
||||
List of paths, used to exclude files and/or directories from checks
|
||||
--extend-exclude <EXTEND_EXCLUDE>
|
||||
Like --exclude, but adds additional files and directories on top of the excluded ones
|
||||
--per-file-ignores <PER_FILE_IGNORES>
|
||||
List of mappings from file pattern to code to exclude
|
||||
--format <FORMAT>
|
||||
Output serialization format for error messages [default: text] [possible values: text, json]
|
||||
--show-files
|
||||
See the files ruff will be run against with the current settings
|
||||
--show-settings
|
||||
See ruff's settings
|
||||
--add-noqa
|
||||
Enable automatic additions of noqa directives to failing lines
|
||||
--dummy-variable-rgx <DUMMY_VARIABLE_RGX>
|
||||
Regular expression matching the name of dummy variables
|
||||
--target-version <TARGET_VERSION>
|
||||
The minimum Python version that should be supported
|
||||
--stdin-filename <STDIN_FILENAME>
|
||||
The name of the file when passing it through stdin
|
||||
-h, --help
|
||||
Print help information
|
||||
-V, --version
|
||||
Print version information
|
||||
```
|
||||
|
||||
### Excluding files
|
||||
|
||||
Exclusions are based on globs, and can be either:
|
||||
|
||||
- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the
|
||||
@@ -138,81 +173,444 @@ Exclusions are based on globs, and can be either:
|
||||
(to exclude any Python files in `directory`). Note that these paths are relative to the
|
||||
project root (e.g., the directory containing your `pyproject.toml`).
|
||||
|
||||
### Compatibility with Black
|
||||
### Ignoring errors
|
||||
|
||||
ruff is compatible with [Black](https://github.com/psf/black) out-of-the-box, as long as
|
||||
To omit a lint check entirely, add it to the "ignore" list via `--ignore` or `--extend-ignore`,
|
||||
either on the command-line or in your `project.toml` file.
|
||||
|
||||
To ignore an error in-line, Ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
|
||||
To ignore an individual error, add `# noqa: {code}` to the end of the line, like so:
|
||||
|
||||
```python
|
||||
# Ignore F841.
|
||||
x = 1 # noqa: F841
|
||||
|
||||
# Ignore E741 and F841.
|
||||
i = 1 # noqa: E741, F841
|
||||
|
||||
# Ignore _all_ errors.
|
||||
x = 1 # noqa
|
||||
```
|
||||
|
||||
Note that, for multi-line strings, the `noqa` directive should come at the end of the string, and
|
||||
will apply to the entire body, like so:
|
||||
|
||||
```python
|
||||
"""Lorem ipsum dolor sit amet.
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""" # noqa: E501
|
||||
```
|
||||
|
||||
Ruff supports several workflows to aid in `noqa` management.
|
||||
|
||||
First, Ruff provides a special error code, `M001`, to enforce that your `noqa` directives are
|
||||
"valid", in that the errors they _say_ they ignore are actually being triggered on that line (and
|
||||
thus suppressed). **You can run `ruff /path/to/file.py --extend-select M001` to flag unused `noqa`
|
||||
directives.**
|
||||
|
||||
Second, Ruff can _automatically remove_ unused `noqa` directives via its autofix functionality.
|
||||
**You can run `ruff /path/to/file.py --extend-select M001 --fix` to automatically remove unused
|
||||
`noqa` directives.**
|
||||
|
||||
Third, Ruff can _automatically add_ `noqa` directives to all failing lines. This is useful when
|
||||
migrating a new codebase to Ruff. **You can run `ruff /path/to/file.py --add-noqa` to automatically
|
||||
add `noqa` directives to all failing lines, with the appropriate error codes.**
|
||||
|
||||
## Supported Rules
|
||||
|
||||
By default, Ruff enables all `E`, `W`, and `F` error codes, which correspond to those built-in to
|
||||
Flake8.
|
||||
|
||||
The 🛠 emoji indicates that a rule is automatically fixable by the `--fix` command-line option.
|
||||
|
||||
### Pyflakes
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| F401 | UnusedImport | `...` imported but unused | 🛠 |
|
||||
| F402 | ImportShadowedByLoopVar | Import `...` from line 1 shadowed by loop variable | |
|
||||
| F403 | ImportStarUsed | `from ... import *` used; unable to detect undefined names | |
|
||||
| F404 | LateFutureImport | `from __future__` imports must occur at the beginning of the file | |
|
||||
| F405 | ImportStarUsage | `...` may be undefined, or defined from star imports: `...` | |
|
||||
| F406 | ImportStarNotPermitted | `from ... import *` only allowed at module level | |
|
||||
| F407 | FutureFeatureNotDefined | Future feature `...` is not defined | |
|
||||
| F541 | FStringMissingPlaceholders | f-string without any placeholders | |
|
||||
| F601 | MultiValueRepeatedKeyLiteral | Dictionary key literal repeated | |
|
||||
| F602 | MultiValueRepeatedKeyVariable | Dictionary key `...` repeated | |
|
||||
| F621 | ExpressionsInStarAssignment | Too many expressions in star-unpacking assignment | |
|
||||
| F622 | TwoStarredExpressions | Two starred expressions in assignment | |
|
||||
| F631 | AssertTuple | Assert test is a non-empty tuple, which is always `True` | |
|
||||
| F632 | IsLiteral | Use `==` and `!=` to compare constant literals | |
|
||||
| F633 | InvalidPrintSyntax | Use of `>>` is invalid with `print` function | |
|
||||
| F634 | IfTuple | If test is a tuple, which is always `True` | |
|
||||
| F701 | BreakOutsideLoop | `break` outside loop | |
|
||||
| F702 | ContinueOutsideLoop | `continue` not properly in loop | |
|
||||
| F704 | YieldOutsideFunction | `yield` or `yield from` statement outside of a function/method | |
|
||||
| F706 | ReturnOutsideFunction | `return` statement outside of a function/method | |
|
||||
| F707 | DefaultExceptNotLast | An `except:` block as not the last exception handler | |
|
||||
| F722 | ForwardAnnotationSyntaxError | Syntax error in forward annotation: `...` | |
|
||||
| F821 | UndefinedName | Undefined name `...` | |
|
||||
| F822 | UndefinedExport | Undefined name `...` in `__all__` | |
|
||||
| F823 | UndefinedLocal | Local variable `...` referenced before assignment | |
|
||||
| F831 | DuplicateArgumentName | Duplicate argument name in function definition | |
|
||||
| F841 | UnusedVariable | Local variable `...` is assigned to but never used | |
|
||||
| F901 | RaiseNotImplemented | `raise NotImplemented` should be `raise NotImplementedError` | |
|
||||
|
||||
### pycodestyle
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| E402 | ModuleImportNotAtTopOfFile | Module level import not at top of file | |
|
||||
| E501 | LineTooLong | Line too long (89 > 88 characters) | |
|
||||
| E711 | NoneComparison | Comparison to `None` should be `cond is None` | |
|
||||
| E712 | TrueFalseComparison | Comparison to `True` should be `cond is True` | |
|
||||
| E713 | NotInTest | Test for membership should be `not in` | |
|
||||
| E714 | NotIsTest | Test for object identity should be `is not` | |
|
||||
| E721 | TypeComparison | Do not compare types, use `isinstance()` | |
|
||||
| E722 | DoNotUseBareExcept | Do not use bare `except` | |
|
||||
| E731 | DoNotAssignLambda | Do not assign a lambda expression, use a def | |
|
||||
| E741 | AmbiguousVariableName | Ambiguous variable name: `...` | |
|
||||
| E742 | AmbiguousClassName | Ambiguous class name: `...` | |
|
||||
| E743 | AmbiguousFunctionName | Ambiguous function name: `...` | |
|
||||
| E902 | IOError | IOError: `...` | |
|
||||
| E999 | SyntaxError | SyntaxError: `...` | |
|
||||
| W292 | NoNewLineAtEndOfFile | No newline at end of file | |
|
||||
|
||||
### pydocstyle
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| D100 | PublicModule | Missing docstring in public module | |
|
||||
| D101 | PublicClass | Missing docstring in public class | |
|
||||
| D102 | PublicMethod | Missing docstring in public method | |
|
||||
| D103 | PublicFunction | Missing docstring in public function | |
|
||||
| D104 | PublicPackage | Missing docstring in public package | |
|
||||
| D105 | MagicMethod | Missing docstring in magic method | |
|
||||
| D106 | PublicNestedClass | Missing docstring in public nested class | |
|
||||
| D107 | PublicInit | Missing docstring in `__init__` | |
|
||||
| D200 | FitsOnOneLine | One-line docstring should fit on one line | |
|
||||
| D201 | NoBlankLineBeforeFunction | No blank lines allowed before function docstring (found 1) | 🛠 |
|
||||
| D202 | NoBlankLineAfterFunction | No blank lines allowed after function docstring (found 1) | 🛠 |
|
||||
| D203 | OneBlankLineBeforeClass | 1 blank line required before class docstring | 🛠 |
|
||||
| D204 | OneBlankLineAfterClass | 1 blank line required after class docstring | 🛠 |
|
||||
| D205 | BlankLineAfterSummary | 1 blank line required between summary line and description | 🛠 |
|
||||
| D206 | IndentWithSpaces | Docstring should be indented with spaces, not tabs | |
|
||||
| D207 | NoUnderIndentation | Docstring is under-indented | 🛠 |
|
||||
| D208 | NoOverIndentation | Docstring is over-indented | 🛠 |
|
||||
| D209 | NewLineAfterLastParagraph | Multi-line docstring closing quotes should be on a separate line | 🛠 |
|
||||
| D210 | NoSurroundingWhitespace | No whitespaces allowed surrounding docstring text | 🛠 |
|
||||
| D211 | NoBlankLineBeforeClass | No blank lines allowed before class docstring | 🛠 |
|
||||
| D212 | MultiLineSummaryFirstLine | Multi-line docstring summary should start at the first line | |
|
||||
| D213 | MultiLineSummarySecondLine | Multi-line docstring summary should start at the second line | |
|
||||
| D214 | SectionNotOverIndented | Section is over-indented ("Returns") | 🛠 |
|
||||
| D215 | SectionUnderlineNotOverIndented | Section underline is over-indented ("Returns") | 🛠 |
|
||||
| D300 | UsesTripleQuotes | Use """triple double quotes""" | |
|
||||
| D400 | EndsInPeriod | First line should end with a period | |
|
||||
| D402 | NoSignature | First line should not be the function's signature | |
|
||||
| D403 | FirstLineCapitalized | First word of the first line should be properly capitalized | |
|
||||
| D404 | NoThisPrefix | First word of the docstring should not be `This` | |
|
||||
| D405 | CapitalizeSectionName | Section name should be properly capitalized ("returns") | 🛠 |
|
||||
| D406 | NewLineAfterSectionName | Section name should end with a newline ("Returns") | 🛠 |
|
||||
| D407 | DashedUnderlineAfterSection | Missing dashed underline after section ("Returns") | 🛠 |
|
||||
| D408 | SectionUnderlineAfterName | Section underline should be in the line following the section's name ("Returns") | 🛠 |
|
||||
| D409 | SectionUnderlineMatchesSectionLength | Section underline should match the length of its name ("Returns") | 🛠 |
|
||||
| D410 | BlankLineAfterSection | Missing blank line after section ("Returns") | 🛠 |
|
||||
| D411 | BlankLineBeforeSection | Missing blank line before section ("Returns") | 🛠 |
|
||||
| D412 | NoBlankLinesBetweenHeaderAndContent | No blank lines allowed between a section header and its content ("Returns") | 🛠 |
|
||||
| D413 | BlankLineAfterLastSection | Missing blank line after last section ("Returns") | 🛠 |
|
||||
| D414 | NonEmptySection | Section has no content ("Returns") | |
|
||||
| D415 | EndsInPunctuation | First line should end with a period, question mark, or exclamation point | |
|
||||
| D416 | SectionNameEndsInColon | Section name should end with a colon ("Returns") | 🛠 |
|
||||
| D417 | DocumentAllArguments | Missing argument descriptions in the docstring: `x`, `y` | |
|
||||
| D418 | SkipDocstring | Function decorated with @overload shouldn't contain a docstring | |
|
||||
| D419 | NonEmpty | Docstring is empty | |
|
||||
|
||||
### pyupgrade
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| U001 | UselessMetaclassType | `__metaclass__ = type` is implied | 🛠 |
|
||||
| U002 | UnnecessaryAbspath | `abspath(__file__)` is unnecessary in Python 3.9 and later | 🛠 |
|
||||
| U003 | TypeOfPrimitive | Use `str` instead of `type(...)` | 🛠 |
|
||||
| U004 | UselessObjectInheritance | Class `...` inherits from object | 🛠 |
|
||||
| U005 | DeprecatedUnittestAlias | `assertEquals` is deprecated, use `assertEqual` instead | 🛠 |
|
||||
| U006 | UsePEP585Annotation | Use `list` instead of `List` for type annotations | 🛠 |
|
||||
| U007 | UsePEP604Annotation | Use `X \| Y` for type annotations | 🛠 |
|
||||
| U008 | SuperCallWithParameters | Use `super()` instead of `super(__class__, self)` | 🛠 |
|
||||
|
||||
### pep8-naming
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| N801 | InvalidClassName | Class name `...` should use CapWords convention | |
|
||||
| N802 | InvalidFunctionName | Function name `...` should be lowercase | |
|
||||
| N803 | InvalidArgumentName | Argument name `...` should be lowercase | |
|
||||
| N804 | InvalidFirstArgumentNameForClassMethod | First argument of a class method should be named `cls` | |
|
||||
| N805 | InvalidFirstArgumentNameForMethod | First argument of a method should be named `self` | |
|
||||
| N807 | DunderFunctionName | Function name should not start and end with `__` | |
|
||||
| N811 | ConstantImportedAsNonConstant | Constant `...` imported as non-constant `...` | |
|
||||
| N812 | LowercaseImportedAsNonLowercase | Lowercase `...` imported as non-lowercase `...` | |
|
||||
| N813 | CamelcaseImportedAsLowercase | Camelcase `...` imported as lowercase `...` | |
|
||||
| N814 | CamelcaseImportedAsConstant | Camelcase `...` imported as constant `...` | |
|
||||
| N817 | CamelcaseImportedAsAcronym | Camelcase `...` imported as acronym `...` | |
|
||||
|
||||
### flake8-comprehensions
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| C400 | UnnecessaryGeneratorList | Unnecessary generator (rewrite as a `list` comprehension) | |
|
||||
| C401 | UnnecessaryGeneratorSet | Unnecessary generator (rewrite as a `set` comprehension) | |
|
||||
| C402 | UnnecessaryGeneratorDict | Unnecessary generator (rewrite as a `dict` comprehension) | |
|
||||
| C403 | UnnecessaryListComprehensionSet | Unnecessary `list` comprehension (rewrite as a `set` comprehension) | |
|
||||
| C404 | UnnecessaryListComprehensionDict | Unnecessary `list` comprehension (rewrite as a `dict` comprehension) | |
|
||||
| C405 | UnnecessaryLiteralSet | Unnecessary `(list\|tuple)` literal (rewrite as a `set` literal) | |
|
||||
| C406 | UnnecessaryLiteralDict | Unnecessary `(list\|tuple)` literal (rewrite as a `dict` literal) | |
|
||||
| C408 | UnnecessaryCollectionCall | Unnecessary `(dict\|list\|tuple)` call (rewrite as a literal) | |
|
||||
| C409 | UnnecessaryLiteralWithinTupleCall | Unnecessary `(list\|tuple)` literal passed to `tuple()` (remove the outer call to `tuple()`) | |
|
||||
| C410 | UnnecessaryLiteralWithinListCall | Unnecessary `(list\|tuple)` literal passed to `list()` (rewrite as a `list` literal) | |
|
||||
| C411 | UnnecessaryListCall | Unnecessary `list` call (remove the outer call to `list()`) | |
|
||||
| C413 | UnnecessaryCallAroundSorted | Unnecessary `(list\|reversed)` call around `sorted()` | |
|
||||
| C414 | UnnecessaryDoubleCastOrProcess | Unnecessary `(list\|reversed\|set\|sorted\|tuple)` call within `(list\|set\|sorted\|tuple)()` | |
|
||||
| C415 | UnnecessarySubscriptReversal | Unnecessary subscript reversal of iterable within `(reversed\|set\|sorted)()` | |
|
||||
| C416 | UnnecessaryComprehension | Unnecessary `(list\|set)` comprehension (rewrite using `(list\|set)()`) | |
|
||||
| C417 | UnnecessaryMap | Unnecessary `map` usage (rewrite using a `(list\|set\|dict)` comprehension) | |
|
||||
|
||||
### flake8-bugbear
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| B002 | UnaryPrefixIncrement | Python does not support the unary prefix increment. | |
|
||||
| B007 | UnusedLoopControlVariable | Loop control variable `i` not used within the loop body. | 🛠 |
|
||||
| B011 | DoNotAssertFalse | Do not `assert False` (`python -O` removes these calls), raise `AssertionError()` | 🛠 |
|
||||
| B014 | DuplicateHandlerException | Exception handler with duplicate exception: `ValueError` | 🛠 |
|
||||
| B017 | NoAssertRaisesException | `assertRaises(Exception):` should be considered evil. | |
|
||||
| B025 | DuplicateTryBlockException | try-except block with duplicate exception `Exception` | |
|
||||
|
||||
### flake8-builtins
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| A001 | BuiltinVariableShadowing | Variable `...` is shadowing a python builtin | |
|
||||
| A002 | BuiltinArgumentShadowing | Argument `...` is shadowing a python builtin | |
|
||||
| A003 | BuiltinAttributeShadowing | Class attribute `...` is shadowing a python builtin | |
|
||||
|
||||
### flake8-print
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| T201 | PrintFound | `print` found | 🛠 |
|
||||
| T203 | PPrintFound | `pprint` found | 🛠 |
|
||||
|
||||
### Meta rules
|
||||
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| M001 | UnusedNOQA | Unused `noqa` directive | 🛠 |
|
||||
|
||||
## Editor Integrations
|
||||
|
||||
### PyCharm
|
||||
|
||||
Ruff can be installed as an [External Tool](https://www.jetbrains.com/help/pycharm/configuring-third-party-tools.html)
|
||||
in PyCharm. Open the Preferences pane, then navigate to "Tools", then "External Tools". From there,
|
||||
add a new tool with the following configuration:
|
||||
|
||||

|
||||
|
||||
Ruff should then appear as a runnable action:
|
||||
|
||||

|
||||
|
||||
### GitHub Actions
|
||||
|
||||
GitHub Actions has everything you need to run Ruff out-of-the-box:
|
||||
|
||||
```yaml
|
||||
name: CI
|
||||
on: push
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install ruff
|
||||
- name: Run Ruff
|
||||
run: ruff .
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
### Is Ruff compatible with Black?
|
||||
|
||||
Yes. Ruff is compatible with [Black](https://github.com/psf/black) out-of-the-box, as long as
|
||||
the `line-length` setting is consistent between the two.
|
||||
|
||||
As a project, ruff is designed to be used alongside Black and, as such, will defer implementing
|
||||
As a project, Ruff is designed to be used alongside Black and, as such, will defer implementing
|
||||
stylistic lint rules that are obviated by autoformatting.
|
||||
|
||||
### Parity with Flake8
|
||||
### How does Ruff compare to Flake8?
|
||||
|
||||
ruff's goal is to achieve feature-parity with Flake8 when used (1) without plugins, (2) alongside
|
||||
Black, and (3) on Python 3 code.
|
||||
Ruff can be used as a (near) drop-in replacement for Flake8 when used (1) without or with a small
|
||||
number of plugins, (2) alongside Black, and (3) on Python 3 code.
|
||||
|
||||
**Under those conditions, ruff implements 44 out of 60 rules.** (ruff is missing: 14 rules related
|
||||
to string `.format` calls, 1 rule related to docstring parsing, and 1 rule related to redefined
|
||||
variables.)
|
||||
Under those conditions Ruff is missing 14 rules related to string `.format` calls, 1 rule related
|
||||
to docstring parsing, and 1 rule related to redefined variables.
|
||||
|
||||
Beyond rule-set parity, ruff suffers from the following limitations vis-à-vis Flake8:
|
||||
Ruff re-implements some of the most popular Flake8 plugins and related code quality tools natively,
|
||||
including:
|
||||
|
||||
1. Flake8 has a plugin architecture and supports writing custom lint rules.
|
||||
2. Flake8 supports a wider range of `noqa` patterns, such as per-file ignores defined in `.flake8`.
|
||||
3. ruff does not yet support parenthesized context managers.
|
||||
- [`pydocstyle`](https://pypi.org/project/pydocstyle/)
|
||||
- [`yesqa`](https://github.com/asottile/yesqa)
|
||||
- [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||
- [`flake8-builtins`](https://pypi.org/project/flake8-builtins/)
|
||||
- [`flake8-super`](https://pypi.org/project/flake8-super/)
|
||||
- [`flake8-print`](https://pypi.org/project/flake8-print/)
|
||||
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
|
||||
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (9/32)
|
||||
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (8/34)
|
||||
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
|
||||
|
||||
## Rules
|
||||
Beyond rule-set parity, Ruff suffers from the following limitations vis-à-vis Flake8:
|
||||
|
||||
| Code | Name | Message |
|
||||
| ---- | ----- | ------- |
|
||||
| E402 | ModuleImportNotAtTopOfFile | Module level import not at top of file |
|
||||
| E501 | LineTooLong | Line too long (89 > 88 characters) |
|
||||
| E711 | NoneComparison | Comparison to `None` should be `cond is None` |
|
||||
| E712 | TrueFalseComparison | Comparison to `True` should be `cond is True` |
|
||||
| E713 | NotInTest | Test for membership should be `not in` |
|
||||
| E714 | NotIsTest | Test for object identity should be `is not` |
|
||||
| E721 | TypeComparison | do not compare types, use `isinstance()` |
|
||||
| E722 | DoNotUseBareExcept | Do not use bare `except` |
|
||||
| E731 | DoNotAssignLambda | Do not assign a lambda expression, use a def |
|
||||
| E741 | AmbiguousVariableName | ambiguous variable name '...' |
|
||||
| E742 | AmbiguousClassName | ambiguous class name '...' |
|
||||
| E743 | AmbiguousFunctionName | ambiguous function name '...' |
|
||||
| E902 | IOError | ... |
|
||||
| E999 | SyntaxError | SyntaxError: ... |
|
||||
| F401 | UnusedImport | `...` imported but unused |
|
||||
| F402 | ImportShadowedByLoopVar | import '...' from line 1 shadowed by loop variable |
|
||||
| F403 | ImportStarUsed | `from ... import *` used; unable to detect undefined names |
|
||||
| F404 | LateFutureImport | from __future__ imports must occur at the beginning of the file |
|
||||
| F405 | ImportStarUsage | '...' may be undefined, or defined from star imports: ... |
|
||||
| F406 | ImportStarNotPermitted | `from ... import *` only allowed at module level |
|
||||
| F407 | FutureFeatureNotDefined | future feature '...' is not defined |
|
||||
| F541 | FStringMissingPlaceholders | f-string without any placeholders |
|
||||
| F601 | MultiValueRepeatedKeyLiteral | Dictionary key literal repeated |
|
||||
| F602 | MultiValueRepeatedKeyVariable | Dictionary key `...` repeated |
|
||||
| F621 | TooManyExpressionsInStarredAssignment | too many expressions in star-unpacking assignment |
|
||||
| F622 | TwoStarredExpressions | two starred expressions in assignment |
|
||||
| F631 | AssertTuple | Assert test is a non-empty tuple, which is always `True` |
|
||||
| F632 | IsLiteral | use ==/!= to compare constant literals |
|
||||
| F633 | InvalidPrintSyntax | use of >> is invalid with print function |
|
||||
| F634 | IfTuple | If test is a tuple, which is always `True` |
|
||||
| F701 | BreakOutsideLoop | `break` outside loop |
|
||||
| F702 | ContinueOutsideLoop | `continue` not properly in loop |
|
||||
| F704 | YieldOutsideFunction | a `yield` or `yield from` statement outside of a function/method |
|
||||
| F706 | ReturnOutsideFunction | a `return` statement outside of a function/method |
|
||||
| F707 | DefaultExceptNotLast | an `except:` block as not the last exception handler |
|
||||
| F722 | ForwardAnnotationSyntaxError | syntax error in forward annotation '...' |
|
||||
| F821 | UndefinedName | Undefined name `...` |
|
||||
| F822 | UndefinedExport | Undefined name `...` in `__all__` |
|
||||
| F823 | UndefinedLocal | Local variable `...` referenced before assignment |
|
||||
| F831 | DuplicateArgumentName | Duplicate argument name in function definition |
|
||||
| F841 | UnusedVariable | Local variable `...` is assigned to but never used |
|
||||
| F901 | RaiseNotImplemented | `raise NotImplemented` should be `raise NotImplementedError` |
|
||||
| R001 | UselessObjectInheritance | Class `...` inherits from object |
|
||||
| R002 | NoAssertEquals | `assertEquals` is deprecated, use `assertEqual` instead |
|
||||
1. Ruff does not yet support a few Python 3.9 and 3.10 language features, including structural
|
||||
pattern matching and parenthesized context managers.
|
||||
2. Flake8 has a plugin architecture and supports writing custom lint rules. (To date, popular Flake8
|
||||
plugins have been re-implemented within Ruff directly.)
|
||||
|
||||
### Which tools does Ruff replace?
|
||||
|
||||
Today, Ruff can be used to replace Flake8 when used with any of the following plugins:
|
||||
|
||||
- [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
||||
- [`flake8-builtins`](https://pypi.org/project/flake8-builtins/)
|
||||
- [`flake8-super`](https://pypi.org/project/flake8-super/)
|
||||
- [`flake8-print`](https://pypi.org/project/flake8-print/)
|
||||
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
|
||||
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (9/32)
|
||||
|
||||
Ruff also implements the functionality that you get from [`yesqa`](https://github.com/asottile/yesqa),
|
||||
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (8/34).
|
||||
|
||||
If you're looking to use Ruff, but rely on an unsupported Flake8 plugin, free to file an Issue.
|
||||
|
||||
### Do I need to install Rust to use Ruff?
|
||||
|
||||
Nope! Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
pip install ruff
|
||||
```
|
||||
|
||||
Ruff ships with wheels for all major platforms, which enables `pip` to install Ruff without relying
|
||||
on Rust at all.
|
||||
|
||||
### Can I write my own plugins for Ruff?
|
||||
|
||||
Ruff does not yet support third-party plugins, though a plugin system is within-scope for the
|
||||
project. See [#283](https://github.com/charliermarsh/ruff/issues/283) for more.
|
||||
|
||||
### Does Ruff support NumPy- or Google-style docstrings?
|
||||
|
||||
Yes! To enable a specific docstring convention, start by enabling all `pydocstyle` error codes, and
|
||||
then selectively disabling based on your [preferred convention](https://www.pydocstyle.org/en/latest/error_codes.html#default-conventions).
|
||||
|
||||
For example, if you're coming from `flake8-docstrings`, the following configuration is equivalent to
|
||||
`--docstring-convention numpy`:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-select = [
|
||||
"D100",
|
||||
"D101",
|
||||
"D102",
|
||||
"D103",
|
||||
"D104",
|
||||
"D105",
|
||||
"D106",
|
||||
"D200",
|
||||
"D201",
|
||||
"D202",
|
||||
"D204",
|
||||
"D205",
|
||||
"D206",
|
||||
"D207",
|
||||
"D208",
|
||||
"D209",
|
||||
"D210",
|
||||
"D211",
|
||||
"D214",
|
||||
"D215",
|
||||
"D300",
|
||||
"D400",
|
||||
"D402",
|
||||
"D403",
|
||||
"D404",
|
||||
"D405",
|
||||
"D406",
|
||||
"D407",
|
||||
"D408",
|
||||
"D409",
|
||||
"D410",
|
||||
"D411",
|
||||
"D412",
|
||||
"D413",
|
||||
"D418",
|
||||
"D419",
|
||||
]
|
||||
```
|
||||
|
||||
Similarly, the following is equivalent to `--docstring-convention google`:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-select = [
|
||||
"D100",
|
||||
"D101",
|
||||
"D102",
|
||||
"D103",
|
||||
"D104",
|
||||
"D105",
|
||||
"D106",
|
||||
"D107",
|
||||
"D200",
|
||||
"D201",
|
||||
"D202",
|
||||
"D205",
|
||||
"D206",
|
||||
"D207",
|
||||
"D208",
|
||||
"D209",
|
||||
"D210",
|
||||
"D211",
|
||||
"D212",
|
||||
"D214",
|
||||
"D300",
|
||||
"D402",
|
||||
"D403",
|
||||
"D405",
|
||||
"D410",
|
||||
"D411",
|
||||
"D412",
|
||||
"D414",
|
||||
"D415",
|
||||
"D416",
|
||||
"D417",
|
||||
"D418",
|
||||
"D419",
|
||||
]
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
ruff is written in Rust (1.63.0). You'll need to install the [Rust toolchain](https://www.rust-lang.org/tools/install)
|
||||
Ruff is written in Rust (1.63.0). You'll need to install the [Rust toolchain](https://www.rust-lang.org/tools/install)
|
||||
for development.
|
||||
|
||||
Assuming you have `cargo` installed, you can run:
|
||||
@@ -224,13 +622,13 @@ cargo clippy
|
||||
cargo test
|
||||
```
|
||||
|
||||
## Deployment
|
||||
## Releases
|
||||
|
||||
ruff is distributed on [PyPI](https://pypi.org/project/ruff/), and published via [`maturin`](https://github.com/PyO3/maturin).
|
||||
Ruff is distributed on [PyPI](https://pypi.org/project/ruff/), and published via [`maturin`](https://github.com/PyO3/maturin).
|
||||
|
||||
See: `.github/workflows/release.yaml`.
|
||||
|
||||
## Benchmarking
|
||||
## Benchmarks
|
||||
|
||||
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
||||
which makes it a good target for benchmarking.
|
||||
@@ -307,9 +705,9 @@ hyperfine --ignore-failure --warmup 5 \
|
||||
|
||||
In order, these evaluate:
|
||||
|
||||
- ruff
|
||||
- Ruff
|
||||
- Pylint
|
||||
- PyFlakes
|
||||
- Pyflakes
|
||||
- autoflake
|
||||
- pycodestyle
|
||||
- Flake8
|
||||
@@ -369,3 +767,8 @@ Summary
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and hugely appreciated. To get started, check out the
|
||||
[contributing guidelines](https://github.com/charliermarsh/ruff/blob/main/CONTRIBUTING.md).
|
||||
|
||||
@@ -1,19 +1,29 @@
|
||||
/// Generate a Markdown-compatible table of supported lint rules.
|
||||
use ruff::checks::{CheckCode, ALL_CHECK_CODES};
|
||||
//! Generate a Markdown-compatible table of supported lint rules.
|
||||
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff::checks::{CheckCategory, CheckCode};
|
||||
|
||||
fn main() {
|
||||
let mut check_codes: Vec<CheckCode> = ALL_CHECK_CODES.to_vec();
|
||||
check_codes.sort();
|
||||
for check_category in CheckCategory::iter() {
|
||||
println!("### {}", check_category.title());
|
||||
println!();
|
||||
|
||||
println!("| Code | Name | Message |");
|
||||
println!("| ---- | ----- | ------- |");
|
||||
for check_code in check_codes {
|
||||
let check_kind = check_code.kind();
|
||||
println!(
|
||||
"| {} | {} | {} |",
|
||||
check_kind.code().as_str(),
|
||||
check_kind.name(),
|
||||
check_kind.body()
|
||||
);
|
||||
println!("| Code | Name | Message | Fix |");
|
||||
println!("| ---- | ---- | ------- | --- |");
|
||||
for check_code in CheckCode::iter() {
|
||||
if check_code.category() == check_category {
|
||||
let check_kind = check_code.kind();
|
||||
let fix_token = if check_kind.fixable() { "🛠" } else { "" };
|
||||
println!(
|
||||
"| {} | {} | {} | {} |",
|
||||
check_kind.code().as_ref(),
|
||||
check_kind.as_ref(),
|
||||
check_kind.summary().replace("|", r"\|"),
|
||||
fix_token
|
||||
);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
26
examples/generate_source_code.rs
Normal file
26
examples/generate_source_code.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use ruff::code_gen::SourceGenerator;
|
||||
use ruff::fs;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let contents = fs::read_file(&cli.file)?;
|
||||
let python_ast = parser::parse_program(&contents, &cli.file.to_string_lossy())?;
|
||||
let mut generator = SourceGenerator::new();
|
||||
generator.unparse_suite(&python_ast)?;
|
||||
println!("{}", generator.generate()?);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,15 +1,16 @@
|
||||
/// Print the AST for a given Python file.
|
||||
//! Print the AST for a given Python file.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, ValueHint};
|
||||
use clap::Parser;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use ruff::fs;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
#[clap(parse(from_os_str), value_hint = ValueHint::FilePath, required = true)]
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
/// Print the token stream for a given Python file.
|
||||
//! Print the token stream for a given Python file.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, ValueHint};
|
||||
use clap::Parser;
|
||||
use rustpython_parser::lexer;
|
||||
|
||||
use ruff::fs;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
#[clap(parse(from_os_str), value_hint = ValueHint::FilePath, required = true)]
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ classifiers = [
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
|
||||
27
resources/test/fixtures/A001.py
vendored
Normal file
27
resources/test/fixtures/A001.py
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
import some as sum
|
||||
from some import other as int
|
||||
|
||||
print = 1
|
||||
copyright: 'annotation' = 2
|
||||
(complex := 3)
|
||||
float = object = 4
|
||||
min, max = 5, 6
|
||||
|
||||
def bytes():
|
||||
pass
|
||||
|
||||
class slice:
|
||||
pass
|
||||
|
||||
try:
|
||||
...
|
||||
except ImportError as ValueError:
|
||||
...
|
||||
|
||||
for memoryview, *bytearray in []:
|
||||
pass
|
||||
|
||||
with open('file') as str, open('file2') as (all, any):
|
||||
pass
|
||||
|
||||
[0 for sum in ()]
|
||||
9
resources/test/fixtures/A002.py
vendored
Normal file
9
resources/test/fixtures/A002.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
def func1(str, /, type, *complex, Exception, **getattr):
|
||||
pass
|
||||
|
||||
|
||||
async def func2(bytes):
|
||||
pass
|
||||
|
||||
|
||||
map([], lambda float: ...)
|
||||
8
resources/test/fixtures/A003.py
vendored
Normal file
8
resources/test/fixtures/A003.py
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
class MyClass:
|
||||
ImportError = 4
|
||||
|
||||
def __init__(self):
|
||||
self.float = 5 # is fine
|
||||
|
||||
def str(self):
|
||||
pass
|
||||
20
resources/test/fixtures/B002.py
vendored
Normal file
20
resources/test/fixtures/B002.py
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
Should emit:
|
||||
B002 - on lines 15 and 20
|
||||
"""
|
||||
|
||||
|
||||
def this_is_all_fine(n):
|
||||
x = n + 1
|
||||
y = 1 + n
|
||||
z = +x + y
|
||||
return +z
|
||||
|
||||
|
||||
def this_is_buggy(n):
|
||||
x = ++n
|
||||
return x
|
||||
|
||||
|
||||
def this_is_buggy_too(n):
|
||||
return ++n
|
||||
31
resources/test/fixtures/B007.py
vendored
Normal file
31
resources/test/fixtures/B007.py
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
for i in range(10):
|
||||
print(i)
|
||||
|
||||
print(i) # name no longer defined on Python 3; no warning yet
|
||||
|
||||
for i in range(10): # name not used within the loop; B007
|
||||
print(10)
|
||||
|
||||
print(i) # name no longer defined on Python 3; no warning yet
|
||||
|
||||
|
||||
for _ in range(10): # _ is okay for a throw-away variable
|
||||
print(10)
|
||||
|
||||
|
||||
for i in range(10):
|
||||
for j in range(10):
|
||||
for k in range(10): # k not used, i and j used transitively
|
||||
print(i + j)
|
||||
|
||||
|
||||
def strange_generator():
|
||||
for i in range(10):
|
||||
for j in range(10):
|
||||
for k in range(10):
|
||||
for l in range(10):
|
||||
yield i, (j, (k, l))
|
||||
|
||||
|
||||
for i, (j, (k, l)) in strange_generator(): # i, k not used
|
||||
print(j, l)
|
||||
10
resources/test/fixtures/B011.py
vendored
Normal file
10
resources/test/fixtures/B011.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"""
|
||||
Should emit:
|
||||
B011 - on line 8
|
||||
B011 - on line 10
|
||||
"""
|
||||
|
||||
assert 1 != 2
|
||||
assert False
|
||||
assert 1 != 2, "message"
|
||||
assert False, "message"
|
||||
76
resources/test/fixtures/B014.py
vendored
Normal file
76
resources/test/fixtures/B014.py
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Should emit:
|
||||
B014 - on lines 11, 17, 28, 42, 49, 56, and 74.
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import re
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, TypeError):
|
||||
# TypeError is a subclass of Exception, so it doesn't add anything
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (OSError, OSError) as err:
|
||||
# Duplicate exception types are useless
|
||||
pass
|
||||
|
||||
|
||||
class MyError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (MyError, MyError):
|
||||
# Detect duplicate non-builtin errors
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (MyError, Exception) as e:
|
||||
# Don't assume that we're all subclasses of Exception
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (MyError, BaseException) as e:
|
||||
# But we *can* assume that everything is a subclass of BaseException
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (re.error, re.error):
|
||||
# Duplicate exception types as attributes
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (IOError, EnvironmentError, OSError):
|
||||
# Detect if a primary exception and any its aliases are present.
|
||||
#
|
||||
# Since Python 3.3, IOError, EnvironmentError, WindowsError, mmap.error,
|
||||
# socket.error and select.error are aliases of OSError. See PEP 3151 for
|
||||
# more info.
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (MyException, NotImplemented):
|
||||
# NotImplemented is not an exception, let's not crash on it.
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, binascii.Error):
|
||||
# binascii.Error is a subclass of ValueError.
|
||||
pass
|
||||
36
resources/test/fixtures/B017.py
vendored
Normal file
36
resources/test/fixtures/B017.py
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"""
|
||||
Should emit:
|
||||
B017 - on lines 20
|
||||
"""
|
||||
import asyncio
|
||||
import unittest
|
||||
|
||||
CONSTANT = True
|
||||
|
||||
|
||||
def something_else() -> None:
|
||||
for i in (1, 2, 3):
|
||||
print(i)
|
||||
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
|
||||
class Foobar(unittest.TestCase):
|
||||
def evil_raises(self) -> None:
|
||||
with self.assertRaises(Exception):
|
||||
raise Exception("Evil I say!")
|
||||
|
||||
def context_manager_raises(self) -> None:
|
||||
with self.assertRaises(Exception) as ex:
|
||||
raise Exception("Context manager is good")
|
||||
self.assertEqual("Context manager is good", str(ex.exception))
|
||||
|
||||
def regex_raises(self) -> None:
|
||||
with self.assertRaisesRegex(Exception, "Regex is good"):
|
||||
raise Exception("Regex is good")
|
||||
|
||||
def raises_with_absolute_reference(self):
|
||||
with self.assertRaises(asyncio.CancelledError):
|
||||
Foo()
|
||||
38
resources/test/fixtures/B025.py
vendored
Normal file
38
resources/test/fixtures/B025.py
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
Should emit:
|
||||
B025 - on lines 15, 22, 31
|
||||
"""
|
||||
|
||||
import pickle
|
||||
|
||||
try:
|
||||
a = 1
|
||||
except ValueError:
|
||||
a = 2
|
||||
finally:
|
||||
a = 3
|
||||
|
||||
try:
|
||||
a = 1
|
||||
except ValueError:
|
||||
a = 2
|
||||
except ValueError:
|
||||
a = 2
|
||||
|
||||
try:
|
||||
a = 1
|
||||
except pickle.PickleError:
|
||||
a = 2
|
||||
except ValueError:
|
||||
a = 2
|
||||
except pickle.PickleError:
|
||||
a = 2
|
||||
|
||||
try:
|
||||
a = 1
|
||||
except (ValueError, TypeError):
|
||||
a = 2
|
||||
except ValueError:
|
||||
a = 2
|
||||
except (OSError, TypeError):
|
||||
a = 2
|
||||
1
resources/test/fixtures/C400.py
vendored
Normal file
1
resources/test/fixtures/C400.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
x = list(x for x in range(3))
|
||||
1
resources/test/fixtures/C401.py
vendored
Normal file
1
resources/test/fixtures/C401.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
x = set(x for x in range(3))
|
||||
1
resources/test/fixtures/C402.py
vendored
Normal file
1
resources/test/fixtures/C402.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
d = dict((x, x) for x in range(3))
|
||||
1
resources/test/fixtures/C403.py
vendored
Normal file
1
resources/test/fixtures/C403.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
s = set([x for x in range(3)])
|
||||
1
resources/test/fixtures/C404.py
vendored
Normal file
1
resources/test/fixtures/C404.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
d = dict([(i, i) for i in range(3)])
|
||||
5
resources/test/fixtures/C405.py
vendored
Normal file
5
resources/test/fixtures/C405.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
s1 = set([1, 2])
|
||||
s2 = set((1, 2))
|
||||
s3 = set([])
|
||||
s4 = set(())
|
||||
s5 = set()
|
||||
5
resources/test/fixtures/C406.py
vendored
Normal file
5
resources/test/fixtures/C406.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
d1 = dict([(1, 2)])
|
||||
d2 = dict(((1, 2),))
|
||||
d3 = dict([])
|
||||
d4 = dict(())
|
||||
d5 = dict()
|
||||
5
resources/test/fixtures/C408.py
vendored
Normal file
5
resources/test/fixtures/C408.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
t = tuple()
|
||||
l = list()
|
||||
d1 = dict()
|
||||
d2 = dict(a=1)
|
||||
d3 = dict(**d2)
|
||||
3
resources/test/fixtures/C409.py
vendored
Normal file
3
resources/test/fixtures/C409.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
t1 = tuple([1, 2])
|
||||
t2 = tuple((1, 2))
|
||||
t3 = tuple([])
|
||||
4
resources/test/fixtures/C410.py
vendored
Normal file
4
resources/test/fixtures/C410.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
l1 = list([1, 2])
|
||||
l2 = list((1, 2))
|
||||
l3 = list([])
|
||||
l4 = list(())
|
||||
2
resources/test/fixtures/C411.py
vendored
Normal file
2
resources/test/fixtures/C411.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
x = [1, 2, 3]
|
||||
list([i for i in x])
|
||||
4
resources/test/fixtures/C413.py
vendored
Normal file
4
resources/test/fixtures/C413.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
x = [2, 3, 1]
|
||||
list(sorted(x))
|
||||
reversed(sorted(x))
|
||||
reversed(sorted(x, reverse=True))
|
||||
14
resources/test/fixtures/C414.py
vendored
Normal file
14
resources/test/fixtures/C414.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
x = [1, 2, 3]
|
||||
list(list(x))
|
||||
list(tuple(x))
|
||||
tuple(list(x))
|
||||
tuple(tuple(x))
|
||||
set(set(x))
|
||||
set(list(x))
|
||||
set(tuple(x))
|
||||
set(sorted(x))
|
||||
set(reversed(x))
|
||||
sorted(list(x))
|
||||
sorted(tuple(x))
|
||||
sorted(sorted(x))
|
||||
sorted(reversed(x))
|
||||
9
resources/test/fixtures/C415.py
vendored
Normal file
9
resources/test/fixtures/C415.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
lst = [2, 1, 3]
|
||||
a = set(lst[::-1])
|
||||
b = reversed(lst[::-1])
|
||||
c = sorted(lst[::-1])
|
||||
d = sorted(lst[::-1], reverse=True)
|
||||
e = set(lst[2:-1])
|
||||
f = set(lst[:1:-1])
|
||||
g = set(lst[::1])
|
||||
h = set(lst[::-2])
|
||||
6
resources/test/fixtures/C416.py
vendored
Normal file
6
resources/test/fixtures/C416.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
x = [1, 2, 3]
|
||||
[i for i in x]
|
||||
{i for i in x}
|
||||
|
||||
[i for i in x if i > 1]
|
||||
[i for i in x for j in x]
|
||||
6
resources/test/fixtures/C417.py
vendored
Normal file
6
resources/test/fixtures/C417.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
nums = [1, 2, 3]
|
||||
map(lambda x: x + 1, nums)
|
||||
map(lambda x: str(x), nums)
|
||||
list(map(lambda x: x * 2, nums))
|
||||
set(map(lambda x: x % 2 == 0, nums))
|
||||
dict(map(lambda v: (v, v**2), nums))
|
||||
534
resources/test/fixtures/D.py
vendored
Normal file
534
resources/test/fixtures/D.py
vendored
Normal file
@@ -0,0 +1,534 @@
|
||||
# No docstring, so we can test D100
|
||||
from functools import wraps
|
||||
import os
|
||||
from .expected import Expectation
|
||||
from typing import overload
|
||||
|
||||
|
||||
expectation = Expectation()
|
||||
expect = expectation.expect
|
||||
|
||||
expect('class_', 'D101: Missing docstring in public class')
|
||||
|
||||
|
||||
class class_:
|
||||
|
||||
expect('meta', 'D419: Docstring is empty')
|
||||
|
||||
class meta:
|
||||
""""""
|
||||
|
||||
@expect('D102: Missing docstring in public method')
|
||||
def method(self=None):
|
||||
pass
|
||||
|
||||
def _ok_since_private(self=None):
|
||||
pass
|
||||
|
||||
@overload
|
||||
def overloaded_method(self, a: int) -> str:
|
||||
...
|
||||
|
||||
@overload
|
||||
def overloaded_method(self, a: str) -> str:
|
||||
"""Foo bar documentation."""
|
||||
...
|
||||
|
||||
def overloaded_method(a):
|
||||
"""Foo bar documentation."""
|
||||
return str(a)
|
||||
|
||||
expect('overloaded_method',
|
||||
"D418: Function/ Method decorated with @overload"
|
||||
" shouldn't contain a docstring")
|
||||
|
||||
@property
|
||||
def foo(self):
|
||||
"""The foo of the thing, which isn't in imperitive mood."""
|
||||
return "hello"
|
||||
|
||||
@expect('D102: Missing docstring in public method')
|
||||
def __new__(self=None):
|
||||
pass
|
||||
|
||||
@expect('D107: Missing docstring in __init__')
|
||||
def __init__(self=None):
|
||||
pass
|
||||
|
||||
@expect('D105: Missing docstring in magic method')
|
||||
def __str__(self=None):
|
||||
pass
|
||||
|
||||
@expect('D102: Missing docstring in public method')
|
||||
def __call__(self=None, x=None, y=None, z=None):
|
||||
pass
|
||||
|
||||
|
||||
@expect('D419: Docstring is empty')
|
||||
def function():
|
||||
""" """
|
||||
def ok_since_nested():
|
||||
pass
|
||||
|
||||
@expect('D419: Docstring is empty')
|
||||
def nested():
|
||||
''
|
||||
|
||||
|
||||
def function_with_nesting():
|
||||
"""Foo bar documentation."""
|
||||
@overload
|
||||
def nested_overloaded_func(a: int) -> str:
|
||||
...
|
||||
|
||||
@overload
|
||||
def nested_overloaded_func(a: str) -> str:
|
||||
"""Foo bar documentation."""
|
||||
...
|
||||
|
||||
def nested_overloaded_func(a):
|
||||
"""Foo bar documentation."""
|
||||
return str(a)
|
||||
|
||||
|
||||
expect('nested_overloaded_func',
|
||||
"D418: Function/ Method decorated with @overload"
|
||||
" shouldn't contain a docstring")
|
||||
|
||||
|
||||
@overload
|
||||
def overloaded_func(a: int) -> str:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def overloaded_func(a: str) -> str:
|
||||
"""Foo bar documentation."""
|
||||
...
|
||||
|
||||
|
||||
def overloaded_func(a):
|
||||
"""Foo bar documentation."""
|
||||
return str(a)
|
||||
|
||||
|
||||
expect('overloaded_func',
|
||||
"D418: Function/ Method decorated with @overload"
|
||||
" shouldn't contain a docstring")
|
||||
|
||||
|
||||
@expect('D200: One-line docstring should fit on one line with quotes '
|
||||
'(found 3)')
|
||||
@expect('D212: Multi-line docstring summary should start at the first line')
|
||||
def asdlkfasd():
|
||||
"""
|
||||
Wrong.
|
||||
"""
|
||||
|
||||
|
||||
@expect('D201: No blank lines allowed before function docstring (found 1)')
|
||||
def leading_space():
|
||||
|
||||
"""Leading space."""
|
||||
|
||||
|
||||
@expect('D202: No blank lines allowed after function docstring (found 1)')
|
||||
def trailing_space():
|
||||
"""Leading space."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@expect('D201: No blank lines allowed before function docstring (found 1)')
|
||||
@expect('D202: No blank lines allowed after function docstring (found 1)')
|
||||
def trailing_and_leading_space():
|
||||
|
||||
"""Trailing and leading space."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
expect('LeadingSpaceMissing',
|
||||
'D203: 1 blank line required before class docstring (found 0)')
|
||||
|
||||
|
||||
class LeadingSpaceMissing:
|
||||
"""Leading space missing."""
|
||||
|
||||
|
||||
expect('WithLeadingSpace',
|
||||
'D211: No blank lines allowed before class docstring (found 1)')
|
||||
|
||||
|
||||
class WithLeadingSpace:
|
||||
|
||||
"""With leading space."""
|
||||
|
||||
|
||||
expect('TrailingSpace',
|
||||
'D204: 1 blank line required after class docstring (found 0)')
|
||||
expect('TrailingSpace',
|
||||
'D211: No blank lines allowed before class docstring (found 1)')
|
||||
|
||||
|
||||
class TrailingSpace:
|
||||
|
||||
"""TrailingSpace."""
|
||||
pass
|
||||
|
||||
|
||||
expect('LeadingAndTrailingSpaceMissing',
|
||||
'D203: 1 blank line required before class docstring (found 0)')
|
||||
expect('LeadingAndTrailingSpaceMissing',
|
||||
'D204: 1 blank line required after class docstring (found 0)')
|
||||
|
||||
|
||||
class LeadingAndTrailingSpaceMissing:
|
||||
"""Leading and trailing space missing."""
|
||||
pass
|
||||
|
||||
|
||||
@expect('D205: 1 blank line required between summary line and description '
|
||||
'(found 0)')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def multi_line_zero_separating_blanks():
|
||||
"""Summary.
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D205: 1 blank line required between summary line and description '
|
||||
'(found 2)')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def multi_line_two_separating_blanks():
|
||||
"""Summary.
|
||||
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def multi_line_one_separating_blanks():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D207: Docstring is under-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfsdf():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D207: Docstring is under-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdsdfsdffsdf():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D208: Docstring is over-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfsdsdf24():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D208: Docstring is over-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfsdsdfsdf24():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D208: Docstring is over-indented')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfsdfsdsdsdfsdf24():
|
||||
"""Summary.
|
||||
|
||||
Description.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect('D209: Multi-line docstring closing quotes should be on a separate '
|
||||
'line')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def asdfljdf24():
|
||||
"""Summary.
|
||||
|
||||
Description."""
|
||||
|
||||
|
||||
@expect('D210: No whitespaces allowed surrounding docstring text')
|
||||
def endswith():
|
||||
"""Whitespace at the end. """
|
||||
|
||||
|
||||
@expect('D210: No whitespaces allowed surrounding docstring text')
|
||||
def around():
|
||||
""" Whitespace at everywhere. """
|
||||
|
||||
|
||||
@expect('D210: No whitespaces allowed surrounding docstring text')
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def multiline():
|
||||
""" Whitespace at the beginning.
|
||||
|
||||
This is the end.
|
||||
"""
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'\'\'-quotes)')
|
||||
def triple_single_quotes_raw():
|
||||
r'''Summary.'''
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'\'\'-quotes)')
|
||||
def triple_single_quotes_raw_uppercase():
|
||||
R'''Summary.'''
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
|
||||
def single_quotes_raw():
|
||||
r'Summary.'
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
|
||||
def single_quotes_raw_uppercase():
|
||||
R'Summary.'
|
||||
|
||||
|
||||
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
|
||||
@expect('D301: Use r""" if any backslashes in a docstring')
|
||||
def single_quotes_raw_uppercase_backslash():
|
||||
R'Sum\mary.'
|
||||
|
||||
|
||||
@expect('D301: Use r""" if any backslashes in a docstring')
|
||||
def double_quotes_backslash():
|
||||
"""Sum\\mary."""
|
||||
|
||||
|
||||
@expect('D301: Use r""" if any backslashes in a docstring')
|
||||
def double_quotes_backslash_uppercase():
|
||||
R"""Sum\\mary."""
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def exceptions_of_D301():
|
||||
"""Exclude some backslashes from D301.
|
||||
|
||||
In particular, line continuations \
|
||||
and unicode literals \u0394 and \N{GREEK CAPITAL LETTER DELTA}.
|
||||
They are considered to be intentionally unescaped.
|
||||
"""
|
||||
|
||||
|
||||
@expect("D400: First line should end with a period (not 'y')")
|
||||
@expect("D415: First line should end with a period, question mark, "
|
||||
"or exclamation point (not 'y')")
|
||||
def lwnlkjl():
|
||||
"""Summary"""
|
||||
|
||||
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Return', not 'Returns')")
|
||||
def liouiwnlkjl():
|
||||
"""Returns foo."""
|
||||
|
||||
|
||||
@expect("D401: First line should be in imperative mood; try rephrasing "
|
||||
"(found 'Constructor')")
|
||||
def sdgfsdg23245():
|
||||
"""Constructor for a foo."""
|
||||
|
||||
|
||||
@expect("D401: First line should be in imperative mood; try rephrasing "
|
||||
"(found 'Constructor')")
|
||||
def sdgfsdg23245777():
|
||||
"""Constructor."""
|
||||
|
||||
|
||||
@expect('D402: First line should not be the function\'s "signature"')
|
||||
def foobar():
|
||||
"""Signature: foobar()."""
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def new_209():
|
||||
"""First line.
|
||||
|
||||
More lines.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def old_209():
|
||||
"""One liner.
|
||||
|
||||
Multi-line comments. OK to have extra blank line
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect("D103: Missing docstring in public function")
|
||||
def oneliner_d102(): return
|
||||
|
||||
|
||||
@expect("D400: First line should end with a period (not 'r')")
|
||||
@expect("D415: First line should end with a period, question mark,"
|
||||
" or exclamation point (not 'r')")
|
||||
def oneliner_withdoc(): """One liner"""
|
||||
|
||||
|
||||
def ignored_decorator(func): # noqa: D400,D401,D415
|
||||
"""Runs something"""
|
||||
func()
|
||||
pass
|
||||
|
||||
|
||||
def decorator_for_test(func): # noqa: D400,D401,D415
|
||||
"""Runs something"""
|
||||
func()
|
||||
pass
|
||||
|
||||
|
||||
@ignored_decorator
|
||||
def oneliner_ignored_decorator(): """One liner"""
|
||||
|
||||
|
||||
@decorator_for_test
|
||||
@expect("D400: First line should end with a period (not 'r')")
|
||||
@expect("D415: First line should end with a period, question mark,"
|
||||
" or exclamation point (not 'r')")
|
||||
def oneliner_with_decorator_expecting_errors(): """One liner"""
|
||||
|
||||
|
||||
@decorator_for_test
|
||||
def valid_oneliner_with_decorator(): """One liner."""
|
||||
|
||||
|
||||
@expect("D207: Docstring is under-indented")
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def docstring_start_in_same_line(): """First Line.
|
||||
|
||||
Second Line
|
||||
"""
|
||||
|
||||
|
||||
def function_with_lambda_arg(x=lambda y: y):
|
||||
"""Wrap the given lambda."""
|
||||
|
||||
|
||||
@expect('D213: Multi-line docstring summary should start at the second line')
|
||||
def a_following_valid_function(x=None):
|
||||
"""Check for a bug where the previous function caused an assertion.
|
||||
|
||||
The assertion was caused in the next function, so this one is necessary.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def outer_function():
|
||||
"""Do something."""
|
||||
def inner_function():
|
||||
"""Do inner something."""
|
||||
return 0
|
||||
|
||||
|
||||
@expect("D400: First line should end with a period (not 'g')")
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Run', not 'Runs')")
|
||||
@expect("D415: First line should end with a period, question mark, "
|
||||
"or exclamation point (not 'g')")
|
||||
def docstring_bad():
|
||||
"""Runs something"""
|
||||
pass
|
||||
|
||||
|
||||
def docstring_bad_ignore_all(): # noqa
|
||||
"""Runs something"""
|
||||
pass
|
||||
|
||||
|
||||
def docstring_bad_ignore_one(): # noqa: D400,D401,D415
|
||||
"""Runs something"""
|
||||
pass
|
||||
|
||||
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Run', not 'Runs')")
|
||||
def docstring_ignore_some_violations_but_catch_D401(): # noqa: E501,D400,D415
|
||||
"""Runs something"""
|
||||
pass
|
||||
|
||||
|
||||
@expect(
|
||||
"D401: First line should be in imperative mood "
|
||||
"(perhaps 'Initiate', not 'Initiates')"
|
||||
)
|
||||
def docstring_initiates():
|
||||
"""Initiates the process."""
|
||||
|
||||
|
||||
@expect(
|
||||
"D401: First line should be in imperative mood "
|
||||
"(perhaps 'Initialize', not 'Initializes')"
|
||||
)
|
||||
def docstring_initializes():
|
||||
"""Initializes the process."""
|
||||
|
||||
|
||||
@wraps(docstring_bad_ignore_one)
|
||||
def bad_decorated_function():
|
||||
"""Bad (E501) but decorated"""
|
||||
pass
|
||||
|
||||
|
||||
def valid_google_string(): # noqa: D400
|
||||
"""Test a valid something!"""
|
||||
|
||||
|
||||
@expect("D415: First line should end with a period, question mark, "
|
||||
"or exclamation point (not 'g')")
|
||||
def bad_google_string(): # noqa: D400
|
||||
"""Test a valid something"""
|
||||
|
||||
|
||||
# This is reproducing a bug where AttributeError is raised when parsing class
|
||||
# parameters as functions for Google / Numpy conventions.
|
||||
class Blah: # noqa: D203,D213
|
||||
"""A Blah.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : int
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, x):
|
||||
pass
|
||||
|
||||
|
||||
expect(os.path.normcase(__file__ if __file__[-1] != 'c' else __file__[:-1]),
|
||||
'D100: Missing docstring in public module')
|
||||
4
resources/test/fixtures/E402.py
vendored
4
resources/test/fixtures/E402.py
vendored
@@ -1,4 +1,8 @@
|
||||
"""Top-level docstring."""
|
||||
|
||||
__all__ = ["y"]
|
||||
__version__: str = "0.1.0"
|
||||
|
||||
import a
|
||||
|
||||
try:
|
||||
|
||||
7
resources/test/fixtures/E501.py
vendored
7
resources/test/fixtures/E501.py
vendored
@@ -5,5 +5,12 @@
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
"""
|
||||
|
||||
_ = """Lorem ipsum dolor sit amet.
|
||||
|
||||
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""" # noqa: E501
|
||||
|
||||
_ = "---------------------------------------------------------------------------AAAAAAA"
|
||||
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜亜"
|
||||
|
||||
41
resources/test/fixtures/F401.py
vendored
41
resources/test/fixtures/F401.py
vendored
@@ -1,41 +0,0 @@
|
||||
from __future__ import all_feature_names
|
||||
import os
|
||||
import functools
|
||||
from datetime import datetime
|
||||
from collections import (
|
||||
Counter,
|
||||
OrderedDict,
|
||||
namedtuple,
|
||||
)
|
||||
import multiprocessing.pool
|
||||
import multiprocessing.process
|
||||
import logging.config
|
||||
import logging.handlers
|
||||
from typing import TYPING_CHECK, NamedTuple, Dict, Type, TypeVar, List, Set, Union, cast
|
||||
|
||||
from blah import ClassA, ClassB, ClassC
|
||||
|
||||
if TYPING_CHECK:
|
||||
from models import Fruit, Nut, Vegetable
|
||||
|
||||
|
||||
class X:
|
||||
datetime: datetime
|
||||
foo: Type["NamedTuple"]
|
||||
|
||||
def a(self) -> "namedtuple":
|
||||
x = os.environ["1"]
|
||||
y = Counter()
|
||||
z = multiprocessing.pool.ThreadPool()
|
||||
|
||||
|
||||
__all__ = ["ClassA"] + ["ClassB"]
|
||||
__all__ += ["ClassC"]
|
||||
|
||||
X = TypeVar("X")
|
||||
Y = TypeVar("Y", bound="Dict")
|
||||
Z = TypeVar("Z", "List", "Set")
|
||||
|
||||
a = list["Fruit"]
|
||||
b = Union["Nut", None]
|
||||
c = cast("Vegetable", b)
|
||||
88
resources/test/fixtures/F401_0.py
vendored
Normal file
88
resources/test/fixtures/F401_0.py
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
from __future__ import all_feature_names
|
||||
import functools, os
|
||||
from datetime import datetime
|
||||
from collections import (
|
||||
Counter,
|
||||
OrderedDict,
|
||||
namedtuple,
|
||||
)
|
||||
import multiprocessing.pool
|
||||
import multiprocessing.process
|
||||
import logging.config
|
||||
import logging.handlers
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
NamedTuple,
|
||||
Dict,
|
||||
Type,
|
||||
TypeVar,
|
||||
List,
|
||||
Set,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from dataclasses import MISSING, field
|
||||
|
||||
from blah import ClassA, ClassB, ClassC
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Fruit, Nut, Vegetable
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import shelve
|
||||
import importlib
|
||||
|
||||
if TYPE_CHECKING:
|
||||
"""Hello, world!"""
|
||||
import pathlib
|
||||
|
||||
z = 1
|
||||
|
||||
|
||||
class X:
|
||||
datetime: datetime
|
||||
foo: Type["NamedTuple"]
|
||||
|
||||
def a(self) -> "namedtuple":
|
||||
x = os.environ["1"]
|
||||
y = Counter()
|
||||
z = multiprocessing.pool.ThreadPool()
|
||||
|
||||
def b(self) -> None:
|
||||
import pickle
|
||||
|
||||
|
||||
__all__ = ["ClassA"] + ["ClassB"]
|
||||
__all__ += ["ClassC"]
|
||||
|
||||
X = TypeVar("X")
|
||||
Y = TypeVar("Y", bound="Dict")
|
||||
Z = TypeVar("Z", "List", "Set")
|
||||
|
||||
a = list["Fruit"]
|
||||
b = Union["""Nut""", None]
|
||||
c = cast("Vegetable", b)
|
||||
|
||||
Field = lambda default=MISSING: field(default=default)
|
||||
|
||||
|
||||
# Test: access a sub-importation via an alias.
|
||||
import pyarrow as pa
|
||||
import pyarrow.csv
|
||||
|
||||
print(pa.csv.read_csv("test.csv"))
|
||||
|
||||
|
||||
# Test: referencing an import via TypeAlias.
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
|
||||
CustomInt: TypeAlias = "np.int8 | np.int16"
|
||||
5
resources/test/fixtures/F401_1.py
vendored
Normal file
5
resources/test/fixtures/F401_1.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Access a sub-importation via an alias."""
|
||||
import pyarrow as pa
|
||||
import pyarrow.csv
|
||||
|
||||
print(pa.csv.read_csv("test.csv"))
|
||||
12
resources/test/fixtures/F401_2.py
vendored
Normal file
12
resources/test/fixtures/F401_2.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Test: referencing an import via TypeAlias."""
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
|
||||
CustomInt: TypeAlias = "np.int8 | np.int16"
|
||||
14
resources/test/fixtures/F401_3.py
vendored
Normal file
14
resources/test/fixtures/F401_3.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Test: referencing an import via TypeAlias (with future annotations)."""
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
|
||||
CustomInt: TypeAlias = np.int8 | np.int16
|
||||
14
resources/test/fixtures/F401_4.py
vendored
Normal file
14
resources/test/fixtures/F401_4.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Test: referencing an import via TypeAlias (with future annotations and quotes)."""
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
|
||||
CustomInt: TypeAlias = "np.int8 | np.int16"
|
||||
1
resources/test/fixtures/F404.py
vendored
1
resources/test/fixtures/F404.py
vendored
@@ -1,4 +1,3 @@
|
||||
from __future__ import print_function
|
||||
"""Docstring"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
42
resources/test/fixtures/F821.py
vendored
42
resources/test/fixtures/F821.py
vendored
@@ -89,3 +89,45 @@ A = (
|
||||
f'B'
|
||||
f'{B}'
|
||||
)
|
||||
|
||||
|
||||
from typing import Annotated, Literal
|
||||
|
||||
|
||||
def arbitrary_callable() -> None:
|
||||
...
|
||||
|
||||
|
||||
class PEP593Test:
|
||||
field: Annotated[
|
||||
int,
|
||||
"base64",
|
||||
arbitrary_callable(),
|
||||
123,
|
||||
(1, 2, 3),
|
||||
]
|
||||
field_with_stringified_type: Annotated[
|
||||
"PEP593Test",
|
||||
123,
|
||||
]
|
||||
field_with_undefined_stringified_type: Annotated[
|
||||
"PEP593Test123",
|
||||
123,
|
||||
]
|
||||
field_with_nested_subscript: Annotated[
|
||||
dict[Literal["foo"], str],
|
||||
123,
|
||||
]
|
||||
field_with_undefined_nested_subscript: Annotated[
|
||||
dict["foo", "bar"], # Expected to fail as undefined.
|
||||
123,
|
||||
]
|
||||
|
||||
|
||||
def in_ipython_notebook() -> bool:
|
||||
try:
|
||||
# autoimported by notebooks
|
||||
get_ipython() # type: ignore[name-defined]
|
||||
except NameError:
|
||||
return False # not in notebook
|
||||
return True
|
||||
|
||||
12
resources/test/fixtures/F841.py
vendored
12
resources/test/fixtures/F841.py
vendored
@@ -10,13 +10,13 @@ except ValueError as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def f():
|
||||
def f1():
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + y
|
||||
|
||||
|
||||
def g():
|
||||
def f2():
|
||||
foo = (1, 2)
|
||||
(a, b) = (1, 2)
|
||||
|
||||
@@ -26,6 +26,12 @@ def g():
|
||||
(x, y) = baz = bar
|
||||
|
||||
|
||||
def h():
|
||||
def f3():
|
||||
locals()
|
||||
x = 1
|
||||
|
||||
|
||||
def f4():
|
||||
_ = 1
|
||||
__ = 1
|
||||
_discarded = 1
|
||||
|
||||
60
resources/test/fixtures/M001.py
vendored
Normal file
60
resources/test/fixtures/M001.py
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
def f() -> None:
|
||||
# Valid
|
||||
a = 1 # noqa
|
||||
|
||||
# Valid
|
||||
b = 2 # noqa: F841
|
||||
|
||||
# Invalid
|
||||
c = 1 # noqa
|
||||
print(c)
|
||||
|
||||
# Invalid
|
||||
d = 1 # noqa: E501
|
||||
|
||||
# Invalid
|
||||
d = 1 # noqa: F841, E501
|
||||
|
||||
# Invalid (and unimplemented)
|
||||
d = 1 # noqa: F841, W191
|
||||
|
||||
|
||||
# Valid
|
||||
_ = """Lorem ipsum dolor sit amet.
|
||||
|
||||
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""" # noqa: E501
|
||||
|
||||
# Valid
|
||||
_ = """Lorem ipsum dolor sit amet.
|
||||
|
||||
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""" # noqa
|
||||
|
||||
# Invalid
|
||||
_ = """Lorem ipsum dolor sit amet.
|
||||
|
||||
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||
""" # noqa: E501, F841
|
||||
|
||||
# Invalid
|
||||
_ = """Lorem ipsum dolor sit amet.
|
||||
|
||||
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor.
|
||||
""" # noqa: E501
|
||||
|
||||
# Invalid
|
||||
_ = """Lorem ipsum dolor sit amet.
|
||||
|
||||
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor.
|
||||
""" # noqa
|
||||
34
resources/test/fixtures/N801.py
vendored
Normal file
34
resources/test/fixtures/N801.py
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
class bad:
|
||||
pass
|
||||
|
||||
|
||||
class _bad:
|
||||
pass
|
||||
|
||||
|
||||
class bad_class:
|
||||
pass
|
||||
|
||||
|
||||
class Bad_Class:
|
||||
pass
|
||||
|
||||
|
||||
class BAD_CLASS:
|
||||
pass
|
||||
|
||||
|
||||
class Good:
|
||||
pass
|
||||
|
||||
|
||||
class _Good:
|
||||
pass
|
||||
|
||||
|
||||
class GoodClass:
|
||||
pass
|
||||
|
||||
|
||||
class GOOD:
|
||||
pass
|
||||
26
resources/test/fixtures/N802.py
vendored
Normal file
26
resources/test/fixtures/N802.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
def Bad():
|
||||
pass
|
||||
|
||||
|
||||
def _Bad():
|
||||
pass
|
||||
|
||||
|
||||
def BAD():
|
||||
pass
|
||||
|
||||
|
||||
def BAD_FUNC():
|
||||
pass
|
||||
|
||||
|
||||
def good():
|
||||
pass
|
||||
|
||||
|
||||
def _good():
|
||||
pass
|
||||
|
||||
|
||||
def good_func():
|
||||
pass
|
||||
7
resources/test/fixtures/N803.py
vendored
Normal file
7
resources/test/fixtures/N803.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
def func(a, A):
|
||||
return a, A
|
||||
|
||||
|
||||
class Class:
|
||||
def method(self, a, A):
|
||||
return a, A
|
||||
19
resources/test/fixtures/N804.py
vendored
Normal file
19
resources/test/fixtures/N804.py
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
class Class:
|
||||
@classmethod
|
||||
def bad_class_method(this):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def good_class_method(cls):
|
||||
pass
|
||||
|
||||
def method(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def static_method(x):
|
||||
return x
|
||||
|
||||
|
||||
def func(x):
|
||||
return x
|
||||
26
resources/test/fixtures/N805.py
vendored
Normal file
26
resources/test/fixtures/N805.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import random
|
||||
|
||||
|
||||
class Class:
|
||||
def bad_method(this):
|
||||
pass
|
||||
|
||||
if random.random(0, 2) == 0:
|
||||
|
||||
def extra_bad_method(this):
|
||||
pass
|
||||
|
||||
def good_method(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def class_method(cls):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def static_method(x):
|
||||
return x
|
||||
|
||||
|
||||
def func(x):
|
||||
return x
|
||||
15
resources/test/fixtures/N807.py
vendored
Normal file
15
resources/test/fixtures/N807.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
def __bad__():
|
||||
pass
|
||||
|
||||
|
||||
def __good():
|
||||
pass
|
||||
|
||||
|
||||
def good__():
|
||||
pass
|
||||
|
||||
|
||||
class Class:
|
||||
def __good__(self):
|
||||
pass
|
||||
3
resources/test/fixtures/N811.py
vendored
Normal file
3
resources/test/fixtures/N811.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import mod.CONST as const
|
||||
from mod import CONSTANT as constant
|
||||
from mod import ANOTHER_CONSTANT as another_constant
|
||||
3
resources/test/fixtures/N812.py
vendored
Normal file
3
resources/test/fixtures/N812.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import modl.lowercase as Lower
|
||||
from mod import lowercase as Lowercase
|
||||
from mod import another_lowercase as AnotherLowercase
|
||||
3
resources/test/fixtures/N813.py
vendored
Normal file
3
resources/test/fixtures/N813.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import mod.Camel as camel
|
||||
from mod import CamelCase as camelcase
|
||||
from mod import AnotherCamelCase as another_camelcase
|
||||
3
resources/test/fixtures/N814.py
vendored
Normal file
3
resources/test/fixtures/N814.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import mod.Camel as CAMEL
|
||||
from mod import CamelCase as CAMELCASE
|
||||
from mod import AnotherCamelCase as ANOTHER_CAMELCASE
|
||||
2
resources/test/fixtures/N817.py
vendored
Normal file
2
resources/test/fixtures/N817.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import mod.CaMel as CM
|
||||
from mod import CamelCase as CC
|
||||
3
resources/test/fixtures/R002.py
vendored
3
resources/test/fixtures/R002.py
vendored
@@ -1,3 +0,0 @@
|
||||
self.assertEquals (1, 2)
|
||||
self.assertEquals(1, 2)
|
||||
self.assertEqual(3, 4)
|
||||
1
resources/test/fixtures/T201.py
vendored
Normal file
1
resources/test/fixtures/T201.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
print("Hello, world!") # T201
|
||||
10
resources/test/fixtures/T203.py
vendored
Normal file
10
resources/test/fixtures/T203.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
from pprint import pprint
|
||||
|
||||
pprint("Hello, world!") # T203
|
||||
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint("Hello, world!") # T203
|
||||
|
||||
pprint.pformat("Hello, world!")
|
||||
13
resources/test/fixtures/U001.py
vendored
Normal file
13
resources/test/fixtures/U001.py
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
class A:
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
class B:
|
||||
__metaclass__ = type
|
||||
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class C(metaclass=type):
|
||||
pass
|
||||
15
resources/test/fixtures/U002.py
vendored
Normal file
15
resources/test/fixtures/U002.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
from os.path import abspath
|
||||
|
||||
x = abspath(__file__)
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
y = os.path.abspath(__file__)
|
||||
|
||||
|
||||
from os import path
|
||||
|
||||
|
||||
z = path.abspath(__file__)
|
||||
5
resources/test/fixtures/U003.py
vendored
Normal file
5
resources/test/fixtures/U003.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
type('')
|
||||
type(b'')
|
||||
type(0)
|
||||
type(0.)
|
||||
type(0j)
|
||||
10
resources/test/fixtures/U005.py
vendored
Normal file
10
resources/test/fixtures/U005.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import unittest
|
||||
|
||||
|
||||
class Suite(unittest.TestCase):
|
||||
def test(self) -> None:
|
||||
self.assertEquals (1, 2)
|
||||
self.assertEquals(1, 2)
|
||||
self.assertEqual(3, 4)
|
||||
self.failUnlessAlmostEqual(1, 1.1)
|
||||
self.assertNotRegexpMatches("a", "b")
|
||||
12
resources/test/fixtures/U006.py
vendored
Normal file
12
resources/test/fixtures/U006.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
from typing import List
|
||||
|
||||
|
||||
def f(x: List[str]) -> None:
|
||||
...
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
def f(x: typing.List[str]) -> None:
|
||||
...
|
||||
40
resources/test/fixtures/U007.py
vendored
Normal file
40
resources/test/fixtures/U007.py
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def f(x: Optional[str]) -> None:
|
||||
...
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
def f(x: typing.Optional[str]) -> None:
|
||||
...
|
||||
|
||||
|
||||
from typing import Union
|
||||
|
||||
|
||||
def f(x: Union[str, int, Union[float, bytes]]) -> None:
|
||||
...
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
def f(x: typing.Union[str, int]) -> None:
|
||||
...
|
||||
|
||||
|
||||
from typing import Union
|
||||
|
||||
|
||||
def f(x: "Union[str, int, Union[float, bytes]]") -> None:
|
||||
...
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
def f(x: "typing.Union[str, int]") -> None:
|
||||
...
|
||||
65
resources/test/fixtures/U008.py
vendored
Normal file
65
resources/test/fixtures/U008.py
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
class Parent:
|
||||
def method(self):
|
||||
pass
|
||||
|
||||
def wrong(self):
|
||||
pass
|
||||
|
||||
|
||||
class Child(Parent):
|
||||
def method(self):
|
||||
parent = super() # ok
|
||||
super().method() # ok
|
||||
Parent.method(self) # ok
|
||||
Parent.super(1, 2) # ok
|
||||
|
||||
def wrong(self):
|
||||
parent = super(Child, self) # wrong
|
||||
super(Child, self).method # wrong
|
||||
super(
|
||||
Child,
|
||||
self,
|
||||
).method() # wrong
|
||||
|
||||
|
||||
class BaseClass:
|
||||
def f(self):
|
||||
print("f")
|
||||
|
||||
|
||||
def defined_outside(self):
|
||||
super(MyClass, self).f() # CANNOT use super()
|
||||
|
||||
|
||||
class MyClass(BaseClass):
|
||||
def normal(self):
|
||||
super(MyClass, self).f() # can use super()
|
||||
super().f()
|
||||
|
||||
def different_argument(self, other):
|
||||
super(MyClass, other).f() # CANNOT use super()
|
||||
|
||||
def comprehension_scope(self):
|
||||
[super(MyClass, self).f() for x in [1]] # CANNOT use super()
|
||||
|
||||
def inner_functions(self):
|
||||
def outer_argument():
|
||||
super(MyClass, self).f() # CANNOT use super()
|
||||
|
||||
def inner_argument(self):
|
||||
super(MyClass, self).f() # can use super()
|
||||
super().f()
|
||||
|
||||
outer_argument()
|
||||
inner_argument(self)
|
||||
|
||||
def inner_class(self):
|
||||
class InnerClass:
|
||||
super(MyClass, self).f() # CANNOT use super()
|
||||
|
||||
def method(inner_self):
|
||||
super(MyClass, self).f() # CANNOT use super()
|
||||
|
||||
InnerClass().method()
|
||||
|
||||
defined_outside = defined_outside
|
||||
2
resources/test/fixtures/W292_0.py
vendored
Normal file
2
resources/test/fixtures/W292_0.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
def fn() -> None:
|
||||
pass
|
||||
2
resources/test/fixtures/W292_1.py
vendored
Normal file
2
resources/test/fixtures/W292_1.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
def fn() -> None:
|
||||
pass # noqa: W292
|
||||
2
resources/test/fixtures/W292_2.py
vendored
Normal file
2
resources/test/fixtures/W292_2.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
def fn() -> None:
|
||||
pass
|
||||
108
resources/test/fixtures/canonical_google_examples.py
vendored
Normal file
108
resources/test/fixtures/canonical_google_examples.py
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
"""A one line summary of the module or program, terminated by a period.
|
||||
|
||||
Leave one blank line. The rest of this docstring should contain an
|
||||
overall description of the module or program. Optionally, it may also
|
||||
contain a brief description of exported classes and functions and/or usage
|
||||
examples.
|
||||
|
||||
Typical usage example:
|
||||
|
||||
foo = ClassFoo()
|
||||
bar = foo.FunctionBar()
|
||||
"""
|
||||
# above: "2.8.2 Modules" section example
|
||||
# https://google.github.io/styleguide/pyguide.html#382-modules
|
||||
|
||||
# Examples from the official "Google Python Style Guide" documentation:
|
||||
# * As HTML: https://google.github.io/styleguide/pyguide.html
|
||||
# * Source Markdown:
|
||||
# https://github.com/google/styleguide/blob/gh-pages/pyguide.md
|
||||
|
||||
import os
|
||||
from .expected import Expectation
|
||||
|
||||
expectation = Expectation()
|
||||
expect = expectation.expect
|
||||
|
||||
# module docstring expected violations:
|
||||
expectation.expected.add((
|
||||
os.path.normcase(__file__),
|
||||
"D213: Multi-line docstring summary should start at the second line"))
|
||||
|
||||
|
||||
# "3.8.3 Functions and Methods" section example
|
||||
# https://google.github.io/styleguide/pyguide.html#383-functions-and-methods
|
||||
@expect("D213: Multi-line docstring summary should start at the second line",
|
||||
arg_count=3)
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Fetch', not 'Fetches')", arg_count=3)
|
||||
@expect("D406: Section name should end with a newline "
|
||||
"('Raises', not 'Raises:')", arg_count=3)
|
||||
@expect("D406: Section name should end with a newline "
|
||||
"('Returns', not 'Returns:')", arg_count=3)
|
||||
@expect("D407: Missing dashed underline after section ('Raises')", arg_count=3)
|
||||
@expect("D407: Missing dashed underline after section ('Returns')",
|
||||
arg_count=3)
|
||||
@expect("D413: Missing blank line after last section ('Raises')", arg_count=3)
|
||||
def fetch_bigtable_rows(big_table, keys, other_silly_variable=None):
|
||||
"""Fetches rows from a Bigtable.
|
||||
|
||||
Retrieves rows pertaining to the given keys from the Table instance
|
||||
represented by big_table. Silly things may happen if
|
||||
other_silly_variable is not None.
|
||||
|
||||
Args:
|
||||
big_table: An open Bigtable Table instance.
|
||||
keys: A sequence of strings representing the key of each table row
|
||||
to fetch.
|
||||
other_silly_variable: Another optional variable, that has a much
|
||||
longer name than the other args, and which does nothing.
|
||||
|
||||
Returns:
|
||||
A dict mapping keys to the corresponding table row data
|
||||
fetched. Each row is represented as a tuple of strings. For
|
||||
example:
|
||||
|
||||
{'Serak': ('Rigel VII', 'Preparer'),
|
||||
'Zim': ('Irk', 'Invader'),
|
||||
'Lrrr': ('Omicron Persei 8', 'Emperor')}
|
||||
|
||||
If a key from the keys argument is missing from the dictionary,
|
||||
then that row was not found in the table.
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred accessing the bigtable.Table object.
|
||||
"""
|
||||
|
||||
|
||||
# "3.8.4 Classes" section example
|
||||
# https://google.github.io/styleguide/pyguide.html#384-classes
|
||||
@expect("D203: 1 blank line required before class docstring (found 0)")
|
||||
@expect("D213: Multi-line docstring summary should start at the second line")
|
||||
@expect("D406: Section name should end with a newline "
|
||||
"('Attributes', not 'Attributes:')")
|
||||
@expect("D407: Missing dashed underline after section ('Attributes')")
|
||||
@expect("D413: Missing blank line after last section ('Attributes')")
|
||||
class SampleClass:
|
||||
"""Summary of class here.
|
||||
|
||||
Longer class information....
|
||||
Longer class information....
|
||||
|
||||
Attributes:
|
||||
likes_spam: A boolean indicating if we like SPAM or not.
|
||||
eggs: An integer count of the eggs we have laid.
|
||||
"""
|
||||
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Init', not 'Inits')", arg_count=2)
|
||||
def __init__(self, likes_spam=False):
|
||||
"""Inits SampleClass with blah."""
|
||||
if self: # added to avoid NameError when run via @expect decorator
|
||||
self.likes_spam = likes_spam
|
||||
self.eggs = 0
|
||||
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Perform', not 'Performs')", arg_count=1)
|
||||
def public_method(self):
|
||||
"""Performs operation blah."""
|
||||
163
resources/test/fixtures/canonical_numpy_examples.py
vendored
Normal file
163
resources/test/fixtures/canonical_numpy_examples.py
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
"""This is the docstring for the example.py module. Modules names should
|
||||
have short, all-lowercase names. The module name may have underscores if
|
||||
this improves readability.
|
||||
|
||||
Every module should have a docstring at the very top of the file. The
|
||||
module's docstring may extend over multiple lines. If your docstring does
|
||||
extend over multiple lines, the closing three quotation marks must be on
|
||||
a line by itself, preferably preceded by a blank line.
|
||||
|
||||
"""
|
||||
|
||||
# Example source file from the official "numpydoc docstring guide"
|
||||
# documentation (with the modification of commenting out all the original
|
||||
# ``import`` lines, plus adding this note and ``Expectation`` code):
|
||||
# * As HTML: https://numpydoc.readthedocs.io/en/latest/example.html
|
||||
# * Source Python:
|
||||
# https://github.com/numpy/numpydoc/blob/master/doc/example.py
|
||||
|
||||
# from __future__ import division, absolute_import, print_function
|
||||
#
|
||||
# import os # standard library imports first
|
||||
#
|
||||
# Do NOT import using *, e.g. from numpy import *
|
||||
#
|
||||
# Import the module using
|
||||
#
|
||||
# import numpy
|
||||
#
|
||||
# instead or import individual functions as needed, e.g
|
||||
#
|
||||
# from numpy import array, zeros
|
||||
#
|
||||
# If you prefer the use of abbreviated module names, we suggest the
|
||||
# convention used by NumPy itself::
|
||||
#
|
||||
# import numpy as np
|
||||
# import matplotlib as mpl
|
||||
# import matplotlib.pyplot as plt
|
||||
#
|
||||
# These abbreviated names are not to be used in docstrings; users must
|
||||
# be able to paste and execute docstrings after importing only the
|
||||
# numpy module itself, unabbreviated.
|
||||
|
||||
import os
|
||||
from .expected import Expectation
|
||||
|
||||
expectation = Expectation()
|
||||
expect = expectation.expect
|
||||
|
||||
# module docstring expected violations:
|
||||
expectation.expected.add((
|
||||
os.path.normcase(__file__),
|
||||
"D205: 1 blank line required between summary line and description "
|
||||
"(found 0)"))
|
||||
expectation.expected.add((
|
||||
os.path.normcase(__file__),
|
||||
"D213: Multi-line docstring summary should start at the second line"))
|
||||
expectation.expected.add((
|
||||
os.path.normcase(__file__),
|
||||
"D400: First line should end with a period (not 'd')"))
|
||||
expectation.expected.add((
|
||||
os.path.normcase(__file__),
|
||||
"D404: First word of the docstring should not be `This`"))
|
||||
expectation.expected.add((
|
||||
os.path.normcase(__file__),
|
||||
"D415: First line should end with a period, question mark, or exclamation "
|
||||
"point (not 'd')"))
|
||||
|
||||
|
||||
@expect("D213: Multi-line docstring summary should start at the second line",
|
||||
arg_count=3)
|
||||
@expect("D401: First line should be in imperative mood; try rephrasing "
|
||||
"(found 'A')", arg_count=3)
|
||||
@expect("D413: Missing blank line after last section ('Examples')",
|
||||
arg_count=3)
|
||||
def foo(var1, var2, long_var_name='hi'):
|
||||
r"""A one-line summary that does not use variable names.
|
||||
|
||||
Several sentences providing an extended description. Refer to
|
||||
variables using back-ticks, e.g. `var`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
var1 : array_like
|
||||
Array_like means all those objects -- lists, nested lists, etc. --
|
||||
that can be converted to an array. We can also refer to
|
||||
variables like `var1`.
|
||||
var2 : int
|
||||
The type above can either refer to an actual Python type
|
||||
(e.g. ``int``), or describe the type of the variable in more
|
||||
detail, e.g. ``(N,) ndarray`` or ``array_like``.
|
||||
long_var_name : {'hi', 'ho'}, optional
|
||||
Choices in brackets, default first when optional.
|
||||
|
||||
Returns
|
||||
-------
|
||||
type
|
||||
Explanation of anonymous return value of type ``type``.
|
||||
describe : type
|
||||
Explanation of return value named `describe`.
|
||||
out : type
|
||||
Explanation of `out`.
|
||||
type_without_description
|
||||
|
||||
Other Parameters
|
||||
----------------
|
||||
only_seldom_used_keywords : type
|
||||
Explanation
|
||||
common_parameters_listed_above : type
|
||||
Explanation
|
||||
|
||||
Raises
|
||||
------
|
||||
BadException
|
||||
Because you shouldn't have done that.
|
||||
|
||||
See Also
|
||||
--------
|
||||
numpy.array : Relationship (optional).
|
||||
numpy.ndarray : Relationship (optional), which could be fairly long, in
|
||||
which case the line wraps here.
|
||||
numpy.dot, numpy.linalg.norm, numpy.eye
|
||||
|
||||
Notes
|
||||
-----
|
||||
Notes about the implementation algorithm (if needed).
|
||||
|
||||
This can have multiple paragraphs.
|
||||
|
||||
You may include some math:
|
||||
|
||||
.. math:: X(e^{j\omega } ) = x(n)e^{ - j\omega n}
|
||||
|
||||
And even use a Greek symbol like :math:`\omega` inline.
|
||||
|
||||
References
|
||||
----------
|
||||
Cite the relevant literature, e.g. [1]_. You may also cite these
|
||||
references in the notes section above.
|
||||
|
||||
.. [1] O. McNoleg, "The integration of GIS, remote sensing,
|
||||
expert systems and adaptive co-kriging for environmental habitat
|
||||
modelling of the Highland Haggis using object-oriented, fuzzy-logic
|
||||
and neural-network techniques," Computers & Geosciences, vol. 22,
|
||||
pp. 585-588, 1996.
|
||||
|
||||
Examples
|
||||
--------
|
||||
These are written in doctest format, and should illustrate how to
|
||||
use the function.
|
||||
|
||||
>>> a = [1, 2, 3]
|
||||
>>> print([x + 3 for x in a])
|
||||
[4, 5, 6]
|
||||
>>> print("a\nb")
|
||||
a
|
||||
b
|
||||
"""
|
||||
# After closing class docstring, there should be one blank line to
|
||||
# separate following codes (according to PEP257).
|
||||
# But for function, method and module, there should be no blank lines
|
||||
# after closing the docstring.
|
||||
pass
|
||||
@@ -2,7 +2,10 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from models import Fruit, Nut
|
||||
from models import (
|
||||
Fruit,
|
||||
Nut,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
497
resources/test/fixtures/sections.py
vendored
Normal file
497
resources/test/fixtures/sections.py
vendored
Normal file
@@ -0,0 +1,497 @@
|
||||
"""A valid module docstring."""
|
||||
|
||||
from .expected import Expectation
|
||||
|
||||
expectation = Expectation()
|
||||
expect = expectation.expect
|
||||
|
||||
|
||||
_D213 = 'D213: Multi-line docstring summary should start at the second line'
|
||||
_D400 = "D400: First line should end with a period (not '!')"
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D405: Section name should be properly capitalized "
|
||||
"('Returns', not 'returns')")
|
||||
def not_capitalized(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
returns
|
||||
-------
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D406: Section name should end with a newline "
|
||||
"('Returns', not 'Returns:')")
|
||||
def superfluous_suffix(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns:
|
||||
-------
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D407: Missing dashed underline after section ('Returns')")
|
||||
def no_underline(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D407: Missing dashed underline after section ('Returns')")
|
||||
@expect("D414: Section has no content ('Returns')")
|
||||
def no_underline_and_no_description(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D410: Missing blank line after section ('Returns')")
|
||||
@expect("D414: Section has no content ('Returns')")
|
||||
@expect("D411: Missing blank line before section ('Yields')")
|
||||
@expect("D414: Section has no content ('Yields')")
|
||||
def consecutive_sections(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Yields
|
||||
------
|
||||
|
||||
Raises
|
||||
------
|
||||
Questions.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D408: Section underline should be in the line following the "
|
||||
"section's name ('Returns')")
|
||||
def blank_line_before_underline(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
|
||||
-------
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D409: Section underline should match the length of its name "
|
||||
"(Expected 7 dashes in section 'Returns', got 2)")
|
||||
def bad_underline_length(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
--
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D413: Missing blank line after last section ('Returns')")
|
||||
def no_blank_line_after_last_section(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
-------
|
||||
A value of some sort.
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D411: Missing blank line before section ('Returns')")
|
||||
def no_blank_line_before_section(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
The function's description.
|
||||
Returns
|
||||
-------
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D214: Section is over-indented ('Returns')")
|
||||
def section_overindented(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
-------
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D215: Section underline is over-indented (in section 'Returns')")
|
||||
def section_underline_overindented(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
-------
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D215: Section underline is over-indented (in section 'Returns')")
|
||||
@expect("D413: Missing blank line after last section ('Returns')")
|
||||
@expect("D414: Section has no content ('Returns')")
|
||||
def section_underline_overindented_and_contentless(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Returns
|
||||
-------
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
def ignore_non_actual_section(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
This is the function's description, which will also specify what it
|
||||
returns
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D401: First line should be in imperative mood "
|
||||
"(perhaps 'Return', not 'Returns')")
|
||||
@expect("D400: First line should end with a period (not 's')")
|
||||
@expect("D415: First line should end with a period, question "
|
||||
"mark, or exclamation point (not 's')")
|
||||
@expect("D205: 1 blank line required between summary line and description "
|
||||
"(found 0)")
|
||||
def section_name_in_first_line(): # noqa: D416
|
||||
"""Returns
|
||||
-------
|
||||
A value of some sort.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D405: Section name should be properly capitalized "
|
||||
"('Short Summary', not 'Short summary')")
|
||||
@expect("D412: No blank lines allowed between a section header and its "
|
||||
"content ('Short Summary')")
|
||||
@expect("D409: Section underline should match the length of its name "
|
||||
"(Expected 7 dashes in section 'Returns', got 6)")
|
||||
@expect("D410: Missing blank line after section ('Returns')")
|
||||
@expect("D411: Missing blank line before section ('Raises')")
|
||||
@expect("D406: Section name should end with a newline "
|
||||
"('Raises', not 'Raises:')")
|
||||
@expect("D407: Missing dashed underline after section ('Raises')")
|
||||
def multiple_sections(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Short summary
|
||||
-------------
|
||||
|
||||
This is the function's description, which will also specify what it
|
||||
returns.
|
||||
|
||||
Returns
|
||||
------
|
||||
Many many wonderful things.
|
||||
Raises:
|
||||
My attention.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
def false_positive_section_prefix(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
attributes_are_fun: attributes for the function.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
def section_names_as_parameter_names(): # noqa: D416
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
notes : list
|
||||
A list of wonderful notes.
|
||||
examples: list
|
||||
A list of horrible examples.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D414: Section has no content ('Returns')")
|
||||
def valid_google_style_section(): # noqa: D406, D407
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Args:
|
||||
note: A random string.
|
||||
|
||||
Returns:
|
||||
|
||||
Raises:
|
||||
RandomError: A random error that occurs randomly.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D416: Section name should end with a colon "
|
||||
"('Args:', not 'Args')")
|
||||
def missing_colon_google_style_section(): # noqa: D406, D407
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Args
|
||||
note: A random string.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) y are missing descriptions in "
|
||||
"'bar' docstring)", func_name="bar")
|
||||
def _test_nested_functions():
|
||||
x = 1
|
||||
|
||||
def bar(y=2): # noqa: D207, D213, D406, D407
|
||||
"""Nested function test for docstrings.
|
||||
|
||||
Will this work when referencing x?
|
||||
|
||||
Args:
|
||||
x: Test something
|
||||
that is broken.
|
||||
|
||||
"""
|
||||
print(x)
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) y are missing descriptions in "
|
||||
"'test_missing_google_args' docstring)")
|
||||
def test_missing_google_args(x=1, y=2, _private=3): # noqa: D406, D407
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Args:
|
||||
x (int): The greatest integer.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class TestGoogle: # noqa: D203
|
||||
"""Test class."""
|
||||
|
||||
def test_method(self, test, another_test, _): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Args:
|
||||
test: A parameter.
|
||||
another_test: Another parameter.
|
||||
|
||||
"""
|
||||
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) test, y, z are missing descriptions in "
|
||||
"'test_missing_args' docstring)", arg_count=5)
|
||||
def test_missing_args(self, test, x, y, z=3, _private_arg=3): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Args:
|
||||
x: Another parameter.
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) test, y, z are missing descriptions in "
|
||||
"'test_missing_args_class_method' docstring)", arg_count=5)
|
||||
def test_missing_args_class_method(cls, test, x, y, _, z=3): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Args:
|
||||
x: Another parameter. The parameter below is missing description.
|
||||
y:
|
||||
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) a, y, z are missing descriptions in "
|
||||
"'test_missing_args_static_method' docstring)", arg_count=4)
|
||||
def test_missing_args_static_method(a, x, y, _test, z=3): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Args:
|
||||
x: Another parameter.
|
||||
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) a, b are missing descriptions in "
|
||||
"'test_missing_docstring' docstring)", arg_count=2)
|
||||
def test_missing_docstring(a, b): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Args:
|
||||
a:
|
||||
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def test_hanging_indent(skip, verbose): # noqa: D213, D407
|
||||
"""Do stuff.
|
||||
|
||||
Args:
|
||||
skip (:attr:`.Skip`):
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
|
||||
Etiam at tellus a tellus faucibus maximus. Curabitur tellus
|
||||
mauris, semper id vehicula ac, feugiat ut tortor.
|
||||
verbose (bool):
|
||||
If True, print out as much infromation as possible.
|
||||
If False, print out concise "one-liner" information.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@expect(_D213)
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) y are missing descriptions in "
|
||||
"'test_missing_numpy_args' docstring)")
|
||||
def test_missing_numpy_args(_private_arg=0, x=1, y=2): # noqa: D406, D407
|
||||
"""Toggle the gizmo.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : int
|
||||
The greatest integer in the history \
|
||||
of the entire world.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class TestNumpy: # noqa: D203
|
||||
"""Test class."""
|
||||
|
||||
def test_method(self, test, another_test, z, _, x=1, y=2, _private_arg=1): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Some long string with a \
|
||||
line continuation.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
test, another_test
|
||||
Some parameters without type.
|
||||
z : some parameter with a very long type description that requires a \
|
||||
line continuation.
|
||||
But no further description.
|
||||
x, y : int
|
||||
Some integer parameters.
|
||||
|
||||
"""
|
||||
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) test, y, z are missing descriptions in "
|
||||
"'test_missing_args' docstring)", arg_count=5)
|
||||
def test_missing_args(self, test, x, y, z=3, t=1, _private=0): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x, t : int
|
||||
Some parameters.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) test, y, z are missing descriptions in "
|
||||
"'test_missing_args_class_method' docstring)", arg_count=4)
|
||||
def test_missing_args_class_method(cls, test, x, y, z=3): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
z
|
||||
x
|
||||
Another parameter. The parameters y, test below are
|
||||
missing descriptions. The parameter z above is also missing
|
||||
a description.
|
||||
y
|
||||
test
|
||||
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) a, z are missing descriptions in "
|
||||
"'test_missing_args_static_method' docstring)", arg_count=3)
|
||||
def test_missing_args_static_method(a, x, y, z=3, t=1): # noqa: D213, D407
|
||||
"""Test a valid args section.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x, y
|
||||
Another parameter.
|
||||
t : int
|
||||
Yet another parameter.
|
||||
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def test_mixing_numpy_and_google(danger): # noqa: D213
|
||||
"""Repro for #388.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
danger
|
||||
Zoneeeeee!
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class TestIncorrectIndent: # noqa: D203
|
||||
"""Test class."""
|
||||
|
||||
@expect("D417: Missing argument descriptions in the docstring "
|
||||
"(argument(s) y are missing descriptions in "
|
||||
"'test_incorrect_indent' docstring)", arg_count=3)
|
||||
def test_incorrect_indent(self, x=1, y=2): # noqa: D207, D213, D407
|
||||
"""Reproducing issue #437.
|
||||
|
||||
Testing this incorrectly indented docstring.
|
||||
|
||||
Args:
|
||||
x: Test argument.
|
||||
|
||||
"""
|
||||
19
scripts/.flake8
Normal file
19
scripts/.flake8
Normal file
@@ -0,0 +1,19 @@
|
||||
[flake8]
|
||||
exclude =
|
||||
# Defaults
|
||||
.svn,
|
||||
CVS,
|
||||
.bzr,
|
||||
.hg,
|
||||
.git,
|
||||
__pycache__,
|
||||
.tox,
|
||||
.idea,
|
||||
.mypy_cache,
|
||||
.venv,
|
||||
node_modules,
|
||||
# Custom
|
||||
_state_machine.py,
|
||||
test_fstring.py,
|
||||
bad_coding2.py,
|
||||
badsyntax_*.py
|
||||
23
setup.py
Normal file
23
setup.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import sys
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
sys.stderr.write(
|
||||
"""
|
||||
===============================
|
||||
Unsupported installation method
|
||||
===============================
|
||||
ruff no longer supports installation with `python setup.py install`.
|
||||
Please use `python -m pip install .` instead.
|
||||
"""
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# The below code will never execute, however GitHub is particularly
|
||||
# picky about where it finds Python packaging metadata.
|
||||
# See: https://github.com/github/feedback/discussions/6456
|
||||
#
|
||||
# To be removed once GitHub catches up.
|
||||
|
||||
setup(name="ruff", install_requires=[])
|
||||
@@ -1,4 +1,4 @@
|
||||
pub mod checks;
|
||||
pub mod helpers;
|
||||
pub mod operations;
|
||||
pub mod relocate;
|
||||
pub mod types;
|
||||
|
||||
@@ -1,650 +0,0 @@
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use itertools::izip;
|
||||
use rustpython_parser::ast::{
|
||||
Arg, Arguments, Cmpop, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprKind, Keyword,
|
||||
Location, Stmt, StmtKind, Unaryop,
|
||||
};
|
||||
|
||||
use crate::ast::operations::SourceCodeLocator;
|
||||
use crate::ast::types::{Binding, BindingKind, CheckLocator, FunctionScope, Scope, ScopeKind};
|
||||
use crate::autofix::{fixer, fixes};
|
||||
use crate::checks::{Check, CheckKind, Fix, RejectedCmpop};
|
||||
|
||||
/// Check IfTuple compliance.
|
||||
pub fn check_if_tuple(test: &Expr, location: Location) -> Option<Check> {
|
||||
if let ExprKind::Tuple { elts, .. } = &test.node {
|
||||
if !elts.is_empty() {
|
||||
return Some(Check::new(CheckKind::IfTuple, location));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Check AssertTuple compliance.
|
||||
pub fn check_assert_tuple(test: &Expr, location: Location) -> Option<Check> {
|
||||
if let ExprKind::Tuple { elts, .. } = &test.node {
|
||||
if !elts.is_empty() {
|
||||
return Some(Check::new(CheckKind::AssertTuple, location));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Check NotInTest and NotIsTest compliance.
|
||||
pub fn check_not_tests(
|
||||
op: &Unaryop,
|
||||
operand: &Expr,
|
||||
check_not_in: bool,
|
||||
check_not_is: bool,
|
||||
locator: &dyn CheckLocator,
|
||||
) -> Vec<Check> {
|
||||
let mut checks: Vec<Check> = vec![];
|
||||
|
||||
if matches!(op, Unaryop::Not) {
|
||||
if let ExprKind::Compare { ops, .. } = &operand.node {
|
||||
for op in ops {
|
||||
match op {
|
||||
Cmpop::In => {
|
||||
if check_not_in {
|
||||
checks.push(Check::new(
|
||||
CheckKind::NotInTest,
|
||||
locator.locate_check(operand.location),
|
||||
));
|
||||
}
|
||||
}
|
||||
Cmpop::Is => {
|
||||
if check_not_is {
|
||||
checks.push(Check::new(
|
||||
CheckKind::NotIsTest,
|
||||
locator.locate_check(operand.location),
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checks
|
||||
}
|
||||
|
||||
/// Check UnusedVariable compliance.
|
||||
pub fn check_unused_variables(scope: &Scope, locator: &dyn CheckLocator) -> Vec<Check> {
|
||||
let mut checks: Vec<Check> = vec![];
|
||||
|
||||
if matches!(
|
||||
scope.kind,
|
||||
ScopeKind::Function(FunctionScope { uses_locals: true })
|
||||
) {
|
||||
return checks;
|
||||
}
|
||||
|
||||
for (name, binding) in scope.values.iter() {
|
||||
// TODO(charlie): Ignore if using `locals`.
|
||||
if binding.used.is_none()
|
||||
&& name != "_"
|
||||
&& name != "__tracebackhide__"
|
||||
&& name != "__traceback_info__"
|
||||
&& name != "__traceback_supplement__"
|
||||
&& matches!(binding.kind, BindingKind::Assignment)
|
||||
{
|
||||
checks.push(Check::new(
|
||||
CheckKind::UnusedVariable(name.to_string()),
|
||||
locator.locate_check(binding.location),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
checks
|
||||
}
|
||||
|
||||
/// Check DoNotAssignLambda compliance.
|
||||
pub fn check_do_not_assign_lambda(value: &Expr, location: Location) -> Option<Check> {
|
||||
if let ExprKind::Lambda { .. } = &value.node {
|
||||
Some(Check::new(CheckKind::DoNotAssignLambda, location))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ambiguous_name(name: &str) -> bool {
|
||||
name == "l" || name == "I" || name == "O"
|
||||
}
|
||||
|
||||
/// Check AmbiguousVariableName compliance.
|
||||
pub fn check_ambiguous_variable_name(name: &str, location: Location) -> Option<Check> {
|
||||
if is_ambiguous_name(name) {
|
||||
Some(Check::new(
|
||||
CheckKind::AmbiguousVariableName(name.to_string()),
|
||||
location,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Check AmbiguousClassName compliance.
|
||||
pub fn check_ambiguous_class_name(name: &str, location: Location) -> Option<Check> {
|
||||
if is_ambiguous_name(name) {
|
||||
Some(Check::new(
|
||||
CheckKind::AmbiguousClassName(name.to_string()),
|
||||
location,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Check AmbiguousFunctionName compliance.
|
||||
pub fn check_ambiguous_function_name(name: &str, location: Location) -> Option<Check> {
|
||||
if is_ambiguous_name(name) {
|
||||
Some(Check::new(
|
||||
CheckKind::AmbiguousFunctionName(name.to_string()),
|
||||
location,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Check UselessObjectInheritance compliance.
|
||||
pub fn check_useless_object_inheritance(
|
||||
stmt: &Stmt,
|
||||
name: &str,
|
||||
bases: &[Expr],
|
||||
keywords: &[Keyword],
|
||||
scope: &Scope,
|
||||
locator: &mut SourceCodeLocator,
|
||||
autofix: &fixer::Mode,
|
||||
) -> Option<Check> {
|
||||
for expr in bases {
|
||||
if let ExprKind::Name { id, .. } = &expr.node {
|
||||
if id == "object" {
|
||||
match scope.values.get(id) {
|
||||
None
|
||||
| Some(Binding {
|
||||
kind: BindingKind::Builtin,
|
||||
..
|
||||
}) => {
|
||||
let mut check = Check::new(
|
||||
CheckKind::UselessObjectInheritance(name.to_string()),
|
||||
expr.location,
|
||||
);
|
||||
if matches!(autofix, fixer::Mode::Generate | fixer::Mode::Apply) {
|
||||
if let Some(fix) = fixes::remove_class_def_base(
|
||||
locator,
|
||||
&stmt.location,
|
||||
expr.location,
|
||||
bases,
|
||||
keywords,
|
||||
) {
|
||||
check.amend(fix);
|
||||
}
|
||||
}
|
||||
return Some(check);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Check DefaultExceptNotLast compliance.
|
||||
pub fn check_default_except_not_last(handlers: &Vec<Excepthandler>) -> Option<Check> {
|
||||
for (idx, handler) in handlers.iter().enumerate() {
|
||||
let ExcepthandlerKind::ExceptHandler { type_, .. } = &handler.node;
|
||||
if type_.is_none() && idx < handlers.len() - 1 {
|
||||
return Some(Check::new(
|
||||
CheckKind::DefaultExceptNotLast,
|
||||
handler.location,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Check RaiseNotImplemented compliance.
|
||||
pub fn check_raise_not_implemented(expr: &Expr) -> Option<Check> {
|
||||
match &expr.node {
|
||||
ExprKind::Call { func, .. } => {
|
||||
if let ExprKind::Name { id, .. } = &func.node {
|
||||
if id == "NotImplemented" {
|
||||
return Some(Check::new(CheckKind::RaiseNotImplemented, expr.location));
|
||||
}
|
||||
}
|
||||
}
|
||||
ExprKind::Name { id, .. } => {
|
||||
if id == "NotImplemented" {
|
||||
return Some(Check::new(CheckKind::RaiseNotImplemented, expr.location));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Check DuplicateArgumentName compliance.
|
||||
pub fn check_duplicate_arguments(arguments: &Arguments) -> Vec<Check> {
|
||||
let mut checks: Vec<Check> = vec![];
|
||||
|
||||
// Collect all the arguments into a single vector.
|
||||
let mut all_arguments: Vec<&Arg> = arguments
|
||||
.args
|
||||
.iter()
|
||||
.chain(arguments.posonlyargs.iter())
|
||||
.chain(arguments.kwonlyargs.iter())
|
||||
.collect();
|
||||
if let Some(arg) = &arguments.vararg {
|
||||
all_arguments.push(arg);
|
||||
}
|
||||
if let Some(arg) = &arguments.kwarg {
|
||||
all_arguments.push(arg);
|
||||
}
|
||||
|
||||
// Search for duplicates.
|
||||
let mut idents: BTreeSet<&str> = BTreeSet::new();
|
||||
for arg in all_arguments {
|
||||
let ident = &arg.node.arg;
|
||||
if idents.contains(ident.as_str()) {
|
||||
checks.push(Check::new(CheckKind::DuplicateArgumentName, arg.location));
|
||||
}
|
||||
idents.insert(ident);
|
||||
}
|
||||
|
||||
checks
|
||||
}
|
||||
|
||||
/// Check AssertEquals compliance.
|
||||
pub fn check_assert_equals(expr: &Expr, autofix: &fixer::Mode) -> Option<Check> {
|
||||
if let ExprKind::Attribute { value, attr, .. } = &expr.node {
|
||||
if attr == "assertEquals" {
|
||||
if let ExprKind::Name { id, .. } = &value.node {
|
||||
if id == "self" {
|
||||
let mut check = Check::new(CheckKind::NoAssertEquals, expr.location);
|
||||
if matches!(autofix, fixer::Mode::Generate | fixer::Mode::Apply) {
|
||||
check.amend(Fix {
|
||||
content: "assertEqual".to_string(),
|
||||
start: Location::new(expr.location.row(), expr.location.column() + 1),
|
||||
end: Location::new(
|
||||
expr.location.row(),
|
||||
expr.location.column() + 1 + "assertEquals".len(),
|
||||
),
|
||||
applied: false,
|
||||
});
|
||||
}
|
||||
return Some(check);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum DictionaryKey<'a> {
|
||||
Constant(&'a Constant),
|
||||
Variable(&'a String),
|
||||
}
|
||||
|
||||
fn convert_to_value(expr: &Expr) -> Option<DictionaryKey> {
|
||||
match &expr.node {
|
||||
ExprKind::Constant { value, .. } => Some(DictionaryKey::Constant(value)),
|
||||
ExprKind::Name { id, .. } => Some(DictionaryKey::Variable(id)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check MultiValueRepeatedKeyLiteral and MultiValueRepeatedKeyVariable compliance.
|
||||
pub fn check_repeated_keys(
|
||||
keys: &Vec<Expr>,
|
||||
check_repeated_literals: bool,
|
||||
check_repeated_variables: bool,
|
||||
locator: &dyn CheckLocator,
|
||||
) -> Vec<Check> {
|
||||
let mut checks: Vec<Check> = vec![];
|
||||
|
||||
let num_keys = keys.len();
|
||||
for i in 0..num_keys {
|
||||
let k1 = &keys[i];
|
||||
let v1 = convert_to_value(k1);
|
||||
for k2 in keys.iter().take(num_keys).skip(i + 1) {
|
||||
let v2 = convert_to_value(k2);
|
||||
match (&v1, &v2) {
|
||||
(Some(DictionaryKey::Constant(v1)), Some(DictionaryKey::Constant(v2))) => {
|
||||
if check_repeated_literals && v1 == v2 {
|
||||
checks.push(Check::new(
|
||||
CheckKind::MultiValueRepeatedKeyLiteral,
|
||||
locator.locate_check(k2.location),
|
||||
))
|
||||
}
|
||||
}
|
||||
(Some(DictionaryKey::Variable(v1)), Some(DictionaryKey::Variable(v2))) => {
|
||||
if check_repeated_variables && v1 == v2 {
|
||||
checks.push(Check::new(
|
||||
CheckKind::MultiValueRepeatedKeyVariable((*v2).to_string()),
|
||||
locator.locate_check(k2.location),
|
||||
))
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checks
|
||||
}
|
||||
|
||||
/// Check TrueFalseComparison and NoneComparison compliance.
|
||||
pub fn check_literal_comparisons(
|
||||
left: &Expr,
|
||||
ops: &Vec<Cmpop>,
|
||||
comparators: &Vec<Expr>,
|
||||
check_none_comparisons: bool,
|
||||
check_true_false_comparisons: bool,
|
||||
locator: &dyn CheckLocator,
|
||||
) -> Vec<Check> {
|
||||
let mut checks: Vec<Check> = vec![];
|
||||
|
||||
let op = ops.first().unwrap();
|
||||
let comparator = left;
|
||||
|
||||
// Check `left`.
|
||||
if check_none_comparisons
|
||||
&& matches!(
|
||||
comparator.node,
|
||||
ExprKind::Constant {
|
||||
value: Constant::None,
|
||||
kind: None
|
||||
}
|
||||
)
|
||||
{
|
||||
if matches!(op, Cmpop::Eq) {
|
||||
checks.push(Check::new(
|
||||
CheckKind::NoneComparison(RejectedCmpop::Eq),
|
||||
locator.locate_check(comparator.location),
|
||||
));
|
||||
}
|
||||
if matches!(op, Cmpop::NotEq) {
|
||||
checks.push(Check::new(
|
||||
CheckKind::NoneComparison(RejectedCmpop::NotEq),
|
||||
locator.locate_check(comparator.location),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if check_true_false_comparisons {
|
||||
if let ExprKind::Constant {
|
||||
value: Constant::Bool(value),
|
||||
kind: None,
|
||||
} = comparator.node
|
||||
{
|
||||
if matches!(op, Cmpop::Eq) {
|
||||
checks.push(Check::new(
|
||||
CheckKind::TrueFalseComparison(value, RejectedCmpop::Eq),
|
||||
locator.locate_check(comparator.location),
|
||||
));
|
||||
}
|
||||
if matches!(op, Cmpop::NotEq) {
|
||||
checks.push(Check::new(
|
||||
CheckKind::TrueFalseComparison(value, RejectedCmpop::NotEq),
|
||||
locator.locate_check(comparator.location),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check each comparator in order.
|
||||
for (op, comparator) in izip!(ops, comparators) {
|
||||
if check_none_comparisons
|
||||
&& matches!(
|
||||
comparator.node,
|
||||
ExprKind::Constant {
|
||||
value: Constant::None,
|
||||
kind: None
|
||||
}
|
||||
)
|
||||
{
|
||||
if matches!(op, Cmpop::Eq) {
|
||||
checks.push(Check::new(
|
||||
CheckKind::NoneComparison(RejectedCmpop::Eq),
|
||||
locator.locate_check(comparator.location),
|
||||
));
|
||||
}
|
||||
if matches!(op, Cmpop::NotEq) {
|
||||
checks.push(Check::new(
|
||||
CheckKind::NoneComparison(RejectedCmpop::NotEq),
|
||||
locator.locate_check(comparator.location),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if check_true_false_comparisons {
|
||||
if let ExprKind::Constant {
|
||||
value: Constant::Bool(value),
|
||||
kind: None,
|
||||
} = comparator.node
|
||||
{
|
||||
if matches!(op, Cmpop::Eq) {
|
||||
checks.push(Check::new(
|
||||
CheckKind::TrueFalseComparison(value, RejectedCmpop::Eq),
|
||||
locator.locate_check(comparator.location),
|
||||
));
|
||||
}
|
||||
if matches!(op, Cmpop::NotEq) {
|
||||
checks.push(Check::new(
|
||||
CheckKind::TrueFalseComparison(value, RejectedCmpop::NotEq),
|
||||
locator.locate_check(comparator.location),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checks
|
||||
}
|
||||
|
||||
fn is_constant(expr: &Expr) -> bool {
|
||||
match &expr.node {
|
||||
ExprKind::Constant { .. } => true,
|
||||
ExprKind::Tuple { elts, .. } => elts.iter().all(is_constant),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_singleton(expr: &Expr) -> bool {
|
||||
matches!(
|
||||
expr.node,
|
||||
ExprKind::Constant {
|
||||
value: Constant::None | Constant::Bool(_) | Constant::Ellipsis,
|
||||
..
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
fn is_constant_non_singleton(expr: &Expr) -> bool {
|
||||
is_constant(expr) && !is_singleton(expr)
|
||||
}
|
||||
|
||||
/// Check IsLiteral compliance.
|
||||
pub fn check_is_literal(
|
||||
left: &Expr,
|
||||
ops: &Vec<Cmpop>,
|
||||
comparators: &Vec<Expr>,
|
||||
location: Location,
|
||||
) -> Vec<Check> {
|
||||
let mut checks: Vec<Check> = vec![];
|
||||
|
||||
let mut left = left;
|
||||
for (op, right) in izip!(ops, comparators) {
|
||||
if matches!(op, Cmpop::Is | Cmpop::IsNot)
|
||||
&& (is_constant_non_singleton(left) || is_constant_non_singleton(right))
|
||||
{
|
||||
checks.push(Check::new(CheckKind::IsLiteral, location));
|
||||
}
|
||||
left = right;
|
||||
}
|
||||
|
||||
checks
|
||||
}
|
||||
|
||||
/// Check TypeComparison compliance.
|
||||
pub fn check_type_comparison(
|
||||
ops: &Vec<Cmpop>,
|
||||
comparators: &Vec<Expr>,
|
||||
location: Location,
|
||||
) -> Vec<Check> {
|
||||
let mut checks: Vec<Check> = vec![];
|
||||
|
||||
for (op, right) in izip!(ops, comparators) {
|
||||
if matches!(op, Cmpop::Is | Cmpop::IsNot | Cmpop::Eq | Cmpop::NotEq) {
|
||||
match &right.node {
|
||||
ExprKind::Call { func, args, .. } => {
|
||||
if let ExprKind::Name { id, .. } = &func.node {
|
||||
// Ex) type(False)
|
||||
if id == "type" {
|
||||
if let Some(arg) = args.first() {
|
||||
// Allow comparison for types which are not obvious.
|
||||
if !matches!(arg.node, ExprKind::Name { .. }) {
|
||||
checks.push(Check::new(CheckKind::TypeComparison, location));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ExprKind::Attribute { value, .. } => {
|
||||
if let ExprKind::Name { id, .. } = &value.node {
|
||||
// Ex) types.IntType
|
||||
if id == "types" {
|
||||
checks.push(Check::new(CheckKind::TypeComparison, location));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checks
|
||||
}
|
||||
|
||||
/// Check TwoStarredExpressions and TooManyExpressionsInStarredAssignment compliance.
|
||||
pub fn check_starred_expressions(
|
||||
elts: &[Expr],
|
||||
check_too_many_expressions: bool,
|
||||
check_two_starred_expressions: bool,
|
||||
location: Location,
|
||||
) -> Option<Check> {
|
||||
let mut has_starred: bool = false;
|
||||
let mut starred_index: Option<usize> = None;
|
||||
for (index, elt) in elts.iter().enumerate() {
|
||||
if matches!(elt.node, ExprKind::Starred { .. }) {
|
||||
if has_starred && check_two_starred_expressions {
|
||||
return Some(Check::new(CheckKind::TwoStarredExpressions, location));
|
||||
}
|
||||
has_starred = true;
|
||||
starred_index = Some(index);
|
||||
}
|
||||
}
|
||||
|
||||
if check_too_many_expressions {
|
||||
if let Some(starred_index) = starred_index {
|
||||
if starred_index >= 1 << 8 || elts.len() - starred_index > 1 << 24 {
|
||||
return Some(Check::new(
|
||||
CheckKind::TooManyExpressionsInStarredAssignment,
|
||||
location,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Check BreakOutsideLoop compliance.
|
||||
pub fn check_break_outside_loop(
|
||||
stmt: &Stmt,
|
||||
parents: &[&Stmt],
|
||||
parent_stack: &[usize],
|
||||
locator: &dyn CheckLocator,
|
||||
) -> Option<Check> {
|
||||
let mut allowed: bool = false;
|
||||
let mut parent = stmt;
|
||||
for index in parent_stack.iter().rev() {
|
||||
let child = parent;
|
||||
parent = parents[*index];
|
||||
match &parent.node {
|
||||
StmtKind::For { orelse, .. }
|
||||
| StmtKind::AsyncFor { orelse, .. }
|
||||
| StmtKind::While { orelse, .. } => {
|
||||
if !orelse.contains(child) {
|
||||
allowed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
StmtKind::FunctionDef { .. }
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
| StmtKind::ClassDef { .. } => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if !allowed {
|
||||
Some(Check::new(
|
||||
CheckKind::BreakOutsideLoop,
|
||||
locator.locate_check(stmt.location),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Check ContinueOutsideLoop compliance.
|
||||
pub fn check_continue_outside_loop(
|
||||
stmt: &Stmt,
|
||||
parents: &[&Stmt],
|
||||
parent_stack: &[usize],
|
||||
locator: &dyn CheckLocator,
|
||||
) -> Option<Check> {
|
||||
let mut allowed: bool = false;
|
||||
let mut parent = stmt;
|
||||
for index in parent_stack.iter().rev() {
|
||||
let child = parent;
|
||||
parent = parents[*index];
|
||||
match &parent.node {
|
||||
StmtKind::For { orelse, .. }
|
||||
| StmtKind::AsyncFor { orelse, .. }
|
||||
| StmtKind::While { orelse, .. } => {
|
||||
if !orelse.contains(child) {
|
||||
allowed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
StmtKind::FunctionDef { .. }
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
| StmtKind::ClassDef { .. } => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if !allowed {
|
||||
Some(Check::new(
|
||||
CheckKind::ContinueOutsideLoop,
|
||||
locator.locate_check(stmt.location),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
145
src/ast/helpers.rs
Normal file
145
src/ast/helpers.rs
Normal file
@@ -0,0 +1,145 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind, Location, StmtKind};
|
||||
|
||||
use crate::python::typing;
|
||||
|
||||
fn compose_call_path_inner<'a>(expr: &'a Expr, parts: &mut Vec<&'a str>) {
|
||||
match &expr.node {
|
||||
ExprKind::Call { func, .. } => {
|
||||
compose_call_path_inner(func, parts);
|
||||
}
|
||||
ExprKind::Attribute { value, attr, .. } => {
|
||||
compose_call_path_inner(value, parts);
|
||||
parts.push(attr);
|
||||
}
|
||||
ExprKind::Name { id, .. } => {
|
||||
parts.push(id);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compose_call_path(expr: &Expr) -> Option<String> {
|
||||
let mut segments = vec![];
|
||||
compose_call_path_inner(expr, &mut segments);
|
||||
if segments.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(segments.join("."))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn match_name_or_attr(expr: &Expr, target: &str) -> bool {
|
||||
match &expr.node {
|
||||
ExprKind::Attribute { attr, .. } => target == attr,
|
||||
ExprKind::Name { id, .. } => target == id,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub enum SubscriptKind {
|
||||
AnnotatedSubscript,
|
||||
PEP593AnnotatedSubscript,
|
||||
}
|
||||
|
||||
pub fn match_annotated_subscript(expr: &Expr) -> Option<SubscriptKind> {
|
||||
match &expr.node {
|
||||
ExprKind::Attribute { attr, .. } => {
|
||||
if typing::is_annotated_subscript(attr) {
|
||||
Some(SubscriptKind::AnnotatedSubscript)
|
||||
} else if typing::is_pep593_annotated_subscript(attr) {
|
||||
Some(SubscriptKind::PEP593AnnotatedSubscript)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ExprKind::Name { id, .. } => {
|
||||
if typing::is_annotated_subscript(id) {
|
||||
Some(SubscriptKind::AnnotatedSubscript)
|
||||
} else if typing::is_pep593_annotated_subscript(id) {
|
||||
Some(SubscriptKind::PEP593AnnotatedSubscript)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
static DUNDER_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"__[^\s]+__").unwrap());
|
||||
|
||||
pub fn is_assignment_to_a_dunder(node: &StmtKind) -> bool {
|
||||
// Check whether it's an assignment to a dunder, with or without a type annotation.
|
||||
// This is what pycodestyle (as of 2.9.1) does.
|
||||
match node {
|
||||
StmtKind::Assign {
|
||||
targets,
|
||||
value: _,
|
||||
type_comment: _,
|
||||
} => {
|
||||
if targets.len() != 1 {
|
||||
return false;
|
||||
}
|
||||
match &targets[0].node {
|
||||
ExprKind::Name { id, ctx: _ } => DUNDER_REGEX.is_match(id),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
StmtKind::AnnAssign {
|
||||
target,
|
||||
annotation: _,
|
||||
value: _,
|
||||
simple: _,
|
||||
} => match &target.node {
|
||||
ExprKind::Name { id, ctx: _ } => DUNDER_REGEX.is_match(id),
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the names of all handled exceptions.
|
||||
pub fn extract_handler_names(handlers: &[Excepthandler]) -> Vec<String> {
|
||||
let mut handler_names = vec![];
|
||||
for handler in handlers {
|
||||
match &handler.node {
|
||||
ExcepthandlerKind::ExceptHandler { type_, .. } => {
|
||||
if let Some(type_) = type_ {
|
||||
if let ExprKind::Tuple { elts, .. } = &type_.node {
|
||||
for type_ in elts {
|
||||
if let Some(name) = compose_call_path(type_) {
|
||||
handler_names.push(name);
|
||||
}
|
||||
}
|
||||
} else if let Some(name) = compose_call_path(type_) {
|
||||
handler_names.push(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
handler_names
|
||||
}
|
||||
|
||||
/// Returns `true` if a call is an argumented `super` invocation.
|
||||
pub fn is_super_call_with_arguments(func: &Expr, args: &[Expr]) -> bool {
|
||||
// Check: is this a `super` call?
|
||||
if let ExprKind::Name { id, .. } = &func.node {
|
||||
id == "super" && !args.is_empty()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a location within a file (relative to `base`) to an absolute position.
|
||||
pub fn to_absolute(relative: &Location, base: &Location) -> Location {
|
||||
if relative.row() == 1 {
|
||||
Location::new(
|
||||
relative.row() + base.row() - 1,
|
||||
relative.column() + base.column() - 1,
|
||||
)
|
||||
} else {
|
||||
Location::new(relative.row() + base.row() - 1, relative.column())
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use rustpython_parser::ast::{Constant, Expr, ExprKind, Location, Stmt, StmtKind};
|
||||
|
||||
use crate::ast::types::{BindingKind, Scope};
|
||||
use crate::ast::types::{BindingKind, Range, Scope};
|
||||
|
||||
/// Extract the names bound to a given __all__ assignment.
|
||||
pub fn extract_all_names(stmt: &Stmt, scope: &Scope) -> Vec<String> {
|
||||
@@ -121,30 +121,78 @@ pub fn is_unpacking_assignment(stmt: &Stmt) -> bool {
|
||||
/// Struct used to efficiently slice source code at (row, column) Locations.
|
||||
pub struct SourceCodeLocator<'a> {
|
||||
content: &'a str,
|
||||
offsets: Vec<usize>,
|
||||
initialized: bool,
|
||||
offsets: Vec<Vec<usize>>,
|
||||
}
|
||||
|
||||
impl<'a> SourceCodeLocator<'a> {
|
||||
pub fn new(content: &'a str) -> Self {
|
||||
SourceCodeLocator {
|
||||
content,
|
||||
offsets: vec![],
|
||||
initialized: false,
|
||||
offsets: Self::compute_offsets(content),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn slice_source_code(&mut self, location: &Location) -> &'a str {
|
||||
if !self.initialized {
|
||||
let mut offset = 0;
|
||||
for i in self.content.lines() {
|
||||
self.offsets.push(offset);
|
||||
offset += i.len();
|
||||
offset += 1;
|
||||
fn compute_offsets(content: &str) -> Vec<Vec<usize>> {
|
||||
let mut offsets = vec![];
|
||||
let mut offset = 0;
|
||||
for line in content.lines() {
|
||||
let mut newline = 0;
|
||||
let mut line_offsets: Vec<usize> = vec![];
|
||||
for (i, char) in line.char_indices() {
|
||||
line_offsets.push(offset + i);
|
||||
newline = i + char.len_utf8();
|
||||
}
|
||||
self.initialized = true;
|
||||
line_offsets.push(offset + newline);
|
||||
offsets.push(line_offsets);
|
||||
offset += newline + 1;
|
||||
}
|
||||
let offset = self.offsets[location.row() - 1] + location.column() - 1;
|
||||
offsets.push(vec![offset]);
|
||||
offsets
|
||||
}
|
||||
|
||||
pub fn slice_source_code_at(&self, location: &Location) -> &'a str {
|
||||
let offset = self.offsets[location.row() - 1][location.column() - 1];
|
||||
&self.content[offset..]
|
||||
}
|
||||
|
||||
pub fn slice_source_code_range(&self, range: &Range) -> &'a str {
|
||||
let start = self.offsets[range.location.row() - 1][range.location.column() - 1];
|
||||
let end = self.offsets[range.end_location.row() - 1][range.end_location.column() - 1];
|
||||
&self.content[start..end]
|
||||
}
|
||||
|
||||
pub fn partition_source_code_at(
|
||||
&self,
|
||||
outer: &Range,
|
||||
inner: &Range,
|
||||
) -> (&'a str, &'a str, &'a str) {
|
||||
let outer_start = self.offsets[outer.location.row() - 1][outer.location.column() - 1];
|
||||
let outer_end = self.offsets[outer.end_location.row() - 1][outer.end_location.column() - 1];
|
||||
let inner_start = self.offsets[inner.location.row() - 1][inner.location.column() - 1];
|
||||
let inner_end = self.offsets[inner.end_location.row() - 1][inner.end_location.column() - 1];
|
||||
(
|
||||
&self.content[outer_start..inner_start],
|
||||
&self.content[inner_start..inner_end],
|
||||
&self.content[inner_end..outer_end],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::SourceCodeLocator;
|
||||
|
||||
#[test]
|
||||
fn source_code_locator_init() {
|
||||
let content = "# \u{4e9c}\nclass Foo:\n \"\"\".\"\"\"";
|
||||
let locator = SourceCodeLocator::new(content);
|
||||
assert_eq!(locator.offsets.len(), 4);
|
||||
assert_eq!(locator.offsets[0], [0, 1, 2, 5]);
|
||||
assert_eq!(locator.offsets[1], [6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
|
||||
assert_eq!(
|
||||
locator.offsets[2],
|
||||
[17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]
|
||||
);
|
||||
assert_eq!(locator.offsets[3], [29]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
use rustpython_parser::ast::{Expr, ExprKind, Keyword, Location};
|
||||
use rustpython_parser::ast::{Expr, ExprKind, Keyword};
|
||||
|
||||
fn relocate_keyword(keyword: &mut Keyword, location: Location) {
|
||||
keyword.location = location;
|
||||
use crate::ast::types::Range;
|
||||
|
||||
fn relocate_keyword(keyword: &mut Keyword, location: Range) {
|
||||
keyword.location = location.location;
|
||||
keyword.end_location = Some(location.end_location);
|
||||
relocate_expr(&mut keyword.node.value, location);
|
||||
}
|
||||
|
||||
/// Change an expression's location (recursively) to match a desired, fixed location.
|
||||
pub fn relocate_expr(expr: &mut Expr, location: Location) {
|
||||
expr.location = location;
|
||||
pub fn relocate_expr(expr: &mut Expr, location: Range) {
|
||||
expr.location = location.location;
|
||||
expr.end_location = Some(location.end_location);
|
||||
match &mut expr.node {
|
||||
ExprKind::BoolOp { values, .. } => {
|
||||
for expr in values {
|
||||
|
||||
@@ -1,13 +1,30 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::ast::{Located, Location};
|
||||
|
||||
fn id() -> usize {
|
||||
static COUNTER: AtomicUsize = AtomicUsize::new(1);
|
||||
COUNTER.fetch_add(1, Ordering::Relaxed)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Range {
|
||||
pub location: Location,
|
||||
pub end_location: Location,
|
||||
}
|
||||
|
||||
impl Range {
|
||||
pub fn from_located<T>(located: &Located<T>) -> Self {
|
||||
Range {
|
||||
location: located.location,
|
||||
end_location: located
|
||||
.end_location
|
||||
.expect("AST nodes should have end_location."),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct FunctionScope {
|
||||
pub uses_locals: bool,
|
||||
@@ -40,6 +57,12 @@ impl Scope {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BindingContext {
|
||||
pub defined_by: usize,
|
||||
pub defined_in: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum BindingKind {
|
||||
Annotation,
|
||||
@@ -52,20 +75,27 @@ pub enum BindingKind {
|
||||
Definition,
|
||||
Export(Vec<String>),
|
||||
FutureImportation,
|
||||
Importation(String),
|
||||
StarImportation,
|
||||
SubmoduleImportation(String),
|
||||
Importation(String, String, BindingContext),
|
||||
FromImportation(String, String, BindingContext),
|
||||
SubmoduleImportation(String, String, BindingContext),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Binding {
|
||||
pub kind: BindingKind,
|
||||
pub location: Location,
|
||||
/// Tuple of (scope index, location) indicating the scope and location at which the binding was
|
||||
pub range: Range,
|
||||
/// Tuple of (scope index, range) indicating the scope and range at which the binding was
|
||||
/// last used.
|
||||
pub used: Option<(usize, Location)>,
|
||||
pub used: Option<(usize, Range)>,
|
||||
}
|
||||
|
||||
pub trait CheckLocator {
|
||||
fn locate_check(&self, default: Location) -> Location;
|
||||
fn locate_check(&self, default: Range) -> Range;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum ImportKind {
|
||||
Import,
|
||||
ImportFrom,
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ use rustpython_parser::ast::{
|
||||
PatternKind, Stmt, StmtKind, Unaryop, Withitem,
|
||||
};
|
||||
|
||||
use crate::ast::helpers::match_name_or_attr;
|
||||
|
||||
pub trait Visitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
walk_stmt(self, stmt);
|
||||
@@ -148,7 +150,11 @@ pub fn walk_stmt<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, stmt: &'a Stmt) {
|
||||
} => {
|
||||
visitor.visit_annotation(annotation);
|
||||
if let Some(expr) = value {
|
||||
visitor.visit_expr(expr);
|
||||
if match_name_or_attr(annotation, "TypeAlias") {
|
||||
visitor.visit_annotation(expr);
|
||||
} else {
|
||||
visitor.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
visitor.visit_expr(target);
|
||||
}
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod fixer;
|
||||
pub mod fixes;
|
||||
@@ -1,10 +1,11 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use rustpython_parser::ast::Location;
|
||||
|
||||
use crate::checks::{Check, Fix};
|
||||
use crate::autofix::Fix;
|
||||
use crate::autofix::Patch;
|
||||
use crate::checks::Check;
|
||||
|
||||
#[derive(Hash)]
|
||||
pub enum Mode {
|
||||
@@ -13,6 +14,17 @@ pub enum Mode {
|
||||
None,
|
||||
}
|
||||
|
||||
impl Mode {
|
||||
/// Return `true` if a patch should be generated under the given `Mode`.
|
||||
pub fn patch(&self) -> bool {
|
||||
match &self {
|
||||
Mode::Generate => true,
|
||||
Mode::Apply => true,
|
||||
Mode::None => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for Mode {
|
||||
fn from(value: bool) -> Self {
|
||||
match value {
|
||||
@@ -23,61 +35,74 @@ impl From<bool> for Mode {
|
||||
}
|
||||
|
||||
/// Auto-fix errors in a file, and write the fixed source code to disk.
|
||||
pub fn fix_file(checks: &mut [Check], contents: &str, path: &Path) -> Result<()> {
|
||||
pub fn fix_file(checks: &mut [Check], contents: &str) -> Option<String> {
|
||||
if checks.iter().all(|check| check.fix.is_none()) {
|
||||
return Ok(());
|
||||
return None;
|
||||
}
|
||||
|
||||
let output = apply_fixes(
|
||||
Some(apply_fixes(
|
||||
checks.iter_mut().filter_map(|check| check.fix.as_mut()),
|
||||
contents,
|
||||
);
|
||||
|
||||
fs::write(path, output).map_err(|e| e.into())
|
||||
))
|
||||
}
|
||||
|
||||
/// Apply a series of fixes.
|
||||
fn apply_fixes<'a>(fixes: impl Iterator<Item = &'a mut Fix>, contents: &str) -> String {
|
||||
let lines: Vec<&str> = contents.lines().collect();
|
||||
|
||||
let mut output = "".to_string();
|
||||
let mut last_pos: Location = Location::new(0, 0);
|
||||
let mut output: String = Default::default();
|
||||
let mut last_pos: Location = Default::default();
|
||||
let mut applied: BTreeSet<&Patch> = Default::default();
|
||||
|
||||
for fix in fixes {
|
||||
// Best-effort approach: if this fix overlaps with a fix we've already applied, skip it.
|
||||
if last_pos > fix.start {
|
||||
for fix in fixes.sorted_by_key(|fix| fix.patch.location) {
|
||||
// If we already applied an identical fix as part of another correction, skip any
|
||||
// re-application.
|
||||
if applied.contains(&fix.patch) {
|
||||
fix.applied = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if fix.start.row() > last_pos.row() {
|
||||
// Best-effort approach: if this fix overlaps with a fix we've already applied, skip it.
|
||||
if last_pos > fix.patch.location {
|
||||
continue;
|
||||
}
|
||||
|
||||
if fix.patch.location.row() > last_pos.row() {
|
||||
if last_pos.row() > 0 || last_pos.column() > 0 {
|
||||
output.push_str(&lines[last_pos.row() - 1][last_pos.column() - 1..]);
|
||||
output.push('\n');
|
||||
}
|
||||
for line in &lines[last_pos.row()..fix.start.row() - 1] {
|
||||
for line in &lines[last_pos.row()..fix.patch.location.row() - 1] {
|
||||
output.push_str(line);
|
||||
output.push('\n');
|
||||
}
|
||||
output.push_str(&lines[fix.start.row() - 1][..fix.start.column() - 1]);
|
||||
output.push_str(&fix.content);
|
||||
output
|
||||
.push_str(&lines[fix.patch.location.row() - 1][..fix.patch.location.column() - 1]);
|
||||
output.push_str(&fix.patch.content);
|
||||
} else {
|
||||
output.push_str(
|
||||
&lines[last_pos.row() - 1][last_pos.column() - 1..fix.start.column() - 1],
|
||||
&lines[last_pos.row() - 1][last_pos.column() - 1..fix.patch.location.column() - 1],
|
||||
);
|
||||
output.push_str(&fix.content);
|
||||
output.push_str(&fix.patch.content);
|
||||
}
|
||||
last_pos = fix.patch.end_location;
|
||||
|
||||
last_pos = fix.end;
|
||||
applied.insert(&fix.patch);
|
||||
fix.applied = true;
|
||||
}
|
||||
|
||||
if last_pos.row() > 0 || last_pos.column() > 0 {
|
||||
if last_pos.row() > 0
|
||||
&& (last_pos.row() - 1) < lines.len()
|
||||
&& (last_pos.row() > 0 || last_pos.column() > 0)
|
||||
{
|
||||
output.push_str(&lines[last_pos.row() - 1][last_pos.column() - 1..]);
|
||||
output.push('\n');
|
||||
}
|
||||
for line in &lines[last_pos.row()..] {
|
||||
output.push_str(line);
|
||||
output.push('\n');
|
||||
if last_pos.row() < lines.len() {
|
||||
for line in &lines[last_pos.row()..] {
|
||||
output.push_str(line);
|
||||
output.push('\n');
|
||||
}
|
||||
}
|
||||
|
||||
output
|
||||
@@ -89,7 +114,8 @@ mod tests {
|
||||
use rustpython_parser::ast::Location;
|
||||
|
||||
use crate::autofix::fixer::apply_fixes;
|
||||
use crate::checks::Fix;
|
||||
use crate::autofix::Fix;
|
||||
use crate::autofix::Patch;
|
||||
|
||||
#[test]
|
||||
fn empty_file() -> Result<()> {
|
||||
@@ -105,9 +131,11 @@ mod tests {
|
||||
#[test]
|
||||
fn apply_single_replacement() -> Result<()> {
|
||||
let mut fixes = vec![Fix {
|
||||
content: "Bar".to_string(),
|
||||
start: Location::new(1, 9),
|
||||
end: Location::new(1, 15),
|
||||
patch: Patch {
|
||||
content: "Bar".to_string(),
|
||||
location: Location::new(1, 9),
|
||||
end_location: Location::new(1, 15),
|
||||
},
|
||||
applied: false,
|
||||
}];
|
||||
let actual = apply_fixes(
|
||||
@@ -129,9 +157,11 @@ mod tests {
|
||||
#[test]
|
||||
fn apply_single_removal() -> Result<()> {
|
||||
let mut fixes = vec![Fix {
|
||||
content: "".to_string(),
|
||||
start: Location::new(1, 8),
|
||||
end: Location::new(1, 16),
|
||||
patch: Patch {
|
||||
content: "".to_string(),
|
||||
location: Location::new(1, 8),
|
||||
end_location: Location::new(1, 16),
|
||||
},
|
||||
applied: false,
|
||||
}];
|
||||
let actual = apply_fixes(
|
||||
@@ -154,15 +184,19 @@ mod tests {
|
||||
fn apply_double_removal() -> Result<()> {
|
||||
let mut fixes = vec![
|
||||
Fix {
|
||||
content: "".to_string(),
|
||||
start: Location::new(1, 8),
|
||||
end: Location::new(1, 17),
|
||||
patch: Patch {
|
||||
content: "".to_string(),
|
||||
location: Location::new(1, 8),
|
||||
end_location: Location::new(1, 17),
|
||||
},
|
||||
applied: false,
|
||||
},
|
||||
Fix {
|
||||
content: "".to_string(),
|
||||
start: Location::new(1, 17),
|
||||
end: Location::new(1, 24),
|
||||
patch: Patch {
|
||||
content: "".to_string(),
|
||||
location: Location::new(1, 17),
|
||||
end_location: Location::new(1, 24),
|
||||
},
|
||||
applied: false,
|
||||
},
|
||||
];
|
||||
@@ -186,15 +220,19 @@ mod tests {
|
||||
fn ignore_overlapping_fixes() -> Result<()> {
|
||||
let mut fixes = vec![
|
||||
Fix {
|
||||
content: "".to_string(),
|
||||
start: Location::new(1, 8),
|
||||
end: Location::new(1, 16),
|
||||
patch: Patch {
|
||||
content: "".to_string(),
|
||||
location: Location::new(1, 8),
|
||||
end_location: Location::new(1, 16),
|
||||
},
|
||||
applied: false,
|
||||
},
|
||||
Fix {
|
||||
content: "ignored".to_string(),
|
||||
start: Location::new(1, 10),
|
||||
end: Location::new(1, 12),
|
||||
patch: Patch {
|
||||
content: "ignored".to_string(),
|
||||
location: Location::new(1, 10),
|
||||
end_location: Location::new(1, 12),
|
||||
},
|
||||
applied: false,
|
||||
},
|
||||
];
|
||||
|
||||
89
src/autofix/helpers.rs
Normal file
89
src/autofix/helpers.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use rustpython_parser::ast::{ExcepthandlerKind, Location, Stmt, StmtKind};
|
||||
|
||||
use crate::autofix::Fix;
|
||||
|
||||
/// Determine if a body contains only a single statement, taking into account deleted.
|
||||
fn has_single_child(body: &[Stmt], deleted: &[&Stmt]) -> bool {
|
||||
body.iter().filter(|child| !deleted.contains(child)).count() == 1
|
||||
}
|
||||
|
||||
/// Determine if a child is the only statement in its body.
|
||||
fn is_lone_child(child: &Stmt, parent: &Stmt, deleted: &[&Stmt]) -> Result<bool> {
|
||||
match &parent.node {
|
||||
StmtKind::FunctionDef { body, .. }
|
||||
| StmtKind::AsyncFunctionDef { body, .. }
|
||||
| StmtKind::ClassDef { body, .. }
|
||||
| StmtKind::With { body, .. }
|
||||
| StmtKind::AsyncWith { body, .. } => {
|
||||
if body.iter().contains(child) {
|
||||
Ok(has_single_child(body, deleted))
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Unable to find child in parent body."))
|
||||
}
|
||||
}
|
||||
StmtKind::For { body, orelse, .. }
|
||||
| StmtKind::AsyncFor { body, orelse, .. }
|
||||
| StmtKind::While { body, orelse, .. }
|
||||
| StmtKind::If { body, orelse, .. } => {
|
||||
if body.iter().contains(child) {
|
||||
Ok(has_single_child(body, deleted))
|
||||
} else if orelse.iter().contains(child) {
|
||||
Ok(has_single_child(orelse, deleted))
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Unable to find child in parent body."))
|
||||
}
|
||||
}
|
||||
StmtKind::Try {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
} => {
|
||||
if body.iter().contains(child) {
|
||||
Ok(has_single_child(body, deleted))
|
||||
} else if orelse.iter().contains(child) {
|
||||
Ok(has_single_child(orelse, deleted))
|
||||
} else if finalbody.iter().contains(child) {
|
||||
Ok(has_single_child(finalbody, deleted))
|
||||
} else if let Some(body) = handlers.iter().find_map(|handler| match &handler.node {
|
||||
ExcepthandlerKind::ExceptHandler { body, .. } => {
|
||||
if body.iter().contains(child) {
|
||||
Some(body)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}) {
|
||||
Ok(has_single_child(body, deleted))
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Unable to find child in parent body."))
|
||||
}
|
||||
}
|
||||
_ => Err(anyhow::anyhow!("Unable to find child in parent body.")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove_stmt(stmt: &Stmt, parent: Option<&Stmt>, deleted: &[&Stmt]) -> Result<Fix> {
|
||||
if parent
|
||||
.map(|parent| is_lone_child(stmt, parent, deleted))
|
||||
.map_or(Ok(None), |v| v.map(Some))?
|
||||
.unwrap_or_default()
|
||||
{
|
||||
// If removing this node would lead to an invalid syntax tree, replace
|
||||
// it with a `pass`.
|
||||
Ok(Fix::replacement(
|
||||
"pass".to_string(),
|
||||
stmt.location,
|
||||
stmt.end_location.unwrap(),
|
||||
))
|
||||
} else {
|
||||
// Otherwise, nuke the entire line.
|
||||
// TODO(charlie): This logic assumes that there are no multi-statement physical lines.
|
||||
Ok(Fix::deletion(
|
||||
Location::new(stmt.location.row(), 1),
|
||||
Location::new(stmt.end_location.unwrap().row() + 1, 1),
|
||||
))
|
||||
}
|
||||
}
|
||||
53
src/autofix/mod.rs
Normal file
53
src/autofix/mod.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use rustpython_ast::Location;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub mod fixer;
|
||||
pub mod helpers;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
pub struct Patch {
|
||||
pub content: String,
|
||||
pub location: Location,
|
||||
pub end_location: Location,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Fix {
|
||||
pub patch: Patch,
|
||||
pub applied: bool,
|
||||
}
|
||||
|
||||
impl Fix {
|
||||
pub fn deletion(start: Location, end: Location) -> Self {
|
||||
Self {
|
||||
patch: Patch {
|
||||
content: "".to_string(),
|
||||
location: start,
|
||||
end_location: end,
|
||||
},
|
||||
applied: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn replacement(content: String, start: Location, end: Location) -> Self {
|
||||
Self {
|
||||
patch: Patch {
|
||||
content,
|
||||
location: start,
|
||||
end_location: end,
|
||||
},
|
||||
applied: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insertion(content: String, at: Location) -> Self {
|
||||
Self {
|
||||
patch: Patch {
|
||||
content,
|
||||
location: at,
|
||||
end_location: at,
|
||||
},
|
||||
applied: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
10
src/cache.rs
10
src/cache.rs
@@ -1,3 +1,9 @@
|
||||
// cacache uses asyncd-std which has no wasm support, so currently no caching support on wasm
|
||||
#![cfg_attr(
|
||||
target_family = "wasm",
|
||||
allow(unused_imports, unused_variables, dead_code)
|
||||
)]
|
||||
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::fs::{create_dir_all, File, Metadata};
|
||||
use std::hash::{Hash, Hasher};
|
||||
@@ -5,6 +11,7 @@ use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use cacache::Error::EntryNotFound;
|
||||
use filetime::FileTime;
|
||||
use log::error;
|
||||
@@ -107,6 +114,7 @@ pub fn get(
|
||||
return None;
|
||||
};
|
||||
|
||||
#[cfg(not(target_family = "wasm"))] // cacache needs async-std which doesn't support wasm
|
||||
match cacache::read_sync(cache_dir(), cache_key(path, settings, autofix)) {
|
||||
Ok(encoded) => match bincode::deserialize::<CheckResult>(&encoded[..]) {
|
||||
Ok(CheckResult {
|
||||
@@ -137,12 +145,14 @@ pub fn set(
|
||||
return;
|
||||
};
|
||||
|
||||
#[cfg(not(target_family = "wasm"))] // modification date not supported on wasm
|
||||
let check_result = CheckResultRef {
|
||||
metadata: &CacheMetadata {
|
||||
mtime: FileTime::from_last_modification_time(metadata).unix_seconds(),
|
||||
},
|
||||
messages,
|
||||
};
|
||||
#[cfg(not(target_family = "wasm"))] // cacache needs async-std which doesn't support wasm
|
||||
if let Err(e) = cacache::write_sync(
|
||||
cache_dir(),
|
||||
cache_key(path, settings, autofix),
|
||||
|
||||
1596
src/check_ast.rs
1596
src/check_ast.rs
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,13 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use rustpython_parser::ast::Location;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::fixer;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checks::{Check, CheckCode, CheckKind};
|
||||
use crate::noqa;
|
||||
use crate::noqa::Directive;
|
||||
use crate::settings::Settings;
|
||||
|
||||
/// Whether the given line is too long and should be reported.
|
||||
@@ -19,17 +26,58 @@ fn should_enforce_line_length(line: &str, length: usize, limit: usize) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_lines(checks: &mut Vec<Check>, contents: &str, settings: &Settings) {
|
||||
let enforce_line_too_long = settings.select.contains(&CheckCode::E501);
|
||||
pub fn check_lines(
|
||||
checks: &mut Vec<Check>,
|
||||
contents: &str,
|
||||
noqa_line_for: &[usize],
|
||||
settings: &Settings,
|
||||
autofix: &fixer::Mode,
|
||||
) {
|
||||
let enforce_line_too_long = settings.enabled.contains(&CheckCode::E501);
|
||||
let enforce_noqa = settings.enabled.contains(&CheckCode::M001);
|
||||
|
||||
let mut noqa_directives: BTreeMap<usize, (Directive, Vec<&str>)> = BTreeMap::new();
|
||||
|
||||
let mut line_checks = vec![];
|
||||
let mut ignored = vec![];
|
||||
for (row, line) in contents.lines().enumerate() {
|
||||
|
||||
let lines: Vec<&str> = contents.lines().collect();
|
||||
for (lineno, line) in lines.iter().enumerate() {
|
||||
// Grab the noqa (logical) line number for the current (physical) line.
|
||||
// If there are newlines at the end of the file, they won't be represented in
|
||||
// `noqa_line_for`, so fallback to the current line.
|
||||
let noqa_lineno = noqa_line_for
|
||||
.get(lineno)
|
||||
.map(|lineno| lineno - 1)
|
||||
.unwrap_or(lineno);
|
||||
|
||||
if enforce_noqa {
|
||||
noqa_directives
|
||||
.entry(noqa_lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||
}
|
||||
|
||||
// Remove any ignored checks.
|
||||
// TODO(charlie): Only validate checks for the current line.
|
||||
for (index, check) in checks.iter().enumerate() {
|
||||
if check.location.row() == row + 1 && check.is_inline_ignored(line) {
|
||||
ignored.push(index);
|
||||
if check.location.row() == lineno + 1 {
|
||||
let noqa = noqa_directives
|
||||
.entry(noqa_lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||
|
||||
match noqa {
|
||||
(Directive::All(_, _), matches) => {
|
||||
matches.push(check.kind.code().as_ref());
|
||||
ignored.push(index)
|
||||
}
|
||||
(Directive::Codes(_, _, codes), matches) => {
|
||||
if codes.contains(&check.kind.code().as_ref()) {
|
||||
matches.push(check.kind.code().as_ref());
|
||||
ignored.push(index);
|
||||
}
|
||||
}
|
||||
(Directive::None, _) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,16 +85,137 @@ pub fn check_lines(checks: &mut Vec<Check>, contents: &str, settings: &Settings)
|
||||
if enforce_line_too_long {
|
||||
let line_length = line.chars().count();
|
||||
if should_enforce_line_length(line, line_length, settings.line_length) {
|
||||
let noqa = noqa_directives
|
||||
.entry(noqa_lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||
|
||||
let check = Check::new(
|
||||
CheckKind::LineTooLong(line_length, settings.line_length),
|
||||
Location::new(row + 1, settings.line_length + 1),
|
||||
Range {
|
||||
location: Location::new(lineno + 1, 1),
|
||||
end_location: Location::new(lineno + 1, line_length + 1),
|
||||
},
|
||||
);
|
||||
if !check.is_inline_ignored(line) {
|
||||
line_checks.push(check);
|
||||
|
||||
match noqa {
|
||||
(Directive::All(_, _), matches) => {
|
||||
matches.push(check.kind.code().as_ref());
|
||||
}
|
||||
(Directive::Codes(_, _, codes), matches) => {
|
||||
if codes.contains(&check.kind.code().as_ref()) {
|
||||
matches.push(check.kind.code().as_ref());
|
||||
} else {
|
||||
line_checks.push(check);
|
||||
}
|
||||
}
|
||||
(Directive::None, _) => line_checks.push(check),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Enforce newlines at end of files.
|
||||
if settings.enabled.contains(&CheckCode::W292) && !contents.ends_with('\n') {
|
||||
// Note: if `lines.last()` is `None`, then `contents` is empty (and so we don't want to
|
||||
// raise W292 anyway).
|
||||
if let Some(line) = lines.last() {
|
||||
let lineno = lines.len() - 1;
|
||||
let noqa_lineno = noqa_line_for
|
||||
.get(lineno)
|
||||
.map(|lineno| lineno - 1)
|
||||
.unwrap_or(lineno);
|
||||
|
||||
let noqa = noqa_directives
|
||||
.entry(noqa_lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||
|
||||
let check = Check::new(
|
||||
CheckKind::NoNewLineAtEndOfFile,
|
||||
Range {
|
||||
location: Location::new(lines.len(), line.len() + 1),
|
||||
end_location: Location::new(lines.len(), line.len() + 1),
|
||||
},
|
||||
);
|
||||
|
||||
match noqa {
|
||||
(Directive::All(_, _), matches) => {
|
||||
matches.push(check.kind.code().as_ref());
|
||||
}
|
||||
(Directive::Codes(_, _, codes), matches) => {
|
||||
if codes.contains(&check.kind.code().as_ref()) {
|
||||
matches.push(check.kind.code().as_ref());
|
||||
} else {
|
||||
line_checks.push(check);
|
||||
}
|
||||
}
|
||||
(Directive::None, _) => line_checks.push(check),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Enforce that the noqa directive was actually used.
|
||||
if enforce_noqa {
|
||||
for (row, (directive, matches)) in noqa_directives {
|
||||
match directive {
|
||||
Directive::All(start, end) => {
|
||||
if matches.is_empty() {
|
||||
let mut check = Check::new(
|
||||
CheckKind::UnusedNOQA(None),
|
||||
Range {
|
||||
location: Location::new(row + 1, start + 1),
|
||||
end_location: Location::new(row + 1, end + 1),
|
||||
},
|
||||
);
|
||||
if autofix.patch() {
|
||||
check.amend(Fix::deletion(
|
||||
Location::new(row + 1, start + 1),
|
||||
Location::new(row + 1, lines[row].chars().count() + 1),
|
||||
));
|
||||
}
|
||||
line_checks.push(check);
|
||||
}
|
||||
}
|
||||
Directive::Codes(start, end, codes) => {
|
||||
let mut invalid_codes = vec![];
|
||||
let mut valid_codes = vec![];
|
||||
for code in codes {
|
||||
if !matches.contains(&code) {
|
||||
invalid_codes.push(code.to_string());
|
||||
} else {
|
||||
valid_codes.push(code.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if !invalid_codes.is_empty() {
|
||||
let mut check = Check::new(
|
||||
CheckKind::UnusedNOQA(Some(invalid_codes)),
|
||||
Range {
|
||||
location: Location::new(row + 1, start + 1),
|
||||
end_location: Location::new(row + 1, end + 1),
|
||||
},
|
||||
);
|
||||
if autofix.patch() {
|
||||
if valid_codes.is_empty() {
|
||||
check.amend(Fix::deletion(
|
||||
Location::new(row + 1, start + 1),
|
||||
Location::new(row + 1, lines[row].chars().count() + 1),
|
||||
));
|
||||
} else {
|
||||
check.amend(Fix::replacement(
|
||||
format!(" # noqa: {}", valid_codes.join(", ")),
|
||||
Location::new(row + 1, start + 1),
|
||||
Location::new(row + 1, lines[row].chars().count() + 1),
|
||||
));
|
||||
}
|
||||
}
|
||||
line_checks.push(check);
|
||||
}
|
||||
}
|
||||
Directive::None => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ignored.sort();
|
||||
for index in ignored.iter().rev() {
|
||||
checks.swap_remove(*index);
|
||||
@@ -56,25 +225,28 @@ pub fn check_lines(checks: &mut Vec<Check>, contents: &str, settings: &Settings)
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::BTreeSet;
|
||||
use crate::autofix::fixer;
|
||||
use crate::checks::{Check, CheckCode};
|
||||
use crate::settings;
|
||||
|
||||
use super::check_lines;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn e501_non_ascii_char() {
|
||||
let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8.
|
||||
let noqa_line_for: Vec<usize> = vec![1];
|
||||
let check_with_max_line_length = |line_length: usize| {
|
||||
let mut checks: Vec<Check> = vec![];
|
||||
let settings = Settings {
|
||||
pyproject: None,
|
||||
project_root: None,
|
||||
line_length,
|
||||
exclude: vec![],
|
||||
extend_exclude: vec![],
|
||||
select: BTreeSet::from_iter(vec![CheckCode::E501]),
|
||||
};
|
||||
check_lines(&mut checks, line, &settings);
|
||||
check_lines(
|
||||
&mut checks,
|
||||
line,
|
||||
&noqa_line_for,
|
||||
&settings::Settings {
|
||||
line_length,
|
||||
..settings::Settings::for_rule(CheckCode::E501)
|
||||
},
|
||||
&fixer::Mode::Generate,
|
||||
);
|
||||
return checks;
|
||||
};
|
||||
assert!(!check_with_max_line_length(6).is_empty());
|
||||
|
||||
1433
src/checks.rs
1433
src/checks.rs
File diff suppressed because it is too large
Load Diff
142
src/cli.rs
Normal file
142
src/cli.rs
Normal file
@@ -0,0 +1,142 @@
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::{command, Parser};
|
||||
use log::warn;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::checks::CheckCode;
|
||||
use crate::printer::SerializationFormat;
|
||||
use crate::pyproject::StrCheckCodePair;
|
||||
use crate::settings::PythonVersion;
|
||||
use crate::RawSettings;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(author, about = "ruff: An extremely fast Python linter.")]
|
||||
#[command(version)]
|
||||
pub struct Cli {
|
||||
#[arg(required = true)]
|
||||
pub files: Vec<PathBuf>,
|
||||
/// Path to the `pyproject.toml` file to use for configuration.
|
||||
#[arg(long)]
|
||||
pub config: Option<PathBuf>,
|
||||
/// Enable verbose logging.
|
||||
#[arg(short, long)]
|
||||
pub verbose: bool,
|
||||
/// Disable all logging (but still exit with status code "1" upon detecting errors).
|
||||
#[arg(short, long)]
|
||||
pub quiet: bool,
|
||||
/// Exit with status code "0", even upon detecting errors.
|
||||
#[arg(short, long)]
|
||||
pub exit_zero: bool,
|
||||
/// Run in watch mode by re-running whenever files change.
|
||||
#[arg(short, long)]
|
||||
pub watch: bool,
|
||||
/// Attempt to automatically fix lint errors.
|
||||
#[arg(short, long)]
|
||||
pub fix: bool,
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long)]
|
||||
pub no_cache: bool,
|
||||
/// List of error codes to enable.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub select: Vec<CheckCode>,
|
||||
/// Like --select, but adds additional error codes on top of the selected ones.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub extend_select: Vec<CheckCode>,
|
||||
/// List of error codes to ignore.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub ignore: Vec<CheckCode>,
|
||||
/// Like --ignore, but adds additional error codes on top of the ignored ones.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub extend_ignore: Vec<CheckCode>,
|
||||
/// List of paths, used to exclude files and/or directories from checks.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub exclude: Vec<String>,
|
||||
/// Like --exclude, but adds additional files and directories on top of the excluded ones.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub extend_exclude: Vec<String>,
|
||||
/// List of mappings from file pattern to code to exclude
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
pub per_file_ignores: Vec<StrCheckCodePair>,
|
||||
/// Output serialization format for error messages.
|
||||
#[arg(long, value_enum, default_value_t=SerializationFormat::Text)]
|
||||
pub format: SerializationFormat,
|
||||
/// See the files ruff will be run against with the current settings.
|
||||
#[arg(long)]
|
||||
pub show_files: bool,
|
||||
/// See ruff's settings.
|
||||
#[arg(long)]
|
||||
pub show_settings: bool,
|
||||
/// Enable automatic additions of noqa directives to failing lines.
|
||||
#[arg(long)]
|
||||
pub add_noqa: bool,
|
||||
/// Regular expression matching the name of dummy variables.
|
||||
#[arg(long)]
|
||||
pub dummy_variable_rgx: Option<Regex>,
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long)]
|
||||
pub target_version: Option<PythonVersion>,
|
||||
/// Round-trip auto-formatting.
|
||||
// TODO(charlie): This should be a sub-command.
|
||||
#[arg(long, hide = true)]
|
||||
pub autoformat: bool,
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long)]
|
||||
pub stdin_filename: Option<String>,
|
||||
}
|
||||
|
||||
pub enum Warnable {
|
||||
Select,
|
||||
ExtendSelect,
|
||||
}
|
||||
|
||||
impl fmt::Display for Warnable {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Warnable::Select => fmt.write_str("--select"),
|
||||
Warnable::ExtendSelect => fmt.write_str("--extend-select"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Warn the user if they attempt to enable a code that won't be respected.
|
||||
pub fn warn_on(
|
||||
flag: Warnable,
|
||||
codes: &[CheckCode],
|
||||
cli_ignore: &[CheckCode],
|
||||
cli_extend_ignore: &[CheckCode],
|
||||
pyproject_settings: &RawSettings,
|
||||
pyproject_path: &Option<PathBuf>,
|
||||
) {
|
||||
for code in codes {
|
||||
if !cli_ignore.is_empty() {
|
||||
if cli_ignore.contains(code) {
|
||||
warn!("{code:?} was passed to {flag}, but ignored via --ignore")
|
||||
}
|
||||
} else if pyproject_settings.ignore.contains(code) {
|
||||
if let Some(path) = pyproject_path {
|
||||
warn!(
|
||||
"{code:?} was passed to {flag}, but ignored by the `ignore` field in {}",
|
||||
path.to_string_lossy()
|
||||
)
|
||||
} else {
|
||||
warn!("{code:?} was passed to {flag}, but ignored by the default `ignore` field",)
|
||||
}
|
||||
}
|
||||
if !cli_extend_ignore.is_empty() {
|
||||
if cli_extend_ignore.contains(code) {
|
||||
warn!("{code:?} was passed to {flag}, but ignored via --extend-ignore")
|
||||
}
|
||||
} else if pyproject_settings.extend_ignore.contains(code) {
|
||||
if let Some(path) = pyproject_path {
|
||||
warn!(
|
||||
"{code:?} was passed to {flag}, but ignored by the `extend_ignore` field in {}",
|
||||
path.to_string_lossy()
|
||||
)
|
||||
} else {
|
||||
warn!("{code:?} was passed to {flag}, but ignored by the default `extend_ignore` field")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1039
src/code_gen.rs
Normal file
1039
src/code_gen.rs
Normal file
File diff suppressed because it is too large
Load Diff
23
src/docstrings/definition.rs
Normal file
23
src/docstrings/definition.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use rustpython_ast::{Expr, Stmt};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DefinitionKind<'a> {
|
||||
Module,
|
||||
Package,
|
||||
Class(&'a Stmt),
|
||||
NestedClass(&'a Stmt),
|
||||
Function(&'a Stmt),
|
||||
NestedFunction(&'a Stmt),
|
||||
Method(&'a Stmt),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Definition<'a> {
|
||||
pub kind: DefinitionKind<'a>,
|
||||
pub docstring: Option<&'a Expr>,
|
||||
}
|
||||
|
||||
pub enum Documentable {
|
||||
Class,
|
||||
Function,
|
||||
}
|
||||
82
src/docstrings/extraction.rs
Normal file
82
src/docstrings/extraction.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
//! Extract docstrings from an AST.
|
||||
|
||||
use rustpython_ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
|
||||
|
||||
use crate::docstrings::definition::{Definition, DefinitionKind, Documentable};
|
||||
use crate::visibility::{Modifier, VisibleScope};
|
||||
|
||||
/// Extract a docstring from a function or class body.
|
||||
pub fn docstring_from(suite: &[Stmt]) -> Option<&Expr> {
|
||||
if let Some(stmt) = suite.first() {
|
||||
if let StmtKind::Expr { value } = &stmt.node {
|
||||
if matches!(
|
||||
&value.node,
|
||||
ExprKind::Constant {
|
||||
value: Constant::Str(_),
|
||||
..
|
||||
}
|
||||
) {
|
||||
return Some(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Extract a `Definition` from the AST node defined by a `Stmt`.
|
||||
pub fn extract<'a>(
|
||||
scope: &VisibleScope,
|
||||
stmt: &'a Stmt,
|
||||
body: &'a [Stmt],
|
||||
kind: &Documentable,
|
||||
) -> Definition<'a> {
|
||||
let expr = docstring_from(body);
|
||||
match kind {
|
||||
Documentable::Function => match scope {
|
||||
VisibleScope {
|
||||
modifier: Modifier::Module,
|
||||
..
|
||||
} => Definition {
|
||||
kind: DefinitionKind::Function(stmt),
|
||||
docstring: expr,
|
||||
},
|
||||
VisibleScope {
|
||||
modifier: Modifier::Class,
|
||||
..
|
||||
} => Definition {
|
||||
kind: DefinitionKind::Method(stmt),
|
||||
docstring: expr,
|
||||
},
|
||||
VisibleScope {
|
||||
modifier: Modifier::Function,
|
||||
..
|
||||
} => Definition {
|
||||
kind: DefinitionKind::NestedFunction(stmt),
|
||||
docstring: expr,
|
||||
},
|
||||
},
|
||||
Documentable::Class => match scope {
|
||||
VisibleScope {
|
||||
modifier: Modifier::Module,
|
||||
..
|
||||
} => Definition {
|
||||
kind: DefinitionKind::Class(stmt),
|
||||
docstring: expr,
|
||||
},
|
||||
VisibleScope {
|
||||
modifier: Modifier::Class,
|
||||
..
|
||||
} => Definition {
|
||||
kind: DefinitionKind::NestedClass(stmt),
|
||||
docstring: expr,
|
||||
},
|
||||
VisibleScope {
|
||||
modifier: Modifier::Function,
|
||||
..
|
||||
} => Definition {
|
||||
kind: DefinitionKind::NestedClass(stmt),
|
||||
docstring: expr,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user