Compare commits
66 Commits
dhruv/synt
...
v0.4.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
49a5a9ccc2 | ||
|
|
69d9212817 | ||
|
|
4a305588e9 | ||
|
|
16acd4913f | ||
|
|
3989cb8b56 | ||
|
|
a38c05bf13 | ||
|
|
ab107ef1f3 | ||
|
|
b36c713279 | ||
|
|
34a5063aa2 | ||
|
|
adc0a5d126 | ||
|
|
e28e737296 | ||
|
|
37ad994318 | ||
|
|
246a3388ee | ||
|
|
6be00d5775 | ||
|
|
9200dfc79f | ||
|
|
5dcde88099 | ||
|
|
7794eb2bde | ||
|
|
40bfae4f99 | ||
|
|
7b064b25b2 | ||
|
|
9993115f63 | ||
|
|
f0a21c9161 | ||
|
|
f26c155de5 | ||
|
|
c3fa826b0a | ||
|
|
8b69794f1d | ||
|
|
4e7c84df1d | ||
|
|
99c400000a | ||
|
|
b5d147d219 | ||
|
|
77da4615c1 | ||
|
|
627d230688 | ||
|
|
0eef834e89 | ||
|
|
650c578e07 | ||
|
|
9567fddf69 | ||
|
|
ab6d9d4658 | ||
|
|
677893226a | ||
|
|
33fd50027c | ||
|
|
3e30962077 | ||
|
|
81275a6c3d | ||
|
|
52c946a4c5 | ||
|
|
ebdaf5765a | ||
|
|
9a93409e1c | ||
|
|
102b9d930f | ||
|
|
550aa871d3 | ||
|
|
3c22a3bdcc | ||
|
|
6263923915 | ||
|
|
94abea4b08 | ||
|
|
519a65007f | ||
|
|
573facd2ba | ||
|
|
3cb2e677aa | ||
|
|
f0046ab28e | ||
|
|
5bb9720a10 | ||
|
|
9ff18bf9d3 | ||
|
|
aa906b9c75 | ||
|
|
3476e2f359 | ||
|
|
8848eca3c6 | ||
|
|
b0731ef9cb | ||
|
|
84531d1644 | ||
|
|
83b8b62e3e | ||
|
|
7225732859 | ||
|
|
403f0dccd8 | ||
|
|
46fcd19ca6 | ||
|
|
d9ec3d56b0 | ||
|
|
cd87b787d9 | ||
|
|
dd6d411026 | ||
|
|
cfceb437a8 | ||
|
|
48b0660228 | ||
|
|
24899efe50 |
3
.github/workflows/ci.yaml
vendored
3
.github/workflows/ci.yaml
vendored
@@ -167,6 +167,9 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||
run: |
|
||||
cargo nextest run --all-features --profile ci
|
||||
cargo test --all-features --doc
|
||||
|
||||
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
||||
6
.vscode/settings.json
vendored
Normal file
6
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"rust-analyzer.check.extraArgs": [
|
||||
"--all-features"
|
||||
],
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
}
|
||||
97
CHANGELOG.md
97
CHANGELOG.md
@@ -1,5 +1,102 @@
|
||||
# Changelog
|
||||
|
||||
## 0.4.6
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Use project-relative paths when calculating GitLab fingerprints ([#11532](https://github.com/astral-sh/ruff/pull/11532))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-async`\] Sleep with >24 hour interval should usually sleep forever (`ASYNC116`) ([#11498](https://github.com/astral-sh/ruff/pull/11498))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Add missing functions to NumPy 2.0 migration rule ([#11528](https://github.com/astral-sh/ruff/pull/11528))
|
||||
- \[`mccabe`\] Consider irrefutable pattern similar to `if .. else` for `C901` ([#11565](https://github.com/astral-sh/ruff/pull/11565))
|
||||
- Consider `match`-`case` statements for `C901`, `PLR0912`, and `PLR0915` ([#11521](https://github.com/astral-sh/ruff/pull/11521))
|
||||
- Remove empty strings when converting to f-string (`UP032`) ([#11524](https://github.com/astral-sh/ruff/pull/11524))
|
||||
- \[`flake8-bandit`\] `request-without-timeout` should warn for `requests.request` ([#11548](https://github.com/astral-sh/ruff/pull/11548))
|
||||
- \[`flake8-self`\] Ignore sunder accesses in `flake8-self` rules ([#11546](https://github.com/astral-sh/ruff/pull/11546))
|
||||
- \[`pyupgrade`\] Lint for `TypeAliasType` usages (`UP040`) ([#11530](https://github.com/astral-sh/ruff/pull/11530))
|
||||
|
||||
### Server
|
||||
|
||||
- Respect excludes in `ruff server` configuration discovery ([#11551](https://github.com/astral-sh/ruff/pull/11551))
|
||||
- Use default settings if initialization options is empty or not provided ([#11566](https://github.com/astral-sh/ruff/pull/11566))
|
||||
- `ruff server` correctly treats `.pyi` files as stub files ([#11535](https://github.com/astral-sh/ruff/pull/11535))
|
||||
- `ruff server` searches for configuration in parent directories ([#11537](https://github.com/astral-sh/ruff/pull/11537))
|
||||
- `ruff server`: An empty code action filter no longer returns notebook source actions ([#11526](https://github.com/astral-sh/ruff/pull/11526))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-logging-format`\] Fix autofix title in `logging-warn` (`G010`) ([#11514](https://github.com/astral-sh/ruff/pull/11514))
|
||||
- \[`refurb`\] Avoid recommending `operator.itemgetter` with dependence on lambda arguments ([#11574](https://github.com/astral-sh/ruff/pull/11574))
|
||||
- \[`flake8-simplify`\] Avoid recommending context manager in `__enter__` implementations ([#11575](https://github.com/astral-sh/ruff/pull/11575))
|
||||
- Create intermediary directories for `--output-file` ([#11550](https://github.com/astral-sh/ruff/pull/11550))
|
||||
- Propagate reads on global variables ([#11584](https://github.com/astral-sh/ruff/pull/11584))
|
||||
- Treat all `singledispatch` arguments as runtime-required ([#11523](https://github.com/astral-sh/ruff/pull/11523))
|
||||
|
||||
## 0.4.5
|
||||
|
||||
### Ruff's language server is now in Beta
|
||||
|
||||
`v0.4.5` marks the official Beta release of `ruff server`, an integrated language server built into Ruff.
|
||||
`ruff server` supports the same feature set as `ruff-lsp`, powering linting, formatting, and
|
||||
code fixes in Ruff's editor integrations -- but with superior performance and
|
||||
no installation required. We'd love your feedback!
|
||||
|
||||
You can enable `ruff server` in the [VS Code extension](https://github.com/astral-sh/ruff-vscode?tab=readme-ov-file#enabling-the-rust-based-language-server) today.
|
||||
|
||||
To read more about this exciting milestone, check out our [blog post](https://astral.sh/blog/ruff-v0.4.5)!
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-future-annotations`\] Reword `future-rewritable-type-annotation` (`FA100`) message ([#11381](https://github.com/astral-sh/ruff/pull/11381))
|
||||
- \[`pycodestyle`\] Consider soft keywords for `E27` rules ([#11446](https://github.com/astral-sh/ruff/pull/11446))
|
||||
- \[`pyflakes`\] Recommend adding unused import bindings to `__all__` ([#11314](https://github.com/astral-sh/ruff/pull/11314))
|
||||
- \[`pyflakes`\] Update documentation and deprecate `ignore_init_module_imports` ([#11436](https://github.com/astral-sh/ruff/pull/11436))
|
||||
- \[`pyupgrade`\] Mark quotes as unnecessary for non-evaluated annotations ([#11485](https://github.com/astral-sh/ruff/pull/11485))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid multiline quotes warning with `quote-style = preserve` ([#11490](https://github.com/astral-sh/ruff/pull/11490))
|
||||
|
||||
### Server
|
||||
|
||||
- Support Jupyter Notebook files ([#11206](https://github.com/astral-sh/ruff/pull/11206))
|
||||
- Support `noqa` comment code actions ([#11276](https://github.com/astral-sh/ruff/pull/11276))
|
||||
- Fix automatic configuration reloading ([#11492](https://github.com/astral-sh/ruff/pull/11492))
|
||||
- Fix several issues with configuration in Neovim and Helix ([#11497](https://github.com/astral-sh/ruff/pull/11497))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add `--output-format` as a CLI option for `ruff config` ([#11438](https://github.com/astral-sh/ruff/pull/11438))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid `PLE0237` for property with setter ([#11377](https://github.com/astral-sh/ruff/pull/11377))
|
||||
- Avoid `TCH005` for `if` stmt with `elif`/`else` block ([#11376](https://github.com/astral-sh/ruff/pull/11376))
|
||||
- Avoid flagging `__future__` annotations as required for non-evaluated type annotations ([#11414](https://github.com/astral-sh/ruff/pull/11414))
|
||||
- Check for ruff executable in 'bin' directory as installed by 'pip install --target'. ([#11450](https://github.com/astral-sh/ruff/pull/11450))
|
||||
- Sort edits prior to deduplicating in quotation fix ([#11452](https://github.com/astral-sh/ruff/pull/11452))
|
||||
- Treat escaped newline as valid sequence ([#11465](https://github.com/astral-sh/ruff/pull/11465))
|
||||
- \[`flake8-pie`\] Preserve parentheses in `unnecessary-dict-kwargs` ([#11372](https://github.com/astral-sh/ruff/pull/11372))
|
||||
- \[`pylint`\] Ignore `__slots__` with dynamic values ([#11488](https://github.com/astral-sh/ruff/pull/11488))
|
||||
- \[`pylint`\] Remove `try` body from branch counting ([#11487](https://github.com/astral-sh/ruff/pull/11487))
|
||||
- \[`refurb`\] Respect operator precedence in `FURB110` ([#11464](https://github.com/astral-sh/ruff/pull/11464))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `--preview` to the README ([#11395](https://github.com/astral-sh/ruff/pull/11395))
|
||||
- Add Python 3.13 to list of allowed Python versions ([#11411](https://github.com/astral-sh/ruff/pull/11411))
|
||||
- Simplify Neovim setup documentation ([#11489](https://github.com/astral-sh/ruff/pull/11489))
|
||||
- Update CONTRIBUTING.md to reflect the new parser ([#11434](https://github.com/astral-sh/ruff/pull/11434))
|
||||
- Update server documentation with new migration guide ([#11499](https://github.com/astral-sh/ruff/pull/11499))
|
||||
- \[`pycodestyle`\] Clarify motivation for `E713` and `E714` ([#11483](https://github.com/astral-sh/ruff/pull/11483))
|
||||
- \[`pyflakes`\] Update docs to describe WAI behavior (F541) ([#11362](https://github.com/astral-sh/ruff/pull/11362))
|
||||
- \[`pylint`\] Clearly indicate what is counted as a branch ([#11423](https://github.com/astral-sh/ruff/pull/11423))
|
||||
|
||||
## 0.4.4
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -101,6 +101,8 @@ pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting,
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
If you're using VS Code, you can also install the recommended [rust-analyzer](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer) extension to get these checks while editing.
|
||||
|
||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||
after running `cargo test` like so:
|
||||
|
||||
|
||||
103
Cargo.lock
generated
103
Cargo.lock
generated
@@ -129,9 +129,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.83"
|
||||
version = "1.0.86"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3"
|
||||
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
|
||||
|
||||
[[package]]
|
||||
name = "argfile"
|
||||
@@ -886,12 +886,6 @@ version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
|
||||
|
||||
[[package]]
|
||||
name = "hexf-parse"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df"
|
||||
|
||||
[[package]]
|
||||
name = "home"
|
||||
version = "0.5.9"
|
||||
@@ -1176,41 +1170,11 @@ version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "lexical-parse-float"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f"
|
||||
dependencies = [
|
||||
"lexical-parse-integer",
|
||||
"lexical-util",
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lexical-parse-integer"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9"
|
||||
dependencies = [
|
||||
"lexical-util",
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lexical-util"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc"
|
||||
dependencies = [
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.154"
|
||||
version = "0.2.155"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
|
||||
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
@@ -1239,9 +1203,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "libmimalloc-sys"
|
||||
version = "0.1.37"
|
||||
version = "0.1.38"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81eb4061c0582dedea1cbc7aff2240300dd6982e0239d1c99e65c1dbf4a30ba7"
|
||||
checksum = "0e7bb23d733dfcc8af652a78b7bf232f0e967710d044732185e561e47c0336b6"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
@@ -1300,8 +1264,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.95.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e34d33a8e9b006cd3fc4fe69a921affa097bae4bb65f76271f4644f9a334365"
|
||||
source = "git+https://github.com/astral-sh/lsp-types.git?rev=3512a9f#3512a9f33eadc5402cfab1b8f7340824c8ca1439"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"serde",
|
||||
@@ -1339,9 +1302,9 @@ checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
|
||||
|
||||
[[package]]
|
||||
name = "mimalloc"
|
||||
version = "0.1.41"
|
||||
version = "0.1.42"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f41a2280ded0da56c8cf898babb86e8f10651a34adcfff190ae9a1159c6908d"
|
||||
checksum = "e9186d86b79b52f4a77af65604b51225e8db1d6ee7e3f41aec1e40829c71a176"
|
||||
dependencies = [
|
||||
"libmimalloc-sys",
|
||||
]
|
||||
@@ -1498,9 +1461,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.2"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb"
|
||||
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
@@ -1707,9 +1670,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.82"
|
||||
version = "1.0.84"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b"
|
||||
checksum = "ec96c6a92621310b51366f1e28d05ef11489516e93be030060e5fc12024a49d6"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@@ -1940,7 +1903,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.4.4"
|
||||
version = "0.4.6"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2101,7 +2064,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.4.4"
|
||||
version = "0.4.6"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2278,9 +2241,7 @@ name = "ruff_python_literal"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"hexf-parse",
|
||||
"itertools 0.12.1",
|
||||
"lexical-parse-float",
|
||||
"ruff_python_ast",
|
||||
"unic-ucd-category",
|
||||
]
|
||||
@@ -2368,6 +2329,7 @@ version = "0.2.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"globset",
|
||||
"insta",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
@@ -2377,6 +2339,7 @@ dependencies = [
|
||||
"ruff_diagnostics",
|
||||
"ruff_formatter",
|
||||
"ruff_linter",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_codegen",
|
||||
"ruff_python_formatter",
|
||||
@@ -2555,9 +2518,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "schemars"
|
||||
version = "0.8.19"
|
||||
version = "0.8.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc6e7ed6919cb46507fb01ff1654309219f62b4d603822501b0b80d42f6f21ef"
|
||||
checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92"
|
||||
dependencies = [
|
||||
"dyn-clone",
|
||||
"schemars_derive",
|
||||
@@ -2567,9 +2530,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "schemars_derive"
|
||||
version = "0.8.19"
|
||||
version = "0.8.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "185f2b7aa7e02d418e453790dde16890256bbd2bcd04b7dc5348811052b53f49"
|
||||
checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2597,9 +2560,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.201"
|
||||
version = "1.0.203"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c"
|
||||
checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2617,9 +2580,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.201"
|
||||
version = "1.0.203"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865"
|
||||
checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2744,9 +2707,9 @@ checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c"
|
||||
|
||||
[[package]]
|
||||
name = "smol_str"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6845563ada680337a52d43bb0b29f396f2d911616f6573012645b9e3d048a49"
|
||||
checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -2814,9 +2777,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.63"
|
||||
version = "2.0.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf5be731623ca1a1fb7d8be6f261a3be6d3e2337b8a1f97be944d020c8fcb704"
|
||||
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2904,18 +2867,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.60"
|
||||
version = "1.0.61"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18"
|
||||
checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.60"
|
||||
version = "1.0.61"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524"
|
||||
checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -62,7 +62,6 @@ filetime = { version = "0.2.23" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
hashbrown = "0.14.3"
|
||||
hexf-parse = { version = "0.2.1" }
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
@@ -76,12 +75,11 @@ is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.12.1" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = ["proposed"] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
|
||||
@@ -152,7 +152,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.4.4
|
||||
rev: v0.4.6
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -433,6 +433,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- [Nautobot](https://github.com/nautobot/nautobot)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [Nokia](https://nokia.com/)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.4.4"
|
||||
version = "0.4.6"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -857,12 +857,20 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
|
||||
|
||||
if setting.linter.rules.enabled(Rule::BadQuotesMultilineString)
|
||||
&& setting.linter.flake8_quotes.multiline_quotes == Quote::Single
|
||||
&& matches!(
|
||||
setting.formatter.quote_style,
|
||||
QuoteStyle::Single | QuoteStyle::Double
|
||||
)
|
||||
{
|
||||
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q001` when using the formatter, which enforces double quotes for multiline strings. Alternatively, set the `flake8-quotes.multiline-quotes` option to `\"double\"`.`");
|
||||
}
|
||||
|
||||
if setting.linter.rules.enabled(Rule::BadQuotesDocstring)
|
||||
&& setting.linter.flake8_quotes.docstring_quotes == Quote::Single
|
||||
&& matches!(
|
||||
setting.formatter.quote_style,
|
||||
QuoteStyle::Single | QuoteStyle::Double
|
||||
)
|
||||
{
|
||||
warn_user_once!("The `flake8-quotes.multiline-quotes=\"single\"` option is incompatible with the formatter. We recommend disabling `Q002` when using the formatter, which enforces double quotes for docstrings. Alternatively, set the `flake8-quotes.docstring-quotes` option to `\"double\"`.`");
|
||||
}
|
||||
|
||||
@@ -237,6 +237,9 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
let mut writer: Box<dyn Write> = match cli.output_file {
|
||||
Some(path) if !cli.watch => {
|
||||
colored::control::set_override(false);
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let file = File::create(path)?;
|
||||
Box::new(BufWriter::new(file))
|
||||
}
|
||||
|
||||
@@ -1038,6 +1038,48 @@ def say_hy(name: str):
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_linter_options_preserve() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint]
|
||||
select = ["Q"]
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
docstring-quotes = "single"
|
||||
multiline-quotes = "single"
|
||||
|
||||
[format]
|
||||
quote-style = "preserve"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let test_path = tempdir.path().join("test.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
def say_hy(name: str):
|
||||
print(f"Hy {name}")"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--no-cache", "--config"])
|
||||
.arg(&ruff_toml)
|
||||
.arg(test_path), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
1 file reformatted
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_rules_default_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
@@ -1414,7 +1414,7 @@ fn check_input_from_argfile() -> Result<()> {
|
||||
fs::write(&file_a_path, b"import os")?;
|
||||
fs::write(&file_b_path, b"print('hello, world!')")?;
|
||||
|
||||
// Create a the input file for argfile to expand
|
||||
// Create the input file for argfile to expand
|
||||
let input_file_path = tempdir.path().join("file_paths.txt");
|
||||
fs::write(
|
||||
&input_file_path,
|
||||
|
||||
@@ -34,12 +34,29 @@ marking it as unused, as in:
|
||||
from module import member as member
|
||||
```
|
||||
|
||||
Alternatively, you can use `__all__` to declare a symbol as part of the module's
|
||||
interface, as in:
|
||||
|
||||
```python
|
||||
# __init__.py
|
||||
import some_module
|
||||
|
||||
__all__ = [ "some_module"]
|
||||
```
|
||||
|
||||
## Fix safety
|
||||
|
||||
When `ignore_init_module_imports` is disabled, fixes can remove for unused imports in `__init__` files.
|
||||
These fixes are considered unsafe because they can change the public interface.
|
||||
Fixes to remove unused imports are safe, except in `__init__.py` files.
|
||||
|
||||
Applying fixes to `__init__.py` files is currently in preview. The fix offered depends on the
|
||||
type of the unused import. Ruff will suggest a safe fix to export first-party imports with
|
||||
either a redundant alias or, if already present in the file, an `__all__` entry. If multiple
|
||||
`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix
|
||||
to remove third-party and standard library imports -- the fix is unsafe because the module's
|
||||
interface changes.
|
||||
|
||||
## Example
|
||||
|
||||
```python
|
||||
import numpy as np # unused import
|
||||
|
||||
@@ -49,12 +66,14 @@ def area(radius):
|
||||
```
|
||||
|
||||
Use instead:
|
||||
|
||||
```python
|
||||
def area(radius):
|
||||
return 3.14 * radius**2
|
||||
```
|
||||
|
||||
To check the availability of a module, use `importlib.util.find_spec`:
|
||||
|
||||
```python
|
||||
from importlib.util import find_spec
|
||||
|
||||
|
||||
@@ -553,11 +553,6 @@ impl PrintedRange {
|
||||
pub fn source_range(&self) -> TextRange {
|
||||
self.source_range
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_code(self, code: String) -> Self {
|
||||
Self { code, ..self }
|
||||
}
|
||||
}
|
||||
|
||||
/// Public return type of the formatter
|
||||
@@ -780,10 +775,6 @@ where
|
||||
self.item = item;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn into_item(self) -> T {
|
||||
self.item
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, R, C> Format<C> for FormatOwnedWithRule<T, R, C>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.4.4"
|
||||
version = "0.4.6"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
57
crates/ruff_linter/resources/test/fixtures/flake8_async/ASYNC116.py
vendored
Normal file
57
crates/ruff_linter/resources/test/fixtures/flake8_async/ASYNC116.py
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
# type: ignore
|
||||
# ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
import math
|
||||
from math import inf
|
||||
|
||||
|
||||
async def import_trio():
|
||||
import trio
|
||||
|
||||
# These examples are probably not meant to ever wake up:
|
||||
await trio.sleep(100000) # error: 116, "async"
|
||||
|
||||
# 'inf literal' overflow trick
|
||||
await trio.sleep(1e999) # error: 116, "async"
|
||||
|
||||
await trio.sleep(86399)
|
||||
await trio.sleep(86400)
|
||||
await trio.sleep(86400.01) # error: 116, "async"
|
||||
await trio.sleep(86401) # error: 116, "async"
|
||||
|
||||
await trio.sleep(-1) # will raise a runtime error
|
||||
await trio.sleep(0) # handled by different check
|
||||
|
||||
# these ones _definitely_ never wake up (TODO)
|
||||
await trio.sleep(float("inf"))
|
||||
await trio.sleep(math.inf)
|
||||
await trio.sleep(inf)
|
||||
|
||||
# don't require inf to be in math (TODO)
|
||||
await trio.sleep(np.inf)
|
||||
|
||||
# don't evaluate expressions (TODO)
|
||||
one_day = 86401
|
||||
await trio.sleep(86400 + 1)
|
||||
await trio.sleep(60 * 60 * 24 + 1)
|
||||
await trio.sleep(foo())
|
||||
await trio.sleep(one_day)
|
||||
await trio.sleep(86400 + foo())
|
||||
await trio.sleep(86400 + ...)
|
||||
await trio.sleep("hello")
|
||||
await trio.sleep(...)
|
||||
|
||||
|
||||
def not_async_fun():
|
||||
import trio
|
||||
|
||||
# does not require the call to be awaited, nor in an async fun
|
||||
trio.sleep(86401) # error: 116, "async"
|
||||
# also checks that we don't break visit_Call
|
||||
trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
|
||||
|
||||
async def import_from_trio():
|
||||
from trio import sleep
|
||||
|
||||
# catch from import
|
||||
await sleep(86401) # error: 116, "async"
|
||||
7
crates/ruff_linter/resources/test/fixtures/flake8_future_annotations/ok_quoted_type.py
vendored
Normal file
7
crates/ruff_linter/resources/test/fixtures/flake8_future_annotations/ok_quoted_type.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
def main() -> None:
|
||||
a_list: list[str] | None = []
|
||||
a_list.append("hello")
|
||||
|
||||
|
||||
def hello(y: "dict[str, int] | None") -> None:
|
||||
del y
|
||||
@@ -77,3 +77,8 @@ print(foo._asdict())
|
||||
import os
|
||||
|
||||
os._exit()
|
||||
|
||||
|
||||
from enum import Enum
|
||||
|
||||
Enum._missing_(1) # OK
|
||||
|
||||
@@ -46,3 +46,15 @@ with contextlib.ExitStack() as exit_stack:
|
||||
# OK (quick one-liner to clear file contents)
|
||||
open("filename", "w").close()
|
||||
pathlib.Path("filename").open("w").close()
|
||||
|
||||
|
||||
# OK (custom context manager)
|
||||
class MyFile:
|
||||
def __init__(self, filename: str):
|
||||
self.filename = filename
|
||||
|
||||
def __enter__(self):
|
||||
self.file = open(self.filename)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.file.close()
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import singledispatch
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from numpy import asarray
|
||||
@@ -32,3 +33,24 @@ def _(a: spmatrix) -> spmatrix:
|
||||
|
||||
def _(a: DataFrame) -> DataFrame:
|
||||
return a
|
||||
|
||||
|
||||
@singledispatch
|
||||
def process_path(a: int | str, p: Path) -> int:
|
||||
"""Convert arg to array or leaves it as sparse matrix."""
|
||||
msg = f"Unhandled type {type(a)}"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
@process_path.register
|
||||
def _(a: int, p: Path) -> int:
|
||||
return asarray(a)
|
||||
|
||||
|
||||
@process_path.register
|
||||
def _(a: str, p: Path) -> int:
|
||||
return a
|
||||
|
||||
|
||||
def _(a: DataFrame, p: Path) -> DataFrame:
|
||||
return a
|
||||
|
||||
@@ -106,3 +106,11 @@ def func():
|
||||
np.who()
|
||||
|
||||
np.row_stack(([1,2], [3,4]))
|
||||
|
||||
np.alltrue([True, True])
|
||||
|
||||
np.anytrue([True, False])
|
||||
|
||||
np.cumproduct([1, 2, 3])
|
||||
|
||||
np.product([1, 2, 3])
|
||||
|
||||
@@ -63,3 +63,16 @@ if (a and
|
||||
#: Okay
|
||||
def f():
|
||||
return 1
|
||||
|
||||
# Soft keywords
|
||||
|
||||
#: E271
|
||||
type Number = int
|
||||
|
||||
#: E273
|
||||
type Number = int
|
||||
|
||||
#: E275
|
||||
match(foo):
|
||||
case(1):
|
||||
pass
|
||||
|
||||
@@ -46,3 +46,15 @@ regex = '\\\_'
|
||||
|
||||
#: W605:1:7
|
||||
u'foo\ bar'
|
||||
|
||||
#: W605:1:13
|
||||
(
|
||||
"foo \
|
||||
bar \. baz"
|
||||
)
|
||||
|
||||
#: W605:1:6
|
||||
"foo \. bar \t"
|
||||
|
||||
#: W605:1:13
|
||||
"foo \t bar \."
|
||||
|
||||
@@ -82,3 +82,16 @@ class Foo:
|
||||
@qux.setter
|
||||
def qux(self, value):
|
||||
self.bar = value / 2
|
||||
|
||||
|
||||
class StudentG:
|
||||
names = ("surname",)
|
||||
__slots__ = (*names, "a")
|
||||
|
||||
def __init__(self, name, surname):
|
||||
self.name = name
|
||||
self.surname = surname # [assigning-non-slot]
|
||||
self.setup()
|
||||
|
||||
def setup(self):
|
||||
pass
|
||||
|
||||
@@ -21,6 +21,8 @@ def wrong(): # [too-many-branches]
|
||||
pass
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
pass
|
||||
if 2:
|
||||
@@ -56,6 +58,8 @@ def good():
|
||||
pass
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
pass
|
||||
if 1:
|
||||
@@ -90,6 +94,8 @@ def with_statement_wrong():
|
||||
pass
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
pass
|
||||
if 2:
|
||||
|
||||
14
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP037_1.py
vendored
Normal file
14
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP037_1.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
def foo():
|
||||
# UP037
|
||||
x: "Tuple[int, int]" = (0, 0)
|
||||
print(x)
|
||||
|
||||
|
||||
# OK
|
||||
X: "Tuple[int, int]" = (0, 0)
|
||||
@@ -51,3 +51,37 @@ x: int = 1
|
||||
# type alias.
|
||||
T = typing.TypeVar["T"]
|
||||
Decorator: TypeAlias = typing.Callable[[T], T]
|
||||
|
||||
|
||||
from typing import TypeVar, Annotated, TypeAliasType
|
||||
|
||||
from annotated_types import Gt, SupportGt
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/11422
|
||||
T = TypeVar("T")
|
||||
PositiveList = TypeAliasType(
|
||||
"PositiveList", list[Annotated[T, Gt(0)]], type_params=(T,)
|
||||
)
|
||||
|
||||
# Bound
|
||||
T = TypeVar("T", bound=SupportGt)
|
||||
PositiveList = TypeAliasType(
|
||||
"PositiveList", list[Annotated[T, Gt(0)]], type_params=(T,)
|
||||
)
|
||||
|
||||
# Multiple bounds
|
||||
T1 = TypeVar("T1", bound=SupportGt)
|
||||
T2 = TypeVar("T2")
|
||||
T3 = TypeVar("T3")
|
||||
Tuple3 = TypeAliasType("Tuple3", tuple[T1, T2, T3], type_params=(T1, T2, T3))
|
||||
|
||||
# No type_params
|
||||
PositiveInt = TypeAliasType("PositiveInt", Annotated[int, Gt(0)])
|
||||
PositiveInt = TypeAliasType("PositiveInt", Annotated[int, Gt(0)], type_params=())
|
||||
|
||||
# OK: Other name
|
||||
T = TypeVar("T", bound=SupportGt)
|
||||
PositiveList = TypeAliasType(
|
||||
"PositiveList2", list[Annotated[T, Gt(0)]], type_params=(T,)
|
||||
)
|
||||
|
||||
@@ -38,3 +38,12 @@ z = (
|
||||
else
|
||||
y
|
||||
)
|
||||
|
||||
# FURB110
|
||||
z = (
|
||||
x
|
||||
if x
|
||||
else y
|
||||
if y > 0
|
||||
else None
|
||||
)
|
||||
|
||||
@@ -60,6 +60,7 @@ op_itemgetter = lambda x, y: (x[0], y[0])
|
||||
op_itemgetter = lambda x: ()
|
||||
op_itemgetter = lambda x: (*x[0], x[1])
|
||||
op_itemgetter = lambda x: (x[0],)
|
||||
op_itemgetter = lambda x: x[x]
|
||||
|
||||
|
||||
def op_neg3(x, y):
|
||||
|
||||
@@ -62,6 +62,8 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if !checker.semantic.future_annotations_or_stub()
|
||||
&& checker.settings.target_version < PythonVersion::Py39
|
||||
&& checker.semantic.in_annotation()
|
||||
&& checker.semantic.in_runtime_evaluated_annotation()
|
||||
&& !checker.semantic.in_string_type_definition()
|
||||
&& typing::is_pep585_generic(value, &checker.semantic)
|
||||
{
|
||||
flake8_future_annotations::rules::future_required_type_annotation(
|
||||
@@ -506,6 +508,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::BlockingOsCallInAsyncFunction) {
|
||||
flake8_async::rules::blocking_os_call(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::SleepForeverCall) {
|
||||
flake8_async::rules::sleep_forever_call(checker, call);
|
||||
}
|
||||
if checker.any_enabled(&[Rule::Print, Rule::PPrint]) {
|
||||
flake8_print::rules::print_call(checker, call);
|
||||
}
|
||||
@@ -1195,6 +1200,8 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if !checker.semantic.future_annotations_or_stub()
|
||||
&& checker.settings.target_version < PythonVersion::Py310
|
||||
&& checker.semantic.in_annotation()
|
||||
&& checker.semantic.in_runtime_evaluated_annotation()
|
||||
&& !checker.semantic.in_string_type_definition()
|
||||
{
|
||||
flake8_future_annotations::rules::future_required_type_annotation(
|
||||
checker,
|
||||
|
||||
@@ -1558,6 +1558,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::ListReverseCopy) {
|
||||
refurb::rules::list_assign_reversed(checker, assign);
|
||||
}
|
||||
if checker.enabled(Rule::NonPEP695TypeAlias) {
|
||||
pyupgrade::rules::non_pep695_type_alias_type(checker, assign);
|
||||
}
|
||||
}
|
||||
Stmt::AnnAssign(
|
||||
assign_stmt @ ast::StmtAnnAssign {
|
||||
|
||||
@@ -588,8 +588,10 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
Stmt::Global(ast::StmtGlobal { names, range: _ }) => {
|
||||
if !self.semantic.scope_id.is_global() {
|
||||
for name in names {
|
||||
if let Some(binding_id) = self.semantic.global_scope().get(name) {
|
||||
// Mark the binding in the global scope as "rebound" in the current scope.
|
||||
let binding_id = self.semantic.global_scope().get(name);
|
||||
|
||||
// Mark the binding in the global scope as "rebound" in the current scope.
|
||||
if let Some(binding_id) = binding_id {
|
||||
self.semantic
|
||||
.add_rebinding_scope(binding_id, self.semantic.scope_id);
|
||||
}
|
||||
@@ -597,7 +599,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
// Add a binding to the current scope.
|
||||
let binding_id = self.semantic.push_binding(
|
||||
name.range(),
|
||||
BindingKind::Global,
|
||||
BindingKind::Global(binding_id),
|
||||
BindingFlags::GLOBAL,
|
||||
);
|
||||
let scope = self.semantic.current_scope_mut();
|
||||
@@ -609,7 +611,8 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
if !self.semantic.scope_id.is_global() {
|
||||
for name in names {
|
||||
if let Some((scope_id, binding_id)) = self.semantic.nonlocal(name) {
|
||||
// Mark the binding as "used".
|
||||
// Mark the binding as "used", since the `nonlocal` requires an existing
|
||||
// binding.
|
||||
self.semantic.add_local_reference(
|
||||
binding_id,
|
||||
ExprContext::Load,
|
||||
@@ -624,7 +627,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
// Add a binding to the current scope.
|
||||
let binding_id = self.semantic.push_binding(
|
||||
name.range(),
|
||||
BindingKind::Nonlocal(scope_id),
|
||||
BindingKind::Nonlocal(binding_id, scope_id),
|
||||
BindingFlags::NONLOCAL,
|
||||
);
|
||||
let scope = self.semantic.current_scope_mut();
|
||||
@@ -661,7 +664,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
AnnotationContext::from_function(function_def, &self.semantic, self.settings);
|
||||
|
||||
// The first parameter may be a single dispatch.
|
||||
let mut singledispatch =
|
||||
let singledispatch =
|
||||
flake8_type_checking::helpers::is_singledispatch_implementation(
|
||||
function_def,
|
||||
self.semantic(),
|
||||
@@ -677,7 +680,6 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
if let Some(expr) = parameter.annotation() {
|
||||
if singledispatch && !parameter.is_variadic() {
|
||||
self.visit_runtime_required_annotation(expr);
|
||||
singledispatch = false;
|
||||
} else {
|
||||
match annotation {
|
||||
AnnotationContext::RuntimeRequired => {
|
||||
@@ -2152,7 +2154,7 @@ impl<'a> Checker<'a> {
|
||||
|
||||
self.semantic.restore(snapshot);
|
||||
|
||||
if self.semantic.in_annotation() && self.semantic.future_annotations_or_stub() {
|
||||
if self.semantic.in_annotation() && self.semantic.in_typing_only_annotation() {
|
||||
if self.enabled(Rule::QuotedAnnotation) {
|
||||
pyupgrade::rules::quoted_annotation(self, value, range);
|
||||
}
|
||||
|
||||
@@ -334,6 +334,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Async, "100") => (RuleGroup::Stable, rules::flake8_async::rules::BlockingHttpCallInAsyncFunction),
|
||||
(Flake8Async, "101") => (RuleGroup::Stable, rules::flake8_async::rules::OpenSleepOrSubprocessInAsyncFunction),
|
||||
(Flake8Async, "102") => (RuleGroup::Stable, rules::flake8_async::rules::BlockingOsCallInAsyncFunction),
|
||||
(Flake8Async, "116") => (RuleGroup::Preview, rules::flake8_async::rules::SleepForeverCall),
|
||||
|
||||
// flake8-trio
|
||||
(Flake8Trio, "100") => (RuleGroup::Stable, rules::flake8_trio::rules::TrioTimeoutWithoutAwait),
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::iter::Peekable;
|
||||
use std::str::FromStr;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use ruff_python_ast::StringFlags;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::Tok;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
@@ -45,22 +46,6 @@ pub struct IsortDirectives {
|
||||
pub skip_file: bool,
|
||||
}
|
||||
|
||||
impl IsortDirectives {
|
||||
pub fn is_excluded(&self, offset: TextSize) -> bool {
|
||||
for range in &self.exclusions {
|
||||
if range.contains(offset) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if range.start() > offset {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Directives {
|
||||
pub noqa_line_for: NoqaMapping,
|
||||
pub isort: IsortDirectives,
|
||||
|
||||
@@ -82,12 +82,12 @@ impl Serialize for SerializedMessages<'_> {
|
||||
|project_dir| relativize_path_to(message.filename(), project_dir),
|
||||
);
|
||||
|
||||
let mut message_fingerprint = fingerprint(message, 0);
|
||||
let mut message_fingerprint = fingerprint(message, &path, 0);
|
||||
|
||||
// Make sure that we do not get a fingerprint that is already in use
|
||||
// by adding in the previously generated one.
|
||||
while fingerprints.contains(&message_fingerprint) {
|
||||
message_fingerprint = fingerprint(message, message_fingerprint);
|
||||
message_fingerprint = fingerprint(message, &path, message_fingerprint);
|
||||
}
|
||||
fingerprints.insert(message_fingerprint);
|
||||
|
||||
@@ -109,12 +109,12 @@ impl Serialize for SerializedMessages<'_> {
|
||||
}
|
||||
|
||||
/// Generate a unique fingerprint to identify a violation.
|
||||
fn fingerprint(message: &Message, salt: u64) -> u64 {
|
||||
fn fingerprint(message: &Message, project_path: &str, salt: u64) -> u64 {
|
||||
let Message {
|
||||
kind,
|
||||
range: _,
|
||||
fix: _fix,
|
||||
file,
|
||||
file: _,
|
||||
noqa_offset: _,
|
||||
} = message;
|
||||
|
||||
@@ -122,7 +122,7 @@ fn fingerprint(message: &Message, salt: u64) -> u64 {
|
||||
|
||||
salt.hash(&mut hasher);
|
||||
kind.name.hash(&mut hasher);
|
||||
file.name().hash(&mut hasher);
|
||||
project_path.hash(&mut hasher);
|
||||
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
@@ -125,8 +125,8 @@ impl Renamer {
|
||||
let scope_id = scope.get_all(name).find_map(|binding_id| {
|
||||
let binding = semantic.binding(binding_id);
|
||||
match binding.kind {
|
||||
BindingKind::Global => Some(ScopeId::global()),
|
||||
BindingKind::Nonlocal(symbol_id) => Some(symbol_id),
|
||||
BindingKind::Global(_) => Some(ScopeId::global()),
|
||||
BindingKind::Nonlocal(_, scope_id) => Some(scope_id),
|
||||
_ => None,
|
||||
}
|
||||
});
|
||||
@@ -266,8 +266,8 @@ impl Renamer {
|
||||
| BindingKind::LoopVar
|
||||
| BindingKind::ComprehensionVar
|
||||
| BindingKind::WithItemVar
|
||||
| BindingKind::Global
|
||||
| BindingKind::Nonlocal(_)
|
||||
| BindingKind::Global(_)
|
||||
| BindingKind::Nonlocal(_, _)
|
||||
| BindingKind::ClassDefinition(_)
|
||||
| BindingKind::FunctionDefinition(_)
|
||||
| BindingKind::Deletion
|
||||
|
||||
@@ -93,7 +93,7 @@ impl Violation for SysVersion2 {
|
||||
/// ## Why is this bad?
|
||||
/// If the current major or minor version consists of multiple digits,
|
||||
/// `sys.version[0]` will select the first digit of the major version number
|
||||
/// only (e.g., `"3.10"` would evaluate to `"1"`). This is likely unintended,
|
||||
/// only (e.g., `"10.2"` would evaluate to `"1"`). This is likely unintended,
|
||||
/// and can lead to subtle bugs if the version string is used to test against a
|
||||
/// major version number.
|
||||
///
|
||||
|
||||
@@ -16,6 +16,7 @@ mod tests {
|
||||
#[test_case(Rule::BlockingHttpCallInAsyncFunction, Path::new("ASYNC100.py"))]
|
||||
#[test_case(Rule::OpenSleepOrSubprocessInAsyncFunction, Path::new("ASYNC101.py"))]
|
||||
#[test_case(Rule::BlockingOsCallInAsyncFunction, Path::new("ASYNC102.py"))]
|
||||
#[test_case(Rule::SleepForeverCall, Path::new("ASYNC116.py"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
pub(crate) use blocking_http_call::*;
|
||||
pub(crate) use blocking_os_call::*;
|
||||
pub(crate) use open_sleep_or_subprocess_call::*;
|
||||
pub(crate) use sleep_forever_call::*;
|
||||
|
||||
mod blocking_http_call;
|
||||
mod blocking_os_call;
|
||||
mod open_sleep_or_subprocess_call;
|
||||
mod sleep_forever_call;
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{Expr, ExprCall, ExprNumberLiteral, Number};
|
||||
use ruff_python_semantic::Modules;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::{checkers::ast::Checker, importer::ImportRequest};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `trio.sleep()` with an interval greater than 24 hours.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `trio.sleep()` with an interval greater than 24 hours is usually intended
|
||||
/// to sleep indefinitely. Instead of using a large interval,
|
||||
/// `trio.sleep_forever()` better conveys the intent.
|
||||
///
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import trio
|
||||
///
|
||||
///
|
||||
/// async def func():
|
||||
/// await trio.sleep(86401)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// import trio
|
||||
///
|
||||
///
|
||||
/// async def func():
|
||||
/// await trio.sleep_forever()
|
||||
/// ```
|
||||
#[violation]
|
||||
pub struct SleepForeverCall;
|
||||
|
||||
impl Violation for SleepForeverCall {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some(format!("Replace with `trio.sleep_forever()`"))
|
||||
}
|
||||
}
|
||||
|
||||
/// ASYNC116
|
||||
pub(crate) fn sleep_forever_call(checker: &mut Checker, call: &ExprCall) {
|
||||
if !checker.semantic().seen_module(Modules::TRIO) {
|
||||
return;
|
||||
}
|
||||
|
||||
if call.arguments.len() != 1 {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(arg) = call.arguments.find_argument("seconds", 0) else {
|
||||
return;
|
||||
};
|
||||
|
||||
if !checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(call.func.as_ref())
|
||||
.is_some_and(|qualified_name| matches!(qualified_name.segments(), ["trio", "sleep"]))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let Expr::NumberLiteral(ExprNumberLiteral { value, .. }) = arg else {
|
||||
return;
|
||||
};
|
||||
|
||||
// TODO(ekohilas): Replace with Duration::from_days(1).as_secs(); when available.
|
||||
let one_day_in_secs = 60 * 60 * 24;
|
||||
match value {
|
||||
Number::Int(int_value) => {
|
||||
let Some(int_value) = int_value.as_u64() else {
|
||||
return;
|
||||
};
|
||||
if int_value <= one_day_in_secs {
|
||||
return;
|
||||
}
|
||||
}
|
||||
Number::Float(float_value) =>
|
||||
{
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
if *float_value <= one_day_in_secs as f64 {
|
||||
return;
|
||||
}
|
||||
}
|
||||
Number::Complex { .. } => return,
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(SleepForeverCall, call.range());
|
||||
let replacement_function = "sleep_forever";
|
||||
diagnostic.try_set_fix(|| {
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
&ImportRequest::import_from("trio", replacement_function),
|
||||
call.func.start(),
|
||||
checker.semantic(),
|
||||
)?;
|
||||
let reference_edit = Edit::range_replacement(binding, call.func.range());
|
||||
let arg_edit = Edit::range_replacement("()".to_string(), call.arguments.range());
|
||||
Ok(Fix::unsafe_edits(import_edit, [reference_edit, arg_edit]))
|
||||
});
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -0,0 +1,145 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
|
||||
---
|
||||
ASYNC116.py:11:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
10 | # These examples are probably not meant to ever wake up:
|
||||
11 | await trio.sleep(100000) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
12 |
|
||||
13 | # 'inf literal' overflow trick
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
8 8 | import trio
|
||||
9 9 |
|
||||
10 10 | # These examples are probably not meant to ever wake up:
|
||||
11 |- await trio.sleep(100000) # error: 116, "async"
|
||||
11 |+ await trio.sleep_forever() # error: 116, "async"
|
||||
12 12 |
|
||||
13 13 | # 'inf literal' overflow trick
|
||||
14 14 | await trio.sleep(1e999) # error: 116, "async"
|
||||
|
||||
ASYNC116.py:14:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
13 | # 'inf literal' overflow trick
|
||||
14 | await trio.sleep(1e999) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
15 |
|
||||
16 | await trio.sleep(86399)
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
11 11 | await trio.sleep(100000) # error: 116, "async"
|
||||
12 12 |
|
||||
13 13 | # 'inf literal' overflow trick
|
||||
14 |- await trio.sleep(1e999) # error: 116, "async"
|
||||
14 |+ await trio.sleep_forever() # error: 116, "async"
|
||||
15 15 |
|
||||
16 16 | await trio.sleep(86399)
|
||||
17 17 | await trio.sleep(86400)
|
||||
|
||||
ASYNC116.py:18:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
16 | await trio.sleep(86399)
|
||||
17 | await trio.sleep(86400)
|
||||
18 | await trio.sleep(86400.01) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
19 | await trio.sleep(86401) # error: 116, "async"
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
15 15 |
|
||||
16 16 | await trio.sleep(86399)
|
||||
17 17 | await trio.sleep(86400)
|
||||
18 |- await trio.sleep(86400.01) # error: 116, "async"
|
||||
18 |+ await trio.sleep_forever() # error: 116, "async"
|
||||
19 19 | await trio.sleep(86401) # error: 116, "async"
|
||||
20 20 |
|
||||
21 21 | await trio.sleep(-1) # will raise a runtime error
|
||||
|
||||
ASYNC116.py:19:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
17 | await trio.sleep(86400)
|
||||
18 | await trio.sleep(86400.01) # error: 116, "async"
|
||||
19 | await trio.sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
20 |
|
||||
21 | await trio.sleep(-1) # will raise a runtime error
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
16 16 | await trio.sleep(86399)
|
||||
17 17 | await trio.sleep(86400)
|
||||
18 18 | await trio.sleep(86400.01) # error: 116, "async"
|
||||
19 |- await trio.sleep(86401) # error: 116, "async"
|
||||
19 |+ await trio.sleep_forever() # error: 116, "async"
|
||||
20 20 |
|
||||
21 21 | await trio.sleep(-1) # will raise a runtime error
|
||||
22 22 | await trio.sleep(0) # handled by different check
|
||||
|
||||
ASYNC116.py:48:5: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
47 | # does not require the call to be awaited, nor in an async fun
|
||||
48 | trio.sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
49 | # also checks that we don't break visit_Call
|
||||
50 | trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
45 45 | import trio
|
||||
46 46 |
|
||||
47 47 | # does not require the call to be awaited, nor in an async fun
|
||||
48 |- trio.sleep(86401) # error: 116, "async"
|
||||
48 |+ trio.sleep_forever() # error: 116, "async"
|
||||
49 49 | # also checks that we don't break visit_Call
|
||||
50 50 | trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
51 51 |
|
||||
|
||||
ASYNC116.py:50:14: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
48 | trio.sleep(86401) # error: 116, "async"
|
||||
49 | # also checks that we don't break visit_Call
|
||||
50 | trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
47 47 | # does not require the call to be awaited, nor in an async fun
|
||||
48 48 | trio.sleep(86401) # error: 116, "async"
|
||||
49 49 | # also checks that we don't break visit_Call
|
||||
50 |- trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
50 |+ trio.run(trio.sleep_forever()) # error: 116, "async"
|
||||
51 51 |
|
||||
52 52 |
|
||||
53 53 | async def import_from_trio():
|
||||
|
||||
ASYNC116.py:57:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
56 | # catch from import
|
||||
57 | await sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^ ASYNC116
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from trio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
54 55 | from trio import sleep
|
||||
55 56 |
|
||||
56 57 | # catch from import
|
||||
57 |- await sleep(86401) # error: 116, "async"
|
||||
58 |+ await sleep_forever() # error: 116, "async"
|
||||
@@ -58,7 +58,7 @@ pub(crate) fn request_without_timeout(checker: &mut Checker, call: &ast::ExprCal
|
||||
qualified_name.segments(),
|
||||
[
|
||||
"requests",
|
||||
"get" | "options" | "head" | "post" | "put" | "patch" | "delete"
|
||||
"get" | "options" | "head" | "post" | "put" | "patch" | "delete" | "request"
|
||||
]
|
||||
)
|
||||
})
|
||||
|
||||
@@ -43,6 +43,7 @@ mod tests {
|
||||
#[test_case(Path::new("no_future_import_uses_union_inner.py"))]
|
||||
#[test_case(Path::new("ok_no_types.py"))]
|
||||
#[test_case(Path::new("ok_uses_future.py"))]
|
||||
#[test_case(Path::new("ok_quoted_type.py"))]
|
||||
fn fa102(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("fa102_{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -7,7 +7,6 @@ use ruff_python_ast::Expr;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::importer::Importer;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of PEP 585- and PEP 604-style type annotations in Python
|
||||
@@ -87,13 +86,11 @@ impl AlwaysFixableViolation for FutureRequiredTypeAnnotation {
|
||||
/// FA102
|
||||
pub(crate) fn future_required_type_annotation(checker: &mut Checker, expr: &Expr, reason: Reason) {
|
||||
let mut diagnostic = Diagnostic::new(FutureRequiredTypeAnnotation { reason }, expr.range());
|
||||
if let Some(python_ast) = checker.semantic().definitions.python_ast() {
|
||||
let required_import =
|
||||
AnyImport::ImportFrom(ImportFrom::member("__future__", "annotations"));
|
||||
diagnostic.set_fix(Fix::unsafe_edit(
|
||||
Importer::new(python_ast, checker.locator(), checker.stylist())
|
||||
.add_import(&required_import, TextSize::default()),
|
||||
));
|
||||
}
|
||||
let required_import = AnyImport::ImportFrom(ImportFrom::member("__future__", "annotations"));
|
||||
diagnostic.set_fix(Fix::unsafe_edit(
|
||||
checker
|
||||
.importer()
|
||||
.add_import(&required_import, TextSize::default()),
|
||||
));
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
@@ -1,21 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_future_annotations/mod.rs
|
||||
---
|
||||
no_future_import_uses_lowercase.py:2:13: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
||||
|
|
||||
1 | def main() -> None:
|
||||
2 | a_list: list[str] = []
|
||||
| ^^^^^^^^^ FA102
|
||||
3 | a_list.append("hello")
|
||||
|
|
||||
= help: Add `from __future__ import annotations`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str] = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
no_future_import_uses_lowercase.py:6:14: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
||||
|
|
||||
6 | def hello(y: dict[str, int]) -> None:
|
||||
@@ -29,5 +14,3 @@ no_future_import_uses_lowercase.py:6:14: FA102 [*] Missing `from __future__ impo
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str] = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
|
||||
|
||||
@@ -1,36 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_future_annotations/mod.rs
|
||||
---
|
||||
no_future_import_uses_union.py:2:13: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
||||
|
|
||||
1 | def main() -> None:
|
||||
2 | a_list: list[str] | None = []
|
||||
| ^^^^^^^^^ FA102
|
||||
3 | a_list.append("hello")
|
||||
|
|
||||
= help: Add `from __future__ import annotations`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str] | None = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
no_future_import_uses_union.py:2:13: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 604 union
|
||||
|
|
||||
1 | def main() -> None:
|
||||
2 | a_list: list[str] | None = []
|
||||
| ^^^^^^^^^^^^^^^^ FA102
|
||||
3 | a_list.append("hello")
|
||||
|
|
||||
= help: Add `from __future__ import annotations`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str] | None = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
no_future_import_uses_union.py:6:14: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
||||
|
|
||||
6 | def hello(y: dict[str, int] | None) -> None:
|
||||
@@ -58,5 +28,3 @@ no_future_import_uses_union.py:6:14: FA102 [*] Missing `from __future__ import a
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str] | None = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
|
||||
|
||||
@@ -1,36 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_future_annotations/mod.rs
|
||||
---
|
||||
no_future_import_uses_union_inner.py:2:13: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
||||
|
|
||||
1 | def main() -> None:
|
||||
2 | a_list: list[str | None] = []
|
||||
| ^^^^^^^^^^^^^^^^ FA102
|
||||
3 | a_list.append("hello")
|
||||
|
|
||||
= help: Add `from __future__ import annotations`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str | None] = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
no_future_import_uses_union_inner.py:2:18: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 604 union
|
||||
|
|
||||
1 | def main() -> None:
|
||||
2 | a_list: list[str | None] = []
|
||||
| ^^^^^^^^^^ FA102
|
||||
3 | a_list.append("hello")
|
||||
|
|
||||
= help: Add `from __future__ import annotations`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str | None] = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
no_future_import_uses_union_inner.py:6:14: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
||||
|
|
||||
6 | def hello(y: dict[str | None, int]) -> None:
|
||||
@@ -60,35 +30,3 @@ no_future_import_uses_union_inner.py:6:19: FA102 [*] Missing `from __future__ im
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str | None] = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
no_future_import_uses_union_inner.py:7:8: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 585 collection
|
||||
|
|
||||
6 | def hello(y: dict[str | None, int]) -> None:
|
||||
7 | z: tuple[str, str | None, str] = tuple(y)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ FA102
|
||||
8 | del z
|
||||
|
|
||||
= help: Add `from __future__ import annotations`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str | None] = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
no_future_import_uses_union_inner.py:7:19: FA102 [*] Missing `from __future__ import annotations`, but uses PEP 604 union
|
||||
|
|
||||
6 | def hello(y: dict[str | None, int]) -> None:
|
||||
7 | z: tuple[str, str | None, str] = tuple(y)
|
||||
| ^^^^^^^^^^ FA102
|
||||
8 | del z
|
||||
|
|
||||
= help: Add `from __future__ import annotations`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |+from __future__ import annotations
|
||||
1 2 | def main() -> None:
|
||||
2 3 | a_list: list[str | None] = []
|
||||
3 4 | a_list.append("hello")
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_future_annotations/mod.rs
|
||||
---
|
||||
|
||||
@@ -10,7 +10,7 @@ G010.py:6:9: G010 [*] Logging statement uses `warn` instead of `warning`
|
||||
7 | log.warn("Hello world!") # This shouldn't be considered as a logger candidate
|
||||
8 | logger.warn("Hello world!")
|
||||
|
|
||||
= help: Convert to `warn`
|
||||
= help: Convert to `warning`
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 |
|
||||
@@ -31,7 +31,7 @@ G010.py:8:8: G010 [*] Logging statement uses `warn` instead of `warning`
|
||||
9 |
|
||||
10 | logging . warn("Hello World!")
|
||||
|
|
||||
= help: Convert to `warn`
|
||||
= help: Convert to `warning`
|
||||
|
||||
ℹ Safe fix
|
||||
5 5 |
|
||||
@@ -52,7 +52,7 @@ G010.py:10:11: G010 [*] Logging statement uses `warn` instead of `warning`
|
||||
11 |
|
||||
12 | from logging import warn, warning, exception
|
||||
|
|
||||
= help: Convert to `warn`
|
||||
= help: Convert to `warning`
|
||||
|
||||
ℹ Safe fix
|
||||
7 7 | log.warn("Hello world!") # This shouldn't be considered as a logger candidate
|
||||
@@ -72,7 +72,7 @@ G010.py:13:1: G010 [*] Logging statement uses `warn` instead of `warning`
|
||||
14 | warning("foo")
|
||||
15 | exception("foo")
|
||||
|
|
||||
= help: Convert to `warn`
|
||||
= help: Convert to `warning`
|
||||
|
||||
ℹ Safe fix
|
||||
10 10 | logging . warn("Hello World!")
|
||||
|
||||
@@ -383,7 +383,7 @@ impl AlwaysFixableViolation for LoggingWarn {
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
"Convert to `warn`".to_string()
|
||||
"Convert to `warning`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -384,7 +384,11 @@ pub(crate) fn unittest_raises_assertion(
|
||||
},
|
||||
call.func.range(),
|
||||
);
|
||||
if !checker.indexer().has_comments(call, checker.locator()) {
|
||||
if !checker
|
||||
.indexer()
|
||||
.comment_ranges()
|
||||
.has_comments(call, checker.locator())
|
||||
{
|
||||
if let Some(args) = to_pytest_raises_args(checker, attr.as_str(), &call.arguments) {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::AnyStringFlags;
|
||||
use ruff_python_ast::{AnyStringFlags, StringFlags};
|
||||
use ruff_text_size::TextLen;
|
||||
|
||||
/// Returns the raw contents of the string given the string's contents and flags.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::visitor::{walk_f_string, Visitor};
|
||||
use ruff_python_ast::{self as ast, AnyStringFlags, StringLike};
|
||||
use ruff_python_ast::{self as ast, AnyStringFlags, StringFlags, StringLike};
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, AnyStringFlags, StringLike};
|
||||
use ruff_python_ast::{self as ast, AnyStringFlags, StringFlags, StringLike};
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
|
||||
@@ -20,6 +20,9 @@ use crate::checkers::ast::Checker;
|
||||
/// versions, that it will have the same type, or that it will have the same
|
||||
/// behavior. Instead, use the class's public interface.
|
||||
///
|
||||
/// This rule ignores accesses on dunder methods (e.g., `__init__`) and sunder
|
||||
/// methods (e.g., `_missing_`).
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// class Class:
|
||||
@@ -70,128 +73,143 @@ pub(crate) fn private_member_access(checker: &mut Checker, expr: &Expr) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (attr.starts_with("__") && !attr.ends_with("__"))
|
||||
|| (attr.starts_with('_') && !attr.starts_with("__"))
|
||||
// Ignore non-private accesses.
|
||||
if !attr.starts_with('_') {
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore dunder accesses.
|
||||
let is_dunder = attr.starts_with("__") && attr.ends_with("__");
|
||||
if is_dunder {
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore sunder accesses.
|
||||
let is_sunder = attr.starts_with('_')
|
||||
&& attr.ends_with('_')
|
||||
&& !attr.starts_with("__")
|
||||
&& !attr.ends_with("__");
|
||||
if is_sunder {
|
||||
return;
|
||||
}
|
||||
|
||||
if checker
|
||||
.settings
|
||||
.flake8_self
|
||||
.ignore_names
|
||||
.contains(attr.as_ref())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore accesses on instances within special methods (e.g., `__eq__`).
|
||||
if let ScopeKind::Function(ast::StmtFunctionDef { name, .. }) =
|
||||
checker.semantic().current_scope().kind
|
||||
{
|
||||
if matches!(
|
||||
name.as_str(),
|
||||
"__lt__"
|
||||
| "__le__"
|
||||
| "__eq__"
|
||||
| "__ne__"
|
||||
| "__gt__"
|
||||
| "__ge__"
|
||||
| "__add__"
|
||||
| "__sub__"
|
||||
| "__mul__"
|
||||
| "__matmul__"
|
||||
| "__truediv__"
|
||||
| "__floordiv__"
|
||||
| "__mod__"
|
||||
| "__divmod__"
|
||||
| "__pow__"
|
||||
| "__lshift__"
|
||||
| "__rshift__"
|
||||
| "__and__"
|
||||
| "__xor__"
|
||||
| "__or__"
|
||||
| "__radd__"
|
||||
| "__rsub__"
|
||||
| "__rmul__"
|
||||
| "__rmatmul__"
|
||||
| "__rtruediv__"
|
||||
| "__rfloordiv__"
|
||||
| "__rmod__"
|
||||
| "__rdivmod__"
|
||||
| "__rpow__"
|
||||
| "__rlshift__"
|
||||
| "__rrshift__"
|
||||
| "__rand__"
|
||||
| "__rxor__"
|
||||
| "__ror__"
|
||||
| "__iadd__"
|
||||
| "__isub__"
|
||||
| "__imul__"
|
||||
| "__imatmul__"
|
||||
| "__itruediv__"
|
||||
| "__ifloordiv__"
|
||||
| "__imod__"
|
||||
| "__ipow__"
|
||||
| "__ilshift__"
|
||||
| "__irshift__"
|
||||
| "__iand__"
|
||||
| "__ixor__"
|
||||
| "__ior__"
|
||||
) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Allow some documented private methods, like `os._exit()`.
|
||||
if let Some(qualified_name) = checker.semantic().resolve_qualified_name(expr) {
|
||||
if matches!(qualified_name.segments(), ["os", "_exit"]) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() {
|
||||
// Ignore `super()` calls.
|
||||
if let Some(name) = UnqualifiedName::from_expr(func) {
|
||||
if matches!(name.segments(), ["super"]) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(name) = UnqualifiedName::from_expr(value) {
|
||||
// Ignore `self` and `cls` accesses.
|
||||
if matches!(name.segments(), ["self" | "cls" | "mcs"]) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if let Expr::Name(name) = value.as_ref() {
|
||||
// Ignore accesses on class members from _within_ the class.
|
||||
if checker
|
||||
.settings
|
||||
.flake8_self
|
||||
.ignore_names
|
||||
.contains(attr.as_ref())
|
||||
.semantic()
|
||||
.resolve_name(name)
|
||||
.and_then(|id| {
|
||||
if let BindingKind::ClassDefinition(scope) = checker.semantic().binding(id).kind {
|
||||
Some(scope)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.is_some_and(|scope| {
|
||||
checker
|
||||
.semantic()
|
||||
.current_scope_ids()
|
||||
.any(|parent| scope == parent)
|
||||
})
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore accesses on instances within special methods (e.g., `__eq__`).
|
||||
if let ScopeKind::Function(ast::StmtFunctionDef { name, .. }) =
|
||||
checker.semantic().current_scope().kind
|
||||
{
|
||||
if matches!(
|
||||
name.as_str(),
|
||||
"__lt__"
|
||||
| "__le__"
|
||||
| "__eq__"
|
||||
| "__ne__"
|
||||
| "__gt__"
|
||||
| "__ge__"
|
||||
| "__add__"
|
||||
| "__sub__"
|
||||
| "__mul__"
|
||||
| "__matmul__"
|
||||
| "__truediv__"
|
||||
| "__floordiv__"
|
||||
| "__mod__"
|
||||
| "__divmod__"
|
||||
| "__pow__"
|
||||
| "__lshift__"
|
||||
| "__rshift__"
|
||||
| "__and__"
|
||||
| "__xor__"
|
||||
| "__or__"
|
||||
| "__radd__"
|
||||
| "__rsub__"
|
||||
| "__rmul__"
|
||||
| "__rmatmul__"
|
||||
| "__rtruediv__"
|
||||
| "__rfloordiv__"
|
||||
| "__rmod__"
|
||||
| "__rdivmod__"
|
||||
| "__rpow__"
|
||||
| "__rlshift__"
|
||||
| "__rrshift__"
|
||||
| "__rand__"
|
||||
| "__rxor__"
|
||||
| "__ror__"
|
||||
| "__iadd__"
|
||||
| "__isub__"
|
||||
| "__imul__"
|
||||
| "__imatmul__"
|
||||
| "__itruediv__"
|
||||
| "__ifloordiv__"
|
||||
| "__imod__"
|
||||
| "__ipow__"
|
||||
| "__ilshift__"
|
||||
| "__irshift__"
|
||||
| "__iand__"
|
||||
| "__ixor__"
|
||||
| "__ior__"
|
||||
) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Allow some documented private methods, like `os._exit()`.
|
||||
if let Some(qualified_name) = checker.semantic().resolve_qualified_name(expr) {
|
||||
if matches!(qualified_name.segments(), ["os", "_exit"]) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() {
|
||||
// Ignore `super()` calls.
|
||||
if let Some(name) = UnqualifiedName::from_expr(func) {
|
||||
if matches!(name.segments(), ["super"]) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(name) = UnqualifiedName::from_expr(value) {
|
||||
// Ignore `self` and `cls` accesses.
|
||||
if matches!(name.segments(), ["self" | "cls" | "mcs"]) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if let Expr::Name(name) = value.as_ref() {
|
||||
// Ignore accesses on class members from _within_ the class.
|
||||
if checker
|
||||
.semantic()
|
||||
.resolve_name(name)
|
||||
.and_then(|id| {
|
||||
if let BindingKind::ClassDefinition(scope) = checker.semantic().binding(id).kind
|
||||
{
|
||||
Some(scope)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.is_some_and(|scope| {
|
||||
checker
|
||||
.semantic()
|
||||
.current_scope_ids()
|
||||
.any(|parent| scope == parent)
|
||||
})
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
PrivateMemberAccess {
|
||||
access: attr.to_string(),
|
||||
},
|
||||
expr.range(),
|
||||
));
|
||||
}
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
PrivateMemberAccess {
|
||||
access: attr.to_string(),
|
||||
},
|
||||
expr.range(),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -526,7 +526,11 @@ pub(crate) fn compare_with_tuple(checker: &mut Checker, expr: &Expr) {
|
||||
}
|
||||
|
||||
// Avoid removing comments.
|
||||
if checker.indexer().has_comments(expr, checker.locator()) {
|
||||
if checker
|
||||
.indexer()
|
||||
.comment_ranges()
|
||||
.has_comments(expr, checker.locator())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -209,7 +209,11 @@ pub(crate) fn if_else_block_instead_of_dict_get(checker: &mut Checker, stmt_if:
|
||||
},
|
||||
stmt_if.range(),
|
||||
);
|
||||
if !checker.indexer().has_comments(stmt_if, checker.locator()) {
|
||||
if !checker
|
||||
.indexer()
|
||||
.comment_ranges()
|
||||
.has_comments(stmt_if, checker.locator())
|
||||
{
|
||||
diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement(
|
||||
contents,
|
||||
stmt_if.range(),
|
||||
@@ -295,7 +299,11 @@ pub(crate) fn if_exp_instead_of_dict_get(
|
||||
},
|
||||
expr.range(),
|
||||
);
|
||||
if !checker.indexer().has_comments(expr, checker.locator()) {
|
||||
if !checker
|
||||
.indexer()
|
||||
.comment_ranges()
|
||||
.has_comments(expr, checker.locator())
|
||||
{
|
||||
diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement(
|
||||
contents,
|
||||
expr.range(),
|
||||
|
||||
@@ -142,7 +142,11 @@ pub(crate) fn if_else_block_instead_of_if_exp(checker: &mut Checker, stmt_if: &a
|
||||
},
|
||||
stmt_if.range(),
|
||||
);
|
||||
if !checker.indexer().has_comments(stmt_if, checker.locator()) {
|
||||
if !checker
|
||||
.indexer()
|
||||
.comment_ranges()
|
||||
.has_comments(stmt_if, checker.locator())
|
||||
{
|
||||
diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement(
|
||||
contents,
|
||||
stmt_if.range(),
|
||||
|
||||
@@ -193,7 +193,11 @@ pub(crate) fn needless_bool(checker: &mut Checker, stmt: &Stmt) {
|
||||
}
|
||||
|
||||
// Generate the replacement condition.
|
||||
let condition = if checker.indexer().has_comments(&range, checker.locator()) {
|
||||
let condition = if checker
|
||||
.indexer()
|
||||
.comment_ranges()
|
||||
.has_comments(&range, checker.locator())
|
||||
{
|
||||
None
|
||||
} else {
|
||||
// If the return values are inverted, wrap the condition in a `not`.
|
||||
|
||||
@@ -2,7 +2,7 @@ use ruff_python_ast::{self as ast, Expr, Stmt};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_semantic::{ScopeKind, SemanticModel};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -114,24 +114,27 @@ fn match_exit_stack(semantic: &SemanticModel) -> bool {
|
||||
|
||||
/// Return `true` if `func` is the builtin `open` or `pathlib.Path(...).open`.
|
||||
fn is_open(semantic: &SemanticModel, func: &Expr) -> bool {
|
||||
// open(...)
|
||||
// Ex) `open(...)`
|
||||
if semantic.match_builtin_expr(func, "open") {
|
||||
return true;
|
||||
}
|
||||
|
||||
// pathlib.Path(...).open()
|
||||
// Ex) `pathlib.Path(...).open()`
|
||||
let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func else {
|
||||
return false;
|
||||
};
|
||||
|
||||
if attr != "open" {
|
||||
return false;
|
||||
}
|
||||
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func: value_func, ..
|
||||
}) = &**value
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
semantic
|
||||
.resolve_qualified_name(value_func)
|
||||
.is_some_and(|qualified_name| matches!(qualified_name.segments(), ["pathlib", "Path"]))
|
||||
@@ -189,6 +192,15 @@ pub(crate) fn open_file_with_context_handler(checker: &mut Checker, func: &Expr)
|
||||
return;
|
||||
}
|
||||
|
||||
// Ex) `def __enter__(self): ...`
|
||||
if let ScopeKind::Function(ast::StmtFunctionDef { name, .. }) =
|
||||
&checker.semantic().current_scope().kind
|
||||
{
|
||||
if name == "__enter__" {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(OpenFileWithContextHandler, func.range()));
|
||||
|
||||
@@ -125,7 +125,11 @@ pub(crate) fn suppressible_exception(
|
||||
},
|
||||
stmt.range(),
|
||||
);
|
||||
if !checker.indexer().has_comments(stmt, checker.locator()) {
|
||||
if !checker
|
||||
.indexer()
|
||||
.comment_ranges()
|
||||
.has_comments(stmt, checker.locator())
|
||||
{
|
||||
diagnostic.try_set_fix(|| {
|
||||
// let range = statement_range(stmt, checker.locator(), checker.indexer());
|
||||
|
||||
|
||||
@@ -1,27 +1,25 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs
|
||||
---
|
||||
singledispatch.py:10:20: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
singledispatch.py:11:20: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||
|
|
||||
8 | from numpy.typing import ArrayLike
|
||||
9 | from scipy.sparse import spmatrix
|
||||
10 | from pandas import DataFrame
|
||||
9 | from numpy.typing import ArrayLike
|
||||
10 | from scipy.sparse import spmatrix
|
||||
11 | from pandas import DataFrame
|
||||
| ^^^^^^^^^ TCH002
|
||||
11 |
|
||||
12 | if TYPE_CHECKING:
|
||||
12 |
|
||||
13 | if TYPE_CHECKING:
|
||||
|
|
||||
= help: Move into type-checking block
|
||||
|
||||
ℹ Unsafe fix
|
||||
7 7 | from numpy import asarray
|
||||
8 8 | from numpy.typing import ArrayLike
|
||||
9 9 | from scipy.sparse import spmatrix
|
||||
10 |-from pandas import DataFrame
|
||||
11 10 |
|
||||
12 11 | if TYPE_CHECKING:
|
||||
12 |+ from pandas import DataFrame
|
||||
13 13 | from numpy import ndarray
|
||||
14 14 |
|
||||
8 8 | from numpy import asarray
|
||||
9 9 | from numpy.typing import ArrayLike
|
||||
10 10 | from scipy.sparse import spmatrix
|
||||
11 |-from pandas import DataFrame
|
||||
12 11 |
|
||||
13 12 | if TYPE_CHECKING:
|
||||
13 |+ from pandas import DataFrame
|
||||
14 14 | from numpy import ndarray
|
||||
15 15 |
|
||||
|
||||
|
||||
16 16 |
|
||||
|
||||
@@ -383,26 +383,6 @@ impl KnownModules {
|
||||
Some((section, reason))
|
||||
}
|
||||
|
||||
/// Return the list of modules that are known to be of a given type.
|
||||
pub fn modules_for_known_type(
|
||||
&self,
|
||||
import_type: ImportType,
|
||||
) -> impl Iterator<Item = &glob::Pattern> {
|
||||
self.known
|
||||
.iter()
|
||||
.filter_map(move |(module, known_section)| {
|
||||
if let ImportSection::Known(section) = known_section {
|
||||
if *section == import_type {
|
||||
Some(module)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the list of user-defined modules, indexed by section.
|
||||
pub fn user_defined(&self) -> FxHashMap<&str, Vec<&glob::Pattern>> {
|
||||
let mut user_defined: FxHashMap<&str, Vec<&glob::Pattern>> = FxHashMap::default();
|
||||
|
||||
@@ -96,10 +96,27 @@ fn get_complexity_number(stmts: &[Stmt]) -> usize {
|
||||
complexity += get_complexity_number(orelse);
|
||||
}
|
||||
Stmt::Match(ast::StmtMatch { cases, .. }) => {
|
||||
complexity += 1;
|
||||
for case in cases {
|
||||
complexity += 1;
|
||||
complexity += get_complexity_number(&case.body);
|
||||
}
|
||||
if let Some(last_case) = cases.last() {
|
||||
// The complexity of an irrefutable pattern is similar to an `else` block of an `if` statement.
|
||||
//
|
||||
// For example:
|
||||
// ```python
|
||||
// match subject:
|
||||
// case 1: ...
|
||||
// case _: ...
|
||||
//
|
||||
// match subject:
|
||||
// case 1: ...
|
||||
// case foo: ...
|
||||
// ```
|
||||
if last_case.guard.is_none() && last_case.pattern.is_irrefutable() {
|
||||
complexity -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
Stmt::Try(ast::StmtTry {
|
||||
body,
|
||||
@@ -424,6 +441,68 @@ def with_lock():
|
||||
with lock:
|
||||
if foo:
|
||||
print('bar')
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_match_case() -> Result<()> {
|
||||
let source = r"
|
||||
def f():
|
||||
match subject:
|
||||
case 2:
|
||||
print('foo')
|
||||
case _:
|
||||
print('bar')
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_match_case() -> Result<()> {
|
||||
let source = r"
|
||||
def f():
|
||||
match subject:
|
||||
case 2:
|
||||
print('foo')
|
||||
case 2:
|
||||
print('bar')
|
||||
case _:
|
||||
print('baz')
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 3);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn named_catch_all_match_case() -> Result<()> {
|
||||
let source = r"
|
||||
def f():
|
||||
match subject:
|
||||
case 2:
|
||||
print('hello')
|
||||
case x:
|
||||
print(x)
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_case_catch_all_with_seuqnece() -> Result<()> {
|
||||
let source = r"
|
||||
def f():
|
||||
match subject:
|
||||
case 2:
|
||||
print('hello')
|
||||
case 5 | _:
|
||||
print(x)
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
|
||||
@@ -184,6 +184,12 @@ pub(crate) fn numpy_2_0_deprecation(checker: &mut Checker, expr: &Expr) {
|
||||
guideline: Some("`add_newdoc_ufunc` is an internal function."),
|
||||
},
|
||||
}),
|
||||
["numpy", "alltrue"] => Some(Replacement {
|
||||
existing: "alltrue",
|
||||
details: Details::AutoPurePython {
|
||||
python_expr: "all",
|
||||
},
|
||||
}),
|
||||
["numpy", "asfarray"] => Some(Replacement {
|
||||
existing: "asfarray",
|
||||
details: Details::Manual {
|
||||
@@ -234,6 +240,14 @@ pub(crate) fn numpy_2_0_deprecation(checker: &mut Checker, expr: &Expr) {
|
||||
compatibility: Compatibility::BackwardsCompatible,
|
||||
},
|
||||
}),
|
||||
["numpy", "cumproduct"] => Some(Replacement {
|
||||
existing: "cumproduct",
|
||||
details: Details::AutoImport {
|
||||
path: "numpy",
|
||||
name: "cumprod",
|
||||
compatibility: Compatibility::BackwardsCompatible,
|
||||
},
|
||||
}),
|
||||
["numpy", "DataSource"] => Some(Replacement {
|
||||
existing: "DataSource",
|
||||
details: Details::AutoImport {
|
||||
@@ -420,6 +434,14 @@ pub(crate) fn numpy_2_0_deprecation(checker: &mut Checker, expr: &Expr) {
|
||||
compatibility: Compatibility::BackwardsCompatible,
|
||||
},
|
||||
}),
|
||||
["numpy", "product"] => Some(Replacement {
|
||||
existing: "product",
|
||||
details: Details::AutoImport {
|
||||
path: "numpy",
|
||||
name: "prod",
|
||||
compatibility: Compatibility::BackwardsCompatible,
|
||||
},
|
||||
}),
|
||||
["numpy", "PZERO"] => Some(Replacement {
|
||||
existing: "PZERO",
|
||||
details: Details::AutoPurePython { python_expr: "0.0" },
|
||||
@@ -492,6 +514,12 @@ pub(crate) fn numpy_2_0_deprecation(checker: &mut Checker, expr: &Expr) {
|
||||
compatibility: Compatibility::BackwardsCompatible,
|
||||
},
|
||||
}),
|
||||
["numpy", "sometrue"] => Some(Replacement {
|
||||
existing: "sometrue",
|
||||
details: Details::AutoPurePython {
|
||||
python_expr: "any",
|
||||
},
|
||||
}),
|
||||
["numpy", "source"] => Some(Replacement {
|
||||
existing: "source",
|
||||
details: Details::AutoImport {
|
||||
|
||||
@@ -854,6 +854,8 @@ NPY201.py:108:5: NPY201 [*] `np.row_stack` will be removed in NumPy 2.0. Use `nu
|
||||
107 |
|
||||
108 | np.row_stack(([1,2], [3,4]))
|
||||
| ^^^^^^^^^^^^ NPY201
|
||||
109 |
|
||||
110 | np.alltrue([True, True])
|
||||
|
|
||||
= help: Replace with `numpy.vstack`
|
||||
|
||||
@@ -863,5 +865,65 @@ NPY201.py:108:5: NPY201 [*] `np.row_stack` will be removed in NumPy 2.0. Use `nu
|
||||
107 107 |
|
||||
108 |- np.row_stack(([1,2], [3,4]))
|
||||
108 |+ np.vstack(([1,2], [3,4]))
|
||||
109 109 |
|
||||
110 110 | np.alltrue([True, True])
|
||||
111 111 |
|
||||
|
||||
NPY201.py:110:5: NPY201 [*] `np.alltrue` will be removed in NumPy 2.0. Use `all` instead.
|
||||
|
|
||||
108 | np.row_stack(([1,2], [3,4]))
|
||||
109 |
|
||||
110 | np.alltrue([True, True])
|
||||
| ^^^^^^^^^^ NPY201
|
||||
111 |
|
||||
112 | np.anytrue([True, False])
|
||||
|
|
||||
= help: Replace with `all`
|
||||
|
||||
ℹ Safe fix
|
||||
107 107 |
|
||||
108 108 | np.row_stack(([1,2], [3,4]))
|
||||
109 109 |
|
||||
110 |- np.alltrue([True, True])
|
||||
110 |+ all([True, True])
|
||||
111 111 |
|
||||
112 112 | np.anytrue([True, False])
|
||||
113 113 |
|
||||
|
||||
NPY201.py:114:5: NPY201 [*] `np.cumproduct` will be removed in NumPy 2.0. Use `numpy.cumprod` instead.
|
||||
|
|
||||
112 | np.anytrue([True, False])
|
||||
113 |
|
||||
114 | np.cumproduct([1, 2, 3])
|
||||
| ^^^^^^^^^^^^^ NPY201
|
||||
115 |
|
||||
116 | np.product([1, 2, 3])
|
||||
|
|
||||
= help: Replace with `numpy.cumprod`
|
||||
|
||||
ℹ Safe fix
|
||||
111 111 |
|
||||
112 112 | np.anytrue([True, False])
|
||||
113 113 |
|
||||
114 |- np.cumproduct([1, 2, 3])
|
||||
114 |+ np.cumprod([1, 2, 3])
|
||||
115 115 |
|
||||
116 116 | np.product([1, 2, 3])
|
||||
|
||||
NPY201.py:116:5: NPY201 [*] `np.product` will be removed in NumPy 2.0. Use `numpy.prod` instead.
|
||||
|
|
||||
114 | np.cumproduct([1, 2, 3])
|
||||
115 |
|
||||
116 | np.product([1, 2, 3])
|
||||
| ^^^^^^^^^^ NPY201
|
||||
|
|
||||
= help: Replace with `numpy.prod`
|
||||
|
||||
ℹ Safe fix
|
||||
113 113 |
|
||||
114 114 | np.cumproduct([1, 2, 3])
|
||||
115 115 |
|
||||
116 |- np.product([1, 2, 3])
|
||||
116 |+ np.prod([1, 2, 3])
|
||||
|
||||
|
||||
|
||||
@@ -48,8 +48,8 @@ pub(super) fn test_expression(expr: &Expr, semantic: &SemanticModel) -> Resoluti
|
||||
| BindingKind::NamedExprAssignment
|
||||
| BindingKind::LoopVar
|
||||
| BindingKind::ComprehensionVar
|
||||
| BindingKind::Global
|
||||
| BindingKind::Nonlocal(_) => Resolution::RelevantLocal,
|
||||
| BindingKind::Global(_)
|
||||
| BindingKind::Nonlocal(_, _) => Resolution::RelevantLocal,
|
||||
BindingKind::Import(import)
|
||||
if matches!(import.qualified_name().segments(), ["pandas"]) =>
|
||||
{
|
||||
|
||||
@@ -181,6 +181,7 @@ fn check(
|
||||
|
||||
// If we're at the end of line, skip.
|
||||
if matches!(next_char, '\n' | '\r') {
|
||||
contains_valid_escape_sequence = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ pub(crate) fn missing_whitespace_after_keyword(
|
||||
let tok0_kind = tok0.kind();
|
||||
let tok1_kind = tok1.kind();
|
||||
|
||||
if tok0_kind.is_non_soft_keyword()
|
||||
if tok0_kind.is_keyword()
|
||||
&& !(tok0_kind.is_singleton()
|
||||
|| matches!(tok0_kind, TokenKind::Async | TokenKind::Await)
|
||||
|| tok0_kind == TokenKind::Except && tok1_kind == TokenKind::Star
|
||||
|
||||
@@ -445,7 +445,7 @@ impl LogicalLinesBuilder {
|
||||
|
||||
if matches!(kind, TokenKind::Comma | TokenKind::Semi | TokenKind::Colon) {
|
||||
line.flags.insert(TokenFlags::PUNCTUATION);
|
||||
} else if kind.is_non_soft_keyword() {
|
||||
} else if kind.is_keyword() {
|
||||
line.flags.insert(TokenFlags::KEYWORD);
|
||||
}
|
||||
|
||||
|
||||
@@ -127,8 +127,8 @@ pub(crate) fn whitespace_around_keywords(line: &LogicalLine, context: &mut Logic
|
||||
let mut after_keyword = false;
|
||||
|
||||
for token in line.tokens() {
|
||||
let is_non_soft_keyword = token.kind().is_non_soft_keyword();
|
||||
if is_non_soft_keyword {
|
||||
let is_keyword = token.kind().is_keyword();
|
||||
if is_keyword {
|
||||
if !after_keyword {
|
||||
match line.leading_whitespace(token) {
|
||||
(Whitespace::Tab, offset) => {
|
||||
@@ -184,6 +184,6 @@ pub(crate) fn whitespace_around_keywords(line: &LogicalLine, context: &mut Logic
|
||||
}
|
||||
}
|
||||
|
||||
after_keyword = is_non_soft_keyword;
|
||||
after_keyword = is_keyword;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,10 +9,10 @@ use crate::checkers::ast::Checker;
|
||||
use crate::registry::Rule;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for negative comparison using `not {foo} in {bar}`.
|
||||
/// Checks for membership tests using `not {element} in {collection}`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Negative comparison should be done using `not in`.
|
||||
/// Testing membership with `{element} not in {collection}` is more readable.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
@@ -42,10 +42,11 @@ impl AlwaysFixableViolation for NotInTest {
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for negative comparison using `not {foo} is {bar}`.
|
||||
/// Checks for identity comparisons using `not {foo} is {bar}`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Negative comparison should be done using `is not`.
|
||||
/// According to [PEP8], testing for an object's identity with `is not` is more
|
||||
/// readable.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
@@ -60,6 +61,8 @@ impl AlwaysFixableViolation for NotInTest {
|
||||
/// pass
|
||||
/// Z = X.B is not Y
|
||||
/// ```
|
||||
///
|
||||
/// [PEP8]: https://peps.python.org/pep-0008/#programming-recommendations
|
||||
#[violation]
|
||||
pub struct NotIsTest;
|
||||
|
||||
|
||||
@@ -11,18 +11,6 @@ use ruff_text_size::{TextRange, TextSize};
|
||||
/// According to [PEP 8], spaces are preferred over tabs (unless used to remain
|
||||
/// consistent with code that is already indented with tabs).
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// if True:
|
||||
/// a = 1
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// if True:
|
||||
/// a = 1
|
||||
/// ```
|
||||
///
|
||||
/// ## Formatter compatibility
|
||||
/// We recommend against using this rule alongside the [formatter]. The
|
||||
/// formatter enforces consistent indentation, making the rule redundant.
|
||||
|
||||
@@ -190,4 +190,22 @@ E27.py:35:14: E271 [*] Multiple spaces after keyword
|
||||
37 37 | from w import(e, f)
|
||||
38 38 | #: E275
|
||||
|
||||
E27.py:70:5: E271 [*] Multiple spaces after keyword
|
||||
|
|
||||
69 | #: E271
|
||||
70 | type Number = int
|
||||
| ^^ E271
|
||||
71 |
|
||||
72 | #: E273
|
||||
|
|
||||
= help: Replace with single space
|
||||
|
||||
ℹ Safe fix
|
||||
67 67 | # Soft keywords
|
||||
68 68 |
|
||||
69 69 | #: E271
|
||||
70 |-type Number = int
|
||||
70 |+type Number = int
|
||||
71 71 |
|
||||
72 72 | #: E273
|
||||
73 73 | type Number = int
|
||||
|
||||
@@ -106,4 +106,22 @@ E27.py:30:10: E273 [*] Tab after keyword
|
||||
32 32 | from u import (a, b)
|
||||
33 33 | from v import c, d
|
||||
|
||||
E27.py:73:5: E273 [*] Tab after keyword
|
||||
|
|
||||
72 | #: E273
|
||||
73 | type Number = int
|
||||
| ^^^^ E273
|
||||
74 |
|
||||
75 | #: E275
|
||||
|
|
||||
= help: Replace with single space
|
||||
|
||||
ℹ Safe fix
|
||||
70 70 | type Number = int
|
||||
71 71 |
|
||||
72 72 | #: E273
|
||||
73 |-type Number = int
|
||||
73 |+type Number = int
|
||||
74 74 |
|
||||
75 75 | #: E275
|
||||
76 76 | match(foo):
|
||||
|
||||
@@ -106,4 +106,39 @@ E27.py:54:5: E275 [*] Missing whitespace after keyword
|
||||
56 56 | def f():
|
||||
57 57 | print((yield))
|
||||
|
||||
E27.py:76:1: E275 [*] Missing whitespace after keyword
|
||||
|
|
||||
75 | #: E275
|
||||
76 | match(foo):
|
||||
| ^^^^^ E275
|
||||
77 | case(1):
|
||||
78 | pass
|
||||
|
|
||||
= help: Added missing whitespace after keyword
|
||||
|
||||
ℹ Safe fix
|
||||
73 73 | type Number = int
|
||||
74 74 |
|
||||
75 75 | #: E275
|
||||
76 |-match(foo):
|
||||
76 |+match (foo):
|
||||
77 77 | case(1):
|
||||
78 78 | pass
|
||||
|
||||
E27.py:77:5: E275 [*] Missing whitespace after keyword
|
||||
|
|
||||
75 | #: E275
|
||||
76 | match(foo):
|
||||
77 | case(1):
|
||||
| ^^^^ E275
|
||||
78 | pass
|
||||
|
|
||||
= help: Added missing whitespace after keyword
|
||||
|
||||
ℹ Safe fix
|
||||
74 74 |
|
||||
75 75 | #: E275
|
||||
76 76 | match(foo):
|
||||
77 |- case(1):
|
||||
77 |+ case (1):
|
||||
78 78 | pass
|
||||
|
||||
@@ -145,6 +145,8 @@ W605_0.py:48:6: W605 [*] Invalid escape sequence: `\ `
|
||||
47 | #: W605:1:7
|
||||
48 | u'foo\ bar'
|
||||
| ^^ W605
|
||||
49 |
|
||||
50 | #: W605:1:13
|
||||
|
|
||||
= help: Use a raw string literal
|
||||
|
||||
@@ -154,5 +156,61 @@ W605_0.py:48:6: W605 [*] Invalid escape sequence: `\ `
|
||||
47 47 | #: W605:1:7
|
||||
48 |-u'foo\ bar'
|
||||
48 |+r'foo\ bar'
|
||||
49 49 |
|
||||
50 50 | #: W605:1:13
|
||||
51 51 | (
|
||||
|
||||
W605_0.py:53:9: W605 [*] Invalid escape sequence: `\.`
|
||||
|
|
||||
51 | (
|
||||
52 | "foo \
|
||||
53 | bar \. baz"
|
||||
| ^^ W605
|
||||
54 | )
|
||||
|
|
||||
= help: Add backslash to escape sequence
|
||||
|
||||
ℹ Safe fix
|
||||
50 50 | #: W605:1:13
|
||||
51 51 | (
|
||||
52 52 | "foo \
|
||||
53 |- bar \. baz"
|
||||
53 |+ bar \\. baz"
|
||||
54 54 | )
|
||||
55 55 |
|
||||
56 56 | #: W605:1:6
|
||||
|
||||
W605_0.py:57:6: W605 [*] Invalid escape sequence: `\.`
|
||||
|
|
||||
56 | #: W605:1:6
|
||||
57 | "foo \. bar \t"
|
||||
| ^^ W605
|
||||
58 |
|
||||
59 | #: W605:1:13
|
||||
|
|
||||
= help: Add backslash to escape sequence
|
||||
|
||||
ℹ Safe fix
|
||||
54 54 | )
|
||||
55 55 |
|
||||
56 56 | #: W605:1:6
|
||||
57 |-"foo \. bar \t"
|
||||
57 |+"foo \\. bar \t"
|
||||
58 58 |
|
||||
59 59 | #: W605:1:13
|
||||
60 60 | "foo \t bar \."
|
||||
|
||||
W605_0.py:60:13: W605 [*] Invalid escape sequence: `\.`
|
||||
|
|
||||
59 | #: W605:1:13
|
||||
60 | "foo \t bar \."
|
||||
| ^^ W605
|
||||
|
|
||||
= help: Add backslash to escape sequence
|
||||
|
||||
ℹ Safe fix
|
||||
57 57 | "foo \. bar \t"
|
||||
58 58 |
|
||||
59 59 | #: W605:1:13
|
||||
60 |-"foo \t bar \."
|
||||
60 |+"foo \t bar \\."
|
||||
|
||||
@@ -229,6 +229,49 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_24/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_25__all_nonempty/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_26__all_empty/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_27__all_mistyped/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_28__all_multiple/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_29__all_conditional/__init__.py"))]
|
||||
fn f401_stable(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"{}_stable_{}",
|
||||
rule_code.noqa_code(),
|
||||
path.to_string_lossy()
|
||||
);
|
||||
let diagnostics = test_path(
|
||||
Path::new("pyflakes").join(path).as_path(),
|
||||
&LinterSettings::for_rule(rule_code),
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_24/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_25__all_nonempty/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_26__all_empty/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_27__all_mistyped/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_28__all_multiple/__init__.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_29__all_conditional/__init__.py"))]
|
||||
fn f401_deprecated_option(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"{}_deprecated_option_{}",
|
||||
rule_code.noqa_code(),
|
||||
path.to_string_lossy()
|
||||
);
|
||||
let diagnostics = test_path(
|
||||
Path::new("pyflakes").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
ignore_init_module_imports: false,
|
||||
..LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn f841_dummy_variable_rgx() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -24,6 +24,7 @@ enum UnusedImportContext {
|
||||
Init {
|
||||
first_party: bool,
|
||||
dunder_all_count: usize,
|
||||
ignore_init_module_imports: bool,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -46,12 +47,29 @@ enum UnusedImportContext {
|
||||
/// from module import member as member
|
||||
/// ```
|
||||
///
|
||||
/// Alternatively, you can use `__all__` to declare a symbol as part of the module's
|
||||
/// interface, as in:
|
||||
///
|
||||
/// ```python
|
||||
/// # __init__.py
|
||||
/// import some_module
|
||||
///
|
||||
/// __all__ = [ "some_module"]
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
///
|
||||
/// When `ignore_init_module_imports` is disabled, fixes can remove for unused imports in `__init__` files.
|
||||
/// These fixes are considered unsafe because they can change the public interface.
|
||||
/// Fixes to remove unused imports are safe, except in `__init__.py` files.
|
||||
///
|
||||
/// Applying fixes to `__init__.py` files is currently in preview. The fix offered depends on the
|
||||
/// type of the unused import. Ruff will suggest a safe fix to export first-party imports with
|
||||
/// either a redundant alias or, if already present in the file, an `__all__` entry. If multiple
|
||||
/// `__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix
|
||||
/// to remove third-party and standard library imports -- the fix is unsafe because the module's
|
||||
/// interface changes.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// import numpy as np # unused import
|
||||
///
|
||||
@@ -61,12 +79,14 @@ enum UnusedImportContext {
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
///
|
||||
/// ```python
|
||||
/// def area(radius):
|
||||
/// return 3.14 * radius**2
|
||||
/// ```
|
||||
///
|
||||
/// To check the availability of a module, use `importlib.util.find_spec`:
|
||||
///
|
||||
/// ```python
|
||||
/// from importlib.util import find_spec
|
||||
///
|
||||
@@ -87,6 +107,8 @@ enum UnusedImportContext {
|
||||
pub struct UnusedImport {
|
||||
/// Qualified name of the import
|
||||
name: String,
|
||||
/// Unqualified name of the import
|
||||
module: String,
|
||||
/// Name of the import binding
|
||||
binding: String,
|
||||
context: Option<UnusedImportContext>,
|
||||
@@ -117,6 +139,7 @@ impl Violation for UnusedImport {
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
let UnusedImport {
|
||||
name,
|
||||
module,
|
||||
binding,
|
||||
multiple,
|
||||
..
|
||||
@@ -125,14 +148,14 @@ impl Violation for UnusedImport {
|
||||
Some(UnusedImportContext::Init {
|
||||
first_party: true,
|
||||
dunder_all_count: 1,
|
||||
ignore_init_module_imports: true,
|
||||
}) => Some(format!("Add unused import `{binding}` to __all__")),
|
||||
|
||||
Some(UnusedImportContext::Init {
|
||||
first_party: true,
|
||||
dunder_all_count: 0,
|
||||
}) => Some(format!(
|
||||
"Use an explicit re-export: `{binding} as {binding}`"
|
||||
)),
|
||||
ignore_init_module_imports: true,
|
||||
}) => Some(format!("Use an explicit re-export: `{module} as {module}`")),
|
||||
|
||||
_ => Some(if *multiple {
|
||||
"Remove unused import".to_string()
|
||||
@@ -244,7 +267,8 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
}
|
||||
|
||||
let in_init = checker.path().ends_with("__init__.py");
|
||||
let fix_init = checker.settings.preview.is_enabled();
|
||||
let fix_init = !checker.settings.ignore_init_module_imports;
|
||||
let preview_mode = checker.settings.preview.is_enabled();
|
||||
let dunder_all_exprs = find_dunder_all_exprs(checker.semantic());
|
||||
|
||||
// Generate a diagnostic for every import, but share fixes across all imports within the same
|
||||
@@ -275,6 +299,7 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
checker,
|
||||
),
|
||||
dunder_all_count: dunder_all_exprs.len(),
|
||||
ignore_init_module_imports: !fix_init,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
@@ -288,30 +313,31 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
first_party: true,
|
||||
..
|
||||
})
|
||||
)
|
||||
) && preview_mode
|
||||
});
|
||||
|
||||
// generate fixes that are shared across bindings in the statement
|
||||
let (fix_remove, fix_reexport) = if (!in_init || fix_init) && !in_except_handler {
|
||||
(
|
||||
fix_by_removing_imports(
|
||||
checker,
|
||||
import_statement,
|
||||
to_remove.iter().map(|(binding, _)| binding),
|
||||
in_init,
|
||||
let (fix_remove, fix_reexport) =
|
||||
if (!in_init || fix_init || preview_mode) && !in_except_handler {
|
||||
(
|
||||
fix_by_removing_imports(
|
||||
checker,
|
||||
import_statement,
|
||||
to_remove.iter().map(|(binding, _)| binding),
|
||||
in_init,
|
||||
)
|
||||
.ok(),
|
||||
fix_by_reexporting(
|
||||
checker,
|
||||
import_statement,
|
||||
&to_reexport.iter().map(|(b, _)| b).collect::<Vec<_>>(),
|
||||
&dunder_all_exprs,
|
||||
)
|
||||
.ok(),
|
||||
)
|
||||
.ok(),
|
||||
fix_by_reexporting(
|
||||
checker,
|
||||
import_statement,
|
||||
&to_reexport.iter().map(|(b, _)| b).collect::<Vec<_>>(),
|
||||
&dunder_all_exprs,
|
||||
)
|
||||
.ok(),
|
||||
)
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
for ((binding, context), fix) in iter::Iterator::chain(
|
||||
iter::zip(to_remove, iter::repeat(fix_remove)),
|
||||
@@ -320,6 +346,7 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
UnusedImport {
|
||||
name: binding.import.qualified_name().to_string(),
|
||||
module: binding.import.member_name().to_string(),
|
||||
binding: binding.name.to_string(),
|
||||
context,
|
||||
multiple,
|
||||
@@ -344,6 +371,7 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
UnusedImport {
|
||||
name: binding.import.qualified_name().to_string(),
|
||||
module: binding.import.member_name().to_string(),
|
||||
binding: binding.name.to_string(),
|
||||
context: None,
|
||||
multiple: false,
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:19:8: F401 [*] `sys` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
19 | import sys # F401: remove unused
|
||||
| ^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `sys`
|
||||
|
||||
ℹ Unsafe fix
|
||||
16 16 | import argparse as argparse # Ok: is redundant alias
|
||||
17 17 |
|
||||
18 18 |
|
||||
19 |-import sys # F401: remove unused
|
||||
20 19 |
|
||||
21 20 |
|
||||
22 21 | # first-party
|
||||
|
||||
__init__.py:33:15: F401 [*] `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
33 | from . import unused # F401: change to redundant alias
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `.unused`
|
||||
|
||||
ℹ Unsafe fix
|
||||
30 30 | from . import aliased as aliased # Ok: is redundant alias
|
||||
31 31 |
|
||||
32 32 |
|
||||
33 |-from . import unused # F401: change to redundant alias
|
||||
34 33 |
|
||||
35 34 |
|
||||
36 35 | from . import renamed as bees # F401: no fix
|
||||
|
||||
__init__.py:36:26: F401 [*] `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
36 | from . import renamed as bees # F401: no fix
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `.renamed`
|
||||
|
||||
ℹ Unsafe fix
|
||||
33 33 | from . import unused # F401: change to redundant alias
|
||||
34 34 |
|
||||
35 35 |
|
||||
36 |-from . import renamed as bees # F401: no fix
|
||||
@@ -0,0 +1,50 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:19:8: F401 [*] `sys` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
19 | import sys # F401: remove unused
|
||||
| ^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `sys`
|
||||
|
||||
ℹ Unsafe fix
|
||||
16 16 | import argparse # Ok: is exported in __all__
|
||||
17 17 |
|
||||
18 18 |
|
||||
19 |-import sys # F401: remove unused
|
||||
20 19 |
|
||||
21 20 |
|
||||
22 21 | # first-party
|
||||
|
||||
__init__.py:36:15: F401 [*] `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
36 | from . import unused # F401: add to __all__
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `.unused`
|
||||
|
||||
ℹ Unsafe fix
|
||||
33 33 | from . import exported # Ok: is exported in __all__
|
||||
34 34 |
|
||||
35 35 |
|
||||
36 |-from . import unused # F401: add to __all__
|
||||
37 36 |
|
||||
38 37 |
|
||||
39 38 | from . import renamed as bees # F401: add to __all__
|
||||
|
||||
__init__.py:39:26: F401 [*] `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
39 | from . import renamed as bees # F401: add to __all__
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `.renamed`
|
||||
|
||||
ℹ Unsafe fix
|
||||
36 36 | from . import unused # F401: add to __all__
|
||||
37 37 |
|
||||
38 38 |
|
||||
39 |-from . import renamed as bees # F401: add to __all__
|
||||
40 39 |
|
||||
41 40 |
|
||||
42 41 | __all__ = ["argparse", "exported"]
|
||||
@@ -0,0 +1,34 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:5:15: F401 [*] `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
5 | from . import unused # F401: add to __all__
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `.unused`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | """
|
||||
3 3 |
|
||||
4 4 |
|
||||
5 |-from . import unused # F401: add to __all__
|
||||
6 5 |
|
||||
7 6 |
|
||||
8 7 | from . import renamed as bees # F401: add to __all__
|
||||
|
||||
__init__.py:8:26: F401 [*] `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
8 | from . import renamed as bees # F401: add to __all__
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `.renamed`
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 | from . import unused # F401: add to __all__
|
||||
6 6 |
|
||||
7 7 |
|
||||
8 |-from . import renamed as bees # F401: add to __all__
|
||||
9 8 |
|
||||
10 9 |
|
||||
11 10 | __all__ = []
|
||||
@@ -0,0 +1,34 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:5:15: F401 [*] `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
5 | from . import unused # F401: recommend add to all w/o fix
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `.unused`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | """
|
||||
3 3 |
|
||||
4 4 |
|
||||
5 |-from . import unused # F401: recommend add to all w/o fix
|
||||
6 5 |
|
||||
7 6 |
|
||||
8 7 | from . import renamed as bees # F401: recommend add to all w/o fix
|
||||
|
||||
__init__.py:8:26: F401 [*] `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
8 | from . import renamed as bees # F401: recommend add to all w/o fix
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `.renamed`
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 | from . import unused # F401: recommend add to all w/o fix
|
||||
6 6 |
|
||||
7 7 |
|
||||
8 |-from . import renamed as bees # F401: recommend add to all w/o fix
|
||||
9 8 |
|
||||
10 9 |
|
||||
11 10 | __all__ = None
|
||||
@@ -0,0 +1,34 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:5:15: F401 [*] `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
5 | from . import unused, renamed as bees # F401: add to __all__
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Remove unused import
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | """
|
||||
3 3 |
|
||||
4 4 |
|
||||
5 |-from . import unused, renamed as bees # F401: add to __all__
|
||||
6 5 |
|
||||
7 6 |
|
||||
8 7 | __all__ = [];
|
||||
|
||||
__init__.py:5:34: F401 [*] `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
5 | from . import unused, renamed as bees # F401: add to __all__
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Remove unused import
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | """
|
||||
3 3 |
|
||||
4 4 |
|
||||
5 |-from . import unused, renamed as bees # F401: add to __all__
|
||||
6 5 |
|
||||
7 6 |
|
||||
8 7 | __all__ = [];
|
||||
@@ -0,0 +1,44 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:8:15: F401 [*] `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
6 | import sys
|
||||
7 |
|
||||
8 | from . import unused, exported, renamed as bees
|
||||
| ^^^^^^ F401
|
||||
9 |
|
||||
10 | if sys.version_info > (3, 9):
|
||||
|
|
||||
= help: Remove unused import
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 |
|
||||
6 6 | import sys
|
||||
7 7 |
|
||||
8 |-from . import unused, exported, renamed as bees
|
||||
8 |+from . import exported
|
||||
9 9 |
|
||||
10 10 | if sys.version_info > (3, 9):
|
||||
11 11 | from . import also_exported
|
||||
|
||||
__init__.py:8:44: F401 [*] `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
6 | import sys
|
||||
7 |
|
||||
8 | from . import unused, exported, renamed as bees
|
||||
| ^^^^ F401
|
||||
9 |
|
||||
10 | if sys.version_info > (3, 9):
|
||||
|
|
||||
= help: Remove unused import
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 |
|
||||
6 6 | import sys
|
||||
7 7 |
|
||||
8 |-from . import unused, exported, renamed as bees
|
||||
8 |+from . import exported
|
||||
9 9 |
|
||||
10 10 | if sys.version_info > (3, 9):
|
||||
11 11 | from . import also_exported
|
||||
@@ -0,0 +1,23 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:19:8: F401 `sys` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
19 | import sys # F401: remove unused
|
||||
| ^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `sys`
|
||||
|
||||
__init__.py:33:15: F401 `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
33 | from . import unused # F401: change to redundant alias
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Use an explicit re-export: `unused as unused`
|
||||
|
||||
__init__.py:36:26: F401 `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
36 | from . import renamed as bees # F401: no fix
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Use an explicit re-export: `renamed as renamed`
|
||||
@@ -0,0 +1,23 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:19:8: F401 `sys` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
19 | import sys # F401: remove unused
|
||||
| ^^^ F401
|
||||
|
|
||||
= help: Remove unused import: `sys`
|
||||
|
||||
__init__.py:36:15: F401 `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
36 | from . import unused # F401: add to __all__
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Add unused import `unused` to __all__
|
||||
|
||||
__init__.py:39:26: F401 `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
39 | from . import renamed as bees # F401: add to __all__
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Add unused import `bees` to __all__
|
||||
@@ -0,0 +1,16 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:5:15: F401 `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
5 | from . import unused # F401: add to __all__
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Add unused import `unused` to __all__
|
||||
|
||||
__init__.py:8:26: F401 `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
8 | from . import renamed as bees # F401: add to __all__
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Add unused import `bees` to __all__
|
||||
@@ -0,0 +1,16 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:5:15: F401 `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
5 | from . import unused # F401: recommend add to all w/o fix
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Add unused import `unused` to __all__
|
||||
|
||||
__init__.py:8:26: F401 `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
8 | from . import renamed as bees # F401: recommend add to all w/o fix
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Add unused import `bees` to __all__
|
||||
@@ -0,0 +1,16 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:5:15: F401 `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
5 | from . import unused, renamed as bees # F401: add to __all__
|
||||
| ^^^^^^ F401
|
||||
|
|
||||
= help: Add unused import `unused` to __all__
|
||||
|
||||
__init__.py:5:34: F401 `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
5 | from . import unused, renamed as bees # F401: add to __all__
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Add unused import `bees` to __all__
|
||||
@@ -0,0 +1,24 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:8:15: F401 `.unused` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
6 | import sys
|
||||
7 |
|
||||
8 | from . import unused, exported, renamed as bees
|
||||
| ^^^^^^ F401
|
||||
9 |
|
||||
10 | if sys.version_info > (3, 9):
|
||||
|
|
||||
= help: Remove unused import
|
||||
|
||||
__init__.py:8:44: F401 `.renamed` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
6 | import sys
|
||||
7 |
|
||||
8 | from . import unused, exported, renamed as bees
|
||||
| ^^^^ F401
|
||||
9 |
|
||||
10 | if sys.version_info > (3, 9):
|
||||
|
|
||||
= help: Remove unused import
|
||||
@@ -39,4 +39,4 @@ __init__.py:36:26: F401 `.renamed` imported but unused; consider removing, addin
|
||||
36 | from . import renamed as bees # F401: no fix
|
||||
| ^^^^ F401
|
||||
|
|
||||
= help: Use an explicit re-export: `bees as bees`
|
||||
= help: Use an explicit re-export: `renamed as renamed`
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::str::FromStr;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{AnyStringFlags, Expr, ExprStringLiteral};
|
||||
use ruff_python_ast::{Expr, ExprStringLiteral, StringFlags, StringLiteral};
|
||||
use ruff_python_literal::{
|
||||
cformat::{CFormatErrorType, CFormatString},
|
||||
format::FormatPart,
|
||||
@@ -90,9 +90,13 @@ pub(crate) fn call(checker: &mut Checker, string: &str, range: TextRange) {
|
||||
/// PLE1300
|
||||
/// Ex) `"%z" % "1"`
|
||||
pub(crate) fn percent(checker: &mut Checker, expr: &Expr, format_string: &ExprStringLiteral) {
|
||||
for string_literal in &format_string.value {
|
||||
let string = checker.locator().slice(string_literal);
|
||||
let flags = AnyStringFlags::from(string_literal.flags);
|
||||
for StringLiteral {
|
||||
value: _,
|
||||
range,
|
||||
flags,
|
||||
} in &format_string.value
|
||||
{
|
||||
let string = checker.locator().slice(range);
|
||||
let string = &string
|
||||
[usize::from(flags.opener_len())..(string.len() - usize::from(flags.closer_len()))];
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use ruff_python_ast::{self as ast, AnyStringFlags, Expr};
|
||||
use ruff_python_ast::{self as ast, Expr, StringFlags, StringLiteral};
|
||||
use ruff_python_literal::cformat::{CFormatPart, CFormatSpec, CFormatStrOrBytes, CFormatString};
|
||||
use ruff_text_size::Ranged;
|
||||
use rustc_hash::FxHashMap;
|
||||
@@ -217,12 +217,15 @@ pub(crate) fn bad_string_format_type(
|
||||
) {
|
||||
// Parse each string segment.
|
||||
let mut format_strings = vec![];
|
||||
for string_literal in &format_string.value {
|
||||
let string = checker.locator().slice(string_literal);
|
||||
let flags = AnyStringFlags::from(string_literal.flags);
|
||||
let quote_len = usize::from(flags.quote_len());
|
||||
let string =
|
||||
&string[(usize::from(flags.prefix_len()) + quote_len)..(string.len() - quote_len)];
|
||||
for StringLiteral {
|
||||
value: _,
|
||||
range,
|
||||
flags,
|
||||
} in &format_string.value
|
||||
{
|
||||
let string = checker.locator().slice(range);
|
||||
let string = &string
|
||||
[usize::from(flags.opener_len())..(string.len() - usize::from(flags.closer_len()))];
|
||||
|
||||
// Parse the format string (e.g. `"%s"`) into a list of `PercentFormat`.
|
||||
if let Ok(format_string) = CFormatString::from_str(string) {
|
||||
|
||||
@@ -155,7 +155,11 @@ pub(crate) fn nested_min_max(
|
||||
MinMax::try_from_call(func.as_ref(), keywords.as_ref(), checker.semantic()) == Some(min_max)
|
||||
}) {
|
||||
let mut diagnostic = Diagnostic::new(NestedMinMax { func: min_max }, expr.range());
|
||||
if !checker.indexer().has_comments(expr, checker.locator()) {
|
||||
if !checker
|
||||
.indexer()
|
||||
.comment_ranges()
|
||||
.has_comments(expr, checker.locator())
|
||||
{
|
||||
let flattened_expr = Expr::Call(ast::ExprCall {
|
||||
func: Box::new(func.clone()),
|
||||
arguments: Arguments {
|
||||
|
||||
@@ -54,8 +54,8 @@ pub(crate) fn non_ascii_name(binding: &Binding, locator: &Locator) -> Option<Dia
|
||||
BindingKind::LoopVar => Kind::LoopVar,
|
||||
BindingKind::ComprehensionVar => Kind::ComprenhensionVar,
|
||||
BindingKind::WithItemVar => Kind::WithItemVar,
|
||||
BindingKind::Global => Kind::Global,
|
||||
BindingKind::Nonlocal(_) => Kind::Nonlocal,
|
||||
BindingKind::Global(_) => Kind::Global,
|
||||
BindingKind::Nonlocal(_, _) => Kind::Nonlocal,
|
||||
BindingKind::ClassDefinition(_) => Kind::ClassDefinition,
|
||||
BindingKind::FunctionDefinition(_) => Kind::FunctionDefinition,
|
||||
BindingKind::BoundException => Kind::BoundException,
|
||||
|
||||
@@ -98,6 +98,8 @@ impl Ranged for AttributeAssignment<'_> {
|
||||
}
|
||||
|
||||
/// Return a list of attributes that are assigned to but not included in `__slots__`.
|
||||
///
|
||||
/// If the `__slots__` attribute cannot be statically determined, returns an empty vector.
|
||||
fn is_attributes_not_in_slots(body: &[Stmt]) -> Vec<AttributeAssignment> {
|
||||
// First, collect all the attributes that are assigned to `__slots__`.
|
||||
let mut slots = FxHashSet::default();
|
||||
@@ -110,7 +112,13 @@ fn is_attributes_not_in_slots(body: &[Stmt]) -> Vec<AttributeAssignment> {
|
||||
};
|
||||
|
||||
if id == "__slots__" {
|
||||
slots.extend(slots_attributes(value));
|
||||
for attribute in slots_attributes(value) {
|
||||
if let Some(attribute) = attribute {
|
||||
slots.insert(attribute);
|
||||
} else {
|
||||
return vec![];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,7 +133,13 @@ fn is_attributes_not_in_slots(body: &[Stmt]) -> Vec<AttributeAssignment> {
|
||||
};
|
||||
|
||||
if id == "__slots__" {
|
||||
slots.extend(slots_attributes(value));
|
||||
for attribute in slots_attributes(value) {
|
||||
if let Some(attribute) = attribute {
|
||||
slots.insert(attribute);
|
||||
} else {
|
||||
return vec![];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +150,13 @@ fn is_attributes_not_in_slots(body: &[Stmt]) -> Vec<AttributeAssignment> {
|
||||
};
|
||||
|
||||
if id == "__slots__" {
|
||||
slots.extend(slots_attributes(value));
|
||||
for attribute in slots_attributes(value) {
|
||||
if let Some(attribute) = attribute {
|
||||
slots.insert(attribute);
|
||||
} else {
|
||||
return vec![];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -237,12 +257,14 @@ fn is_attributes_not_in_slots(body: &[Stmt]) -> Vec<AttributeAssignment> {
|
||||
}
|
||||
|
||||
/// Return an iterator over the attributes enumerated in the given `__slots__` value.
|
||||
fn slots_attributes(expr: &Expr) -> impl Iterator<Item = &str> {
|
||||
///
|
||||
/// If an attribute can't be statically determined, it will be `None`.
|
||||
fn slots_attributes(expr: &Expr) -> impl Iterator<Item = Option<&str>> {
|
||||
// Ex) `__slots__ = ("name",)`
|
||||
let elts_iter = match expr {
|
||||
Expr::Tuple(ast::ExprTuple { elts, .. })
|
||||
| Expr::List(ast::ExprList { elts, .. })
|
||||
| Expr::Set(ast::ExprSet { elts, .. }) => Some(elts.iter().filter_map(|elt| match elt {
|
||||
| Expr::Set(ast::ExprSet { elts, .. }) => Some(elts.iter().map(|elt| match elt {
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => Some(value.to_str()),
|
||||
_ => None,
|
||||
})),
|
||||
@@ -251,7 +273,7 @@ fn slots_attributes(expr: &Expr) -> impl Iterator<Item = &str> {
|
||||
|
||||
// Ex) `__slots__ = {"name": ...}`
|
||||
let keys_iter = match expr {
|
||||
Expr::Dict(dict) => Some(dict.iter_keys().filter_map(|key| match key {
|
||||
Expr::Dict(dict) => Some(dict.iter_keys().map(|key| match key {
|
||||
Some(Expr::StringLiteral(ast::ExprStringLiteral { value, .. })) => Some(value.to_str()),
|
||||
_ => None,
|
||||
})),
|
||||
|
||||
@@ -176,10 +176,11 @@ fn num_branches(stmts: &[Stmt]) -> usize {
|
||||
.sum::<usize>()
|
||||
}
|
||||
Stmt::Match(ast::StmtMatch { cases, .. }) => {
|
||||
1 + cases
|
||||
.iter()
|
||||
.map(|case| num_branches(&case.body))
|
||||
.sum::<usize>()
|
||||
cases.len()
|
||||
+ cases
|
||||
.iter()
|
||||
.map(|case| num_branches(&case.body))
|
||||
.sum::<usize>()
|
||||
}
|
||||
// The `with` statement is not considered a branch but the statements inside the `with` should be counted.
|
||||
Stmt::With(ast::StmtWith { body, .. }) => num_branches(body),
|
||||
@@ -199,7 +200,9 @@ fn num_branches(stmts: &[Stmt]) -> usize {
|
||||
finalbody,
|
||||
..
|
||||
}) => {
|
||||
1 + num_branches(body)
|
||||
// Count each `except` clause as a branch; the `else` and `finally` clauses also
|
||||
// count, but the `try` clause itself does not.
|
||||
num_branches(body)
|
||||
+ (if orelse.is_empty() {
|
||||
0
|
||||
} else {
|
||||
@@ -276,6 +279,19 @@ else:
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_case() -> Result<()> {
|
||||
let source: &str = r"
|
||||
match x: # 2
|
||||
case 0:
|
||||
pass
|
||||
case 1:
|
||||
pass
|
||||
";
|
||||
test_helper(source, 2)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_else() -> Result<()> {
|
||||
let source: &str = r"
|
||||
@@ -323,6 +339,47 @@ return 1
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn try_except() -> Result<()> {
|
||||
let source: &str = r"
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
";
|
||||
|
||||
test_helper(source, 1)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn try_except_else() -> Result<()> {
|
||||
let source: &str = r"
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
";
|
||||
|
||||
test_helper(source, 2)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn try_finally() -> Result<()> {
|
||||
let source: &str = r"
|
||||
try:
|
||||
pass
|
||||
finally:
|
||||
pass
|
||||
";
|
||||
|
||||
test_helper(source, 1)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn try_except_except_else_finally() -> Result<()> {
|
||||
let source: &str = r"
|
||||
@@ -338,7 +395,7 @@ finally:
|
||||
pass
|
||||
";
|
||||
|
||||
test_helper(source, 5)?;
|
||||
test_helper(source, 4)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user