Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a60eb0aca | ||
|
|
3e96803033 | ||
|
|
c59035139c | ||
|
|
7632d7eda7 | ||
|
|
b4dbe62da0 | ||
|
|
9106d5338b | ||
|
|
534d8d049c | ||
|
|
e692c4a2cc | ||
|
|
e0b39fa63e | ||
|
|
320a48977b | ||
|
|
0d05aaeb6e |
19
.github/workflows/ci.yaml
vendored
19
.github/workflows/ci.yaml
vendored
@@ -36,18 +36,12 @@ jobs:
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
- run: cargo build --all --release
|
||||
- run: ./target/release/ruff_dev generate-rules-table
|
||||
- run: ./target/release/ruff_dev generate-options
|
||||
- run: git diff --quiet README.md || echo "::error file=README.md::This file is outdated. You may have to rerun 'cargo dev generate-options' and/or 'cargo dev generate-rules-table'."
|
||||
- run: ./target/release/ruff_dev generate-check-code-prefix
|
||||
- run: git diff --quiet src/checks_gen.rs || echo "::error file=src/checks_gen.rs::This file is outdated. You may have to rerun 'cargo dev generate-check-code-prefix'."
|
||||
- run: git diff --exit-code -- README.md src/checks_gen.rs
|
||||
- run: ./target/release/ruff_dev generate-json-schema
|
||||
- run: git diff --quiet ruff.schema.json || echo "::error file=ruff.schema.json::This file is outdated. You may have to rerun 'cargo dev generate-json-schema'."
|
||||
- run: git diff --exit-code -- ruff.schema.json
|
||||
- run: ./target/release/ruff_dev generate-playground-options
|
||||
- run: git diff --quiet playground/src/ruff_options.rs || echo "::error file=playground/src/ruff_options.ts::This file is outdated. You may have to rerun 'cargo dev generate-playground-options'."
|
||||
- run: git diff --exit-code -- README.md src/checks_gen.rs playground/src/ruff_options.ts
|
||||
- run: ./target/release/ruff_dev generate-all
|
||||
- run: git diff --quiet README.md || echo "::error file=README.md::This file is outdated. Run 'cargo +nightly dev generate-all'."
|
||||
- run: git diff --quiet src/checks_gen.rs || echo "::error file=src/checks_gen.rs::This file is outdated. Run 'cargo +nightly dev generate-all'."
|
||||
- run: git diff --quiet ruff.schema.json || echo "::error file=ruff.schema.json::This file is outdated. Run 'cargo +nightly dev generate-all'."
|
||||
- run: git diff --quiet playground/src/ruff_options.ts || echo "::error file=playground/src/ruff_options.ts::This file is outdated. Run 'cargo +nightly dev generate-all'."
|
||||
- run: git diff --exit-code -- README.md src/checks_gen.rs ruff.schema.json playground/src/ruff_options.ts
|
||||
|
||||
cargo-fmt:
|
||||
name: "cargo fmt"
|
||||
@@ -150,6 +144,7 @@ jobs:
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- run: wasm-pack test --node
|
||||
|
||||
maturin-build:
|
||||
|
||||
9
.github/workflows/playground.yaml
vendored
9
.github/workflows/playground.yaml
vendored
@@ -4,8 +4,6 @@ on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
@@ -15,6 +13,8 @@ env:
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
@@ -29,6 +29,7 @@ jobs:
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack"
|
||||
run: wasm-pack build --target web --out-dir playground/src/pkg
|
||||
- name: "Install Node dependencies"
|
||||
@@ -41,9 +42,7 @@ jobs:
|
||||
run: npm run build
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
env:
|
||||
CF_API_TOKEN: ${{ secrets.CF_API_TOKEN }}
|
||||
if: ${{ env.CF_API_TOKEN }} != null
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@2.0.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.195
|
||||
rev: v0.0.196
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
||||
@@ -59,9 +59,9 @@ pattern implemented therein.
|
||||
|
||||
To trigger the rule, you'll likely want to augment the logic in `src/check_ast.rs`, which defines
|
||||
the Python AST visitor, responsible for iterating over the abstract syntax tree and collecting
|
||||
lint-rule violations as it goes. If you need to inspect the AST, you can run `cargo dev print-ast`
|
||||
with a Python file. Grep for the `Check::new` invocations to understand how other, similar rules
|
||||
are implemented.
|
||||
lint-rule violations as it goes. If you need to inspect the AST, you can run
|
||||
`cargo +nightly dev print-ast` with a Python file. Grep for the `Check::new` invocations to
|
||||
understand how other, similar rules are implemented.
|
||||
|
||||
To add a test fixture, create a file under `resources/test/fixtures`, named to match the `CheckCode`
|
||||
you defined earlier (e.g., `E402.py`). This file should contain a variety of violations and
|
||||
@@ -79,9 +79,7 @@ Then, run `cargo test`. Your test will fail, but you'll be prompted to follow-up
|
||||
`cargo insta review`. Accept the generated snapshot, then commit the snapshot file alongside the
|
||||
rest of your changes.
|
||||
|
||||
Finally, to update the documentation, run `cargo dev generate-rules-table` from the repo root. To
|
||||
update the generated prefix map, run `cargo dev generate-check-code-prefix`. Both of these commands
|
||||
should be run whenever a new check is added to the codebase.
|
||||
Finally, regenerate the documentation and generated code with `cargo +nightly dev generate-all`.
|
||||
|
||||
### Example: Adding a new configuration option
|
||||
|
||||
@@ -105,8 +103,7 @@ You may also want to add the new configuration option to the `flake8-to-ruff` to
|
||||
responsible for converting `flake8` configuration files to Ruff's TOML format. This logic
|
||||
lives in `flake8_to_ruff/src/converter.rs`.
|
||||
|
||||
Run `cargo dev generate-options` to update the documentation for supported configuration options,
|
||||
and `cargo dev generate-json-schema` to update the JSON schema for `tool.ruff` in `pyproject.toml`.
|
||||
Finally, regenerate the documentation and generated code with `cargo +nightly dev generate-all`.
|
||||
|
||||
## Release process
|
||||
|
||||
|
||||
16
Cargo.lock
generated
16
Cargo.lock
generated
@@ -750,7 +750,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.195-dev.0"
|
||||
version = "0.0.196-dev.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.0.32",
|
||||
@@ -1878,7 +1878,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.195"
|
||||
version = "0.0.196"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -1945,7 +1945,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_dev"
|
||||
version = "0.0.195"
|
||||
version = "0.0.196"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.0.32",
|
||||
@@ -1966,7 +1966,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_macros"
|
||||
version = "0.0.195"
|
||||
version = "0.0.196"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2009,7 +2009,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-ast"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=247e815880766d556ef1ca6f0af69daf1a5fe59a#247e815880766d556ef1ca6f0af69daf1a5fe59a"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=68d26955b3e24198a150315e7959719b03709dee#68d26955b3e24198a150315e7959719b03709dee"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"rustpython-common",
|
||||
@@ -2019,7 +2019,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-common"
|
||||
version = "0.0.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=247e815880766d556ef1ca6f0af69daf1a5fe59a#247e815880766d556ef1ca6f0af69daf1a5fe59a"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=68d26955b3e24198a150315e7959719b03709dee#68d26955b3e24198a150315e7959719b03709dee"
|
||||
dependencies = [
|
||||
"ascii",
|
||||
"cfg-if 1.0.0",
|
||||
@@ -2042,7 +2042,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-compiler-core"
|
||||
version = "0.1.2"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=247e815880766d556ef1ca6f0af69daf1a5fe59a#247e815880766d556ef1ca6f0af69daf1a5fe59a"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=68d26955b3e24198a150315e7959719b03709dee#68d26955b3e24198a150315e7959719b03709dee"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"bitflags",
|
||||
@@ -2059,7 +2059,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-parser"
|
||||
version = "0.1.2"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=247e815880766d556ef1ca6f0af69daf1a5fe59a#247e815880766d556ef1ca6f0af69daf1a5fe59a"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=68d26955b3e24198a150315e7959719b03709dee#68d26955b3e24198a150315e7959719b03709dee"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"anyhow",
|
||||
|
||||
10
Cargo.toml
10
Cargo.toml
@@ -6,7 +6,7 @@ members = [
|
||||
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.195"
|
||||
version = "0.0.196"
|
||||
edition = "2021"
|
||||
rust-version = "1.65.0"
|
||||
|
||||
@@ -45,11 +45,11 @@ path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix
|
||||
quick-junit = { version = "0.3.2" }
|
||||
regex = { version = "1.6.0" }
|
||||
ropey = { version = "1.5.0", features = ["cr_lines", "simd"], default-features = false }
|
||||
ruff_macros = { version = "0.0.195", path = "ruff_macros" }
|
||||
ruff_macros = { version = "0.0.196", path = "ruff_macros" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "247e815880766d556ef1ca6f0af69daf1a5fe59a" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "247e815880766d556ef1ca6f0af69daf1a5fe59a" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "247e815880766d556ef1ca6f0af69daf1a5fe59a" }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "68d26955b3e24198a150315e7959719b03709dee" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "68d26955b3e24198a150315e7959719b03709dee" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "68d26955b3e24198a150315e7959719b03709dee" }
|
||||
schemars = { version = "0.8.11" }
|
||||
semver = { version = "1.0.16" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
|
||||
101
README.md
101
README.md
@@ -23,15 +23,20 @@ An extremely fast Python linter, written in Rust.
|
||||
- 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- ⚖️ [Near-parity](#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
|
||||
- 🔌 Native re-implementations of popular Flake8 plugins, like [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/)
|
||||
- 🌎 Monorepo-friendly configuration via hierarchical and cascading settings
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface. Ruff can be used to replace Flake8 (plus a variety
|
||||
of plugins), [`isort`](https://pypi.org/project/isort/), [`pydocstyle`](https://pypi.org/project/pydocstyle/),
|
||||
[`yesqa`](https://github.com/asottile/yesqa), [`eradicate`](https://pypi.org/project/eradicate/),
|
||||
and even a subset of [`pyupgrade`](https://pypi.org/project/pyupgrade/) and [`autoflake`](https://pypi.org/project/autoflake/)
|
||||
all while executing tens or hundreds of times faster than any individual tool. Ruff goes beyond the
|
||||
responsibilities of a traditional linter, instead functioning as an advanced code transformation
|
||||
tool capable of upgrading type annotations, rewriting class definitions, sorting imports, and more.
|
||||
functionality behind a single, common interface.
|
||||
|
||||
Ruff can be used to replace Flake8 (plus a variety of plugins), [`isort`](https://pypi.org/project/isort/),
|
||||
[`pydocstyle`](https://pypi.org/project/pydocstyle/), [`yesqa`](https://github.com/asottile/yesqa),
|
||||
[`eradicate`](https://pypi.org/project/eradicate/), [`pyupgrade`](https://pypi.org/project/pyupgrade/),
|
||||
and [`autoflake`](https://pypi.org/project/autoflake/), all while executing tens or hundreds of
|
||||
times faster than any individual tool.
|
||||
|
||||
Ruff goes beyond the responsibilities of a traditional linter, instead functioning as an advanced
|
||||
code transformation tool capable of upgrading type annotations, rewriting class definitions, sorting
|
||||
imports, and more.
|
||||
|
||||
Ruff is extremely actively developed and used in major open-source projects like:
|
||||
|
||||
@@ -162,7 +167,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.195'
|
||||
rev: 'v0.0.196'
|
||||
hooks:
|
||||
- id: ruff
|
||||
# Respect `exclude` and `extend-exclude` settings.
|
||||
@@ -639,7 +644,7 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
|
||||
| UP001 | UselessMetaclassType | `__metaclass__ = type` is implied | 🛠 |
|
||||
| UP003 | TypeOfPrimitive | Use `str` instead of `type(...)` | 🛠 |
|
||||
| UP004 | UselessObjectInheritance | Class `...` inherits from object | 🛠 |
|
||||
| UP005 | DeprecatedUnittestAlias | `assertEquals` is deprecated, use `assertEqual` instead | 🛠 |
|
||||
| UP005 | DeprecatedUnittestAlias | `assertEquals` is deprecated, use `assertEqual` | 🛠 |
|
||||
| UP006 | UsePEP585Annotation | Use `list` instead of `List` for type annotations | 🛠 |
|
||||
| UP007 | UsePEP604Annotation | Use `X \| Y` for type annotations | 🛠 |
|
||||
| UP008 | SuperCallWithParameters | Use `super()` instead of `super(__class__, self)` | 🛠 |
|
||||
@@ -654,6 +659,8 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
|
||||
| UP017 | DatetimeTimezoneUTC | Use `datetime.UTC` alias | 🛠 |
|
||||
| UP018 | NativeLiterals | Unnecessary call to `str` and `bytes` | 🛠 |
|
||||
| UP019 | TypingTextStrAlias | `typing.Text` is deprecated, use `str` | 🛠 |
|
||||
| UP020 | OpenAlias | Use builtin `open` | 🛠 |
|
||||
| UP021 | ReplaceUniversalNewlines | `universal_newlines` is deprecated, use `text` | 🛠 |
|
||||
|
||||
### pep8-naming (N)
|
||||
|
||||
@@ -1257,7 +1264,7 @@ natively, including:
|
||||
- [`pep8-naming`](https://pypi.org/project/pep8-naming/)
|
||||
- [`pydocstyle`](https://pypi.org/project/pydocstyle/)
|
||||
- [`pygrep-hooks`](https://github.com/pre-commit/pygrep-hooks) (3/10)
|
||||
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (19/33)
|
||||
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (20/33)
|
||||
- [`yesqa`](https://github.com/asottile/yesqa)
|
||||
|
||||
Note that, in some cases, Ruff uses different error code prefixes than would be found in the
|
||||
@@ -1314,7 +1321,7 @@ Today, Ruff can be used to replace Flake8 when used with any of the following pl
|
||||
Ruff can also replace [`isort`](https://pypi.org/project/isort/),
|
||||
[`yesqa`](https://github.com/asottile/yesqa), [`eradicate`](https://pypi.org/project/eradicate/),
|
||||
[`pygrep-hooks`](https://github.com/pre-commit/pygrep-hooks) (3/10), and a subset of the rules
|
||||
implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (19/33).
|
||||
implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (20/33).
|
||||
|
||||
If you're looking to use Ruff, but rely on an unsupported Flake8 plugin, free to file an Issue.
|
||||
|
||||
@@ -1431,6 +1438,18 @@ extend-ignore = [
|
||||
]
|
||||
```
|
||||
|
||||
Note that Ruff _also_ supports a [`convention`](#convention) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff.pydocstyle]
|
||||
convention = "google"
|
||||
```
|
||||
|
||||
However, this setting is purely used to implement robust detection of Google and NumPy-style
|
||||
sections, and thus avoid the [false negatives](https://github.com/PyCQA/pydocstyle/issues/459) seen
|
||||
in `pydocstyle`; it does not affect which errors are enabled, which is driven by the `select` and
|
||||
`ignore` settings, as described above.
|
||||
|
||||
## Development
|
||||
|
||||
Ruff is written in Rust (1.65.0). You'll need to install the [Rust toolchain](https://www.rust-lang.org/tools/install)
|
||||
@@ -1446,8 +1465,8 @@ For development, we use [nightly Rust](https://rust-lang.github.io/rustup/concep
|
||||
|
||||
```shell
|
||||
cargo +nightly fmt
|
||||
cargo +nightly clippy
|
||||
cargo +nightly test
|
||||
cargo +nightly clippy --fix --workspace --all-targets --all-features -- -W clippy::pedantic
|
||||
cargo +nightly test --all
|
||||
```
|
||||
|
||||
## Releases
|
||||
@@ -2462,6 +2481,23 @@ extra-standard-library = ["path"]
|
||||
|
||||
---
|
||||
|
||||
#### [`force-single-line`](#force-single-line)
|
||||
|
||||
Forces all from imports to appear on their own line.
|
||||
|
||||
**Default value**: `false`
|
||||
|
||||
**Type**: `bool`
|
||||
|
||||
**Example usage**:
|
||||
|
||||
```toml
|
||||
[tool.ruff.isort]
|
||||
force-single-line = true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### [`force-wrap-aliases`](#force-wrap-aliases)
|
||||
|
||||
Force `import from` statements with multiple members and at least one
|
||||
@@ -2531,6 +2567,23 @@ known-third-party = ["src"]
|
||||
|
||||
---
|
||||
|
||||
#### [`single-line-exclusions`](#single-line-exclusions)
|
||||
|
||||
One or more modules to exclude from the single line rule.
|
||||
|
||||
**Default value**: `[]`
|
||||
|
||||
**Type**: `Vec<String>`
|
||||
|
||||
**Example usage**:
|
||||
|
||||
```toml
|
||||
[tool.ruff.isort]
|
||||
single-line-exclusions = ["os", "json"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### [`split-on-trailing-comma`](#split-on-trailing-comma)
|
||||
|
||||
If a comma is placed after the last member in a multi-line import, then
|
||||
@@ -2632,6 +2685,28 @@ staticmethod-decorators = ["staticmethod", "stcmthd"]
|
||||
|
||||
---
|
||||
|
||||
### `pydocstyle`
|
||||
|
||||
#### [`convention`](#convention)
|
||||
|
||||
Whether to use Google-style or Numpy-style conventions when detecting
|
||||
docstring sections. By default, conventions will be inferred from
|
||||
the available sections.
|
||||
|
||||
**Default value**: `"convention"`
|
||||
|
||||
**Type**: `Convention`
|
||||
|
||||
**Example usage**:
|
||||
|
||||
```toml
|
||||
[tool.ruff.pydocstyle]
|
||||
# Use Google-style docstrings.
|
||||
convention = "google"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `pyupgrade`
|
||||
|
||||
#### [`keep-runtime-typing`](#keep-runtime-typing)
|
||||
|
||||
4
flake8_to_ruff/Cargo.lock
generated
4
flake8_to_ruff/Cargo.lock
generated
@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8_to_ruff"
|
||||
version = "0.0.195"
|
||||
version = "0.0.196"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -1975,7 +1975,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.195"
|
||||
version = "0.0.196"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.195-dev.0"
|
||||
version = "0.0.196-dev.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
|
||||
@@ -325,6 +325,7 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -382,6 +383,7 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -439,6 +441,7 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -496,6 +499,7 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -558,6 +562,7 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -654,6 +659,7 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
@@ -717,6 +723,7 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
@@ -167,6 +167,11 @@ export const AVAILABLE_OPTIONS: OptionGroup[] = [
|
||||
"default": '[]',
|
||||
"type": 'Vec<String>',
|
||||
},
|
||||
{
|
||||
"name": "force-single-line",
|
||||
"default": 'false',
|
||||
"type": 'bool',
|
||||
},
|
||||
{
|
||||
"name": "force-wrap-aliases",
|
||||
"default": 'false',
|
||||
@@ -182,6 +187,11 @@ export const AVAILABLE_OPTIONS: OptionGroup[] = [
|
||||
"default": '[]',
|
||||
"type": 'Vec<String>',
|
||||
},
|
||||
{
|
||||
"name": "single-line-exclusions",
|
||||
"default": '[]',
|
||||
"type": 'Vec<String>',
|
||||
},
|
||||
{
|
||||
"name": "split-on-trailing-comma",
|
||||
"default": 'true',
|
||||
@@ -212,6 +222,13 @@ export const AVAILABLE_OPTIONS: OptionGroup[] = [
|
||||
"type": 'Vec<String>',
|
||||
},
|
||||
]},
|
||||
{"name": "pydocstyle", "fields": [
|
||||
{
|
||||
"name": "convention",
|
||||
"default": '"convention"',
|
||||
"type": 'Convention',
|
||||
},
|
||||
]},
|
||||
{"name": "pyupgrade", "fields": [
|
||||
{
|
||||
"name": "keep-runtime-typing",
|
||||
|
||||
@@ -38,3 +38,8 @@ strip = true
|
||||
[tool.ruff.isort]
|
||||
force-wrap-aliases = true
|
||||
combine-as-imports = true
|
||||
force-single-line = true
|
||||
single-line-exclusions = ["os", "logging.handlers"]
|
||||
|
||||
[tool.ruff.pydocstyle]
|
||||
convention = "google"
|
||||
|
||||
18
resources/test/fixtures/isort/force_single_line.py
vendored
Normal file
18
resources/test/fixtures/isort/force_single_line.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import sys, math
|
||||
from os import path, uname
|
||||
from logging.handlers import StreamHandler, FileHandler
|
||||
|
||||
# comment 1
|
||||
from third_party import lib1, lib2, \
|
||||
lib3, lib7, lib5, lib6
|
||||
# comment 2
|
||||
from third_party import lib4
|
||||
|
||||
from foo import bar # comment 3
|
||||
from foo2 import bar2 # comment 4
|
||||
|
||||
# comment 5
|
||||
from bar import (
|
||||
a, # comment 6
|
||||
b, # comment 7
|
||||
)
|
||||
84
resources/test/fixtures/pydocstyle/D417.py
vendored
Normal file
84
resources/test/fixtures/pydocstyle/D417.py
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
def f(x, y, z):
|
||||
"""Do f.
|
||||
|
||||
Args:
|
||||
x: the value
|
||||
with a hanging indent
|
||||
|
||||
Returns:
|
||||
the value
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
def f(x, y, z):
|
||||
"""Do f.
|
||||
|
||||
Args:
|
||||
x:
|
||||
The whole thing has a hanging indent.
|
||||
|
||||
Returns:
|
||||
the value
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
def f(x, y, z):
|
||||
"""Do f.
|
||||
|
||||
Args:
|
||||
x:
|
||||
The whole thing has a hanging indent.
|
||||
|
||||
Returns: the value
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
def f(x, y, z):
|
||||
"""Do f.
|
||||
|
||||
Args:
|
||||
x: the value def
|
||||
ghi
|
||||
|
||||
Returns:
|
||||
the value
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
def f(x, y, z):
|
||||
"""Do f.
|
||||
|
||||
Args:
|
||||
x: the value
|
||||
z: A final argument
|
||||
|
||||
Returns:
|
||||
the value
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
def f(x, y, z):
|
||||
"""Do g.
|
||||
|
||||
Args:
|
||||
x: the value
|
||||
z: A final argument
|
||||
|
||||
Returns: the value
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
def f(x, y, z):
|
||||
"""Do h.
|
||||
|
||||
Args:
|
||||
x: the value
|
||||
z: A final argument
|
||||
"""
|
||||
return x
|
||||
9
resources/test/fixtures/pyupgrade/UP020.py
vendored
Normal file
9
resources/test/fixtures/pyupgrade/UP020.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
from io import open
|
||||
|
||||
with open("f.txt") as f:
|
||||
print(f.read())
|
||||
|
||||
import io
|
||||
|
||||
with io.open("f.txt", mode="r", buffering=-1, **kwargs) as f:
|
||||
print(f.read())
|
||||
12
resources/test/fixtures/pyupgrade/UP021.py
vendored
Normal file
12
resources/test/fixtures/pyupgrade/UP021.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import subprocess
|
||||
import subprocess as somename
|
||||
from subprocess import run
|
||||
from subprocess import run as anothername
|
||||
|
||||
subprocess.run(["foo"], universal_newlines=True, check=True)
|
||||
somename.run(["foo"], universal_newlines=True)
|
||||
|
||||
run(["foo"], universal_newlines=True, check=False)
|
||||
anothername(["foo"], universal_newlines=True)
|
||||
|
||||
subprocess.run(["foo"], check=True)
|
||||
@@ -277,6 +277,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"pydocstyle": {
|
||||
"description": "Options for the `pydocstyle` plugin.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Pydocstyle"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"pyupgrade": {
|
||||
"description": "Options for the `pyupgrade` plugin.",
|
||||
"anyOf": [
|
||||
@@ -865,6 +876,9 @@
|
||||
"UP017",
|
||||
"UP018",
|
||||
"UP019",
|
||||
"UP02",
|
||||
"UP020",
|
||||
"UP021",
|
||||
"W",
|
||||
"W2",
|
||||
"W29",
|
||||
@@ -891,6 +905,13 @@
|
||||
"YTT303"
|
||||
]
|
||||
},
|
||||
"Convention": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"google",
|
||||
"numpy"
|
||||
]
|
||||
},
|
||||
"Flake8AnnotationsOptions": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -1078,6 +1099,13 @@
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"force-single-line": {
|
||||
"description": "Forces all from imports to appear on their own line.",
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"force-wrap-aliases": {
|
||||
"description": "Force `import from` statements with multiple members and at least one alias (e.g., `import A as B`) to wrap such that every line contains exactly one member. For example, this formatting would be retained, rather than condensing to a single line:\n\n```py from .utils import ( test_directory as test_directory, test_id as test_id ) ```\n\nNote that this setting is only effective when combined with `combine-as-imports = true`. When `combine-as-imports` isn't enabled, every aliased `import from` will be given its own line, in which case, wrapping is not necessary.",
|
||||
"type": [
|
||||
@@ -1105,6 +1133,16 @@
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"single-line-exclusions": {
|
||||
"description": "One or more modules to exclude from the single line rule.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"split-on-trailing-comma": {
|
||||
"description": "If a comma is placed after the last member in a multi-line import, then the imports will never be folded into one line.\n\nSee isort's [`split-on-trailing-comma`](https://pycqa.github.io/isort/docs/configuration/options.html#split-on-trailing-comma) option.",
|
||||
"type": [
|
||||
@@ -1179,6 +1217,23 @@
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"Pydocstyle": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"convention": {
|
||||
"description": "Whether to use Google-style or Numpy-style conventions when detecting docstring sections. By default, conventions will be inferred from the available sections.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Convention"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"PythonVersion": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_dev"
|
||||
version = "0.0.195"
|
||||
version = "0.0.196"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
@@ -11,9 +11,9 @@ itertools = { version = "0.10.5" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "f2f0b7a487a8725d161fe8b3ed73a6758b21e177" }
|
||||
once_cell = { version = "1.16.0" }
|
||||
ruff = { path = ".." }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "247e815880766d556ef1ca6f0af69daf1a5fe59a" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "247e815880766d556ef1ca6f0af69daf1a5fe59a" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "247e815880766d556ef1ca6f0af69daf1a5fe59a" }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "68d26955b3e24198a150315e7959719b03709dee" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "68d26955b3e24198a150315e7959719b03709dee" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "68d26955b3e24198a150315e7959719b03709dee" }
|
||||
schemars = { version = "0.8.11" }
|
||||
serde_json = {version="1.0.91"}
|
||||
strum = { version = "0.24.1", features = ["strum_macros"] }
|
||||
|
||||
35
ruff_dev/src/generate_all.rs
Normal file
35
ruff_dev/src/generate_all.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
//! Run all code and documentation generation steps.
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Args;
|
||||
|
||||
use crate::{
|
||||
generate_check_code_prefix, generate_json_schema, generate_options,
|
||||
generate_playground_options, generate_rules_table,
|
||||
};
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct Cli {
|
||||
/// Write the generated artifacts to stdout (rather than to the filesystem).
|
||||
#[arg(long)]
|
||||
dry_run: bool,
|
||||
}
|
||||
|
||||
pub fn main(cli: &Cli) -> Result<()> {
|
||||
generate_check_code_prefix::main(&generate_check_code_prefix::Cli {
|
||||
dry_run: cli.dry_run,
|
||||
})?;
|
||||
generate_json_schema::main(&generate_json_schema::Cli {
|
||||
dry_run: cli.dry_run,
|
||||
})?;
|
||||
generate_rules_table::main(&generate_rules_table::Cli {
|
||||
dry_run: cli.dry_run,
|
||||
})?;
|
||||
generate_options::main(&generate_options::Cli {
|
||||
dry_run: cli.dry_run,
|
||||
})?;
|
||||
generate_playground_options::main(&generate_playground_options::Cli {
|
||||
dry_run: cli.dry_run,
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -19,7 +19,7 @@ pub struct Cli {
|
||||
/// Write the generated source code to stdout (rather than to
|
||||
/// `src/checks_gen.rs`).
|
||||
#[arg(long)]
|
||||
dry_run: bool,
|
||||
pub(crate) dry_run: bool,
|
||||
}
|
||||
|
||||
pub fn main(cli: &Cli) -> Result<()> {
|
||||
|
||||
@@ -10,7 +10,7 @@ use schemars::schema_for;
|
||||
pub struct Cli {
|
||||
/// Write the generated table to stdout (rather than to `ruff.schema.json`).
|
||||
#[arg(long)]
|
||||
dry_run: bool,
|
||||
pub(crate) dry_run: bool,
|
||||
}
|
||||
|
||||
pub fn main(cli: &Cli) -> Result<()> {
|
||||
|
||||
@@ -18,7 +18,7 @@ const END_PRAGMA: &str = "<!-- End auto-generated options sections. -->";
|
||||
pub struct Cli {
|
||||
/// Write the generated table to stdout (rather than to `README.md`).
|
||||
#[arg(long)]
|
||||
dry_run: bool,
|
||||
pub(crate) dry_run: bool,
|
||||
}
|
||||
|
||||
fn emit_field(output: &mut String, field: &OptionField, group_name: Option<&str>) {
|
||||
|
||||
@@ -14,7 +14,7 @@ use ruff::settings::options_base::{ConfigurationOptions, OptionEntry, OptionFiel
|
||||
pub struct Cli {
|
||||
/// Write the generated table to stdout (rather than to `TODO`).
|
||||
#[arg(long)]
|
||||
dry_run: bool,
|
||||
pub(crate) dry_run: bool,
|
||||
}
|
||||
|
||||
fn emit_field(output: &mut String, field: &OptionField) {
|
||||
|
||||
@@ -21,7 +21,7 @@ const TOC_END_PRAGMA: &str = "<!-- End auto-generated table of contents. -->";
|
||||
pub struct Cli {
|
||||
/// Write the generated table to stdout (rather than to `README.md`).
|
||||
#[arg(long)]
|
||||
dry_run: bool,
|
||||
pub(crate) dry_run: bool,
|
||||
}
|
||||
|
||||
pub fn main(cli: &Cli) -> Result<()> {
|
||||
|
||||
@@ -11,12 +11,13 @@
|
||||
clippy::too_many_lines
|
||||
)]
|
||||
|
||||
pub mod generate_all;
|
||||
pub mod generate_check_code_prefix;
|
||||
pub mod generate_json_schema;
|
||||
pub mod generate_options;
|
||||
pub mod generate_playground_options;
|
||||
pub mod generate_rules_table;
|
||||
pub mod generate_source_code;
|
||||
pub mod print_ast;
|
||||
pub mod print_cst;
|
||||
pub mod print_tokens;
|
||||
pub mod round_trip;
|
||||
|
||||
@@ -14,9 +14,9 @@
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use ruff_dev::{
|
||||
generate_check_code_prefix, generate_json_schema, generate_options,
|
||||
generate_playground_options, generate_rules_table, generate_source_code, print_ast, print_cst,
|
||||
print_tokens,
|
||||
generate_all, generate_check_code_prefix, generate_json_schema, generate_options,
|
||||
generate_playground_options, generate_rules_table, print_ast, print_cst, print_tokens,
|
||||
round_trip,
|
||||
};
|
||||
|
||||
#[derive(Parser)]
|
||||
@@ -29,6 +29,8 @@ struct Cli {
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Run all code and documentation generation steps.
|
||||
GenerateAll(generate_all::Cli),
|
||||
/// Generate the `CheckCodePrefix` enum.
|
||||
GenerateCheckCodePrefix(generate_check_code_prefix::Cli),
|
||||
/// Generate JSON schema for the TOML configuration file.
|
||||
@@ -40,28 +42,29 @@ enum Commands {
|
||||
/// Generate typescript file defining options to be used by the web
|
||||
/// playground.
|
||||
GeneratePlaygroundOptions(generate_playground_options::Cli),
|
||||
/// Run round-trip source code generation on a given Python file.
|
||||
GenerateSourceCode(generate_source_code::Cli),
|
||||
/// Print the AST for a given Python file.
|
||||
PrintAST(print_ast::Cli),
|
||||
/// Print the LibCST CST for a given Python file.
|
||||
PrintCST(print_cst::Cli),
|
||||
/// Print the token stream for a given Python file.
|
||||
PrintTokens(print_tokens::Cli),
|
||||
/// Run round-trip source code generation on a given Python file.
|
||||
RoundTrip(round_trip::Cli),
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
match &cli.command {
|
||||
Commands::GenerateAll(args) => generate_all::main(args)?,
|
||||
Commands::GenerateCheckCodePrefix(args) => generate_check_code_prefix::main(args)?,
|
||||
Commands::GenerateJSONSchema(args) => generate_json_schema::main(args)?,
|
||||
Commands::GenerateRulesTable(args) => generate_rules_table::main(args)?,
|
||||
Commands::GenerateSourceCode(args) => generate_source_code::main(args)?,
|
||||
Commands::GenerateOptions(args) => generate_options::main(args)?,
|
||||
Commands::GeneratePlaygroundOptions(args) => generate_playground_options::main(args)?,
|
||||
Commands::PrintAST(args) => print_ast::main(args)?,
|
||||
Commands::PrintCST(args) => print_cst::main(args)?,
|
||||
Commands::PrintTokens(args) => print_tokens::main(args)?,
|
||||
Commands::RoundTrip(args) => round_trip::main(args)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_macros"
|
||||
version = "0.0.195"
|
||||
version = "0.0.196"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
|
||||
@@ -382,10 +382,8 @@ pub fn identifier_range(stmt: &Stmt, locator: &SourceCodeLocator) -> Range {
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
) {
|
||||
let contents = locator.slice_source_code_range(&Range::from_located(stmt));
|
||||
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(&contents, stmt.location).flatten() {
|
||||
if matches!(tok, Tok::Name { .. }) {
|
||||
let start = to_absolute(start, stmt.location);
|
||||
let end = to_absolute(end, stmt.location);
|
||||
return Range {
|
||||
location: start,
|
||||
end_location: end,
|
||||
@@ -412,15 +410,15 @@ pub fn excepthandler_name_range(
|
||||
location: type_end_location,
|
||||
end_location: body[0].location,
|
||||
});
|
||||
let range = lexer::make_tokenizer(&contents)
|
||||
let range = lexer::make_tokenizer_located(&contents, type_end_location)
|
||||
.flatten()
|
||||
.tuple_windows()
|
||||
.find(|(tok, next_tok)| {
|
||||
matches!(tok.1, Tok::As) && matches!(next_tok.1, Tok::Name { .. })
|
||||
})
|
||||
.map(|((..), (start, _, end))| Range {
|
||||
location: to_absolute(start, type_end_location),
|
||||
end_location: to_absolute(end, type_end_location),
|
||||
.map(|((..), (location, _, end_location))| Range {
|
||||
location,
|
||||
end_location,
|
||||
});
|
||||
range
|
||||
}
|
||||
|
||||
@@ -1654,6 +1654,9 @@ where
|
||||
if self.settings.enabled.contains(&CheckCode::UP018) {
|
||||
pyupgrade::plugins::native_literals(self, expr, func, args, keywords);
|
||||
}
|
||||
if self.settings.enabled.contains(&CheckCode::UP021) {
|
||||
pyupgrade::plugins::replace_universal_newlines(self, expr, keywords);
|
||||
}
|
||||
|
||||
// flake8-super
|
||||
if self.settings.enabled.contains(&CheckCode::UP008) {
|
||||
@@ -1926,6 +1929,10 @@ where
|
||||
pyupgrade::plugins::redundant_open_modes(self, expr);
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&CheckCode::UP020) {
|
||||
pyupgrade::plugins::open_alias(self, expr, func);
|
||||
}
|
||||
|
||||
// flake8-boolean-trap
|
||||
if self.settings.enabled.contains(&CheckCode::FBT003) {
|
||||
flake8_boolean_trap::plugins::check_boolean_positional_value_in_function_call(
|
||||
@@ -3872,7 +3879,11 @@ impl<'a> Checker<'a> {
|
||||
|| self.settings.enabled.contains(&CheckCode::D416)
|
||||
|| self.settings.enabled.contains(&CheckCode::D417)
|
||||
{
|
||||
pydocstyle::plugins::sections(self, &docstring);
|
||||
pydocstyle::plugins::sections(
|
||||
self,
|
||||
&docstring,
|
||||
self.settings.pydocstyle.convention.as_ref(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -227,6 +227,8 @@ pub enum CheckCode {
|
||||
UP017,
|
||||
UP018,
|
||||
UP019,
|
||||
UP020,
|
||||
UP021,
|
||||
// pydocstyle
|
||||
D100,
|
||||
D101,
|
||||
@@ -839,6 +841,8 @@ pub enum CheckKind {
|
||||
RemoveSixCompat,
|
||||
DatetimeTimezoneUTC,
|
||||
NativeLiterals,
|
||||
OpenAlias,
|
||||
ReplaceUniversalNewlines,
|
||||
// pydocstyle
|
||||
BlankLineAfterLastSection(String),
|
||||
BlankLineAfterSection(String),
|
||||
@@ -1215,6 +1219,8 @@ impl CheckCode {
|
||||
CheckCode::UP017 => CheckKind::DatetimeTimezoneUTC,
|
||||
CheckCode::UP018 => CheckKind::NativeLiterals,
|
||||
CheckCode::UP019 => CheckKind::TypingTextStrAlias,
|
||||
CheckCode::UP020 => CheckKind::OpenAlias,
|
||||
CheckCode::UP021 => CheckKind::ReplaceUniversalNewlines,
|
||||
// pydocstyle
|
||||
CheckCode::D100 => CheckKind::PublicModule,
|
||||
CheckCode::D101 => CheckKind::PublicClass,
|
||||
@@ -1635,6 +1641,8 @@ impl CheckCode {
|
||||
CheckCode::UP017 => CheckCategory::Pyupgrade,
|
||||
CheckCode::UP018 => CheckCategory::Pyupgrade,
|
||||
CheckCode::UP019 => CheckCategory::Pyupgrade,
|
||||
CheckCode::UP020 => CheckCategory::Pyupgrade,
|
||||
CheckCode::UP021 => CheckCategory::Pyupgrade,
|
||||
CheckCode::W292 => CheckCategory::Pycodestyle,
|
||||
CheckCode::W605 => CheckCategory::Pycodestyle,
|
||||
CheckCode::YTT101 => CheckCategory::Flake82020,
|
||||
@@ -1848,6 +1856,8 @@ impl CheckKind {
|
||||
CheckKind::DatetimeTimezoneUTC => &CheckCode::UP017,
|
||||
CheckKind::NativeLiterals => &CheckCode::UP018,
|
||||
CheckKind::TypingTextStrAlias => &CheckCode::UP019,
|
||||
CheckKind::OpenAlias => &CheckCode::UP020,
|
||||
CheckKind::ReplaceUniversalNewlines => &CheckCode::UP021,
|
||||
// pydocstyle
|
||||
CheckKind::BlankLineAfterLastSection(..) => &CheckCode::D413,
|
||||
CheckKind::BlankLineAfterSection(..) => &CheckCode::D410,
|
||||
@@ -2540,7 +2550,7 @@ impl CheckKind {
|
||||
CheckKind::UselessMetaclassType => "`__metaclass__ = type` is implied".to_string(),
|
||||
CheckKind::TypingTextStrAlias => "`typing.Text` is deprecated, use `str`".to_string(),
|
||||
CheckKind::DeprecatedUnittestAlias(alias, target) => {
|
||||
format!("`{alias}` is deprecated, use `{target}` instead")
|
||||
format!("`{alias}` is deprecated, use `{target}`")
|
||||
}
|
||||
CheckKind::UselessObjectInheritance(name) => {
|
||||
format!("Class `{name}` inherits from object")
|
||||
@@ -2573,9 +2583,13 @@ impl CheckKind {
|
||||
CheckKind::RemoveSixCompat => "Unnecessary `six` compatibility usage".to_string(),
|
||||
CheckKind::DatetimeTimezoneUTC => "Use `datetime.UTC` alias".to_string(),
|
||||
CheckKind::NativeLiterals => "Unnecessary call to `str` and `bytes`".to_string(),
|
||||
CheckKind::OpenAlias => "Use builtin `open`".to_string(),
|
||||
CheckKind::ConvertTypedDictFunctionalToClass(name) => {
|
||||
format!("Convert `{name}` from `TypedDict` functional to class syntax")
|
||||
}
|
||||
CheckKind::ReplaceUniversalNewlines => {
|
||||
"`universal_newlines` is deprecated, use `text`".to_string()
|
||||
}
|
||||
CheckKind::ConvertNamedTupleFunctionalToClass(name) => {
|
||||
format!("Convert `{name}` from `NamedTuple` functional to class syntax")
|
||||
}
|
||||
@@ -3015,7 +3029,9 @@ impl CheckKind {
|
||||
| CheckKind::MisplacedComparisonConstant(..)
|
||||
| CheckKind::MissingReturnTypeSpecialMethod(..)
|
||||
| CheckKind::NativeLiterals
|
||||
| CheckKind::OpenAlias
|
||||
| CheckKind::NewLineAfterLastParagraph
|
||||
| CheckKind::ReplaceUniversalNewlines
|
||||
| CheckKind::NewLineAfterSectionName(..)
|
||||
| CheckKind::NoBlankLineAfterFunction(..)
|
||||
| CheckKind::NoBlankLineBeforeClass(..)
|
||||
|
||||
@@ -527,6 +527,9 @@ pub enum CheckCodePrefix {
|
||||
UP017,
|
||||
UP018,
|
||||
UP019,
|
||||
UP02,
|
||||
UP020,
|
||||
UP021,
|
||||
W,
|
||||
W2,
|
||||
W29,
|
||||
@@ -2104,6 +2107,8 @@ impl CheckCodePrefix {
|
||||
CheckCode::UP017,
|
||||
CheckCode::UP018,
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
]
|
||||
}
|
||||
CheckCodePrefix::U0 => {
|
||||
@@ -2132,6 +2137,8 @@ impl CheckCodePrefix {
|
||||
CheckCode::UP017,
|
||||
CheckCode::UP018,
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
]
|
||||
}
|
||||
CheckCodePrefix::U00 => {
|
||||
@@ -2344,6 +2351,8 @@ impl CheckCodePrefix {
|
||||
CheckCode::UP017,
|
||||
CheckCode::UP018,
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
],
|
||||
CheckCodePrefix::UP0 => vec![
|
||||
CheckCode::UP001,
|
||||
@@ -2364,6 +2373,8 @@ impl CheckCodePrefix {
|
||||
CheckCode::UP017,
|
||||
CheckCode::UP018,
|
||||
CheckCode::UP019,
|
||||
CheckCode::UP020,
|
||||
CheckCode::UP021,
|
||||
],
|
||||
CheckCodePrefix::UP00 => vec![
|
||||
CheckCode::UP001,
|
||||
@@ -2405,6 +2416,9 @@ impl CheckCodePrefix {
|
||||
CheckCodePrefix::UP017 => vec![CheckCode::UP017],
|
||||
CheckCodePrefix::UP018 => vec![CheckCode::UP018],
|
||||
CheckCodePrefix::UP019 => vec![CheckCode::UP019],
|
||||
CheckCodePrefix::UP02 => vec![CheckCode::UP020, CheckCode::UP021],
|
||||
CheckCodePrefix::UP020 => vec![CheckCode::UP020],
|
||||
CheckCodePrefix::UP021 => vec![CheckCode::UP021],
|
||||
CheckCodePrefix::W => vec![CheckCode::W292, CheckCode::W605],
|
||||
CheckCodePrefix::W2 => vec![CheckCode::W292],
|
||||
CheckCodePrefix::W29 => vec![CheckCode::W292],
|
||||
@@ -2963,6 +2977,9 @@ impl CheckCodePrefix {
|
||||
CheckCodePrefix::UP017 => SuffixLength::Three,
|
||||
CheckCodePrefix::UP018 => SuffixLength::Three,
|
||||
CheckCodePrefix::UP019 => SuffixLength::Three,
|
||||
CheckCodePrefix::UP02 => SuffixLength::Two,
|
||||
CheckCodePrefix::UP020 => SuffixLength::Three,
|
||||
CheckCodePrefix::UP021 => SuffixLength::Three,
|
||||
CheckCodePrefix::W => SuffixLength::Zero,
|
||||
CheckCodePrefix::W2 => SuffixLength::One,
|
||||
CheckCodePrefix::W29 => SuffixLength::Two,
|
||||
|
||||
@@ -3,7 +3,6 @@ use rustpython_ast::Stmt;
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
|
||||
use crate::ast::helpers;
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
@@ -17,13 +16,10 @@ pub fn add_return_none_annotation(locator: &SourceCodeLocator, stmt: &Stmt) -> R
|
||||
let mut seen_lpar = false;
|
||||
let mut seen_rpar = false;
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, ..) in lexer::make_tokenizer(&contents).flatten() {
|
||||
for (start, tok, ..) in lexer::make_tokenizer_located(&contents, range.location).flatten() {
|
||||
if seen_lpar && seen_rpar {
|
||||
if matches!(tok, Tok::Colon) {
|
||||
return Ok(Fix::insertion(
|
||||
" -> None".to_string(),
|
||||
helpers::to_absolute(start, range.location),
|
||||
));
|
||||
return Ok(Fix::insertion(" -> None".to_string(), start));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ use rustpython_ast::Location;
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
|
||||
use crate::ast::helpers;
|
||||
use crate::ast::types::Range;
|
||||
use crate::SourceCodeLocator;
|
||||
|
||||
@@ -18,12 +17,10 @@ pub struct Comment<'a> {
|
||||
/// Collect all comments in an import block.
|
||||
pub fn collect_comments<'a>(range: &Range, locator: &'a SourceCodeLocator) -> Vec<Comment<'a>> {
|
||||
let contents = locator.slice_source_code_range(range);
|
||||
lexer::make_tokenizer(&contents)
|
||||
lexer::make_tokenizer_located(&contents, range.location)
|
||||
.flatten()
|
||||
.filter_map(|(start, tok, end)| {
|
||||
if matches!(tok, Tok::Comment) {
|
||||
let start = helpers::to_absolute(start, range.location);
|
||||
let end = helpers::to_absolute(end, range.location);
|
||||
Some(Comment {
|
||||
value: locator.slice_source_code_range(&Range {
|
||||
location: start,
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::cmp::Ordering;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use itertools::Either::{Left, Right};
|
||||
use itertools::Itertools;
|
||||
use ropey::RopeBuilder;
|
||||
use rustc_hash::FxHashMap;
|
||||
@@ -495,7 +496,54 @@ fn sort_imports(block: ImportBlock) -> OrderedImportBlock {
|
||||
ordered
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn force_single_line_imports<'a>(
|
||||
block: OrderedImportBlock<'a>,
|
||||
single_line_exclusions: &BTreeSet<String>,
|
||||
) -> OrderedImportBlock<'a> {
|
||||
OrderedImportBlock {
|
||||
import: block.import,
|
||||
import_from: block
|
||||
.import_from
|
||||
.into_iter()
|
||||
.flat_map(|(from_data, comment_set, trailing_comma, alias_data)| {
|
||||
if from_data
|
||||
.module
|
||||
.map_or(false, |module| single_line_exclusions.contains(module))
|
||||
{
|
||||
Left(std::iter::once((
|
||||
from_data,
|
||||
comment_set,
|
||||
trailing_comma,
|
||||
alias_data,
|
||||
)))
|
||||
} else {
|
||||
Right(
|
||||
alias_data
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(move |(index, alias_data)| {
|
||||
(
|
||||
from_data.clone(),
|
||||
if index == 0 {
|
||||
comment_set.clone()
|
||||
} else {
|
||||
CommentSet {
|
||||
atop: vec![],
|
||||
inline: vec![],
|
||||
}
|
||||
},
|
||||
TrailingComma::Absent,
|
||||
vec![alias_data],
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments, clippy::fn_params_excessive_bools)]
|
||||
pub fn format_imports(
|
||||
block: &Block,
|
||||
comments: Vec<Comment>,
|
||||
@@ -509,6 +557,8 @@ pub fn format_imports(
|
||||
combine_as_imports: bool,
|
||||
force_wrap_aliases: bool,
|
||||
split_on_trailing_comma: bool,
|
||||
force_single_line: bool,
|
||||
single_line_exclusions: &BTreeSet<String>,
|
||||
) -> String {
|
||||
let trailer = &block.trailer;
|
||||
let block = annotate_imports(&block.imports, comments, locator, split_on_trailing_comma);
|
||||
@@ -531,7 +581,10 @@ pub fn format_imports(
|
||||
// Generate replacement source code.
|
||||
let mut is_first_block = true;
|
||||
for import_block in block_by_type.into_values() {
|
||||
let import_block = sort_imports(import_block);
|
||||
let mut import_block = sort_imports(import_block);
|
||||
if force_single_line {
|
||||
import_block = force_single_line_imports(import_block, single_line_exclusions);
|
||||
}
|
||||
|
||||
// Add a blank line between every section.
|
||||
if is_first_block {
|
||||
@@ -577,6 +630,7 @@ pub fn format_imports(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::BTreeSet;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
@@ -697,4 +751,28 @@ mod tests {
|
||||
insta::assert_yaml_snapshot!(snapshot, checks);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("force_single_line.py"))]
|
||||
fn force_single_line(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("force_single_line_{}", path.to_string_lossy());
|
||||
let mut checks = test_path(
|
||||
Path::new("./resources/test/fixtures/isort")
|
||||
.join(path)
|
||||
.as_path(),
|
||||
&Settings {
|
||||
isort: isort::settings::Settings {
|
||||
force_single_line: true,
|
||||
single_line_exclusions: vec!["os".to_string(), "logging.handlers".to_string()]
|
||||
.into_iter()
|
||||
.collect::<BTreeSet<_>>(),
|
||||
..isort::settings::Settings::default()
|
||||
},
|
||||
src: vec![Path::new("resources/test/fixtures/isort").to_path_buf()],
|
||||
..Settings::for_rule(CheckCode::I001)
|
||||
},
|
||||
)?;
|
||||
checks.sort_by_key(|check| check.location);
|
||||
insta::assert_yaml_snapshot!(snapshot, checks);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,6 +82,8 @@ pub fn check_imports(
|
||||
settings.isort.combine_as_imports,
|
||||
settings.isort.force_wrap_aliases,
|
||||
settings.isort.split_on_trailing_comma,
|
||||
settings.isort.force_single_line,
|
||||
&settings.isort.single_line_exclusions,
|
||||
);
|
||||
|
||||
// Expand the span the entire range, including leading and trailing space.
|
||||
|
||||
@@ -40,6 +40,22 @@ pub struct Options {
|
||||
/// enabled, every aliased `import from` will be given its own line, in
|
||||
/// which case, wrapping is not necessary.
|
||||
pub force_wrap_aliases: Option<bool>,
|
||||
#[option(
|
||||
default = r#"false"#,
|
||||
value_type = "bool",
|
||||
example = r#"force-single-line = true"#
|
||||
)]
|
||||
/// Forces all from imports to appear on their own line.
|
||||
pub force_single_line: Option<bool>,
|
||||
#[option(
|
||||
default = r#"[]"#,
|
||||
value_type = "Vec<String>",
|
||||
example = r#"
|
||||
single-line-exclusions = ["os", "json"]
|
||||
"#
|
||||
)]
|
||||
/// One or more modules to exclude from the single line rule.
|
||||
pub single_line_exclusions: Option<Vec<String>>,
|
||||
#[option(
|
||||
default = r#"false"#,
|
||||
value_type = "bool",
|
||||
@@ -95,10 +111,13 @@ pub struct Options {
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Settings {
|
||||
pub combine_as_imports: bool,
|
||||
pub force_wrap_aliases: bool,
|
||||
pub split_on_trailing_comma: bool,
|
||||
pub force_single_line: bool,
|
||||
pub single_line_exclusions: BTreeSet<String>,
|
||||
pub known_first_party: BTreeSet<String>,
|
||||
pub known_third_party: BTreeSet<String>,
|
||||
pub extra_standard_library: BTreeSet<String>,
|
||||
@@ -110,6 +129,10 @@ impl Settings {
|
||||
combine_as_imports: options.combine_as_imports.unwrap_or(false),
|
||||
force_wrap_aliases: options.force_wrap_aliases.unwrap_or(false),
|
||||
split_on_trailing_comma: options.split_on_trailing_comma.unwrap_or(true),
|
||||
force_single_line: options.force_single_line.unwrap_or(false),
|
||||
single_line_exclusions: BTreeSet::from_iter(
|
||||
options.single_line_exclusions.unwrap_or_default(),
|
||||
),
|
||||
known_first_party: BTreeSet::from_iter(options.known_first_party.unwrap_or_default()),
|
||||
known_third_party: BTreeSet::from_iter(options.known_third_party.unwrap_or_default()),
|
||||
extra_standard_library: BTreeSet::from_iter(
|
||||
@@ -125,6 +148,8 @@ impl Default for Settings {
|
||||
combine_as_imports: false,
|
||||
force_wrap_aliases: false,
|
||||
split_on_trailing_comma: true,
|
||||
force_single_line: false,
|
||||
single_line_exclusions: BTreeSet::new(),
|
||||
known_first_party: BTreeSet::new(),
|
||||
known_third_party: BTreeSet::new(),
|
||||
extra_standard_library: BTreeSet::new(),
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
- kind: UnsortedImports
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 19
|
||||
column: 0
|
||||
fix:
|
||||
content: "import math\nimport sys\nfrom logging.handlers import FileHandler, StreamHandler\nfrom os import path, uname\n\n# comment 5\nfrom bar import a # comment 6\nfrom bar import b # comment 7\nfrom foo import bar # comment 3\nfrom foo2 import bar2 # comment 4\n\n# comment 1\n# comment 2\nfrom third_party import lib1\nfrom third_party import lib2\nfrom third_party import lib3\nfrom third_party import lib4\nfrom third_party import lib5\nfrom third_party import lib6\nfrom third_party import lib7\n"
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 19
|
||||
column: 0
|
||||
|
||||
@@ -16,7 +16,7 @@ impl Default for TrailingComma {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash, Ord, PartialOrd, Eq, PartialEq)]
|
||||
#[derive(Debug, Hash, Ord, PartialOrd, Eq, PartialEq, Clone)]
|
||||
pub struct ImportFromData<'a> {
|
||||
pub module: Option<&'a String>,
|
||||
pub level: Option<&'a usize>,
|
||||
@@ -28,7 +28,7 @@ pub struct AliasData<'a> {
|
||||
pub asname: Option<&'a String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct CommentSet<'a> {
|
||||
pub atop: Vec<Cow<'a, str>>,
|
||||
pub inline: Vec<Cow<'a, str>>,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub mod helpers;
|
||||
pub mod plugins;
|
||||
pub mod settings;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
@@ -11,6 +12,7 @@ mod tests {
|
||||
|
||||
use crate::checks::CheckCode;
|
||||
use crate::linter::test_path;
|
||||
use crate::pydocstyle::settings::{Convention, Settings};
|
||||
use crate::settings;
|
||||
|
||||
#[test_case(CheckCode::D100, Path::new("D.py"); "D100")]
|
||||
@@ -72,4 +74,54 @@ mod tests {
|
||||
insta::assert_yaml_snapshot!(snapshot, checks);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn d417_unspecified() -> Result<()> {
|
||||
let mut checks = test_path(
|
||||
Path::new("./resources/test/fixtures/pydocstyle/D417.py"),
|
||||
&settings::Settings {
|
||||
// When inferring the convention, we'll see a few false negatives.
|
||||
// See: https://github.com/PyCQA/pydocstyle/issues/459.
|
||||
pydocstyle: Settings { convention: None },
|
||||
..settings::Settings::for_rule(CheckCode::D417)
|
||||
},
|
||||
)?;
|
||||
checks.sort_by_key(|check| check.location);
|
||||
insta::assert_yaml_snapshot!(checks);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn d417_google() -> Result<()> {
|
||||
let mut checks = test_path(
|
||||
Path::new("./resources/test/fixtures/pydocstyle/D417.py"),
|
||||
&settings::Settings {
|
||||
// With explicit Google convention, we should flag every function.
|
||||
pydocstyle: Settings {
|
||||
convention: Some(Convention::Google),
|
||||
},
|
||||
..settings::Settings::for_rule(CheckCode::D417)
|
||||
},
|
||||
)?;
|
||||
checks.sort_by_key(|check| check.location);
|
||||
insta::assert_yaml_snapshot!(checks);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn d417_numpy() -> Result<()> {
|
||||
let mut checks = test_path(
|
||||
Path::new("./resources/test/fixtures/pydocstyle/D417.py"),
|
||||
&settings::Settings {
|
||||
// With explicit Google convention, we shouldn't flag anything.
|
||||
pydocstyle: Settings {
|
||||
convention: Some(Convention::Numpy),
|
||||
},
|
||||
..settings::Settings::for_rule(CheckCode::D417)
|
||||
},
|
||||
)?;
|
||||
checks.sort_by_key(|check| check.location);
|
||||
insta::assert_yaml_snapshot!(checks);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ use crate::docstrings::definition::{Definition, DefinitionKind, Docstring};
|
||||
use crate::docstrings::sections::{section_contexts, SectionContext};
|
||||
use crate::docstrings::styles::SectionStyle;
|
||||
use crate::pydocstyle::helpers::{leading_quote, logical_line};
|
||||
use crate::pydocstyle::settings::Convention;
|
||||
use crate::visibility::{is_init, is_magic, is_overload, is_override, is_staticmethod, Visibility};
|
||||
|
||||
/// D100, D101, D102, D103, D104, D105, D106, D107
|
||||
@@ -855,7 +856,7 @@ pub fn not_empty(checker: &mut Checker, docstring: &Docstring) -> bool {
|
||||
|
||||
/// D212, D214, D215, D405, D406, D407, D408, D409, D410, D411, D412, D413,
|
||||
/// D414, D416, D417
|
||||
pub fn sections(checker: &mut Checker, docstring: &Docstring) {
|
||||
pub fn sections(checker: &mut Checker, docstring: &Docstring, convention: Option<&Convention>) {
|
||||
let body = docstring.body;
|
||||
|
||||
let lines: Vec<&str> = LinesWithTrailingNewline::from(body).collect();
|
||||
@@ -863,17 +864,31 @@ pub fn sections(checker: &mut Checker, docstring: &Docstring) {
|
||||
return;
|
||||
}
|
||||
|
||||
// First, interpret as NumPy-style sections.
|
||||
let mut found_numpy_section = false;
|
||||
for context in §ion_contexts(&lines, &SectionStyle::NumPy) {
|
||||
found_numpy_section = true;
|
||||
numpy_section(checker, docstring, context);
|
||||
}
|
||||
match convention {
|
||||
Some(Convention::Google) => {
|
||||
for context in §ion_contexts(&lines, &SectionStyle::Google) {
|
||||
google_section(checker, docstring, context);
|
||||
}
|
||||
}
|
||||
Some(Convention::Numpy) => {
|
||||
for context in §ion_contexts(&lines, &SectionStyle::NumPy) {
|
||||
numpy_section(checker, docstring, context);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// First, interpret as NumPy-style sections.
|
||||
let mut found_numpy_section = false;
|
||||
for context in §ion_contexts(&lines, &SectionStyle::NumPy) {
|
||||
found_numpy_section = true;
|
||||
numpy_section(checker, docstring, context);
|
||||
}
|
||||
|
||||
// If no such sections were identified, interpret as Google-style sections.
|
||||
if !found_numpy_section {
|
||||
for context in §ion_contexts(&lines, &SectionStyle::Google) {
|
||||
google_section(checker, docstring, context);
|
||||
// If no such sections were identified, interpret as Google-style sections.
|
||||
if !found_numpy_section {
|
||||
for context in §ion_contexts(&lines, &SectionStyle::Google) {
|
||||
google_section(checker, docstring, context);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
44
src/pydocstyle/settings.rs
Normal file
44
src/pydocstyle/settings.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
//! Settings for the `pydocstyle` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum Convention {
|
||||
Google,
|
||||
Numpy,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, JsonSchema,
|
||||
)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case", rename = "Pydocstyle")]
|
||||
pub struct Options {
|
||||
#[option(
|
||||
default = r#""convention""#,
|
||||
value_type = "Convention",
|
||||
example = r#"
|
||||
# Use Google-style docstrings.
|
||||
convention = "google"
|
||||
"#
|
||||
)]
|
||||
/// Whether to use Google-style or Numpy-style conventions when detecting
|
||||
/// docstring sections. By default, conventions will be inferred from
|
||||
/// the available sections.
|
||||
pub convention: Option<Convention>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
pub struct Settings {
|
||||
pub convention: Option<Convention>,
|
||||
}
|
||||
|
||||
impl Settings {
|
||||
pub fn from_options(options: Options) -> Self {
|
||||
Self {
|
||||
convention: options.convention,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
---
|
||||
source: src/pydocstyle/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- y
|
||||
- z
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 11
|
||||
column: 12
|
||||
fix: ~
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- x
|
||||
- y
|
||||
- z
|
||||
location:
|
||||
row: 14
|
||||
column: 0
|
||||
end_location:
|
||||
row: 24
|
||||
column: 12
|
||||
fix: ~
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- x
|
||||
- y
|
||||
- z
|
||||
location:
|
||||
row: 27
|
||||
column: 0
|
||||
end_location:
|
||||
row: 36
|
||||
column: 12
|
||||
fix: ~
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- y
|
||||
- z
|
||||
location:
|
||||
row: 39
|
||||
column: 0
|
||||
end_location:
|
||||
row: 49
|
||||
column: 12
|
||||
fix: ~
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- y
|
||||
location:
|
||||
row: 52
|
||||
column: 0
|
||||
end_location:
|
||||
row: 62
|
||||
column: 12
|
||||
fix: ~
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- y
|
||||
location:
|
||||
row: 65
|
||||
column: 0
|
||||
end_location:
|
||||
row: 74
|
||||
column: 12
|
||||
fix: ~
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- y
|
||||
location:
|
||||
row: 77
|
||||
column: 0
|
||||
end_location:
|
||||
row: 84
|
||||
column: 12
|
||||
fix: ~
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/pydocstyle/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
---
|
||||
source: src/pydocstyle/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- x
|
||||
- y
|
||||
- z
|
||||
location:
|
||||
row: 27
|
||||
column: 0
|
||||
end_location:
|
||||
row: 36
|
||||
column: 12
|
||||
fix: ~
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- y
|
||||
location:
|
||||
row: 65
|
||||
column: 0
|
||||
end_location:
|
||||
row: 74
|
||||
column: 12
|
||||
fix: ~
|
||||
- kind:
|
||||
DocumentAllArguments:
|
||||
- y
|
||||
location:
|
||||
row: 77
|
||||
column: 0
|
||||
end_location:
|
||||
row: 84
|
||||
column: 12
|
||||
fix: ~
|
||||
|
||||
@@ -7,7 +7,6 @@ use rustpython_ast::{Expr, Keyword, Location, Stmt};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
|
||||
use crate::ast::helpers;
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::{self, Fix};
|
||||
use crate::cst::matchers::match_module;
|
||||
@@ -28,10 +27,10 @@ pub fn remove_class_def_base(
|
||||
let mut fix_start = None;
|
||||
let mut fix_end = None;
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(&contents, stmt_at).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
if count == 0 {
|
||||
fix_start = Some(helpers::to_absolute(start, stmt_at));
|
||||
fix_start = Some(start);
|
||||
}
|
||||
count += 1;
|
||||
}
|
||||
@@ -39,7 +38,7 @@ pub fn remove_class_def_base(
|
||||
if matches!(tok, Tok::Rpar) {
|
||||
count -= 1;
|
||||
if count == 0 {
|
||||
fix_end = Some(helpers::to_absolute(end, stmt_at));
|
||||
fix_end = Some(end);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -61,8 +60,7 @@ pub fn remove_class_def_base(
|
||||
let mut fix_start: Option<Location> = None;
|
||||
let mut fix_end: Option<Location> = None;
|
||||
let mut seen_comma = false;
|
||||
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||
let start = helpers::to_absolute(start, stmt_at);
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(&contents, stmt_at).flatten() {
|
||||
if seen_comma {
|
||||
if matches!(tok, Tok::Newline) {
|
||||
fix_end = Some(end);
|
||||
@@ -88,9 +86,7 @@ pub fn remove_class_def_base(
|
||||
// isn't a comma.
|
||||
let mut fix_start: Option<Location> = None;
|
||||
let mut fix_end: Option<Location> = None;
|
||||
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||
let start = helpers::to_absolute(start, stmt_at);
|
||||
let end = helpers::to_absolute(end, stmt_at);
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(&contents, stmt_at).flatten() {
|
||||
if start == expr_at {
|
||||
fix_end = Some(end);
|
||||
break;
|
||||
|
||||
@@ -39,6 +39,7 @@ mod tests {
|
||||
#[test_case(CheckCode::UP016, Path::new("UP016.py"); "UP016")]
|
||||
#[test_case(CheckCode::UP018, Path::new("UP018.py"); "UP018")]
|
||||
#[test_case(CheckCode::UP019, Path::new("UP019.py"); "UP019")]
|
||||
#[test_case(CheckCode::UP021, Path::new("UP021.py"); "UP021")]
|
||||
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
|
||||
let mut checks = test_path(
|
||||
|
||||
@@ -3,8 +3,10 @@ pub use convert_typed_dict_functional_to_class::convert_typed_dict_functional_to
|
||||
pub use datetime_utc_alias::datetime_utc_alias;
|
||||
pub use deprecated_unittest_alias::deprecated_unittest_alias;
|
||||
pub use native_literals::native_literals;
|
||||
pub use open_alias::open_alias;
|
||||
pub use redundant_open_modes::redundant_open_modes;
|
||||
pub use remove_six_compat::remove_six_compat;
|
||||
pub use replace_universal_newlines::replace_universal_newlines;
|
||||
pub use super_call_with_parameters::super_call_with_parameters;
|
||||
pub use type_of_primitive::type_of_primitive;
|
||||
pub use typing_text_str_alias::typing_text_str_alias;
|
||||
@@ -21,8 +23,10 @@ mod convert_typed_dict_functional_to_class;
|
||||
mod datetime_utc_alias;
|
||||
mod deprecated_unittest_alias;
|
||||
mod native_literals;
|
||||
mod open_alias;
|
||||
mod redundant_open_modes;
|
||||
mod remove_six_compat;
|
||||
mod replace_universal_newlines;
|
||||
mod super_call_with_parameters;
|
||||
mod type_of_primitive;
|
||||
mod typing_text_str_alias;
|
||||
|
||||
24
src/pyupgrade/plugins/open_alias.rs
Normal file
24
src/pyupgrade/plugins/open_alias.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use rustpython_ast::Expr;
|
||||
|
||||
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path};
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckCode, CheckKind};
|
||||
|
||||
/// UP020
|
||||
pub fn open_alias(checker: &mut Checker, expr: &Expr, func: &Expr) {
|
||||
let call_path = dealias_call_path(collect_call_paths(expr), &checker.import_aliases);
|
||||
|
||||
if match_call_path(&call_path, "io", "open", &checker.from_imports) {
|
||||
let mut check = Check::new(CheckKind::OpenAlias, Range::from_located(expr));
|
||||
if checker.patch(&CheckCode::UP020) {
|
||||
check.amend(Fix::replacement(
|
||||
"open".to_string(),
|
||||
func.location,
|
||||
func.end_location.unwrap(),
|
||||
));
|
||||
}
|
||||
checker.add_check(check);
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,6 @@ use rustpython_ast::{Constant, Expr, ExprKind, Keyword, KeywordData, Location};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::token::Tok;
|
||||
|
||||
use crate::ast::helpers;
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -112,9 +111,7 @@ fn create_remove_param_fix(
|
||||
let mut fix_end: Option<Location> = None;
|
||||
let mut is_first_arg: bool = false;
|
||||
let mut delete_first_arg: bool = false;
|
||||
for (start, tok, end) in lexer::make_tokenizer(&content).flatten() {
|
||||
let start = helpers::to_absolute(start, expr.location);
|
||||
let end = helpers::to_absolute(end, expr.location);
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(&content, expr.location).flatten() {
|
||||
if start == mode_param.location {
|
||||
if is_first_arg {
|
||||
delete_first_arg = true;
|
||||
|
||||
36
src/pyupgrade/plugins/replace_universal_newlines.rs
Normal file
36
src/pyupgrade/plugins/replace_universal_newlines.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use rustpython_ast::{Expr, Keyword, Location};
|
||||
|
||||
use crate::ast::helpers::{find_keyword, match_module_member};
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::checks::{Check, CheckKind};
|
||||
|
||||
/// UP021
|
||||
pub fn replace_universal_newlines(checker: &mut Checker, expr: &Expr, kwargs: &[Keyword]) {
|
||||
if match_module_member(
|
||||
expr,
|
||||
"subprocess",
|
||||
"run",
|
||||
&checker.from_imports,
|
||||
&checker.import_aliases,
|
||||
) {
|
||||
let Some(kwarg) = find_keyword(kwargs, "universal_newlines") else { return; };
|
||||
let range = Range {
|
||||
location: kwarg.location,
|
||||
end_location: Location::new(
|
||||
kwarg.location.row(),
|
||||
kwarg.location.column() + "universal_newlines".len(),
|
||||
),
|
||||
};
|
||||
let mut check = Check::new(CheckKind::ReplaceUniversalNewlines, range);
|
||||
if checker.patch(check.kind.code()) {
|
||||
check.amend(Fix::replacement(
|
||||
"text".to_string(),
|
||||
range.location,
|
||||
range.end_location,
|
||||
));
|
||||
}
|
||||
checker.add_check(check);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
---
|
||||
source: src/pyupgrade/mod.rs
|
||||
expression: checks
|
||||
---
|
||||
- kind: ReplaceUniversalNewlines
|
||||
location:
|
||||
row: 6
|
||||
column: 24
|
||||
end_location:
|
||||
row: 6
|
||||
column: 42
|
||||
fix:
|
||||
content: text
|
||||
location:
|
||||
row: 6
|
||||
column: 24
|
||||
end_location:
|
||||
row: 6
|
||||
column: 42
|
||||
- kind: ReplaceUniversalNewlines
|
||||
location:
|
||||
row: 7
|
||||
column: 22
|
||||
end_location:
|
||||
row: 7
|
||||
column: 40
|
||||
fix:
|
||||
content: text
|
||||
location:
|
||||
row: 7
|
||||
column: 22
|
||||
end_location:
|
||||
row: 7
|
||||
column: 40
|
||||
- kind: ReplaceUniversalNewlines
|
||||
location:
|
||||
row: 9
|
||||
column: 13
|
||||
end_location:
|
||||
row: 9
|
||||
column: 31
|
||||
fix:
|
||||
content: text
|
||||
location:
|
||||
row: 9
|
||||
column: 13
|
||||
end_location:
|
||||
row: 9
|
||||
column: 31
|
||||
- kind: ReplaceUniversalNewlines
|
||||
location:
|
||||
row: 10
|
||||
column: 21
|
||||
end_location:
|
||||
row: 10
|
||||
column: 39
|
||||
fix:
|
||||
content: text
|
||||
location:
|
||||
row: 10
|
||||
column: 21
|
||||
end_location:
|
||||
row: 10
|
||||
column: 39
|
||||
|
||||
@@ -21,7 +21,8 @@ use crate::settings::types::{
|
||||
};
|
||||
use crate::{
|
||||
flake8_annotations, flake8_bugbear, flake8_errmsg, flake8_import_conventions, flake8_quotes,
|
||||
flake8_tidy_imports, flake8_unused_arguments, fs, isort, mccabe, pep8_naming, pyupgrade,
|
||||
flake8_tidy_imports, flake8_unused_arguments, fs, isort, mccabe, pep8_naming, pydocstyle,
|
||||
pyupgrade,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
@@ -62,6 +63,7 @@ pub struct Configuration {
|
||||
pub isort: Option<isort::settings::Options>,
|
||||
pub mccabe: Option<mccabe::settings::Options>,
|
||||
pub pep8_naming: Option<pep8_naming::settings::Options>,
|
||||
pub pydocstyle: Option<pydocstyle::settings::Options>,
|
||||
pub pyupgrade: Option<pyupgrade::settings::Options>,
|
||||
}
|
||||
|
||||
@@ -156,6 +158,7 @@ impl Configuration {
|
||||
isort: options.isort,
|
||||
mccabe: options.mccabe,
|
||||
pep8_naming: options.pep8_naming,
|
||||
pydocstyle: options.pydocstyle,
|
||||
pyupgrade: options.pyupgrade,
|
||||
})
|
||||
}
|
||||
@@ -217,6 +220,7 @@ impl Configuration {
|
||||
isort: self.isort.or(config.isort),
|
||||
mccabe: self.mccabe.or(config.mccabe),
|
||||
pep8_naming: self.pep8_naming.or(config.pep8_naming),
|
||||
pydocstyle: self.pydocstyle.or(config.pydocstyle),
|
||||
pyupgrade: self.pyupgrade.or(config.pyupgrade),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,8 @@ use crate::settings::types::{
|
||||
};
|
||||
use crate::{
|
||||
flake8_annotations, flake8_bugbear, flake8_errmsg, flake8_import_conventions, flake8_quotes,
|
||||
flake8_tidy_imports, flake8_unused_arguments, isort, mccabe, pep8_naming, pyupgrade,
|
||||
flake8_tidy_imports, flake8_unused_arguments, isort, mccabe, pep8_naming, pydocstyle,
|
||||
pyupgrade,
|
||||
};
|
||||
|
||||
pub mod configuration;
|
||||
@@ -68,6 +69,7 @@ pub struct Settings {
|
||||
pub isort: isort::settings::Settings,
|
||||
pub mccabe: mccabe::settings::Settings,
|
||||
pub pep8_naming: pep8_naming::settings::Settings,
|
||||
pub pydocstyle: pydocstyle::settings::Settings,
|
||||
pub pyupgrade: pyupgrade::settings::Settings,
|
||||
}
|
||||
|
||||
@@ -193,6 +195,10 @@ impl Settings {
|
||||
.pep8_naming
|
||||
.map(pep8_naming::settings::Settings::from_options)
|
||||
.unwrap_or_default(),
|
||||
pydocstyle: config
|
||||
.pydocstyle
|
||||
.map(pydocstyle::settings::Settings::from_options)
|
||||
.unwrap_or_default(),
|
||||
pyupgrade: config
|
||||
.pyupgrade
|
||||
.as_ref()
|
||||
@@ -233,6 +239,7 @@ impl Settings {
|
||||
isort: isort::settings::Settings::default(),
|
||||
mccabe: mccabe::settings::Settings::default(),
|
||||
pep8_naming: pep8_naming::settings::Settings::default(),
|
||||
pydocstyle: pydocstyle::settings::Settings::default(),
|
||||
pyupgrade: pyupgrade::settings::Settings::default(),
|
||||
}
|
||||
}
|
||||
@@ -269,6 +276,7 @@ impl Settings {
|
||||
isort: isort::settings::Settings::default(),
|
||||
mccabe: mccabe::settings::Settings::default(),
|
||||
pep8_naming: pep8_naming::settings::Settings::default(),
|
||||
pydocstyle: pydocstyle::settings::Settings::default(),
|
||||
pyupgrade: pyupgrade::settings::Settings::default(),
|
||||
}
|
||||
}
|
||||
@@ -326,6 +334,7 @@ impl Hash for Settings {
|
||||
self.isort.hash(state);
|
||||
self.mccabe.hash(state);
|
||||
self.pep8_naming.hash(state);
|
||||
self.pydocstyle.hash(state);
|
||||
self.pyupgrade.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,8 @@ use crate::checks_gen::CheckCodePrefix;
|
||||
use crate::settings::types::{PythonVersion, SerializationFormat, Version};
|
||||
use crate::{
|
||||
flake8_annotations, flake8_bugbear, flake8_errmsg, flake8_import_conventions, flake8_quotes,
|
||||
flake8_tidy_imports, flake8_unused_arguments, isort, mccabe, pep8_naming, pyupgrade,
|
||||
flake8_tidy_imports, flake8_unused_arguments, isort, mccabe, pep8_naming, pydocstyle,
|
||||
pyupgrade,
|
||||
};
|
||||
|
||||
#[derive(
|
||||
@@ -369,6 +370,9 @@ pub struct Options {
|
||||
/// Options for the `pep8-naming` plugin.
|
||||
pub pep8_naming: Option<pep8_naming::settings::Options>,
|
||||
#[option_group]
|
||||
/// Options for the `pydocstyle` plugin.
|
||||
pub pydocstyle: Option<pydocstyle::settings::Options>,
|
||||
#[option_group]
|
||||
/// Options for the `pyupgrade` plugin.
|
||||
pub pyupgrade: Option<pyupgrade::settings::Options>,
|
||||
// Tables are required to go last.
|
||||
|
||||
@@ -198,6 +198,7 @@ mod tests {
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
})
|
||||
})
|
||||
@@ -249,6 +250,7 @@ line-length = 79
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
})
|
||||
})
|
||||
@@ -300,6 +302,7 @@ exclude = ["foo.py"]
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
})
|
||||
})
|
||||
@@ -351,6 +354,7 @@ select = ["E501"]
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
})
|
||||
})
|
||||
@@ -403,6 +407,7 @@ ignore = ["E501"]
|
||||
isort: None,
|
||||
mccabe: None,
|
||||
pep8_naming: None,
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
})
|
||||
})
|
||||
@@ -541,6 +546,7 @@ other-attribute = 1
|
||||
]),
|
||||
staticmethod_decorators: Some(vec!["staticmethod".to_string()]),
|
||||
}),
|
||||
pydocstyle: None,
|
||||
pyupgrade: None,
|
||||
}
|
||||
);
|
||||
|
||||
Reference in New Issue
Block a user