Compare commits

..

8 Commits

Author SHA1 Message Date
Charlie Marsh
7460ca28dd Fixup 2023-03-09 16:25:38 -05:00
AreamanM
56e8a4fd14 cleanup plc1901 impl 2023-03-09 15:49:09 -05:00
AreamanM
67444143b5 merge with main 2023-03-09 15:49:09 -05:00
AreamanM
daeb3ff37e fix cargo fmt warning 2023-03-09 15:49:09 -05:00
AreamanM
2f3734dd22 update test for plc1901 2023-03-09 15:49:09 -05:00
AreamanM
fc50d28fcf cleanup impl for plc1901 2023-03-09 15:49:09 -05:00
AreamanM
158dc5e7d4 update crate::ast:: to ruff_python_ast:: 2023-03-09 15:49:09 -05:00
AreamanM
9a42be8a90 rough implementation of c1901 2023-03-09 15:49:09 -05:00
303 changed files with 3163 additions and 8498 deletions

View File

@@ -1,6 +1,5 @@
[alias]
dev = "run --package ruff_dev --bin ruff_dev"
benchmark = "bench -p ruff_benchmark --"
[target.'cfg(all())']
rustflags = [

View File

@@ -1,11 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "12:00"
timezone: "America/New_York"
commit-message:
prefix: "ci(deps)"

View File

@@ -1,133 +0,0 @@
name: Benchmark
on:
pull_request:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
jobs:
run-benchmark:
if: github.event_name == 'pull_request'
name: "Run | ${{ matrix.os }}"
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- name: "PR - Checkout Branch"
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: "PR - Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v1
- name: "PR - Build benchmarks"
uses: actions-rs/cargo@v1
with:
command: bench
args: -p ruff_benchmark --no-run
- name: "PR - Run benchmarks"
run: cargo benchmark --save-baseline=pr
- name: "Main - Checkout Branch"
uses: actions/checkout@v3
with:
clean: false
ref: main
- name: "Main - Install Rust toolchain"
run: rustup show
- name: "Main - Build benchmarks"
uses: actions-rs/cargo@v1
with:
command: bench
args: -p ruff_benchmark --no-run
- name: "Main - Run benchmarks"
run: cargo benchmark --save-baseline=main
- name: "Upload benchmark results"
uses: actions/upload-artifact@v3
with:
name: benchmark-results-${{ matrix.os }}
path: ./target/criterion
# Cleanup
- name: Remove Criterion Artifact
uses: JesseTG/rm@v1.0.3
with:
path: ./target/criterion
benchmark-compare:
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
name: Compare
needs:
- run-benchmark
steps:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install critcmp"
# Use debug build: Building takes much longer than the "slowness" of using the debug build.
run: cargo install --debug critcmp
- name: "Linux | Download PR benchmark results"
uses: actions/download-artifact@v3
with:
name: benchmark-results-ubuntu-latest
path: ./target/criterion
- name: "Linux | Compare benchmark results"
shell: bash
run: |
echo "### Benchmark" >> summary.md
echo "#### Linux" >> summary.md
echo "\`\`\`" >> summary.md
critcmp main pr >> summary.md
echo "\`\`\`" >> summary.md
echo "" >> summary.md
- name: "Linux | Cleanup benchmark results"
run: rm -rf ./target/criterion
- name: "Windows | Download PR benchmark results"
uses: actions/download-artifact@v3
with:
name: benchmark-results-windows-latest
path: ./target/criterion
- name: "Windows | Compare benchmark results"
shell: bash
run: |
echo "#### Windows" >> summary.md
echo "\`\`\`" >> summary.md
critcmp main pr >> summary.md
echo "\`\`\`" >> summary.md
echo "" >> summary.md
echo ${{ github.event.pull_request.number }} > pr-number
cat summary.md > $GITHUB_STEP_SUMMARY
- uses: actions/upload-artifact@v3
name: Upload PR Number
with:
name: pr-number
path: pr-number
- uses: actions/upload-artifact@v3
name: Upload Summary
with:
name: summary
path: summary.md

View File

@@ -34,7 +34,7 @@ jobs:
- name: "Install Rust toolchain"
run: |
rustup component add clippy
- uses: Swatinem/rust-cache@v2
- uses: Swatinem/rust-cache@v1
- run: cargo clippy --workspace --all-targets --all-features -- -D warnings
cargo-clippy-wasm:
@@ -46,7 +46,7 @@ jobs:
run: |
rustup component add clippy
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- uses: Swatinem/rust-cache@v1
- run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features -- -D warnings
cargo-test:
@@ -59,9 +59,9 @@ jobs:
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- uses: Swatinem/rust-cache@v1
- run: cargo install cargo-insta
- run: pip install black[d]==23.1.0
- run: pip install black[d]==22.12.0
- name: "Run tests (Ubuntu)"
if: ${{ matrix.os == 'ubuntu-latest' }}
run: |
@@ -79,11 +79,6 @@ jobs:
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
- uses: actions/upload-artifact@v3
if: ${{ matrix.os == 'ubuntu-latest' }}
with:
name: ruff
path: target/debug/ruff
cargo-test-wasm:
@@ -99,7 +94,7 @@ jobs:
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
- uses: Swatinem/rust-cache@v2
- uses: Swatinem/rust-cache@v1
- name: "Run wasm-pack"
run: |
cd crates/ruff_wasm
@@ -112,7 +107,7 @@ jobs:
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- uses: Swatinem/rust-cache@v1
- run: ./scripts/add_rule.py --name DoTheThing --code PLC999 --linter pylint
- run: cargo check
- run: |
@@ -128,51 +123,3 @@ jobs:
- uses: crate-ci/typos@master
with:
files: .
ecosystem:
name: "ecosystem"
runs-on: ubuntu-latest
needs: cargo-test
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
if: github.event_name == 'pull_request'
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.11"
- uses: actions/download-artifact@v3
name: Download Ruff binary
id: ruff-target
with:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@v2
name: Download base results
with:
name: ruff
branch: ${{ github.event.pull_request.base.ref }}
check_artifacts: true
- name: Run ecosystem check
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
scripts/check_ecosystem.py ruff ${{ steps.ruff-target.outputs.download-path }}/ruff | tee ecosystem-result
cat ecosystem-result > $GITHUB_STEP_SUMMARY
echo ${{ github.event.number }} > pr-number
- uses: actions/upload-artifact@v3
name: Upload PR Number
with:
name: pr-number
path: pr-number
- uses: actions/upload-artifact@v3
name: Upload Results
with:
name: ecosystem-result
path: ecosystem-result

View File

@@ -15,7 +15,7 @@ jobs:
- uses: actions/setup-python@v4
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- uses: Swatinem/rust-cache@v1
- name: "Install dependencies"
run: |
pip install -r docs/requirements.txt

View File

@@ -133,7 +133,7 @@ jobs:
target: ${{ matrix.target }}
manylinux: auto
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
- uses: uraimo/run-on-arch-action@v2.5.0
if: matrix.target != 'ppc64'
name: Install built wheel
with:
@@ -206,7 +206,7 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- uses: uraimo/run-on-arch-action@v2
- uses: uraimo/run-on-arch-action@master
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}

View File

@@ -1,83 +0,0 @@
name: PR Check Comment
on:
workflow_run:
workflows: [CI, Benchmark]
types: [completed]
workflow_dispatch:
inputs:
workflow_run_id:
description: The ecosystem workflow that triggers the workflow run
required: true
permissions:
pull-requests: write
jobs:
comment:
runs-on: ubuntu-latest
steps:
- uses: dawidd6/action-download-artifact@v2
name: Download PR Number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
- name: Extract PR Number
id: pr-number
run: |
if [[ -f pr-number ]]
then
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
fi
- uses: dawidd6/action-download-artifact@v2
name: "Download Ecosystem Result"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number
with:
name: ecosystem-result
workflow: ci.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/ecosystem
if_no_artifact_found: ignore
- uses: dawidd6/action-download-artifact@v2
name: "Download Benchmark Result"
id: download-benchmark-result
if: steps.pr-number.outputs.pr-number
with:
name: summary
workflow: benchmark.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/benchmark
if_no_artifact_found: ignore
- name: Generate Comment
id: generate-comment
if: steps.download-ecosystem-result.outputs.found_artifact == 'true' || steps.download-benchmark-result.outputs.found_artifact == 'true'
run: |
echo 'comment<<EOF' >> $GITHUB_OUTPUT
echo '## PR Check Results' >> $GITHUB_OUTPUT
if [[ -f pr/ecosystem/ecosystem-result ]]
then
echo "### Ecosystem" >> $GITHUB_OUTPUT
cat pr/ecosystem/ecosystem-result >> $GITHUB_OUTPUT
fi
if [[ -f pr/benchmark/summary.md ]]
then
cat pr/benchmark/summary.md >> $GITHUB_OUTPUT
fi
echo 'EOF' >> $GITHUB_OUTPUT
- name: Create or update comment
if: steps.generate-comment.outputs.comment
uses: thollander/actions-comment-pull-request@v2
with:
pr_number: ${{ steps.pr-number.outputs.pr-number }}
message: ${{ steps.generate-comment.outputs.comment }}
comment_tag: PR Check Results

View File

@@ -208,7 +208,7 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: auto
args: --release --out dist
- uses: uraimo/run-on-arch-action@v2
- uses: uraimo/run-on-arch-action@v2.5.0
if: matrix.platform.arch != 'ppc64'
name: Install built wheel
with:
@@ -309,7 +309,7 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist
- uses: uraimo/run-on-arch-action@v2
- uses: uraimo/run-on-arch-action@master
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}

View File

@@ -1,7 +1,7 @@
fail_fast: true
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.12.1
rev: v0.10.1
hooks:
- id: validate-pyproject

77
Cargo.lock generated
View File

@@ -313,14 +313,13 @@ dependencies = [
[[package]]
name = "clap_complete_command"
version = "0.5.1"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d"
checksum = "4160b4a4f72ef58bd766bad27c09e6ef1cc9d82a22f6a0f55d152985a4a48e31"
dependencies = [
"clap 4.1.8",
"clap_complete",
"clap_complete_fig",
"clap_complete_nushell",
]
[[package]]
@@ -333,16 +332,6 @@ dependencies = [
"clap_complete",
]
[[package]]
name = "clap_complete_nushell"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7fa41f5e6aa83bd151b70fd0ceaee703d68cd669522795dc812df9edad1252c"
dependencies = [
"clap 4.1.8",
"clap_complete",
]
[[package]]
name = "clap_derive"
version = "4.1.8"
@@ -780,7 +769,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.256"
version = "0.0.254"
dependencies = [
"anyhow",
"clap 4.1.8",
@@ -1514,12 +1503,6 @@ dependencies = [
"once_cell",
]
[[package]]
name = "pathdiff"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
[[package]]
name = "peg"
version = "0.8.1"
@@ -1547,18 +1530,6 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa00462b37ead6d11a82c9d568b26682d78e0477dc02d1966c013af80969739"
[[package]]
name = "pep440_rs"
version = "0.2.0"
source = "git+https://github.com/konstin/pep440-rs.git?rev=a8fef4ec47f4c25b070b39cdbe6a0b9847e49941#a8fef4ec47f4c25b070b39cdbe6a0b9847e49941"
dependencies = [
"lazy_static",
"regex",
"serde",
"tracing",
"unicode-width",
]
[[package]]
name = "percent-encoding"
version = "2.2.0"
@@ -1982,7 +1953,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.256"
version = "0.0.254"
dependencies = [
"anyhow",
"bisection",
@@ -2008,9 +1979,6 @@ dependencies = [
"num-traits",
"once_cell",
"path-absolutize",
"pathdiff",
"pep440_rs",
"pretty_assertions",
"regex",
"result-like",
"ruff_cache",
@@ -2035,21 +2003,6 @@ dependencies = [
"toml",
]
[[package]]
name = "ruff_benchmark"
version = "0.0.0"
dependencies = [
"criterion",
"mimalloc",
"once_cell",
"ruff",
"serde",
"serde_json",
"tikv-jemallocator",
"ureq",
"url",
]
[[package]]
name = "ruff_cache"
version = "0.0.0"
@@ -2063,7 +2016,7 @@ dependencies = [
[[package]]
name = "ruff_cli"
version = "0.0.256"
version = "0.0.254"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -2308,7 +2261,7 @@ dependencies = [
[[package]]
name = "rustpython-ast"
version = "0.2.0"
source = "git+https://github.com/RustPython/RustPython.git?rev=c15f670f2c30cfae6b41a1874893590148c74bc4#c15f670f2c30cfae6b41a1874893590148c74bc4"
source = "git+https://github.com/RustPython/RustPython.git?rev=1871a1632e310985414211222f5bf8069678892f#1871a1632e310985414211222f5bf8069678892f"
dependencies = [
"num-bigint",
"rustpython-compiler-core",
@@ -2317,7 +2270,7 @@ dependencies = [
[[package]]
name = "rustpython-common"
version = "0.2.0"
source = "git+https://github.com/RustPython/RustPython.git?rev=c15f670f2c30cfae6b41a1874893590148c74bc4#c15f670f2c30cfae6b41a1874893590148c74bc4"
source = "git+https://github.com/RustPython/RustPython.git?rev=1871a1632e310985414211222f5bf8069678892f#1871a1632e310985414211222f5bf8069678892f"
dependencies = [
"ascii",
"bitflags",
@@ -2342,7 +2295,7 @@ dependencies = [
[[package]]
name = "rustpython-compiler-core"
version = "0.2.0"
source = "git+https://github.com/RustPython/RustPython.git?rev=c15f670f2c30cfae6b41a1874893590148c74bc4#c15f670f2c30cfae6b41a1874893590148c74bc4"
source = "git+https://github.com/RustPython/RustPython.git?rev=1871a1632e310985414211222f5bf8069678892f#1871a1632e310985414211222f5bf8069678892f"
dependencies = [
"bitflags",
"bstr 0.2.17",
@@ -2356,7 +2309,7 @@ dependencies = [
[[package]]
name = "rustpython-parser"
version = "0.2.0"
source = "git+https://github.com/RustPython/RustPython.git?rev=c15f670f2c30cfae6b41a1874893590148c74bc4#c15f670f2c30cfae6b41a1874893590148c74bc4"
source = "git+https://github.com/RustPython/RustPython.git?rev=1871a1632e310985414211222f5bf8069678892f#1871a1632e310985414211222f5bf8069678892f"
dependencies = [
"ahash",
"anyhow",
@@ -2881,21 +2834,9 @@ checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
dependencies = [
"cfg-if",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tracing-core"
version = "0.1.30"

View File

@@ -4,10 +4,6 @@ members = ["crates/*"]
[workspace.package]
edition = "2021"
rust-version = "1.67"
homepage = "https://beta.ruff.rs/docs/"
documentation = "https://beta.ruff.rs/docs/"
repository = "https://github.com/charliermarsh/ruff"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
[workspace.dependencies]
anyhow = { version = "1.0.69" }
@@ -30,11 +26,11 @@ proc-macro2 = { version = "1.0.51" }
quote = { version = "1.0.23" }
regex = { version = "1.7.1" }
rustc-hash = { version = "1.1.0" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "c15f670f2c30cfae6b41a1874893590148c74bc4" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "1871a1632e310985414211222f5bf8069678892f" }
rustpython-parser = { features = [
"lalrpop",
"serde",
], git = "https://github.com/RustPython/RustPython.git", rev = "c15f670f2c30cfae6b41a1874893590148c74bc4" }
], git = "https://github.com/RustPython/RustPython.git", rev = "1871a1632e310985414211222f5bf8069678892f" }
schemars = { version = "0.8.12" }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = { version = "1.0.93" }
@@ -63,9 +59,3 @@ opt-level = 3
# https://github.com/bytecodealliance/wasm-tools/blob/b5c3d98e40590512a3b12470ef358d5c7b983b15/crates/wasmparser/src/limits.rs#L29
[profile.dev.package.rustpython-parser]
opt-level = 1
# Use the `--profile release-debug` flag to show symbols in release mode.
# e.g. `cargo build --profile release-debug`
[profile.release-debug]
inherits = "release"
debug = 1

View File

@@ -137,7 +137,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
```yaml
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.256'
rev: 'v0.0.254'
hooks:
- id: ruff
```
@@ -306,13 +306,6 @@ Ruff is used in a number of major open-source projects, including:
- [meson-python](https://github.com/mesonbuild/meson-python)
- [ZenML](https://github.com/zenml-io/zenml)
- [delta-rs](https://github.com/delta-io/delta-rs)
- [Starlite](https://github.com/starlite-api/starlite)
- [telemetry-airflow (Mozilla)](https://github.com/mozilla/telemetry-airflow)
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
- [nox](https://github.com/wntrblm/nox)
- [Neon](https://github.com/neondatabase/neon)
- [The Algorithms](https://github.com/TheAlgorithms/Python)
## License

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.256"
version = "0.0.254"
edition = { workspace = true }
rust-version = { workspace = true }

View File

@@ -46,15 +46,8 @@ fn main() -> Result<()> {
.map(|tool| ExternalConfig {
black: tool.black.as_ref(),
isort: tool.isort.as_ref(),
..Default::default()
})
.unwrap_or_default();
let external_config = ExternalConfig {
project: pyproject
.as_ref()
.and_then(|pyproject| pyproject.project.as_ref()),
..external_config
};
// Create Ruff's pyproject.toml section.
let pyproject = flake8_to_ruff::convert(&config, &external_config, args.plugin)?;

View File

@@ -1,17 +1,19 @@
[package]
name = "ruff"
version = "0.0.256"
authors.workspace = true
edition.workspace = true
rust-version.workspace = true
documentation.workspace = true
homepage.workspace = true
repository.workspace = true
version = "0.0.254"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
edition = { workspace = true }
rust-version = { workspace = true }
documentation = "https://github.com/charliermarsh/ruff"
homepage = "https://github.com/charliermarsh/ruff"
repository = "https://github.com/charliermarsh/ruff"
readme = "README.md"
license = "MIT"
[lib]
name = "ruff"
crate-type = ["cdylib", "rlib"]
doctest = false
[dependencies]
ruff_cache = { path = "../ruff_cache" }
@@ -25,7 +27,7 @@ anyhow = { workspace = true }
bisection = { version = "0.1.0" }
bitflags = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "string"], optional = true }
clap = { workspace = true, features = ["derive", "env", "string"] }
colored = { workspace = true }
dirs = { version = "4.0.0" }
fern = { version = "0.6.1" }
@@ -46,10 +48,6 @@ path-absolutize = { workspace = true, features = [
"once_cell_cache",
"use_unix_paths_on_wasm",
] }
pathdiff = { version = "0.2.1" }
pep440_rs = { git = "https://github.com/konstin/pep440-rs.git", features = [
"serde",
], rev = "a8fef4ec47f4c25b070b39cdbe6a0b9847e49941" }
regex = { workspace = true }
result-like = { version = "0.4.6" }
rustc-hash = { workspace = true }
@@ -67,10 +65,10 @@ thiserror = { version = "1.0.38" }
toml = { workspace = true }
[dev-dependencies]
criterion = { version = "0.4.0" }
insta = { workspace = true, features = ["yaml", "redactions"] }
pretty_assertions = "1.3.0"
test-case = { workspace = true }
criterion = { version = "0.4.0" }
[features]
default = []

View File

@@ -1,11 +0,0 @@
import warnings
"""
Should emit:
B028 - on lines 8 and 9
"""
warnings.warn(DeprecationWarning("test"))
warnings.warn(DeprecationWarning("test"), source=None)
warnings.warn(DeprecationWarning("test"), source=None, stacklevel=2)
warnings.warn(DeprecationWarning("test"), stacklevel=1)

View File

@@ -6,8 +6,6 @@ obj.endswith("foo") or obj.endswith("bar")
obj.startswith(foo) or obj.startswith(bar)
# error
obj.startswith(foo) or obj.startswith("foo")
# error
obj.endswith(foo) or obj.startswith(foo) or obj.startswith("foo")
# ok
obj.startswith(("foo", "bar"))

View File

@@ -1,10 +1,3 @@
import math
import os
import sys
from math import inf
import numpy as np
def f12(
x,
y: str = os.pathsep, # Error PYI011 Only simple default values allowed for typed arguments
@@ -68,49 +61,3 @@ def f22(
x: complex = -42.5j # Error PYI011 Only simple default values allowed for typed arguments
+ 4.3j,
) -> None: ...
def f23(
x: bool = True, # OK
) -> None: ...
def f24(
x: float = 3.14, # OK
) -> None: ...
def f25(
x: float = -3.14, # OK
) -> None: ...
def f26(
x: complex = -3.14j, # OK
) -> None: ...
def f27(
x: complex = -3 - 3.14j, # OK
) -> None: ...
def f28(
x: float = math.tau, # OK
) -> None: ...
def f29(
x: float = math.inf, # OK
) -> None: ...
def f30(
x: float = -math.inf, # OK
) -> None: ...
def f31(
x: float = inf, # Error PYI011 Only simple default values allowed for typed arguments
) -> None: ...
def f32(
x: float = np.inf, # Error PYI011 Only simple default values allowed for typed arguments
) -> None: ...
def f33(
x: float = math.nan, # OK
) -> None: ...
def f34(
x: float = -math.nan, # Error PYI011 Only simple default values allowed for typed arguments
) -> None: ...
def f35(
x: complex = math.inf # Error PYI011 Only simple default values allowed for typed arguments
+ 1j,
) -> None: ...
def f36(
*, x: str = sys.version, # OK
) -> None: ...
def f37(
*, x: str = "" + "", # Error PYI011 Only simple default values allowed for typed arguments
) -> None: ...

View File

@@ -43,6 +43,3 @@ def f21(
def f22(
x=-42.5j + 4.3j, # Error PYI014
) -> None: ...
def f23(
x=True, # OK
) -> None: ...

View File

@@ -1,10 +1,5 @@
import sys, math
from os import path, uname
from json import detect_encoding
from json import dump
from json import dumps as json_dumps
from json import load
from json import loads as json_loads
from logging.handlers import StreamHandler, FileHandler
# comment 1
@@ -15,10 +10,9 @@ from third_party import lib4
from foo import bar # comment 3
from foo2 import bar2 # comment 4
from foo3 import bar3, baz3 # comment 5
# comment 6
# comment 5
from bar import (
a, # comment 7
b, # comment 8
)
a, # comment 6
b, # comment 7
)

View File

@@ -1,10 +0,0 @@
# ruff: isort: skip_file
import e
import f
# isort: split
import a
import b
import c
import d

View File

@@ -6,14 +6,6 @@ def f():
# isort: on
def f():
# ruff: isort: off
import sys
import os
import collections
# ruff: isort: on
def f():
import sys
import os # isort: skip

View File

@@ -54,7 +54,3 @@ if type(a) != type(b) or type(a) == type(ccc):
pass
assert type(res) == type(None)
types = StrEnum
if x == types.X:
pass

View File

@@ -1,3 +0,0 @@
def lorem():
"""lorem ipsum dolor sit amet consectetur adipiscing elit
sed do eiusmod tempor incididunt ut labore et dolore magna aliqua"""

View File

@@ -58,15 +58,6 @@ def no_underline_and_no_description(): # noqa: D416
"""
@expect(_D213)
@expect("D407: Missing dashed underline after section ('Returns')")
@expect("D414: Section has no content ('Returns')")
def no_underline_and_no_newline(): # noqa: D416
"""Toggle the gizmo.
Returns"""
@expect(_D213)
@expect("D410: Missing blank line after section ('Returns')")
@expect("D414: Section has no content ('Returns')")

View File

@@ -17,7 +17,3 @@ def errors():
def ok():
if x and not y:
print("x is not an empty string, but y is an empty string")
data.loc[data["a"] != ""]
data.loc[data["a"] != "", :]

View File

@@ -1,95 +0,0 @@
while True:
try:
pass
finally:
continue # [continue-in-finally]
while True:
try:
pass
except Exception:
continue
finally:
try:
pass
finally:
continue # [continue-in-finally]
pass
while True:
try:
pass
finally:
test = "aa"
match test:
case "aa":
continue # [continue-in-finally]
while True:
try:
pass
finally:
with "aa" as f:
continue # [continue-in-finally]
while True:
try:
pass
finally:
if True:
continue # [continue-in-finally]
continue # [continue-in-finally]
def test():
while True:
continue
try:
pass
finally:
continue # [continue-in-finally]
while True:
try:
pass
finally:
continue # [continue-in-finally]
def test():
while True:
continue
while True:
try:
pass
finally:
for i in range(12):
continue
continue # [continue-in-finally]
while True:
pass
else:
continue # [continue-in-finally]
def test():
continue
while True:
continue
while True:
try:
pass
finally:
if True:
pass
elif False:
continue # [continue-in-finally]
else:
continue # [continue-in-finally]
for i in range(10):
pass
else:
continue # [continue-in-finally]

View File

@@ -1,12 +0,0 @@
import os
tempVar = os.getenv("TEST", 12) # [invalid-envvar-default]
goodVar = os.getenv("TESTING", None)
dictVarBad = os.getenv("AAA", {"a", 7}) # [invalid-envvar-default]
print(os.getenv("TEST", False)) # [invalid-envvar-default]
os.getenv("AA", "GOOD")
os.getenv("AA", f"GOOD")
os.getenv("AA", "GOOD" + "BAD")
os.getenv("AA", "GOOD" + 1)
os.getenv("AA", "GOOD %s" % "BAD")
os.getenv("B", Z)

View File

@@ -1,15 +0,0 @@
import os
os.getenv(1) # [invalid-envvar-value]
os.getenv("a")
os.getenv("test")
os.getenv(key="testingAgain")
os.getenv(key=11) # [invalid-envvar-value]
os.getenv(["hello"]) # [invalid-envvar-value]
os.getenv(key="foo", default="bar")
os.getenv(key=f"foo", default="bar")
os.getenv(key="foo" + "bar", default=1)
os.getenv(key=1 + "bar", default=1) # [invalid-envvar-value]
AA = "aa"
os.getenv(AA)

View File

@@ -1,39 +0,0 @@
import sys
def print_python_version():
print(sys.version)
return None # [useless-return]
def print_python_version():
print(sys.version)
return None # [useless-return]
def print_python_version():
print(sys.version)
return None # [useless-return]
class SomeClass:
def print_python_version(self):
print(sys.version)
return None # [useless-return]
def print_python_version():
if 2 * 2 == 4:
return
print(sys.version)
def print_python_version():
if 2 * 2 == 4:
return None
return
def print_python_version():
if 2 * 2 == 4:
return None

View File

@@ -1,10 +0,0 @@
import socket
from kombu import Connection, exceptions
try:
conn = Connection(settings.CELERY_BROKER_URL)
conn.ensure_connection(max_retries=2)
conn._close()
except (socket.error, exceptions.OperationalError):
return HttpResponseServerError("cache: cannot connect to broker.")

View File

@@ -1,29 +1,24 @@
# Error (`from unittest import mock`)
# These should be changed
if True:
import mock
# Error (`from unittest import mock`)
if True:
import mock, sys
# Error (`from unittest.mock import *`)
if True:
from mock import *
# Error (`from unittest import mock`)
# This goes to from unitest import mock
import mock.mock
# Error (`from unittest import mock`)
# Mock should go on a new line as `from unittest import mock`
import contextlib, mock, sys
# Error (`from unittest import mock`)
# Mock should go on a new line as `from unittest import mock`
import mock, sys
x = "This code should be preserved one line below the mock"
# Error (`from unittest import mock`)
# Mock should go on a new line as `from unittest import mock`
from mock import mock
# Error (keep trailing comma)
# Should keep trailing comma
from mock import (
mock,
a,
@@ -37,7 +32,7 @@ from mock import (
mock,
)
# Error (avoid trailing comma)
# Should not get a trailing comma
from mock import (
mock,
a,
@@ -62,16 +57,16 @@ if True:
c
)
# OK
# These should not change:
import os, io
# Error (`from unittest import mock`)
# Mock should go on a new line as `from unittest import mock`
import mock, mock
# Error (`from unittest import mock as foo`)
# Mock should go on a new line as `from unittest import mock as foo`
import mock as foo
# Error (`from unittest import mock as foo`)
# Mock should go on a new line as `from unittest import mock as foo`
from mock import mock as foo
if True:
@@ -86,8 +81,8 @@ if True:
from mock import mock as foo, mock as bar, mock
# OK.
# This should be unchanged.
x = mock.Mock()
# Error (`mock.Mock()`).
# This should change to `mock.Mock`().
x = mock.mock.Mock()

View File

@@ -41,7 +41,7 @@ if True:
Good,
)
from typing import Callable, Match, Pattern, List, OrderedDict, AbstractSet, ContextManager
from typing import Callable, Match, Pattern, List, OrderedDict
if True: from collections import (
Mapping, Counter)

View File

@@ -5,5 +5,3 @@ isinstance(1, int) # OK
issubclass("yes", int) # OK
isinstance(1, int | float) # OK
issubclass("yes", int | str) # OK
isinstance(1, ()) # OK
isinstance(1, (int, *(str, bytes))) # OK

View File

@@ -1,19 +0,0 @@
input = [1, 2, 3]
otherInput = [2, 3, 4]
# OK
zip(input, otherInput) # different inputs
zip(input, otherInput[1:]) # different inputs
zip(input, input[2:]) # not successive
zip(input[:-1], input[2:]) # not successive
list(zip(input, otherInput)) # nested call
zip(input, input[1::2]) # not successive
# Errors
zip(input, input[1:])
zip(input, input[1::1])
zip(input[:-1], input[1:])
zip(input[1:], input[2:])
zip(input[1:-1], input[2:])
list(zip(input, input[1:]))
list(zip(input[:-1], input[1:]))

View File

@@ -1,13 +0,0 @@
# noqa
# noqa # comment
print() # noqa
print() # noqa # comment
print(a) # noqa
print(a) # noqa # comment
# noqa: E501, F821
# noqa: E501, F821 # comment
print() # noqa: E501, F821
print() # noqa: E501, F821 # comment
print(a) # noqa: E501, F821
print(a) # noqa: E501, F821 # comment

View File

@@ -9,8 +9,8 @@ use rustpython_parser::{lexer, Mode, Tok};
use ruff_diagnostics::Fix;
use ruff_python_ast::helpers;
use ruff_python_ast::helpers::to_absolute;
use ruff_python_ast::newlines::NewlineWithTrailingNewline;
use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
use ruff_python_ast::whitespace::LinesWithTrailingNewline;
use crate::cst::helpers::compose_module_path;
use crate::cst::matchers::match_module;
@@ -100,7 +100,7 @@ fn is_lone_child(child: &Stmt, parent: &Stmt, deleted: &[&Stmt]) -> Result<bool>
/// of a multi-statement line.
fn trailing_semicolon(stmt: &Stmt, locator: &Locator) -> Option<Location> {
let contents = locator.skip(stmt.end_location.unwrap());
for (row, line) in NewlineWithTrailingNewline::from(contents).enumerate() {
for (row, line) in LinesWithTrailingNewline::from(contents).enumerate() {
let trimmed = line.trim();
if trimmed.starts_with(';') {
let column = line
@@ -123,7 +123,7 @@ fn trailing_semicolon(stmt: &Stmt, locator: &Locator) -> Option<Location> {
fn next_stmt_break(semicolon: Location, locator: &Locator) -> Location {
let start_location = Location::new(semicolon.row(), semicolon.column() + 1);
let contents = locator.skip(start_location);
for (row, line) in NewlineWithTrailingNewline::from(contents).enumerate() {
for (row, line) in LinesWithTrailingNewline::from(contents).enumerate() {
let trimmed = line.trim();
// Skip past any continuations.
if trimmed.starts_with('\\') {

View File

@@ -9,7 +9,7 @@ use ruff_python_ast::source_code::Locator;
use ruff_python_ast::types::Range;
use crate::linter::FixTable;
use crate::registry::{AsRule, Rule};
use crate::registry::AsRule;
pub mod helpers;
@@ -39,7 +39,7 @@ fn apply_fixes<'a>(
.as_ref()
.map(|fix| (diagnostic.kind.rule(), fix))
})
.sorted_by(|(rule1, fix1), (rule2, fix2)| cmp_fix(*rule1, *rule2, fix1, fix2))
.sorted_by_key(|(.., fix)| fix.location)
{
// If we already applied an identical fix as part of another correction, skip
// any re-application.
@@ -92,18 +92,6 @@ pub(crate) fn apply_fix(fix: &Fix, locator: &Locator) -> String {
output
}
/// Compare two fixes.
fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Ordering {
fix1.location
.cmp(&fix2.location)
.then_with(|| match (&rule1, &rule2) {
// Apply `EndsInPeriod` fixes before `NewLineAfterLastParagraph` fixes.
(Rule::EndsInPeriod, Rule::NewLineAfterLastParagraph) => std::cmp::Ordering::Less,
(Rule::NewLineAfterLastParagraph, Rule::EndsInPeriod) => std::cmp::Ordering::Greater,
_ => std::cmp::Ordering::Equal,
})
}
#[cfg(test)]
mod tests {
use rustpython_parser::ast::Location;

View File

@@ -1,4 +1,3 @@
use ruff_python_ast::scope::ScopeStack;
use rustpython_parser::ast::{Expr, Stmt};
use ruff_python_ast::types::Range;
@@ -8,7 +7,7 @@ use ruff_python_ast::visibility::{Visibility, VisibleScope};
use crate::checkers::ast::AnnotationContext;
use crate::docstrings::definition::Definition;
type Context<'a> = (ScopeStack, Vec<RefEquality<'a, Stmt>>);
type Context<'a> = (Vec<usize>, Vec<RefEquality<'a, Stmt>>);
/// A collection of AST nodes that are deferred for later analysis.
/// Used to, e.g., store functions, whose bodies shouldn't be analyzed until all

File diff suppressed because it is too large Load Diff

View File

@@ -15,7 +15,7 @@ pub fn check_file_path(
let mut diagnostics: Vec<Diagnostic> = vec![];
// flake8-no-pep420
if settings.rules.enabled(Rule::ImplicitNamespacePackage) {
if settings.rules.enabled(&Rule::ImplicitNamespacePackage) {
if let Some(diagnostic) =
implicit_namespace_package(path, package, &settings.project_root, &settings.src)
{
@@ -24,7 +24,7 @@ pub fn check_file_path(
}
// pep8-naming
if settings.rules.enabled(Rule::InvalidModuleName) {
if settings.rules.enabled(&Rule::InvalidModuleName) {
if let Some(diagnostic) = invalid_module_name(path, package) {
diagnostics.push(diagnostic);
}

View File

@@ -38,7 +38,7 @@ pub fn check_imports(
// Enforce import rules.
let mut diagnostics = vec![];
if settings.rules.enabled(Rule::UnsortedImports) {
if settings.rules.enabled(&Rule::UnsortedImports) {
for block in &blocks {
if !block.imports.is_empty() {
if let Some(diagnostic) = isort::rules::organize_imports(
@@ -49,7 +49,7 @@ pub fn check_imports(
}
}
}
if settings.rules.enabled(Rule::MissingRequiredImport) {
if settings.rules.enabled(&Rule::MissingRequiredImport) {
diagnostics.extend(isort::rules::add_required_imports(
&blocks, python_ast, locator, stylist, settings, autofix,
));

View File

@@ -166,7 +166,7 @@ pub fn check_logical_lines(
}
#[cfg(feature = "logical_lines")]
let should_fix = autofix.into() && settings.rules.should_fix(Rule::MissingWhitespace);
let should_fix = autofix.into() && settings.rules.should_fix(&Rule::MissingWhitespace);
#[cfg(not(feature = "logical_lines"))]
let should_fix = false;
@@ -181,7 +181,7 @@ pub fn check_logical_lines(
if line.flags.contains(TokenFlags::BRACKET) {
#[cfg(feature = "logical_lines")]
let should_fix =
autofix.into() && settings.rules.should_fix(Rule::WhitespaceBeforeParameters);
autofix.into() && settings.rules.should_fix(&Rule::WhitespaceBeforeParameters);
#[cfg(not(feature = "logical_lines"))]
let should_fix = false;

View File

@@ -5,7 +5,6 @@ use nohash_hasher::IntMap;
use rustpython_parser::ast::Location;
use ruff_diagnostics::{Diagnostic, Fix};
use ruff_python_ast::newlines::StrExt;
use ruff_python_ast::types::Range;
use crate::codes::NoqaCode;
@@ -23,8 +22,8 @@ pub fn check_noqa(
noqa_line_for: &IntMap<usize, usize>,
settings: &Settings,
autofix: flags::Autofix,
) -> Vec<usize> {
let enforce_noqa = settings.rules.enabled(Rule::UnusedNOQA);
) {
let enforce_noqa = settings.rules.enabled(&Rule::UnusedNOQA);
// Whether the file is exempted from all checks.
let mut file_exempted = false;
@@ -39,7 +38,7 @@ pub fn check_noqa(
// Indices of diagnostics that were ignored by a `noqa` directive.
let mut ignored_diagnostics = vec![];
let lines: Vec<&str> = contents.universal_newlines().collect();
let lines: Vec<&str> = contents.lines().collect();
for lineno in commented_lines {
match extract_file_exemption(lines[lineno - 1]) {
Exemption::All => {
@@ -98,7 +97,7 @@ pub fn check_noqa(
ignored_diagnostics.push(index);
continue;
}
(Directive::Codes(.., codes, _), matches) => {
(Directive::Codes(.., codes), matches) => {
if noqa::includes(diagnostic.kind.rule(), codes) {
matches.push(diagnostic.kind.rule().noqa_code());
ignored_diagnostics.push(index);
@@ -125,7 +124,7 @@ pub fn check_noqa(
ignored_diagnostics.push(index);
continue;
}
(Directive::Codes(.., codes, _), matches) => {
(Directive::Codes(.., codes), matches) => {
if noqa::includes(diagnostic.kind.rule(), codes) {
matches.push(diagnostic.kind.rule().noqa_code());
ignored_diagnostics.push(index);
@@ -141,7 +140,7 @@ pub fn check_noqa(
if enforce_noqa {
for (row, (directive, matches)) in noqa_directives {
match directive {
Directive::All(leading_spaces, start_byte, end_byte, trailing_spaces) => {
Directive::All(spaces, start_byte, end_byte) => {
if matches.is_empty() {
let start = lines[row][..start_byte].chars().count();
let end = start + lines[row][start_byte..end_byte].chars().count();
@@ -151,27 +150,15 @@ pub fn check_noqa(
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
);
if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
if start - leading_spaces == 0 && end == lines[row].chars().count() {
diagnostic.amend(Fix::deletion(
Location::new(row + 1, 0),
Location::new(row + 2, 0),
));
} else if end == lines[row].chars().count() {
diagnostic.amend(Fix::deletion(
Location::new(row + 1, start - leading_spaces),
Location::new(row + 1, end + trailing_spaces),
));
} else {
diagnostic.amend(Fix::deletion(
Location::new(row + 1, start),
Location::new(row + 1, end + trailing_spaces),
));
}
diagnostic.amend(Fix::deletion(
Location::new(row + 1, start - spaces),
Location::new(row + 1, lines[row].chars().count()),
));
}
diagnostics.push(diagnostic);
}
}
Directive::Codes(leading_spaces, start_byte, end_byte, codes, trailing_spaces) => {
Directive::Codes(spaces, start_byte, end_byte, codes) => {
let mut disabled_codes = vec![];
let mut unknown_codes = vec![];
let mut unmatched_codes = vec![];
@@ -188,7 +175,7 @@ pub fn check_noqa(
valid_codes.push(code);
} else {
if let Ok(rule) = Rule::from_code(code) {
if settings.rules.enabled(rule) {
if settings.rules.enabled(&rule) {
unmatched_codes.push(code);
} else {
disabled_codes.push(code);
@@ -231,28 +218,15 @@ pub fn check_noqa(
);
if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
if valid_codes.is_empty() {
if start - leading_spaces == 0 && end == lines[row].chars().count()
{
diagnostic.amend(Fix::deletion(
Location::new(row + 1, 0),
Location::new(row + 2, 0),
));
} else if end == lines[row].chars().count() {
diagnostic.amend(Fix::deletion(
Location::new(row + 1, start - leading_spaces),
Location::new(row + 1, end + trailing_spaces),
));
} else {
diagnostic.amend(Fix::deletion(
Location::new(row + 1, start),
Location::new(row + 1, end + trailing_spaces),
));
}
diagnostic.amend(Fix::deletion(
Location::new(row + 1, start - spaces),
Location::new(row + 1, lines[row].chars().count()),
));
} else {
diagnostic.amend(Fix::replacement(
format!("# noqa: {}", valid_codes.join(", ")),
Location::new(row + 1, start),
Location::new(row + 1, end),
Location::new(row + 1, lines[row].chars().count()),
));
}
}
@@ -265,5 +239,7 @@ pub fn check_noqa(
}
ignored_diagnostics.sort_unstable();
ignored_diagnostics
for index in ignored_diagnostics.iter().rev() {
diagnostics.swap_remove(*index);
}
}

View File

@@ -3,8 +3,7 @@
use std::path::Path;
use ruff_diagnostics::Diagnostic;
use ruff_python_ast::newlines::StrExt;
use ruff_python_ast::source_code::{Locator, Stylist};
use ruff_python_ast::source_code::Stylist;
use crate::registry::Rule;
use crate::rules::flake8_executable::helpers::{extract_shebang, ShebangDirective};
@@ -22,8 +21,8 @@ use crate::settings::{flags, Settings};
pub fn check_physical_lines(
path: &Path,
locator: &Locator,
stylist: &Stylist,
contents: &str,
commented_lines: &[usize],
doc_lines: &[usize],
settings: &Settings,
@@ -32,32 +31,32 @@ pub fn check_physical_lines(
let mut diagnostics: Vec<Diagnostic> = vec![];
let mut has_any_shebang = false;
let enforce_blanket_noqa = settings.rules.enabled(Rule::BlanketNOQA);
let enforce_shebang_not_executable = settings.rules.enabled(Rule::ShebangNotExecutable);
let enforce_shebang_missing = settings.rules.enabled(Rule::ShebangMissingExecutableFile);
let enforce_shebang_whitespace = settings.rules.enabled(Rule::ShebangWhitespace);
let enforce_shebang_newline = settings.rules.enabled(Rule::ShebangNewline);
let enforce_shebang_python = settings.rules.enabled(Rule::ShebangPython);
let enforce_blanket_type_ignore = settings.rules.enabled(Rule::BlanketTypeIgnore);
let enforce_doc_line_too_long = settings.rules.enabled(Rule::DocLineTooLong);
let enforce_line_too_long = settings.rules.enabled(Rule::LineTooLong);
let enforce_no_newline_at_end_of_file = settings.rules.enabled(Rule::NoNewLineAtEndOfFile);
let enforce_unnecessary_coding_comment = settings.rules.enabled(Rule::UTF8EncodingDeclaration);
let enforce_mixed_spaces_and_tabs = settings.rules.enabled(Rule::MixedSpacesAndTabs);
let enforce_bidirectional_unicode = settings.rules.enabled(Rule::BidirectionalUnicode);
let enforce_trailing_whitespace = settings.rules.enabled(Rule::TrailingWhitespace);
let enforce_blanket_noqa = settings.rules.enabled(&Rule::BlanketNOQA);
let enforce_shebang_not_executable = settings.rules.enabled(&Rule::ShebangNotExecutable);
let enforce_shebang_missing = settings.rules.enabled(&Rule::ShebangMissingExecutableFile);
let enforce_shebang_whitespace = settings.rules.enabled(&Rule::ShebangWhitespace);
let enforce_shebang_newline = settings.rules.enabled(&Rule::ShebangNewline);
let enforce_shebang_python = settings.rules.enabled(&Rule::ShebangPython);
let enforce_blanket_type_ignore = settings.rules.enabled(&Rule::BlanketTypeIgnore);
let enforce_doc_line_too_long = settings.rules.enabled(&Rule::DocLineTooLong);
let enforce_line_too_long = settings.rules.enabled(&Rule::LineTooLong);
let enforce_no_newline_at_end_of_file = settings.rules.enabled(&Rule::NoNewLineAtEndOfFile);
let enforce_unnecessary_coding_comment = settings.rules.enabled(&Rule::UTF8EncodingDeclaration);
let enforce_mixed_spaces_and_tabs = settings.rules.enabled(&Rule::MixedSpacesAndTabs);
let enforce_bidirectional_unicode = settings.rules.enabled(&Rule::BidirectionalUnicode);
let enforce_trailing_whitespace = settings.rules.enabled(&Rule::TrailingWhitespace);
let enforce_blank_line_contains_whitespace =
settings.rules.enabled(Rule::BlankLineContainsWhitespace);
let enforce_indentation_contains_tabs = settings.rules.enabled(Rule::IndentationContainsTabs);
settings.rules.enabled(&Rule::BlankLineContainsWhitespace);
let enforce_indentation_contains_tabs = settings.rules.enabled(&Rule::IndentationContainsTabs);
let fix_unnecessary_coding_comment =
autofix.into() && settings.rules.should_fix(Rule::UTF8EncodingDeclaration);
autofix.into() && settings.rules.should_fix(&Rule::UTF8EncodingDeclaration);
let fix_shebang_whitespace =
autofix.into() && settings.rules.should_fix(Rule::ShebangWhitespace);
autofix.into() && settings.rules.should_fix(&Rule::ShebangWhitespace);
let mut commented_lines_iter = commented_lines.iter().peekable();
let mut doc_lines_iter = doc_lines.iter().peekable();
for (index, line) in locator.contents().universal_newlines().enumerate() {
for (index, line) in contents.lines().enumerate() {
while commented_lines_iter
.next_if(|lineno| &(index + 1) == *lineno)
.is_some()
@@ -163,9 +162,9 @@ pub fn check_physical_lines(
if enforce_no_newline_at_end_of_file {
if let Some(diagnostic) = no_newline_at_end_of_file(
locator,
stylist,
autofix.into() && settings.rules.should_fix(Rule::NoNewLineAtEndOfFile),
contents,
autofix.into() && settings.rules.should_fix(&Rule::NoNewLineAtEndOfFile),
) {
diagnostics.push(diagnostic);
}
@@ -200,8 +199,8 @@ mod tests {
let check_with_max_line_length = |line_length: usize| {
check_physical_lines(
Path::new("foo.py"),
&locator,
&stylist,
line,
&[],
&[],
&Settings {

View File

@@ -8,7 +8,7 @@ use crate::registry::{AsRule, Rule};
use crate::rules::ruff::rules::Context;
use crate::rules::{
eradicate, flake8_commas, flake8_implicit_str_concat, flake8_pyi, flake8_quotes, pycodestyle,
pylint, pyupgrade, ruff,
pyupgrade, ruff,
};
use crate::settings::{flags, Settings};
use ruff_diagnostics::Diagnostic;
@@ -23,43 +23,41 @@ pub fn check_tokens(
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
let enforce_ambiguous_unicode_character = settings.rules.any_enabled(&[
Rule::AmbiguousUnicodeCharacterString,
Rule::AmbiguousUnicodeCharacterDocstring,
Rule::AmbiguousUnicodeCharacterComment,
]);
let enforce_invalid_string_character = settings.rules.any_enabled(&[
Rule::InvalidCharacterBackspace,
Rule::InvalidCharacterSub,
Rule::InvalidCharacterEsc,
Rule::InvalidCharacterNul,
Rule::InvalidCharacterZeroWidthSpace,
]);
let enforce_quotes = settings.rules.any_enabled(&[
Rule::BadQuotesInlineString,
Rule::BadQuotesMultilineString,
Rule::BadQuotesDocstring,
Rule::AvoidableEscapedQuote,
]);
let enforce_commented_out_code = settings.rules.enabled(Rule::CommentedOutCode);
let enforce_compound_statements = settings.rules.any_enabled(&[
Rule::MultipleStatementsOnOneLineColon,
Rule::MultipleStatementsOnOneLineSemicolon,
Rule::UselessSemicolon,
]);
let enforce_invalid_escape_sequence = settings.rules.enabled(Rule::InvalidEscapeSequence);
let enforce_implicit_string_concatenation = settings.rules.any_enabled(&[
Rule::SingleLineImplicitStringConcatenation,
Rule::MultiLineImplicitStringConcatenation,
]);
let enforce_trailing_comma = settings.rules.any_enabled(&[
Rule::TrailingCommaMissing,
Rule::TrailingCommaOnBareTupleProhibited,
Rule::TrailingCommaProhibited,
]);
let enforce_extraneous_parenthesis = settings.rules.enabled(Rule::ExtraneousParentheses);
let enforce_type_comment_in_stub = settings.rules.enabled(Rule::TypeCommentInStub);
let enforce_ambiguous_unicode_character = settings
.rules
.enabled(&Rule::AmbiguousUnicodeCharacterString)
|| settings
.rules
.enabled(&Rule::AmbiguousUnicodeCharacterDocstring)
|| settings
.rules
.enabled(&Rule::AmbiguousUnicodeCharacterComment);
let enforce_quotes = settings.rules.enabled(&Rule::BadQuotesInlineString)
|| settings.rules.enabled(&Rule::BadQuotesMultilineString)
|| settings.rules.enabled(&Rule::BadQuotesDocstring)
|| settings.rules.enabled(&Rule::AvoidableEscapedQuote);
let enforce_commented_out_code = settings.rules.enabled(&Rule::CommentedOutCode);
let enforce_compound_statements = settings
.rules
.enabled(&Rule::MultipleStatementsOnOneLineColon)
|| settings
.rules
.enabled(&Rule::MultipleStatementsOnOneLineSemicolon)
|| settings.rules.enabled(&Rule::UselessSemicolon);
let enforce_invalid_escape_sequence = settings.rules.enabled(&Rule::InvalidEscapeSequence);
let enforce_implicit_string_concatenation = settings
.rules
.enabled(&Rule::SingleLineImplicitStringConcatenation)
|| settings
.rules
.enabled(&Rule::MultiLineImplicitStringConcatenation);
let enforce_trailing_comma = settings.rules.enabled(&Rule::TrailingCommaMissing)
|| settings
.rules
.enabled(&Rule::TrailingCommaOnBareTupleProhibited)
|| settings.rules.enabled(&Rule::TrailingCommaProhibited);
let enforce_extraneous_parenthesis = settings.rules.enabled(&Rule::ExtraneousParentheses);
let enforce_type_comment_in_stub = settings.rules.enabled(&Rule::TypeCommentInStub);
// RUF001, RUF002, RUF003
if enforce_ambiguous_unicode_character {
@@ -113,23 +111,11 @@ pub fn check_tokens(
locator,
*start,
*end,
autofix.into() && settings.rules.should_fix(Rule::InvalidEscapeSequence),
autofix.into() && settings.rules.should_fix(&Rule::InvalidEscapeSequence),
));
}
}
}
// PLE2510, PLE2512, PLE2513
if enforce_invalid_string_character {
for (start, tok, end) in tokens.iter().flatten() {
if matches!(tok, Tok::String { .. }) {
diagnostics.extend(
pylint::rules::invalid_string_characters(locator, *start, *end, autofix.into())
.into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
);
}
}
}
// E701, E702, E703
if enforce_compound_statements {

View File

@@ -161,44 +161,35 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
(Pyflakes, "901") => Rule::RaiseNotImplemented,
// pylint
(Pylint, "C0414") => Rule::UselessImportAlias,
(Pylint, "C1901") => Rule::CompareToEmptyString,
(Pylint, "C3002") => Rule::UnnecessaryDirectLambdaCall,
(Pylint, "E0100") => Rule::YieldInInit,
(Pylint, "E0101") => Rule::ReturnInInit,
(Pylint, "E0116") => Rule::ContinueInFinally,
(Pylint, "E0117") => Rule::NonlocalWithoutBinding,
(Pylint, "E0118") => Rule::UsedPriorGlobalDeclaration,
(Pylint, "E0604") => Rule::InvalidAllObject,
(Pylint, "E0605") => Rule::InvalidAllFormat,
(Pylint, "E1142") => Rule::AwaitOutsideAsync,
(Pylint, "E1205") => Rule::LoggingTooManyArgs,
(Pylint, "E1206") => Rule::LoggingTooFewArgs,
(Pylint, "E1307") => Rule::BadStringFormatType,
(Pylint, "E1310") => Rule::BadStrStripCall,
(Pylint, "E1507") => Rule::InvalidEnvvarValue,
(Pylint, "E2502") => Rule::BidirectionalUnicode,
(Pylint, "E2510") => Rule::InvalidCharacterBackspace,
(Pylint, "E2512") => Rule::InvalidCharacterSub,
(Pylint, "E2513") => Rule::InvalidCharacterEsc,
(Pylint, "E2514") => Rule::InvalidCharacterNul,
(Pylint, "E2515") => Rule::InvalidCharacterZeroWidthSpace,
(Pylint, "R0133") => Rule::ComparisonOfConstant,
(Pylint, "E1310") => Rule::BadStrStripCall,
(Pylint, "C0414") => Rule::UselessImportAlias,
(Pylint, "C3002") => Rule::UnnecessaryDirectLambdaCall,
(Pylint, "E0117") => Rule::NonlocalWithoutBinding,
(Pylint, "E0118") => Rule::UsedPriorGlobalDeclaration,
(Pylint, "E1142") => Rule::AwaitOutsideAsync,
(Pylint, "R0206") => Rule::PropertyWithParameters,
(Pylint, "R0402") => Rule::ConsiderUsingFromImport,
(Pylint, "R0911") => Rule::TooManyReturnStatements,
(Pylint, "R0912") => Rule::TooManyBranches,
(Pylint, "R0913") => Rule::TooManyArguments,
(Pylint, "R0915") => Rule::TooManyStatements,
(Pylint, "C1901") => Rule::CompareToEmptyString,
(Pylint, "R0133") => Rule::ComparisonOfConstant,
(Pylint, "R1701") => Rule::ConsiderMergingIsinstance,
(Pylint, "R1711") => Rule::UselessReturn,
(Pylint, "R1722") => Rule::ConsiderUsingSysExit,
(Pylint, "R2004") => Rule::MagicValueComparison,
(Pylint, "R5501") => Rule::CollapsibleElseIf,
(Pylint, "R2004") => Rule::MagicValueComparison,
(Pylint, "W0120") => Rule::UselessElseOnLoop,
(Pylint, "W0602") => Rule::GlobalVariableNotAssigned,
(Pylint, "W0603") => Rule::GlobalStatement,
(Pylint, "W1508") => Rule::InvalidEnvvarDefault,
(Pylint, "R0911") => Rule::TooManyReturnStatements,
(Pylint, "R0913") => Rule::TooManyArguments,
(Pylint, "R0912") => Rule::TooManyBranches,
(Pylint, "R0915") => Rule::TooManyStatements,
(Pylint, "W2901") => Rule::RedefinedLoopName,
// flake8-builtins
@@ -233,7 +224,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
(Flake8Bugbear, "025") => Rule::DuplicateTryBlockException,
(Flake8Bugbear, "026") => Rule::StarArgUnpackingAfterKeywordArg,
(Flake8Bugbear, "027") => Rule::EmptyMethodWithoutAbstractDecorator,
(Flake8Bugbear, "028") => Rule::NoExplicitStacklevel,
(Flake8Bugbear, "029") => Rule::ExceptWithEmptyTuple,
(Flake8Bugbear, "030") => Rule::ExceptWithNonExceptionClasses,
(Flake8Bugbear, "032") => Rule::UnintentionalTypeAnnotation,
@@ -383,7 +373,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
(Pyupgrade, "032") => Rule::FString,
(Pyupgrade, "033") => Rule::FunctoolsCache,
(Pyupgrade, "034") => Rule::ExtraneousParentheses,
(Pyupgrade, "035") => Rule::DeprecatedImport,
(Pyupgrade, "035") => Rule::ImportReplacements,
(Pyupgrade, "036") => Rule::OutdatedVersionBlock,
(Pyupgrade, "037") => Rule::QuotedAnnotation,
(Pyupgrade, "038") => Rule::IsinstanceWithTuple,
@@ -668,7 +658,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
(Ruff, "003") => Rule::AmbiguousUnicodeCharacterComment,
(Ruff, "005") => Rule::UnpackInsteadOfConcatenatingToCollectionLiteral,
(Ruff, "006") => Rule::AsyncioDanglingTask,
(Ruff, "007") => Rule::PairwiseOverZipped,
(Ruff, "100") => Rule::UnusedNOQA,
// flake8-django

View File

@@ -107,21 +107,15 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
// omit a space after the colon. The remaining action comments are
// required to include the space, and must appear on their own lines.
let comment_text = comment_text.trim_end();
if matches!(comment_text, "# isort: split" | "# ruff: isort: split") {
if comment_text == "# isort: split" {
splits.push(start.row());
} else if matches!(
comment_text,
"# isort: skip_file"
| "# isort:skip_file"
| "# ruff: isort: skip_file"
| "# ruff: isort:skip_file"
) {
} else if comment_text == "# isort: skip_file" || comment_text == "# isort:skip_file" {
return IsortDirectives {
skip_file: true,
..IsortDirectives::default()
};
} else if off.is_some() {
if comment_text == "# isort: on" || comment_text == "# ruff: isort: on" {
if comment_text == "# isort: on" {
if let Some(start) = off {
for row in start.row() + 1..=end.row() {
exclusions.insert(row);
@@ -132,7 +126,7 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
} else {
if comment_text.contains("isort: skip") || comment_text.contains("isort:skip") {
exclusions.insert(start.row());
} else if comment_text == "# isort: off" || comment_text == "# ruff: isort: off" {
} else if comment_text == "# isort: off" {
off = Some(start);
}
}

View File

@@ -20,7 +20,6 @@ use crate::rules::{
};
use crate::settings::options::Options;
use crate::settings::pyproject::Pyproject;
use crate::settings::types::PythonVersion;
use crate::warn_user;
const DEFAULT_SELECTORS: &[RuleSelector] = &[
@@ -425,15 +424,6 @@ pub fn convert(
}
}
if let Some(project) = &external_config.project {
if let Some(requires_python) = &project.requires_python {
if options.target_version.is_none() {
options.target_version =
PythonVersion::get_minimum_supported_version(requires_python);
}
}
}
// Create the pyproject.toml.
Ok(Pyproject::new(options))
}
@@ -449,17 +439,13 @@ fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::str::FromStr;
use anyhow::Result;
use itertools::Itertools;
use pep440_rs::VersionSpecifiers;
use pretty_assertions::assert_eq;
use super::super::plugin::Plugin;
use super::convert;
use crate::flake8_to_ruff::converter::DEFAULT_SELECTORS;
use crate::flake8_to_ruff::pep621::Project;
use crate::flake8_to_ruff::ExternalConfig;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
@@ -467,7 +453,6 @@ mod tests {
use crate::rules::{flake8_quotes, pydocstyle};
use crate::settings::options::Options;
use crate::settings::pyproject::Pyproject;
use crate::settings::types::PythonVersion;
fn default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> Options {
Options {
@@ -624,25 +609,4 @@ mod tests {
Ok(())
}
#[test]
fn it_converts_project_requires_python() -> Result<()> {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig {
project: Some(&Project {
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
}),
..ExternalConfig::default()
},
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
target_version: Some(PythonVersion::Py38),
..default_options([])
});
assert_eq!(actual, expected);
Ok(())
}
}

View File

@@ -1,10 +1,8 @@
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Default)]
pub struct ExternalConfig<'a> {
pub black: Option<&'a Black>,
pub isort: Option<&'a Isort>,
pub project: Option<&'a Project>,
}

View File

@@ -3,7 +3,6 @@ mod converter;
mod external_config;
mod isort;
mod parser;
pub mod pep621;
mod plugin;
mod pyproject;

View File

@@ -1,10 +0,0 @@
//! Extract PEP 621 configuration settings from a pyproject.toml.
use pep440_rs::VersionSpecifiers;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct Project {
#[serde(alias = "requires-python", alias = "requires_python")]
pub requires_python: Option<VersionSpecifiers>,
}

View File

@@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize};
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Tools {
@@ -16,7 +15,6 @@ pub struct Tools {
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Pyproject {
pub tool: Option<Tools>,
pub project: Option<Project>,
}
pub fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {

View File

@@ -22,12 +22,11 @@ pub fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
}
/// Create a set with codes matching the pattern/code pairs.
pub(crate) fn ignores_from_path(
pub(crate) fn ignores_from_path<'a>(
path: &Path,
pattern_code_pairs: &[(GlobMatcher, GlobMatcher, FxHashSet<Rule>)],
) -> FxHashSet<Rule> {
pattern_code_pairs: &'a [(GlobMatcher, GlobMatcher, FxHashSet<Rule>)],
) -> FxHashSet<&'a Rule> {
let (file_path, file_basename) = extract_path_names(path).expect("Unable to parse filename");
pattern_code_pairs
.iter()
.filter_map(|(absolute, basename, codes)| {
@@ -38,21 +37,20 @@ pub(crate) fn ignores_from_path(
basename.glob().regex(),
codes
);
Some(codes)
} else if absolute.is_match(file_path) {
return Some(codes.iter());
}
if absolute.is_match(file_path) {
debug!(
"Adding per-file ignores for {:?} due to absolute match on {:?}: {:?}",
path,
absolute.glob().regex(),
codes
);
Some(codes)
} else {
None
return Some(codes.iter());
}
None
})
.flatten()
.copied()
.collect()
}
@@ -76,20 +74,10 @@ pub fn normalize_path_to<P: AsRef<Path>, R: AsRef<Path>>(path: P, project_root:
}
/// Convert an absolute path to be relative to the current working directory.
pub fn relativize_path<P: AsRef<Path>>(path: P) -> String {
pub fn relativize_path(path: impl AsRef<Path>) -> String {
let path = path.as_ref();
if let Ok(path) = path.strip_prefix(&*path_dedot::CWD) {
return format!("{}", path.display());
}
format!("{}", path.display())
}
/// Convert an absolute path to be relative to the specified project root.
pub fn relativize_path_to<P: AsRef<Path>, R: AsRef<Path>>(path: P, project_root: R) -> String {
format!(
"{}",
pathdiff::diff_paths(&path, project_root)
.expect("Could not diff paths")
.display()
)
}

View File

@@ -50,7 +50,7 @@ impl<T> LinterResult<T> {
}
}
pub type FixTable = FxHashMap<Rule, usize>;
pub type FixTable = FxHashMap<&'static Rule, usize>;
/// Generate `Diagnostic`s from the source code contents at the
/// given `Path`.
@@ -74,7 +74,7 @@ pub fn check_path(
// Collect doc lines. This requires a rare mix of tokens (for comments) and AST
// (for docstrings), which demands special-casing at this level.
let use_doc_lines = settings.rules.enabled(Rule::DocLineTooLong);
let use_doc_lines = settings.rules.enabled(&Rule::DocLineTooLong);
let mut doc_lines = vec![];
if use_doc_lines {
doc_lines.extend(doc_lines_from_tokens(&tokens));
@@ -159,14 +159,14 @@ pub fn check_path(
}
}
Err(parse_error) => {
if settings.rules.enabled(Rule::SyntaxError) {
if settings.rules.enabled(&Rule::SyntaxError) {
pycodestyle::rules::syntax_error(&mut diagnostics, &parse_error);
}
// If the syntax error is ignored, suppress it (regardless of whether
// `Rule::SyntaxError` is enabled).
if !rule_is_ignored(
Rule::SyntaxError,
&Rule::SyntaxError,
parse_error.location.row(),
&directives.noqa_line_for,
locator,
@@ -191,8 +191,8 @@ pub fn check_path(
{
diagnostics.extend(check_physical_lines(
path,
locator,
stylist,
contents,
indexer.commented_lines(),
&doc_lines,
settings,
@@ -204,7 +204,7 @@ pub fn check_path(
if !diagnostics.is_empty() && !settings.per_file_ignores.is_empty() {
let ignores = fs::ignores_from_path(path, &settings.per_file_ignores);
if !ignores.is_empty() {
diagnostics.retain(|diagnostic| !ignores.contains(&diagnostic.kind.rule()));
diagnostics.retain(|diagnostic| !ignores.contains(diagnostic.kind.rule()));
}
};
@@ -215,7 +215,7 @@ pub fn check_path(
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_noqa())
{
let ignored = check_noqa(
check_noqa(
&mut diagnostics,
contents,
indexer.commented_lines(),
@@ -223,11 +223,6 @@ pub fn check_path(
settings,
error.as_ref().map_or(autofix, |_| flags::Autofix::Disabled),
);
if noqa.into() {
for index in ignored.iter().rev() {
diagnostics.swap_remove(*index);
}
}
}
LinterResult::new(diagnostics, error)

View File

@@ -1,31 +1,8 @@
use std::sync::Mutex;
use anyhow::Result;
use colored::Colorize;
use fern;
use log::Level;
use once_cell::sync::Lazy;
pub(crate) static WARNINGS: Lazy<Mutex<Vec<&'static str>>> = Lazy::new(Mutex::default);
/// Warn a user once, with uniqueness determined by the given ID.
#[macro_export]
macro_rules! warn_user_once_by_id {
($id:expr, $($arg:tt)*) => {
use colored::Colorize;
use log::warn;
if let Ok(mut states) = $crate::logging::WARNINGS.lock() {
if !states.contains(&$id) {
let message = format!("{}", format_args!($($arg)*));
warn!("{}", message.bold());
states.push($id);
}
}
};
}
/// Warn a user once, with uniqueness determined by the calling location itself.
#[macro_export]
macro_rules! warn_user_once {
($($arg:tt)*) => {

View File

@@ -12,7 +12,6 @@ use rustc_hash::{FxHashMap, FxHashSet};
use rustpython_parser::ast::Location;
use ruff_diagnostics::Diagnostic;
use ruff_python_ast::newlines::StrExt;
use ruff_python_ast::source_code::{LineEnding, Locator};
use ruff_python_ast::types::Range;
@@ -22,7 +21,7 @@ use crate::rule_redirects::get_redirect_target;
static NOQA_LINE_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(
r"(?P<leading_spaces>\s*)(?P<noqa>(?i:# noqa)(?::\s?(?P<codes>(?:[A-Z]+[0-9]+)(?:[,\s]+[A-Z]+[0-9]+)*))?)(?P<trailing_spaces>\s*)",
r"(?P<spaces>\s*)(?P<noqa>(?i:# noqa)(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)",
)
.unwrap()
});
@@ -74,42 +73,35 @@ pub fn extract_file_exemption(line: &str) -> Exemption {
#[derive(Debug)]
pub enum Directive<'a> {
None,
All(usize, usize, usize, usize),
Codes(usize, usize, usize, Vec<&'a str>, usize),
All(usize, usize, usize),
Codes(usize, usize, usize, Vec<&'a str>),
}
/// Extract the noqa `Directive` from a line of Python source code.
pub fn extract_noqa_directive(line: &str) -> Directive {
match NOQA_LINE_REGEX.captures(line) {
Some(caps) => match caps.name("leading_spaces") {
Some(leading_spaces) => match caps.name("trailing_spaces") {
Some(trailing_spaces) => match caps.name("noqa") {
Some(noqa) => match caps.name("codes") {
Some(codes) => {
let codes: Vec<&str> = SPLIT_COMMA_REGEX
.split(codes.as_str().trim())
.map(str::trim)
.filter(|code| !code.is_empty())
.collect();
if codes.is_empty() {
warn!("Expected rule codes on `noqa` directive: \"{line}\"");
}
Directive::Codes(
leading_spaces.as_str().chars().count(),
noqa.start(),
noqa.end(),
codes,
trailing_spaces.as_str().chars().count(),
)
Some(caps) => match caps.name("spaces") {
Some(spaces) => match caps.name("noqa") {
Some(noqa) => match caps.name("codes") {
Some(codes) => {
let codes: Vec<&str> = SPLIT_COMMA_REGEX
.split(codes.as_str().trim())
.map(str::trim)
.filter(|code| !code.is_empty())
.collect();
if codes.is_empty() {
warn!("Expected rule codes on `noqa` directive: \"{line}\"");
}
None => Directive::All(
leading_spaces.as_str().chars().count(),
Directive::Codes(
spaces.as_str().chars().count(),
noqa.start(),
noqa.end(),
trailing_spaces.as_str().chars().count(),
),
},
None => Directive::None,
codes,
)
}
None => {
Directive::All(spaces.as_str().chars().count(), noqa.start(), noqa.end())
}
},
None => Directive::None,
},
@@ -121,7 +113,7 @@ pub fn extract_noqa_directive(line: &str) -> Directive {
/// Returns `true` if the string list of `codes` includes `code` (or an alias
/// thereof).
pub fn includes(needle: Rule, haystack: &[&str]) -> bool {
pub fn includes(needle: &Rule, haystack: &[&str]) -> bool {
let needle = needle.noqa_code();
haystack
.iter()
@@ -130,7 +122,7 @@ pub fn includes(needle: Rule, haystack: &[&str]) -> bool {
/// Returns `true` if the given [`Rule`] is ignored at the specified `lineno`.
pub fn rule_is_ignored(
code: Rule,
code: &Rule,
lineno: usize,
noqa_line_for: &IntMap<usize, usize>,
locator: &Locator,
@@ -143,7 +135,7 @@ pub fn rule_is_ignored(
match extract_noqa_directive(line) {
Directive::None => false,
Directive::All(..) => true,
Directive::Codes(.., codes, _) => includes(code, &codes),
Directive::Codes(.., codes) => includes(code, &codes),
}
}
@@ -174,7 +166,7 @@ fn add_noqa_inner(
line_ending: &LineEnding,
) -> (usize, String) {
// Map of line number to set of (non-ignored) diagnostic codes that are triggered on that line.
let mut matches_by_line: FxHashMap<usize, FxHashSet<Rule>> = FxHashMap::default();
let mut matches_by_line: FxHashMap<usize, FxHashSet<&Rule>> = FxHashMap::default();
// Whether the file is exempted from all checks.
let mut file_exempted = false;
@@ -182,7 +174,7 @@ fn add_noqa_inner(
// Codes that are globally exempted (within the current file).
let mut file_exemptions: Vec<NoqaCode> = vec![];
let lines: Vec<&str> = contents.universal_newlines().collect();
let lines: Vec<&str> = contents.lines().collect();
for lineno in commented_lines {
match extract_file_exemption(lines[lineno - 1]) {
Exemption::All => {
@@ -224,7 +216,7 @@ fn add_noqa_inner(
Directive::All(..) => {
continue;
}
Directive::Codes(.., codes, _) => {
Directive::Codes(.., codes) => {
if includes(diagnostic.kind.rule(), &codes) {
continue;
}
@@ -244,7 +236,7 @@ fn add_noqa_inner(
Directive::All(..) => {
continue;
}
Directive::Codes(.., codes, _) => {
Directive::Codes(.., codes) => {
if includes(diagnostic.kind.rule(), &codes) {
continue;
}
@@ -264,7 +256,7 @@ fn add_noqa_inner(
let mut count: usize = 0;
let mut output = String::new();
for (lineno, line) in lines.into_iter().enumerate() {
for (lineno, line) in contents.lines().enumerate() {
match matches_by_line.get(&lineno) {
None => {
output.push_str(line);
@@ -280,7 +272,7 @@ fn add_noqa_inner(
output.push_str(" # noqa: ");
// Add codes.
push_codes(&mut output, rules.iter().map(Rule::noqa_code));
push_codes(&mut output, rules.iter().map(|r| r.noqa_code()));
output.push_str(line_ending);
count += 1;
}
@@ -289,7 +281,7 @@ fn add_noqa_inner(
output.push_str(line);
output.push_str(line_ending);
}
Directive::Codes(_, start_byte, _, existing, _) => {
Directive::Codes(_, start_byte, _, existing) => {
// Reconstruct the line based on the preserved rule codes.
// This enables us to tally the number of edits.
let mut formatted = String::with_capacity(line.len());

View File

@@ -143,22 +143,13 @@ ruff_macros::register_rules!(
rules::pyflakes::rules::UnusedAnnotation,
rules::pyflakes::rules::RaiseNotImplemented,
// pylint
rules::pylint::rules::UselessReturn,
rules::pylint::rules::YieldInInit,
rules::pylint::rules::InvalidAllObject,
rules::pylint::rules::InvalidAllFormat,
rules::pylint::rules::InvalidEnvvarDefault,
rules::pylint::rules::InvalidEnvvarValue,
rules::pylint::rules::BadStringFormatType,
rules::pylint::rules::BidirectionalUnicode,
rules::pylint::rules::InvalidCharacterBackspace,
rules::pylint::rules::InvalidCharacterSub,
rules::pylint::rules::InvalidCharacterEsc,
rules::pylint::rules::InvalidCharacterNul,
rules::pylint::rules::InvalidCharacterZeroWidthSpace,
rules::pylint::rules::BadStrStripCall,
rules::pylint::rules::CollapsibleElseIf,
rules::pylint::rules::ContinueInFinally,
rules::pylint::rules::UselessImportAlias,
rules::pylint::rules::UnnecessaryDirectLambdaCall,
rules::pylint::rules::NonlocalWithoutBinding,
@@ -192,7 +183,6 @@ ruff_macros::register_rules!(
rules::flake8_bugbear::rules::UnreliableCallableCheck,
rules::flake8_bugbear::rules::StripWithMultiCharacters,
rules::flake8_bugbear::rules::MutableArgumentDefault,
rules::flake8_bugbear::rules::NoExplicitStacklevel,
rules::flake8_bugbear::rules::UnusedLoopControlVariable,
rules::flake8_bugbear::rules::FunctionCallArgumentDefault,
rules::flake8_bugbear::rules::GetAttrWithConstant,
@@ -349,7 +339,7 @@ ruff_macros::register_rules!(
rules::pyupgrade::rules::FString,
rules::pyupgrade::rules::FunctoolsCache,
rules::pyupgrade::rules::ExtraneousParentheses,
rules::pyupgrade::rules::DeprecatedImport,
rules::pyupgrade::rules::ImportReplacements,
rules::pyupgrade::rules::OutdatedVersionBlock,
rules::pyupgrade::rules::QuotedAnnotation,
rules::pyupgrade::rules::IsinstanceWithTuple,
@@ -609,7 +599,6 @@ ruff_macros::register_rules!(
rules::ruff::rules::UnpackInsteadOfConcatenatingToCollectionLiteral,
rules::ruff::rules::AsyncioDanglingTask,
rules::ruff::rules::UnusedNOQA,
rules::ruff::rules::PairwiseOverZipped,
// flake8-django
rules::flake8_django::rules::NullableModelStringField,
rules::flake8_django::rules::LocalsInRenderFunction,
@@ -619,10 +608,6 @@ ruff_macros::register_rules!(
rules::flake8_django::rules::NonLeadingReceiverDecorator,
);
pub trait AsRule {
fn rule(&self) -> Rule;
}
impl Rule {
pub fn from_code(code: &str) -> Result<Self, FromCodeError> {
let (linter, code) = Linter::parse_code(code).ok_or(FromCodeError::Unknown)?;
@@ -818,7 +803,7 @@ impl Linter {
}
}
#[derive(is_macro::Is, Copy, Clone)]
#[derive(is_macro::Is)]
pub enum LintSource {
Ast,
Io,
@@ -833,9 +818,9 @@ pub enum LintSource {
impl Rule {
/// The source for the diagnostic (either the AST, the filesystem, or the
/// physical lines).
pub const fn lint_source(&self) -> LintSource {
pub const fn lint_source(&self) -> &'static LintSource {
match self {
Rule::UnusedNOQA => LintSource::Noqa,
Rule::UnusedNOQA => &LintSource::Noqa,
Rule::BlanketNOQA
| Rule::BlanketTypeIgnore
| Rule::DocLineTooLong
@@ -851,7 +836,7 @@ impl Rule {
| Rule::ShebangWhitespace
| Rule::TrailingWhitespace
| Rule::IndentationContainsTabs
| Rule::BlankLineContainsWhitespace => LintSource::PhysicalLines,
| Rule::BlankLineContainsWhitespace => &LintSource::PhysicalLines,
Rule::AmbiguousUnicodeCharacterComment
| Rule::AmbiguousUnicodeCharacterDocstring
| Rule::AmbiguousUnicodeCharacterString
@@ -861,11 +846,6 @@ impl Rule {
| Rule::BadQuotesMultilineString
| Rule::CommentedOutCode
| Rule::MultiLineImplicitStringConcatenation
| Rule::InvalidCharacterBackspace
| Rule::InvalidCharacterSub
| Rule::InvalidCharacterEsc
| Rule::InvalidCharacterNul
| Rule::InvalidCharacterZeroWidthSpace
| Rule::ExtraneousParentheses
| Rule::InvalidEscapeSequence
| Rule::SingleLineImplicitStringConcatenation
@@ -875,10 +855,10 @@ impl Rule {
| Rule::UselessSemicolon
| Rule::MultipleStatementsOnOneLineSemicolon
| Rule::TrailingCommaProhibited
| Rule::TypeCommentInStub => LintSource::Tokens,
Rule::IOError => LintSource::Io,
Rule::UnsortedImports | Rule::MissingRequiredImport => LintSource::Imports,
Rule::ImplicitNamespacePackage | Rule::InvalidModuleName => LintSource::Filesystem,
| Rule::TypeCommentInStub => &LintSource::Tokens,
Rule::IOError => &LintSource::Io,
Rule::UnsortedImports | Rule::MissingRequiredImport => &LintSource::Imports,
Rule::ImplicitNamespacePackage | Rule::InvalidModuleName => &LintSource::Filesystem,
#[cfg(feature = "logical_lines")]
Rule::IndentationWithInvalidMultiple
| Rule::IndentationWithInvalidMultipleComment
@@ -910,8 +890,8 @@ impl Rule {
| Rule::WhitespaceAfterOpenBracket
| Rule::WhitespaceBeforeCloseBracket
| Rule::WhitespaceBeforeParameters
| Rule::WhitespaceBeforePunctuation => LintSource::LogicalLines,
_ => LintSource::Ast,
| Rule::WhitespaceBeforePunctuation => &LintSource::LogicalLines,
_ => &LintSource::Ast,
}
}
}
@@ -934,7 +914,6 @@ pub const INCOMPATIBLE_CODES: &[(Rule, Rule, &str); 2] = &[
#[cfg(test)]
mod tests {
use std::mem::size_of;
use strum::IntoEnumIterator;
use super::{Linter, Rule, RuleNamespace};
@@ -980,9 +959,4 @@ mod tests {
assert_eq!(code, format!("{}{rest}", linter.common_prefix()));
}
}
#[test]
fn rule_size() {
assert_eq!(2, size_of::<Rule>());
}
}

View File

@@ -128,7 +128,7 @@ pub fn resolve_configuration(
// Resolve the current path.
let options = pyproject::load_options(&path)
.map_err(|err| anyhow!("Failed to parse `{}`: {}", path.display(), err))?;
.map_err(|err| anyhow!("Failed to parse `{}`: {}", path.to_string_lossy(), err))?;
let project_root = relativity.resolve(&path);
let configuration = Configuration::from_options(options, &project_root)?;

View File

@@ -18,7 +18,9 @@ static REDIRECTS: Lazy<HashMap<&'static str, &'static str>> = Lazy::new(|| {
// The following are here because we don't yet have the many-to-one mapping enabled.
("SIM111", "SIM110"),
// The following are deprecated.
("C", "C4"),
("C9", "C90"),
("T", "T10"),
("T1", "T10"),
("T2", "T20"),
// TODO(charlie): Remove by 2023-02-01.

View File

@@ -15,17 +15,9 @@ use crate::rule_redirects::get_redirect;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum RuleSelector {
/// Select all rules.
/// All rules
All,
/// Legacy category to select both the `mccabe` and `flake8-comprehensions` linters
/// via a single selector.
C,
/// Legacy category to select both the `flake8-debugger` and `flake8-print` linters
/// via a single selector.
T,
/// Select all rules for a given linter.
Linter(Linter),
/// Select all rules for a given linter with a given prefix.
Prefix {
prefix: RuleCodePrefix,
redirected_from: Option<&'static str>,
@@ -44,10 +36,6 @@ impl FromStr for RuleSelector {
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s == "ALL" {
Ok(Self::All)
} else if s == "C" {
Ok(Self::C)
} else if s == "T" {
Ok(Self::T)
} else {
let (s, redirected_from) = match get_redirect(s) {
Some((from, target)) => (target, Some(from)),
@@ -82,8 +70,6 @@ impl RuleSelector {
pub fn prefix_and_code(&self) -> (&'static str, &'static str) {
match self {
RuleSelector::All => ("", "ALL"),
RuleSelector::C => ("", "C"),
RuleSelector::T => ("", "T"),
RuleSelector::Prefix { prefix, .. } => {
(prefix.linter().common_prefix(), prefix.short_code())
}
@@ -152,16 +138,6 @@ impl IntoIterator for &RuleSelector {
fn into_iter(self) -> Self::IntoIter {
match self {
RuleSelector::All => RuleSelectorIter::All(Rule::iter()),
RuleSelector::C => RuleSelectorIter::Chain(
Linter::Flake8Comprehensions
.into_iter()
.chain(Linter::McCabe.into_iter()),
),
RuleSelector::T => RuleSelectorIter::Chain(
Linter::Flake8Debugger
.into_iter()
.chain(Linter::Flake8Print.into_iter()),
),
RuleSelector::Linter(linter) => RuleSelectorIter::Vec(linter.into_iter()),
RuleSelector::Prefix { prefix, .. } => RuleSelectorIter::Vec(prefix.into_iter()),
}
@@ -170,7 +146,6 @@ impl IntoIterator for &RuleSelector {
pub enum RuleSelectorIter {
All(RuleIter),
Chain(std::iter::Chain<std::vec::IntoIter<Rule>, std::vec::IntoIter<Rule>>),
Vec(std::vec::IntoIter<Rule>),
}
@@ -180,14 +155,13 @@ impl Iterator for RuleSelectorIter {
fn next(&mut self) -> Option<Self::Item> {
match self {
RuleSelectorIter::All(iter) => iter.next(),
RuleSelectorIter::Chain(iter) => iter.next(),
RuleSelectorIter::Vec(iter) => iter.next(),
}
}
}
/// A const alternative to the `impl From<RuleCodePrefix> for RuleSelector`
/// to let us keep the fields of [`RuleSelector`] private.
// to let us keep the fields of RuleSelector private.
// Note that Rust doesn't yet support `impl const From<RuleCodePrefix> for
// RuleSelector` (see https://github.com/rust-lang/rust/issues/67792).
// TODO(martin): Remove once RuleSelector is an enum with Linter & Rule variants
@@ -203,7 +177,7 @@ impl JsonSchema for RuleSelector {
"RuleSelector".to_string()
}
fn json_schema(_gen: &mut schemars::gen::SchemaGenerator) -> Schema {
fn json_schema(_gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
Schema::Object(SchemaObject {
instance_type: Some(InstanceType::String.into()),
enum_values: Some(
@@ -247,8 +221,6 @@ impl RuleSelector {
pub(crate) fn specificity(&self) -> Specificity {
match self {
RuleSelector::All => Specificity::All,
RuleSelector::T => Specificity::LinterGroup,
RuleSelector::C => Specificity::LinterGroup,
RuleSelector::Linter(..) => Specificity::Linter,
RuleSelector::Prefix { prefix, .. } => {
let prefix: &'static str = prefix.short_code();
@@ -268,7 +240,6 @@ impl RuleSelector {
#[derive(EnumIter, PartialEq, Eq, PartialOrd, Ord)]
pub(crate) enum Specificity {
All,
LinterGroup,
Linter,
Code1Char,
Code2Chars,
@@ -277,7 +248,6 @@ pub(crate) enum Specificity {
Code5Chars,
}
#[cfg(feature = "clap")]
mod clap_completion {
use clap::builder::{PossibleValue, TypedValueParser, ValueParserFactory};
use strum::IntoEnumIterator;
@@ -317,7 +287,9 @@ mod clap_completion {
.map_err(|e| clap::Error::raw(clap::error::ErrorKind::InvalidValue, e))
}
fn possible_values(&self) -> Option<Box<dyn Iterator<Item = PossibleValue> + '_>> {
fn possible_values(
&self,
) -> Option<Box<dyn Iterator<Item = clap::builder::PossibleValue> + '_>> {
Some(Box::new(
std::iter::once(PossibleValue::new("ALL").help("all rules")).chain(
Linter::iter()

View File

@@ -61,7 +61,7 @@ pub fn commented_out_code(
// Verify that the comment is on its own line, and that it contains code.
if is_standalone_comment(line) && comment_contains_code(line, &settings.task_tags[..]) {
let mut diagnostic = Diagnostic::new(CommentedOutCode, Range::new(start, end));
if autofix.into() && settings.rules.should_fix(Rule::CommentedOutCode) {
if autofix.into() && settings.rules.should_fix(&Rule::CommentedOutCode) {
diagnostic.amend(Fix::deletion(location, end_location));
}
Some(diagnostic)

View File

@@ -140,7 +140,7 @@ pub fn subscript(checker: &mut Checker, value: &Expr, slice: &Expr) {
&& checker
.settings
.rules
.enabled(Rule::SysVersionSlice1Referenced)
.enabled(&Rule::SysVersionSlice1Referenced)
{
checker.diagnostics.push(Diagnostic::new(
SysVersionSlice1Referenced,
@@ -150,7 +150,7 @@ pub fn subscript(checker: &mut Checker, value: &Expr, slice: &Expr) {
&& checker
.settings
.rules
.enabled(Rule::SysVersionSlice3Referenced)
.enabled(&Rule::SysVersionSlice3Referenced)
{
checker.diagnostics.push(Diagnostic::new(
SysVersionSlice3Referenced,
@@ -165,13 +165,13 @@ pub fn subscript(checker: &mut Checker, value: &Expr, slice: &Expr) {
..
} => {
if *i == BigInt::from(2)
&& checker.settings.rules.enabled(Rule::SysVersion2Referenced)
&& checker.settings.rules.enabled(&Rule::SysVersion2Referenced)
{
checker
.diagnostics
.push(Diagnostic::new(SysVersion2Referenced, Range::from(value)));
} else if *i == BigInt::from(0)
&& checker.settings.rules.enabled(Rule::SysVersion0Referenced)
&& checker.settings.rules.enabled(&Rule::SysVersion0Referenced)
{
checker
.diagnostics
@@ -210,7 +210,7 @@ pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &
&& checker
.settings
.rules
.enabled(Rule::SysVersionInfo0Eq3Referenced)
.enabled(&Rule::SysVersionInfo0Eq3Referenced)
{
checker.diagnostics.push(Diagnostic::new(
SysVersionInfo0Eq3Referenced,
@@ -231,7 +231,7 @@ pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &
}],
) = (ops, comparators)
{
if checker.settings.rules.enabled(Rule::SysVersionInfo1CmpInt) {
if checker.settings.rules.enabled(&Rule::SysVersionInfo1CmpInt) {
checker
.diagnostics
.push(Diagnostic::new(SysVersionInfo1CmpInt, Range::from(left)));
@@ -259,7 +259,7 @@ pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &
if checker
.settings
.rules
.enabled(Rule::SysVersionInfoMinorCmpInt)
.enabled(&Rule::SysVersionInfoMinorCmpInt)
{
checker.diagnostics.push(Diagnostic::new(
SysVersionInfoMinorCmpInt,
@@ -286,12 +286,12 @@ pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &
) = (ops, comparators)
{
if s.len() == 1 {
if checker.settings.rules.enabled(Rule::SysVersionCmpStr10) {
if checker.settings.rules.enabled(&Rule::SysVersionCmpStr10) {
checker
.diagnostics
.push(Diagnostic::new(SysVersionCmpStr10, Range::from(left)));
}
} else if checker.settings.rules.enabled(Rule::SysVersionCmpStr3) {
} else if checker.settings.rules.enabled(&Rule::SysVersionCmpStr3) {
checker
.diagnostics
.push(Diagnostic::new(SysVersionCmpStr3, Range::from(left)));

View File

@@ -492,7 +492,7 @@ pub fn definition(
// ANN401 for dynamically typed arguments
if let Some(annotation) = &arg.node.annotation {
has_any_typed_arg = true;
if checker.settings.rules.enabled(Rule::AnyType) {
if checker.settings.rules.enabled(&Rule::AnyType) {
check_dynamically_typed(
checker,
annotation,
@@ -507,7 +507,7 @@ pub fn definition(
if checker
.settings
.rules
.enabled(Rule::MissingTypeFunctionArgument)
.enabled(&Rule::MissingTypeFunctionArgument)
{
diagnostics.push(Diagnostic::new(
MissingTypeFunctionArgument {
@@ -525,7 +525,7 @@ pub fn definition(
if let Some(expr) = &arg.node.annotation {
has_any_typed_arg = true;
if !checker.settings.flake8_annotations.allow_star_arg_any {
if checker.settings.rules.enabled(Rule::AnyType) {
if checker.settings.rules.enabled(&Rule::AnyType) {
let name = &arg.node.arg;
check_dynamically_typed(
checker,
@@ -539,7 +539,7 @@ pub fn definition(
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.rules.enabled(Rule::MissingTypeArgs) {
if checker.settings.rules.enabled(&Rule::MissingTypeArgs) {
diagnostics.push(Diagnostic::new(
MissingTypeArgs {
name: arg.node.arg.to_string(),
@@ -556,7 +556,7 @@ pub fn definition(
if let Some(expr) = &arg.node.annotation {
has_any_typed_arg = true;
if !checker.settings.flake8_annotations.allow_star_arg_any {
if checker.settings.rules.enabled(Rule::AnyType) {
if checker.settings.rules.enabled(&Rule::AnyType) {
let name = &arg.node.arg;
check_dynamically_typed(
checker,
@@ -570,7 +570,7 @@ pub fn definition(
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.rules.enabled(Rule::MissingTypeKwargs) {
if checker.settings.rules.enabled(&Rule::MissingTypeKwargs) {
diagnostics.push(Diagnostic::new(
MissingTypeKwargs {
name: arg.node.arg.to_string(),
@@ -587,7 +587,7 @@ pub fn definition(
if let Some(arg) = args.posonlyargs.first().or_else(|| args.args.first()) {
if arg.node.annotation.is_none() {
if visibility::is_classmethod(&checker.ctx, cast::decorator_list(stmt)) {
if checker.settings.rules.enabled(Rule::MissingTypeCls) {
if checker.settings.rules.enabled(&Rule::MissingTypeCls) {
diagnostics.push(Diagnostic::new(
MissingTypeCls {
name: arg.node.arg.to_string(),
@@ -596,7 +596,7 @@ pub fn definition(
));
}
} else {
if checker.settings.rules.enabled(Rule::MissingTypeSelf) {
if checker.settings.rules.enabled(&Rule::MissingTypeSelf) {
diagnostics.push(Diagnostic::new(
MissingTypeSelf {
name: arg.node.arg.to_string(),
@@ -614,7 +614,7 @@ pub fn definition(
// ANN201, ANN202, ANN401
if let Some(expr) = &returns {
has_typed_return = true;
if checker.settings.rules.enabled(Rule::AnyType) {
if checker.settings.rules.enabled(&Rule::AnyType) {
check_dynamically_typed(checker, expr, || name.to_string(), &mut diagnostics);
}
} else if !(
@@ -626,7 +626,7 @@ pub fn definition(
if checker
.settings
.rules
.enabled(Rule::MissingReturnTypeClassMethod)
.enabled(&Rule::MissingReturnTypeClassMethod)
{
diagnostics.push(Diagnostic::new(
MissingReturnTypeClassMethod {
@@ -641,7 +641,7 @@ pub fn definition(
if checker
.settings
.rules
.enabled(Rule::MissingReturnTypeStaticMethod)
.enabled(&Rule::MissingReturnTypeStaticMethod)
{
diagnostics.push(Diagnostic::new(
MissingReturnTypeStaticMethod {
@@ -656,7 +656,7 @@ pub fn definition(
if checker
.settings
.rules
.enabled(Rule::MissingReturnTypeSpecialMethod)
.enabled(&Rule::MissingReturnTypeSpecialMethod)
{
if !(checker.settings.flake8_annotations.mypy_init_return && has_any_typed_arg)
{
@@ -681,7 +681,7 @@ pub fn definition(
if checker
.settings
.rules
.enabled(Rule::MissingReturnTypeSpecialMethod)
.enabled(&Rule::MissingReturnTypeSpecialMethod)
{
diagnostics.push(Diagnostic::new(
MissingReturnTypeSpecialMethod {
@@ -696,7 +696,7 @@ pub fn definition(
if checker
.settings
.rules
.enabled(Rule::MissingReturnTypePublicFunction)
.enabled(&Rule::MissingReturnTypePublicFunction)
{
diagnostics.push(Diagnostic::new(
MissingReturnTypePublicFunction {
@@ -710,7 +710,7 @@ pub fn definition(
if checker
.settings
.rules
.enabled(Rule::MissingReturnTypePrivateFunction)
.enabled(&Rule::MissingReturnTypePrivateFunction)
{
diagnostics.push(Diagnostic::new(
MissingReturnTypePrivateFunction {

View File

@@ -108,7 +108,7 @@ pub fn bad_file_permissions(
.map_or(false, |call_path| call_path.as_slice() == ["os", "chmod"])
{
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(mode_arg) = call_args.argument("mode", 1) {
if let Some(mode_arg) = call_args.get_argument("mode", Some(1)) {
if let Some(int_value) = get_int_value(mode_arg) {
if (int_value & WRITE_WORLD > 0) || (int_value & EXECUTE_GROUP > 0) {
checker.diagnostics.push(Diagnostic::new(

View File

@@ -25,7 +25,7 @@ impl Violation for HashlibInsecureHashFunction {
const WEAK_HASHES: [&str; 4] = ["md4", "md5", "sha", "sha1"];
fn is_used_for_security(call_args: &SimpleCallArgs) -> bool {
match call_args.keyword_argument("usedforsecurity") {
match call_args.get_argument("usedforsecurity", None) {
Some(expr) => !matches!(
&expr.node,
ExprKind::Constant {
@@ -67,7 +67,7 @@ pub fn hashlib_insecure_hash_functions(
return;
}
if let Some(name_arg) = call_args.argument("name", 0) {
if let Some(name_arg) = call_args.get_argument("name", Some(0)) {
if let Some(hash_func_name) = string_literal(name_arg) {
if WEAK_HASHES.contains(&hash_func_name.to_lowercase().as_str()) {
checker.diagnostics.push(Diagnostic::new(

View File

@@ -46,7 +46,7 @@ pub fn jinja2_autoescape_false(
{
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(autoescape_arg) = call_args.keyword_argument("autoescape") {
if let Some(autoescape_arg) = call_args.get_argument("autoescape", None) {
match &autoescape_arg.node {
ExprKind::Constant {
value: Constant::Bool(true),

View File

@@ -33,7 +33,7 @@ pub fn logging_config_insecure_listen(
{
let call_args = SimpleCallArgs::new(args, keywords);
if call_args.keyword_argument("verify").is_none() {
if call_args.get_argument("verify", None).is_none() {
checker.diagnostics.push(Diagnostic::new(
LoggingConfigInsecureListen,
Range::from(func),

View File

@@ -56,7 +56,7 @@ pub fn request_with_no_cert_validation(
None
}) {
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(verify_arg) = call_args.keyword_argument("verify") {
if let Some(verify_arg) = call_args.get_argument("verify", None) {
if let ExprKind::Constant {
value: Constant::Bool(false),
..

View File

@@ -44,7 +44,7 @@ pub fn request_without_timeout(
})
{
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(timeout_arg) = call_args.keyword_argument("timeout") {
if let Some(timeout_arg) = call_args.get_argument("timeout", None) {
if let Some(timeout) = match &timeout_arg.node {
ExprKind::Constant {
value: value @ Constant::None,

View File

@@ -33,7 +33,7 @@ pub fn snmp_insecure_version(
})
{
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(mp_model_arg) = call_args.keyword_argument("mpModel") {
if let Some(mp_model_arg) = call_args.get_argument("mpModel", None) {
if let ExprKind::Constant {
value: Constant::Int(value),
..

View File

@@ -39,7 +39,7 @@ pub fn unsafe_yaml_load(checker: &mut Checker, func: &Expr, args: &[Expr], keywo
.map_or(false, |call_path| call_path.as_slice() == ["yaml", "load"])
{
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(loader_arg) = call_args.argument("Loader", 1) {
if let Some(loader_arg) = call_args.get_argument("Loader", Some(1)) {
if !checker
.ctx
.resolve_call_path(loader_arg)

View File

@@ -41,7 +41,6 @@ mod tests {
#[test_case(Rule::StarArgUnpackingAfterKeywordArg, Path::new("B026.py"); "B026")]
#[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.py"); "B027")]
#[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.pyi"); "B027_pyi")]
#[test_case(Rule::NoExplicitStacklevel, Path::new("B028.py"); "B028")]
#[test_case(Rule::ExceptWithEmptyTuple, Path::new("B029.py"); "B029")]
#[test_case(Rule::ExceptWithNonExceptionClasses, Path::new("B030.py"); "B030")]
#[test_case(Rule::UnintentionalTypeAnnotation, Path::new("B032.py"); "B032")]

View File

@@ -115,7 +115,7 @@ pub fn abstract_base_class(
if !checker
.settings
.rules
.enabled(Rule::EmptyMethodWithoutAbstractDecorator)
.enabled(&Rule::EmptyMethodWithoutAbstractDecorator)
{
continue;
}
@@ -135,7 +135,7 @@ pub fn abstract_base_class(
if checker
.settings
.rules
.enabled(Rule::AbstractBaseClassWithoutAbstractMethod)
.enabled(&Rule::AbstractBaseClassWithoutAbstractMethod)
{
if !has_abstract_method {
checker.diagnostics.push(Diagnostic::new(

View File

@@ -2,8 +2,7 @@ use rustpython_parser::ast::{Expr, ExprKind};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::scope::ScopeKind;
use ruff_python_ast::types::Range;
use ruff_python_ast::types::{Range, ScopeKind};
use crate::checkers::ast::Checker;
@@ -31,7 +30,7 @@ fn is_cache_func(checker: &Checker, expr: &Expr) -> bool {
/// B019
pub fn cached_instance_method(checker: &mut Checker, decorator_list: &[Expr]) {
if !matches!(checker.ctx.scope().kind, ScopeKind::Class(_)) {
if !matches!(checker.ctx.current_scope().kind, ScopeKind::Class(_)) {
return;
}
for decorator in decorator_list {

View File

@@ -83,7 +83,7 @@ fn duplicate_handler_exceptions<'a>(
if checker
.settings
.rules
.enabled(Rule::DuplicateHandlerException)
.enabled(&Rule::DuplicateHandlerException)
{
// TODO(charlie): Handle "BaseException" and redundant exception aliases.
if !duplicates.is_empty() {
@@ -149,7 +149,7 @@ pub fn duplicate_exceptions(checker: &mut Checker, handlers: &[Excepthandler]) {
if checker
.settings
.rules
.enabled(Rule::DuplicateTryBlockException)
.enabled(&Rule::DuplicateTryBlockException)
{
for (name, exprs) in duplicates {
for expr in exprs {

View File

@@ -25,7 +25,6 @@ pub use loop_variable_overrides_iterator::{
loop_variable_overrides_iterator, LoopVariableOverridesIterator,
};
pub use mutable_argument_default::{mutable_argument_default, MutableArgumentDefault};
pub use no_explicit_stacklevel::{no_explicit_stacklevel, NoExplicitStacklevel};
pub use raise_without_from_inside_except::{
raise_without_from_inside_except, RaiseWithoutFromInsideExcept,
};
@@ -64,7 +63,6 @@ mod getattr_with_constant;
mod jump_statement_in_finally;
mod loop_variable_overrides_iterator;
mod mutable_argument_default;
mod no_explicit_stacklevel;
mod raise_without_from_inside_except;
mod redundant_tuple_in_exception_handler;
mod setattr_with_constant;

View File

@@ -1,68 +0,0 @@
use rustpython_parser::ast::{Expr, Keyword};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::SimpleCallArgs;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
/// ## What it does
/// Checks for `warnings.warn` calls without an explicit `stacklevel` keyword
/// argument.
///
/// ## Why is this bad?
/// The `warnings.warn` method uses a `stacklevel` of 1 by default, which
/// limits the rendered stack trace to that of the line on which the
/// `warn` method is called.
///
/// It's recommended to use a `stacklevel` of 2 or higher, give the caller
/// more context about the warning.
///
/// ## Example
/// ```python
/// warnings.warn("This is a warning")
/// ```
///
/// Use instead:
/// ```python
/// warnings.warn("This is a warning", stacklevel=2)
/// ```
#[violation]
pub struct NoExplicitStacklevel;
impl Violation for NoExplicitStacklevel {
#[derive_message_formats]
fn message(&self) -> String {
format!("No explicit `stacklevel` keyword argument found")
}
}
/// B028
pub fn no_explicit_stacklevel(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
if !checker
.ctx
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["warnings", "warn"]
})
{
return;
}
if SimpleCallArgs::new(args, keywords)
.keyword_argument("stacklevel")
.is_some()
{
return;
}
checker
.diagnostics
.push(Diagnostic::new(NoExplicitStacklevel, Range::from(func)));
}

View File

@@ -163,8 +163,9 @@ pub fn unused_loop_control_variable(
if let Some(rename) = rename {
if certainty.into() && checker.patch(diagnostic.kind.rule()) {
// Find the `BindingKind::LoopVar` corresponding to the name.
let scope = checker.ctx.scope();
let scope = checker.ctx.current_scope();
let binding = scope
.bindings
.get(name)
.into_iter()
.chain(scope.rebounds.get(name).into_iter().flatten())

View File

@@ -1,31 +0,0 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
expression: diagnostics
---
- kind:
name: NoExplicitStacklevel
body: "No explicit `stacklevel` keyword argument found"
suggestion: ~
fixable: false
location:
row: 8
column: 0
end_location:
row: 8
column: 13
fix: ~
parent: ~
- kind:
name: NoExplicitStacklevel
body: "No explicit `stacklevel` keyword argument found"
suggestion: ~
fixable: false
location:
row: 9
column: 0
end_location:
row: 9
column: 13
fix: ~
parent: ~

View File

@@ -260,7 +260,7 @@ pub fn trailing_commas(
end_location: comma.2,
},
);
if autofix.into() && settings.rules.should_fix(Rule::TrailingCommaProhibited) {
if autofix.into() && settings.rules.should_fix(&Rule::TrailingCommaProhibited) {
diagnostic.amend(Fix::deletion(comma.0, comma.2));
}
diagnostics.push(diagnostic);
@@ -304,7 +304,7 @@ pub fn trailing_commas(
end_location: missing_comma.2,
},
);
if autofix.into() && settings.rules.should_fix(Rule::TrailingCommaMissing) {
if autofix.into() && settings.rules.should_fix(&Rule::TrailingCommaMissing) {
// Create a replacement that includes the final bracket (or other token),
// rather than just inserting a comma at the end. This prevents the UP034 autofix
// removing any brackets in the same linter pass - doing both at the same time could

View File

@@ -1,4 +1,6 @@
#[derive(Debug, PartialEq, Eq)]
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum DebuggerUsingType {
Call(String),
Import(String),

View File

@@ -162,7 +162,7 @@ pub fn string_in_exception(checker: &mut Checker, exc: &Expr) {
value: Constant::Str(string),
..
} => {
if checker.settings.rules.enabled(Rule::RawStringInException) {
if checker.settings.rules.enabled(&Rule::RawStringInException) {
if string.len() > checker.settings.flake8_errmsg.max_string_length {
checker
.diagnostics
@@ -172,7 +172,7 @@ pub fn string_in_exception(checker: &mut Checker, exc: &Expr) {
}
// Check for f-strings
ExprKind::JoinedStr { .. } => {
if checker.settings.rules.enabled(Rule::FStringInException) {
if checker.settings.rules.enabled(&Rule::FStringInException) {
checker
.diagnostics
.push(Diagnostic::new(FStringInException, Range::from(first)));
@@ -180,7 +180,7 @@ pub fn string_in_exception(checker: &mut Checker, exc: &Expr) {
}
// Check for .format() calls
ExprKind::Call { func, .. } => {
if checker.settings.rules.enabled(Rule::DotFormatInException) {
if checker.settings.rules.enabled(&Rule::DotFormatInException) {
if let ExprKind::Attribute { value, attr, .. } = &func.node {
if attr == "format" && matches!(value.node, ExprKind::Constant { .. }) {
checker.diagnostics.push(Diagnostic::new(

View File

@@ -43,14 +43,14 @@ fn check_msg(checker: &mut Checker, msg: &Expr) {
// Check for string concatenation and percent format.
ExprKind::BinOp { op, .. } => match op {
Operator::Add => {
if checker.settings.rules.enabled(Rule::LoggingStringConcat) {
if checker.settings.rules.enabled(&Rule::LoggingStringConcat) {
checker
.diagnostics
.push(Diagnostic::new(LoggingStringConcat, Range::from(msg)));
}
}
Operator::Mod => {
if checker.settings.rules.enabled(Rule::LoggingPercentFormat) {
if checker.settings.rules.enabled(&Rule::LoggingPercentFormat) {
checker
.diagnostics
.push(Diagnostic::new(LoggingPercentFormat, Range::from(msg)));
@@ -60,7 +60,7 @@ fn check_msg(checker: &mut Checker, msg: &Expr) {
},
// Check for f-strings.
ExprKind::JoinedStr { .. } => {
if checker.settings.rules.enabled(Rule::LoggingFString) {
if checker.settings.rules.enabled(&Rule::LoggingFString) {
checker
.diagnostics
.push(Diagnostic::new(LoggingFString, Range::from(msg)));
@@ -68,7 +68,7 @@ fn check_msg(checker: &mut Checker, msg: &Expr) {
}
// Check for .format() calls.
ExprKind::Call { func, .. } => {
if checker.settings.rules.enabled(Rule::LoggingStringFormat) {
if checker.settings.rules.enabled(&Rule::LoggingStringFormat) {
if let ExprKind::Attribute { value, attr, .. } = &func.node {
if attr == "format" && matches!(value.node, ExprKind::Constant { .. }) {
checker
@@ -146,12 +146,12 @@ pub fn logging_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords:
);
// G001 - G004
if let Some(format_arg) = call_args.argument("msg", 0) {
if let Some(format_arg) = call_args.get_argument("msg", Some(0)) {
check_msg(checker, format_arg);
}
// G010
if checker.settings.rules.enabled(Rule::LoggingWarn)
if checker.settings.rules.enabled(&Rule::LoggingWarn)
&& matches!(logging_level, LoggingLevel::Warn)
{
let mut diagnostic = Diagnostic::new(LoggingWarn, level_call_range);
@@ -166,18 +166,18 @@ pub fn logging_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords:
}
// G101
if checker.settings.rules.enabled(Rule::LoggingExtraAttrClash) {
if checker.settings.rules.enabled(&Rule::LoggingExtraAttrClash) {
if let Some(extra) = find_keyword(keywords, "extra") {
check_log_record_attr_clash(checker, extra);
}
}
// G201, G202
if checker.settings.rules.enabled(Rule::LoggingExcInfo)
if checker.settings.rules.enabled(&Rule::LoggingExcInfo)
|| checker
.settings
.rules
.enabled(Rule::LoggingRedundantExcInfo)
.enabled(&Rule::LoggingRedundantExcInfo)
{
if !checker.ctx.in_exception_handler() {
return;
@@ -206,7 +206,7 @@ pub fn logging_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords:
match logging_level {
LoggingLevel::Error => {
if checker.settings.rules.enabled(Rule::LoggingExcInfo) {
if checker.settings.rules.enabled(&Rule::LoggingExcInfo) {
checker
.diagnostics
.push(Diagnostic::new(LoggingExcInfo, level_call_range));
@@ -216,7 +216,7 @@ pub fn logging_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords:
if checker
.settings
.rules
.enabled(Rule::LoggingRedundantExcInfo)
.enabled(&Rule::LoggingRedundantExcInfo)
{
checker.diagnostics.push(Diagnostic::new(
LoggingRedundantExcInfo,

View File

@@ -1,18 +1,12 @@
use itertools::Either::{Left, Right};
use std::collections::BTreeMap;
use std::iter;
use log::error;
use rustc_hash::FxHashSet;
use rustpython_parser::ast::{
Boolop, Constant, Expr, ExprContext, ExprKind, Keyword, Stmt, StmtKind,
};
use rustpython_parser::ast::{Boolop, Constant, Expr, ExprKind, Keyword, Stmt, StmtKind};
use ruff_diagnostics::{AlwaysAutofixableViolation, Violation};
use ruff_diagnostics::{Diagnostic, Fix};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::helpers::{create_expr, match_trailing_comment, unparse_expr};
use ruff_python_ast::helpers::{match_trailing_comment, unparse_expr};
use ruff_python_ast::types::{Range, RefEquality};
use ruff_python_stdlib::identifiers::is_identifier;
use ruff_python_stdlib::keyword::KWLIST;
@@ -126,17 +120,12 @@ pub struct SingleStartsEndsWith {
pub attr: String,
}
impl AlwaysAutofixableViolation for SingleStartsEndsWith {
impl Violation for SingleStartsEndsWith {
#[derive_message_formats]
fn message(&self) -> String {
let SingleStartsEndsWith { attr } = self;
format!("Call `{attr}` once with a `tuple`")
}
fn autofix_title(&self) -> String {
let SingleStartsEndsWith { attr } = self;
format!("Merge into a single `{attr}` call")
}
}
#[violation]
@@ -403,116 +392,39 @@ pub fn no_unnecessary_dict_kwargs(checker: &mut Checker, expr: &Expr, kwargs: &[
}
/// PIE810
pub fn single_starts_ends_with(checker: &mut Checker, expr: &Expr) {
let ExprKind::BoolOp { op: Boolop::Or, values } = &expr.node else {
pub fn single_starts_ends_with(checker: &mut Checker, values: &[Expr], node: &Boolop) {
if *node != Boolop::Or {
return;
};
}
let mut duplicates = BTreeMap::new();
for (index, call) in values.iter().enumerate() {
let ExprKind::Call {
// Given `foo.startswith`, insert ("foo", "startswith") into the set.
let mut seen = FxHashSet::default();
for expr in values {
if let ExprKind::Call {
func,
args,
keywords,
..
} = &call.node else {
continue
};
if !(args.len() == 1 && keywords.is_empty()) {
continue;
}
let ExprKind::Attribute { value, attr, .. } = &func.node else {
continue
};
if attr != "startswith" && attr != "endswith" {
continue;
}
let ExprKind::Name { id: arg_name, .. } = &value.node else {
continue
};
duplicates
.entry((attr.as_str(), arg_name.as_str()))
.or_insert_with(Vec::new)
.push(index);
}
// Generate a `Diagnostic` for each duplicate.
for ((attr_name, arg_name), indices) in duplicates {
if indices.len() > 1 {
let mut diagnostic = Diagnostic::new(
SingleStartsEndsWith {
attr: attr_name.to_string(),
},
Range::from(expr),
);
if checker.patch(diagnostic.kind.rule()) {
let words: Vec<&Expr> = indices
.iter()
.map(|index| &values[*index])
.map(|expr| {
let ExprKind::Call { func: _, args, keywords: _} = &expr.node else {
unreachable!("{}", format!("Indices should only contain `{attr_name}` calls"))
};
args.get(0)
.unwrap_or_else(|| panic!("`{attr_name}` should have one argument"))
})
.collect();
let call = create_expr(ExprKind::Call {
func: Box::new(create_expr(ExprKind::Attribute {
value: Box::new(create_expr(ExprKind::Name {
id: arg_name.to_string(),
ctx: ExprContext::Load,
})),
attr: attr_name.to_string(),
ctx: ExprContext::Load,
})),
args: vec![create_expr(ExprKind::Tuple {
elts: words
.iter()
.flat_map(|value| {
if let ExprKind::Tuple { elts, .. } = &value.node {
Left(elts.iter())
} else {
Right(iter::once(*value))
}
})
.map(Clone::clone)
.collect(),
ctx: ExprContext::Load,
})],
keywords: vec![],
});
// Generate the combined `BoolOp`.
let mut call = Some(call);
let bool_op = create_expr(ExprKind::BoolOp {
op: Boolop::Or,
values: values
.iter()
.enumerate()
.filter_map(|(index, elt)| {
if indices.contains(&index) {
std::mem::take(&mut call)
} else {
Some(elt.clone())
}
})
.collect(),
});
diagnostic.amend(Fix::replacement(
unparse_expr(&bool_op, checker.stylist),
expr.location,
expr.end_location.unwrap(),
));
} = &expr.node
{
if !(args.len() == 1 && keywords.is_empty()) {
continue;
}
if let ExprKind::Attribute { value, attr, .. } = &func.node {
if attr != "startswith" && attr != "endswith" {
continue;
}
if let ExprKind::Name { id, .. } = &value.node {
if !seen.insert((id, attr)) {
checker.diagnostics.push(Diagnostic::new(
SingleStartsEndsWith {
attr: attr.to_string(),
},
Range::from(value),
));
}
}
}
checker.diagnostics.push(diagnostic);
}
}
}

View File

@@ -5,101 +5,53 @@ expression: diagnostics
- kind:
name: SingleStartsEndsWith
body: "Call `startswith` once with a `tuple`"
suggestion: "Merge into a single `startswith` call"
fixable: true
suggestion: ~
fixable: false
location:
row: 2
column: 0
column: 25
end_location:
row: 2
column: 46
fix:
content: "obj.startswith((\"foo\", \"bar\"))"
location:
row: 2
column: 0
end_location:
row: 2
column: 46
column: 28
fix: ~
parent: ~
- kind:
name: SingleStartsEndsWith
body: "Call `endswith` once with a `tuple`"
suggestion: "Merge into a single `endswith` call"
fixable: true
suggestion: ~
fixable: false
location:
row: 4
column: 0
column: 23
end_location:
row: 4
column: 42
fix:
content: "obj.endswith((\"foo\", \"bar\"))"
location:
row: 4
column: 0
end_location:
row: 4
column: 42
column: 26
fix: ~
parent: ~
- kind:
name: SingleStartsEndsWith
body: "Call `startswith` once with a `tuple`"
suggestion: "Merge into a single `startswith` call"
fixable: true
suggestion: ~
fixable: false
location:
row: 6
column: 0
column: 23
end_location:
row: 6
column: 42
fix:
content: "obj.startswith((foo, bar))"
location:
row: 6
column: 0
end_location:
row: 6
column: 42
column: 26
fix: ~
parent: ~
- kind:
name: SingleStartsEndsWith
body: "Call `startswith` once with a `tuple`"
suggestion: "Merge into a single `startswith` call"
fixable: true
suggestion: ~
fixable: false
location:
row: 8
column: 0
column: 23
end_location:
row: 8
column: 44
fix:
content: "obj.startswith((foo, \"foo\"))"
location:
row: 8
column: 0
end_location:
row: 8
column: 44
parent: ~
- kind:
name: SingleStartsEndsWith
body: "Call `startswith` once with a `tuple`"
suggestion: "Merge into a single `startswith` call"
fixable: true
location:
row: 10
column: 0
end_location:
row: 10
column: 65
fix:
content: "obj.endswith(foo) or obj.startswith((foo, \"foo\"))"
location:
row: 10
column: 0
end_location:
row: 10
column: 65
column: 26
fix: ~
parent: ~

View File

@@ -1,6 +1,7 @@
use std::fmt;
use rustpython_parser::ast::{Expr, ExprKind};
use serde::{Deserialize, Serialize};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -8,7 +9,7 @@ use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum VarKind {
TypeVar,
ParamSpec,

View File

@@ -1,25 +1,20 @@
use rustpython_parser::ast::{Arguments, Constant, Expr, ExprKind, Operator, Unaryop};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Fix, Violation};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
#[violation]
pub struct TypedArgumentSimpleDefaults;
/// PYI011
impl AlwaysAutofixableViolation for TypedArgumentSimpleDefaults {
impl Violation for TypedArgumentSimpleDefaults {
#[derive_message_formats]
fn message(&self) -> String {
format!("Only simple default values allowed for typed arguments")
}
fn autofix_title(&self) -> String {
"Replace default value by `...`".to_string()
}
}
#[violation]
@@ -33,14 +28,6 @@ impl Violation for ArgumentSimpleDefaults {
}
}
const ALLOWED_MATH_ATTRIBUTES_IN_DEFAULTS: &[&[&str]] = &[
&["math", "inf"],
&["math", "nan"],
&["math", "e"],
&["math", "pi"],
&["math", "tau"],
];
const ALLOWED_ATTRIBUTES_IN_DEFAULTS: &[&[&str]] = &[
&["sys", "stdin"],
&["sys", "stdout"],
@@ -74,74 +61,42 @@ fn is_valid_default_value_with_annotation(default: &Expr, checker: &Checker) ->
value: Constant::Bytes(..),
..
} => return checker.locator.slice(default).len() <= 50,
// Ex) `123`, `True`, `False`, `3.14`
ExprKind::Constant {
value: Constant::Int(..) | Constant::Bool(..) | Constant::Float(..),
value: Constant::Int(..),
..
} => {
return checker.locator.slice(default).len() <= 10;
}
// Ex) `2j`
ExprKind::Constant {
value: Constant::Complex { real, .. },
..
} => {
if *real == 0.0 {
return checker.locator.slice(default).len() <= 10;
}
}
ExprKind::UnaryOp {
op: Unaryop::USub,
operand,
} => {
// Ex) `-1`, `-3.14`
if let ExprKind::Constant {
value: Constant::Int(..) | Constant::Float(..),
value: Constant::Int(..),
..
} = &operand.node
{
return checker.locator.slice(operand).len() <= 10;
}
// Ex) `-2j`
if let ExprKind::Constant {
value: Constant::Complex { real, .. },
..
} = &operand.node
{
if *real == 0.0 {
return checker.locator.slice(operand).len() <= 10;
}
}
// Ex) `-math.inf`, `-math.pi`, etc.
if let ExprKind::Attribute { .. } = &operand.node {
if checker
.ctx
.resolve_call_path(operand)
.map_or(false, |call_path| {
ALLOWED_MATH_ATTRIBUTES_IN_DEFAULTS.iter().any(|target| {
// reject `-math.nan`
call_path.as_slice() == *target && *target != ["math", "nan"]
})
})
{
return true;
}
}
}
ExprKind::BinOp {
left,
op: Operator::Add | Operator::Sub,
right,
} => {
// Ex) `1 + 2j`, `1 - 2j`, `-1 - 2j`, `-1 + 2j`
// 1 + 2j
// 1 - 2j
// -1 - 2j
// -1 + 2j
if let ExprKind::Constant {
value: Constant::Complex { .. },
..
} = right.node
{
// Ex) `1 + 2j`, `1 - 2j`
// 1 + 2j
// 1 - 2j
if let ExprKind::Constant {
value: Constant::Int(..) | Constant::Float(..),
value: Constant::Int(..),
..
} = &left.node
{
@@ -151,9 +106,10 @@ fn is_valid_default_value_with_annotation(default: &Expr, checker: &Checker) ->
operand,
} = &left.node
{
// Ex) `-1 + 2j`, `-1 - 2j`
// -1 + 2j
// -1 - 2j
if let ExprKind::Constant {
value: Constant::Int(..) | Constant::Float(..),
value: Constant::Int(..),
..
} = &operand.node
{
@@ -162,15 +118,14 @@ fn is_valid_default_value_with_annotation(default: &Expr, checker: &Checker) ->
}
}
}
// Ex) `math.inf`, `sys.stdin`, etc.
// `sys.stdin`, etc.
ExprKind::Attribute { .. } => {
if checker
.ctx
.resolve_call_path(default)
.map_or(false, |call_path| {
ALLOWED_MATH_ATTRIBUTES_IN_DEFAULTS
ALLOWED_ATTRIBUTES_IN_DEFAULTS
.iter()
.chain(ALLOWED_ATTRIBUTES_IN_DEFAULTS.iter())
.any(|target| call_path.as_slice() == *target)
})
{
@@ -193,18 +148,10 @@ pub fn typed_argument_simple_defaults(checker: &mut Checker, args: &Arguments) {
{
if arg.node.annotation.is_some() {
if !is_valid_default_value_with_annotation(default, checker) {
let mut diagnostic =
Diagnostic::new(TypedArgumentSimpleDefaults, Range::from(default));
if checker.patch(diagnostic.kind.rule()) {
diagnostic.amend(Fix::replacement(
"...".to_string(),
default.location,
default.end_location.unwrap(),
));
}
checker.diagnostics.push(diagnostic);
checker.diagnostics.push(Diagnostic::new(
TypedArgumentSimpleDefaults,
Range::from(default),
));
}
}
}
@@ -220,18 +167,10 @@ pub fn typed_argument_simple_defaults(checker: &mut Checker, args: &Arguments) {
{
if kwarg.node.annotation.is_some() {
if !is_valid_default_value_with_annotation(default, checker) {
let mut diagnostic =
Diagnostic::new(TypedArgumentSimpleDefaults, Range::from(default));
if checker.patch(diagnostic.kind.rule()) {
diagnostic.amend(Fix::replacement(
"...".to_string(),
default.location,
default.end_location.unwrap(),
));
}
checker.diagnostics.push(diagnostic);
checker.diagnostics.push(Diagnostic::new(
TypedArgumentSimpleDefaults,
Range::from(default),
));
}
}
}

View File

@@ -118,7 +118,7 @@ pub fn unrecognized_platform(
&& checker
.settings
.rules
.enabled(Rule::UnrecognizedPlatformCheck)
.enabled(&Rule::UnrecognizedPlatformCheck)
{
checker
.diagnostics
@@ -137,7 +137,7 @@ pub fn unrecognized_platform(
&& checker
.settings
.rules
.enabled(Rule::UnrecognizedPlatformName)
.enabled(&Rule::UnrecognizedPlatformName)
{
checker.diagnostics.push(Diagnostic::new(
UnrecognizedPlatformName {
@@ -151,7 +151,7 @@ pub fn unrecognized_platform(
if checker
.settings
.rules
.enabled(Rule::UnrecognizedPlatformCheck)
.enabled(&Rule::UnrecognizedPlatformCheck)
{
checker
.diagnostics

View File

@@ -5,321 +5,144 @@ expression: diagnostics
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 10
row: 3
column: 13
end_location:
row: 10
row: 3
column: 23
fix:
content: "..."
location:
row: 10
column: 13
end_location:
row: 10
column: 23
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 16
row: 9
column: 8
end_location:
row: 20
row: 13
column: 5
fix:
content: "..."
location:
row: 16
column: 8
end_location:
row: 20
column: 5
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 25
row: 18
column: 8
end_location:
row: 29
row: 22
column: 5
fix:
content: "..."
location:
row: 25
column: 8
end_location:
row: 29
column: 5
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 34
row: 27
column: 8
end_location:
row: 31
column: 5
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: ~
fixable: false
location:
row: 36
column: 8
end_location:
row: 38
column: 5
fix:
content: "..."
location:
row: 34
column: 8
end_location:
row: 38
column: 5
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 43
column: 8
end_location:
row: 45
column: 5
fix:
content: "..."
location:
row: 43
column: 8
end_location:
row: 45
column: 5
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
location:
row: 48
row: 41
column: 13
end_location:
row: 49
row: 42
column: 11
fix:
content: "..."
location:
row: 48
column: 13
end_location:
row: 49
column: 11
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 52
row: 45
column: 13
end_location:
row: 53
row: 46
column: 12
fix:
content: "..."
location:
row: 52
column: 13
end_location:
row: 53
column: 12
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 56
row: 49
column: 16
end_location:
row: 57
row: 50
column: 7
fix:
content: "..."
location:
row: 56
column: 16
end_location:
row: 57
column: 7
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 60
row: 53
column: 13
end_location:
row: 61
row: 54
column: 7
fix:
content: "..."
location:
row: 60
column: 13
end_location:
row: 61
column: 7
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 64
row: 57
column: 17
end_location:
row: 65
row: 58
column: 8
fix:
content: "..."
location:
row: 64
column: 17
end_location:
row: 65
column: 8
fix: ~
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
suggestion: ~
fixable: false
location:
row: 68
row: 61
column: 17
end_location:
row: 69
row: 62
column: 10
fix:
content: "..."
location:
row: 68
column: 17
end_location:
row: 69
column: 10
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
location:
row: 96
column: 15
end_location:
row: 96
column: 18
fix:
content: "..."
location:
row: 96
column: 15
end_location:
row: 96
column: 18
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
location:
row: 99
column: 15
end_location:
row: 99
column: 21
fix:
content: "..."
location:
row: 99
column: 15
end_location:
row: 99
column: 21
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
location:
row: 105
column: 15
end_location:
row: 105
column: 24
fix:
content: "..."
location:
row: 105
column: 15
end_location:
row: 105
column: 24
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
location:
row: 108
column: 17
end_location:
row: 109
column: 8
fix:
content: "..."
location:
row: 108
column: 17
end_location:
row: 109
column: 8
parent: ~
- kind:
name: TypedArgumentSimpleDefaults
body: Only simple default values allowed for typed arguments
suggestion: "Replace default value by `...`"
fixable: true
location:
row: 115
column: 16
end_location:
row: 115
column: 23
fix:
content: "..."
location:
row: 115
column: 16
end_location:
row: 115
column: 23
fix: ~
parent: ~

View File

@@ -22,7 +22,7 @@ impl Violation for FailWithoutMessage {
pub fn fail_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords: &[Keyword]) {
if is_pytest_fail(func, checker) {
let call_args = SimpleCallArgs::new(args, keywords);
let msg = call_args.argument("msg", 0);
let msg = call_args.get_argument("msg", Some(0));
if let Some(msg) = msg {
if is_empty_or_null_string(msg) {

View File

@@ -277,7 +277,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &E
if checker
.settings
.rules
.enabled(Rule::IncorrectFixtureParenthesesStyle)
.enabled(&Rule::IncorrectFixtureParenthesesStyle)
&& !checker.settings.flake8_pytest_style.fixture_parentheses
&& args.is_empty()
&& keywords.is_empty()
@@ -287,7 +287,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &E
pytest_fixture_parentheses(checker, decorator, fix, "", "()");
}
if checker.settings.rules.enabled(Rule::FixturePositionalArgs) && !args.is_empty() {
if checker.settings.rules.enabled(&Rule::FixturePositionalArgs) && !args.is_empty() {
checker.diagnostics.push(Diagnostic::new(
FixturePositionalArgs {
function: func_name.to_string(),
@@ -299,7 +299,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &E
if checker
.settings
.rules
.enabled(Rule::ExtraneousScopeFunction)
.enabled(&Rule::ExtraneousScopeFunction)
{
let scope_keyword = keywords
.iter()
@@ -333,7 +333,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &E
if checker
.settings
.rules
.enabled(Rule::IncorrectFixtureParenthesesStyle)
.enabled(&Rule::IncorrectFixtureParenthesesStyle)
&& checker.settings.flake8_pytest_style.fixture_parentheses
{
let fix = Fix::insertion("()".to_string(), decorator.end_location.unwrap());
@@ -354,7 +354,7 @@ fn check_fixture_returns(checker: &mut Checker, func: &Stmt, func_name: &str, bo
if checker
.settings
.rules
.enabled(Rule::IncorrectFixtureNameUnderscore)
.enabled(&Rule::IncorrectFixtureNameUnderscore)
&& visitor.has_return_with_value
&& func_name.starts_with('_')
{
@@ -367,7 +367,7 @@ fn check_fixture_returns(checker: &mut Checker, func: &Stmt, func_name: &str, bo
} else if checker
.settings
.rules
.enabled(Rule::MissingFixtureNameUnderscore)
.enabled(&Rule::MissingFixtureNameUnderscore)
&& !visitor.has_return_with_value
&& !visitor.has_yield_from
&& !func_name.starts_with('_')
@@ -380,7 +380,7 @@ fn check_fixture_returns(checker: &mut Checker, func: &Stmt, func_name: &str, bo
));
}
if checker.settings.rules.enabled(Rule::UselessYieldFixture) {
if checker.settings.rules.enabled(&Rule::UselessYieldFixture) {
if let Some(stmt) = body.last() {
if let StmtKind::Expr { value, .. } = &stmt.node {
if let ExprKind::Yield { .. } = value.node {
@@ -462,7 +462,7 @@ fn check_fixture_marks(checker: &mut Checker, decorators: &[Expr]) {
if checker
.settings
.rules
.enabled(Rule::UnnecessaryAsyncioMarkOnFixture)
.enabled(&Rule::UnnecessaryAsyncioMarkOnFixture)
{
if name == "asyncio" {
let mut diagnostic =
@@ -479,7 +479,7 @@ fn check_fixture_marks(checker: &mut Checker, decorators: &[Expr]) {
if checker
.settings
.rules
.enabled(Rule::ErroneousUseFixturesOnFixture)
.enabled(&Rule::ErroneousUseFixturesOnFixture)
{
if name == "usefixtures" {
let mut diagnostic =
@@ -508,17 +508,20 @@ pub fn fixture(
if checker
.settings
.rules
.enabled(Rule::IncorrectFixtureParenthesesStyle)
|| checker.settings.rules.enabled(Rule::FixturePositionalArgs)
.enabled(&Rule::IncorrectFixtureParenthesesStyle)
|| checker.settings.rules.enabled(&Rule::FixturePositionalArgs)
|| checker
.settings
.rules
.enabled(Rule::ExtraneousScopeFunction)
.enabled(&Rule::ExtraneousScopeFunction)
{
check_fixture_decorator(checker, func_name, decorator);
}
if checker.settings.rules.enabled(Rule::DeprecatedYieldFixture)
if checker
.settings
.rules
.enabled(&Rule::DeprecatedYieldFixture)
&& checker.settings.flake8_pytest_style.fixture_parentheses
{
check_fixture_decorator_name(checker, decorator);
@@ -527,12 +530,12 @@ pub fn fixture(
if (checker
.settings
.rules
.enabled(Rule::MissingFixtureNameUnderscore)
.enabled(&Rule::MissingFixtureNameUnderscore)
|| checker
.settings
.rules
.enabled(Rule::IncorrectFixtureNameUnderscore)
|| checker.settings.rules.enabled(Rule::UselessYieldFixture))
.enabled(&Rule::IncorrectFixtureNameUnderscore)
|| checker.settings.rules.enabled(&Rule::UselessYieldFixture))
&& !has_abstractmethod_decorator(decorators, checker)
{
check_fixture_returns(checker, func, func_name, body);
@@ -541,7 +544,7 @@ pub fn fixture(
if checker
.settings
.rules
.enabled(Rule::FixtureFinalizerCallback)
.enabled(&Rule::FixtureFinalizerCallback)
{
check_fixture_addfinalizer(checker, args, body);
}
@@ -549,11 +552,11 @@ pub fn fixture(
if checker
.settings
.rules
.enabled(Rule::UnnecessaryAsyncioMarkOnFixture)
.enabled(&Rule::UnnecessaryAsyncioMarkOnFixture)
|| checker
.settings
.rules
.enabled(Rule::ErroneousUseFixturesOnFixture)
.enabled(&Rule::ErroneousUseFixturesOnFixture)
{
check_fixture_marks(checker, decorators);
}
@@ -562,7 +565,7 @@ pub fn fixture(
if checker
.settings
.rules
.enabled(Rule::FixtureParamWithoutValue)
.enabled(&Rule::FixtureParamWithoutValue)
&& func_name.starts_with("test_")
{
check_test_function_args(checker, args);

View File

@@ -123,11 +123,11 @@ pub fn marks(checker: &mut Checker, decorators: &[Expr]) {
let enforce_parentheses = checker
.settings
.rules
.enabled(Rule::IncorrectMarkParenthesesStyle);
.enabled(&Rule::IncorrectMarkParenthesesStyle);
let enforce_useless_usefixtures = checker
.settings
.rules
.enabled(Rule::UseFixturesWithoutParameters);
.enabled(&Rule::UseFixturesWithoutParameters);
for mark in get_mark_decorators(decorators) {
if enforce_parentheses {

View File

@@ -380,7 +380,7 @@ pub fn parametrize(checker: &mut Checker, decorators: &[Expr]) {
if checker
.settings
.rules
.enabled(Rule::ParametrizeNamesWrongType)
.enabled(&Rule::ParametrizeNamesWrongType)
{
if let Some(names) = args.get(0) {
check_names(checker, names);
@@ -389,7 +389,7 @@ pub fn parametrize(checker: &mut Checker, decorators: &[Expr]) {
if checker
.settings
.rules
.enabled(Rule::ParametrizeValuesWrongType)
.enabled(&Rule::ParametrizeValuesWrongType)
{
if let Some(names) = args.get(0) {
if let Some(values) = args.get(1) {

View File

@@ -68,11 +68,11 @@ fn check_patch_call(
new_arg_number: usize,
) -> Option<Diagnostic> {
let simple_args = SimpleCallArgs::new(args, keywords);
if simple_args.keyword_argument("return_value").is_some() {
if simple_args.get_argument("return_value", None).is_some() {
return None;
}
if let Some(new_arg) = simple_args.argument("new", new_arg_number) {
if let Some(new_arg) = simple_args.get_argument("new", Some(new_arg_number)) {
if let ExprKind::Lambda { args, body } = &new_arg.node {
// Walk the lambda body.
let mut visitor = LambdaBodyVisitor {

View File

@@ -67,7 +67,11 @@ const fn is_non_trivial_with_body(body: &[Stmt]) -> bool {
pub fn raises_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords: &[Keyword]) {
if is_pytest_raises(checker, func) {
if checker.settings.rules.enabled(Rule::RaisesWithoutException) {
if checker
.settings
.rules
.enabled(&Rule::RaisesWithoutException)
{
if args.is_empty() && keywords.is_empty() {
checker
.diagnostics
@@ -75,7 +79,7 @@ pub fn raises_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords:
}
}
if checker.settings.rules.enabled(Rule::RaisesTooBroad) {
if checker.settings.rules.enabled(&Rule::RaisesTooBroad) {
let match_keyword = keywords
.iter()
.find(|kw| kw.node.arg == Some("match".to_string()));

View File

@@ -281,7 +281,7 @@ fn docstring(
},
Range::new(start, end),
);
if autofix.into() && settings.rules.should_fix(Rule::BadQuotesDocstring) {
if autofix.into() && settings.rules.should_fix(&Rule::BadQuotesDocstring) {
let quote_count = if trivia.is_multiline { 3 } else { 1 };
let string_contents = &trivia.raw_text[quote_count..trivia.raw_text.len() - quote_count];
let quote = good_docstring(&quotes_settings.docstring_quotes).repeat(quote_count);
@@ -356,7 +356,7 @@ fn strings(
Range::new(*start, *end),
);
if autofix.into() && settings.rules.should_fix(Rule::BadQuotesMultilineString) {
if autofix.into() && settings.rules.should_fix(&Rule::BadQuotesMultilineString) {
let string_contents = &trivia.raw_text[3..trivia.raw_text.len() - 3];
let quote = good_multiline(&quotes_settings.multiline_quotes);
let mut fixed_contents = String::with_capacity(
@@ -386,7 +386,7 @@ fn strings(
{
let mut diagnostic =
Diagnostic::new(AvoidableEscapedQuote, Range::new(*start, *end));
if autofix.into() && settings.rules.should_fix(Rule::AvoidableEscapedQuote) {
if autofix.into() && settings.rules.should_fix(&Rule::AvoidableEscapedQuote) {
let quote = bad_single(&quotes_settings.inline_quotes);
let mut fixed_contents =
@@ -445,7 +445,7 @@ fn strings(
},
Range::new(*start, *end),
);
if autofix.into() && settings.rules.should_fix(Rule::BadQuotesInlineString) {
if autofix.into() && settings.rules.should_fix(&Rule::BadQuotesInlineString) {
let quote = good_single(&quotes_settings.inline_quotes);
let mut fixed_contents =
String::with_capacity(trivia.prefix.len() + string_contents.len() + 2);

View File

@@ -1,6 +1,8 @@
use std::fmt;
#[derive(Debug, PartialEq, Eq)]
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum Branch {
Elif,
Else,

Some files were not shown because too many files have changed in this diff Show More