Compare commits
2 Commits
alex/submo
...
brent/lamb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
79d526cd91 | ||
|
|
376571eed1 |
@@ -7,10 +7,6 @@ serial = { max-threads = 1 }
|
||||
filter = 'binary(file_watching)'
|
||||
test-group = 'serial'
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'binary(e2e)'
|
||||
test-group = 'serial'
|
||||
|
||||
[profile.ci]
|
||||
# Print out output for failing tests as soon as they fail, and also at the end
|
||||
# of the run (for easy scrollability).
|
||||
|
||||
54
.github/workflows/ci.yaml
vendored
54
.github/workflows/ci.yaml
vendored
@@ -261,15 +261,15 @@ jobs:
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -319,17 +319,19 @@ jobs:
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
run: cargo nextest run --cargo-profile profiling --all-features
|
||||
- name: "Run doctests"
|
||||
run: cargo test --doc --profile profiling --all-features
|
||||
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-test-other:
|
||||
strategy:
|
||||
@@ -352,11 +354,11 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
@@ -462,7 +464,7 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
shared-key: ruff-linux-debug
|
||||
@@ -497,7 +499,7 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
@@ -532,7 +534,7 @@ jobs:
|
||||
ref: ${{ github.event.pull_request.base.ref }}
|
||||
persist-credentials: false
|
||||
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
activate-environment: true
|
||||
@@ -638,7 +640,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
@@ -697,7 +699,7 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
@@ -748,7 +750,7 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
@@ -792,7 +794,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
python-version: 3.13
|
||||
activate-environment: true
|
||||
@@ -947,13 +949,13 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -961,7 +963,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6a8e2b874c338bf81cc5e8be715ada75908d3871 # v4.3.4
|
||||
uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
|
||||
with:
|
||||
mode: instrumentation
|
||||
run: cargo codspeed run
|
||||
@@ -987,13 +989,13 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -1001,7 +1003,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6a8e2b874c338bf81cc5e8be715ada75908d3871 # v4.3.4
|
||||
uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
|
||||
with:
|
||||
mode: instrumentation
|
||||
run: cargo codspeed run
|
||||
@@ -1027,13 +1029,13 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -1041,7 +1043,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6a8e2b874c338bf81cc5e8be715ada75908d3871 # v4.3.4
|
||||
uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
|
||||
env:
|
||||
# enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
|
||||
# appear to provide much useful insight for our walltime benchmarks right now
|
||||
|
||||
2
.github/workflows/daily_fuzz.yaml
vendored
2
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
|
||||
5
.github/workflows/mypy_primer.yaml
vendored
5
.github/workflows/mypy_primer.yaml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
@@ -55,7 +55,6 @@ jobs:
|
||||
- name: Run mypy_primer
|
||||
env:
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
CLICOLOR_FORCE: "1"
|
||||
DIFF_FILE: mypy_primer.diff
|
||||
run: |
|
||||
cd ruff
|
||||
@@ -81,7 +80,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -60,7 +60,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -250,7 +250,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
15
.github/workflows/sync_typeshed.yaml
vendored
15
.github/workflows/sync_typeshed.yaml
vendored
@@ -77,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
@@ -131,7 +131,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -170,7 +170,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -207,22 +207,17 @@ jobs:
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@f79fe7514db78f0a7bdba3cb6dd9c1baa7d046d9 # v2.62.56
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: Update snapshots
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
cargo r \
|
||||
--profile=profiling \
|
||||
-p ty_completion_eval \
|
||||
-- all --tasks ./crates/ty_completion_eval/completion-evaluation-tasks.csv
|
||||
|
||||
# The `cargo insta` docs indicate that `--unreferenced=delete` might be a good option,
|
||||
# but from local testing it appears to just revert all changes made by `cargo insta test --accept`.
|
||||
#
|
||||
|
||||
4
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
4
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@e26ebfb78d372b8b091e1cb1d6fc522e135474c1"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
|
||||
4
.github/workflows/ty-ecosystem-report.yaml
vendored
4
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
with:
|
||||
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@e26ebfb78d372b8b091e1cb1d6fc522e135474c1"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
|
||||
96
CHANGELOG.md
96
CHANGELOG.md
@@ -1,101 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.14.6
|
||||
|
||||
Released on 2025-11-21.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bandit`\] Support new PySNMP API paths (`S508`, `S509`) ([#21374](https://github.com/astral-sh/ruff/pull/21374))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Adjust own-line comment placement between branches ([#21185](https://github.com/astral-sh/ruff/pull/21185))
|
||||
- Avoid syntax error when formatting attribute expressions with outer parentheses, parenthesized value, and trailing comment on value ([#20418](https://github.com/astral-sh/ruff/pull/20418))
|
||||
- Fix panic when formatting comments in unary expressions ([#21501](https://github.com/astral-sh/ruff/pull/21501))
|
||||
- Respect `fmt: skip` for compound statements on a single line ([#20633](https://github.com/astral-sh/ruff/pull/20633))
|
||||
- \[`refurb`\] Fix `FURB103` autofix ([#21454](https://github.com/astral-sh/ruff/pull/21454))
|
||||
- \[`ruff`\] Fix false positive for complex conversion specifiers in `logging-eager-conversion` (`RUF065`) ([#21464](https://github.com/astral-sh/ruff/pull/21464))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Avoid false positive on `ClassVar` reassignment (`RUF012`) ([#21478](https://github.com/astral-sh/ruff/pull/21478))
|
||||
|
||||
### CLI
|
||||
|
||||
- Render hyperlinks for lint errors ([#21514](https://github.com/astral-sh/ruff/pull/21514))
|
||||
- Add a `ruff analyze` option to skip over imports in `TYPE_CHECKING` blocks ([#21472](https://github.com/astral-sh/ruff/pull/21472))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Limit `eglot-format` hook to eglot-managed Python buffers ([#21459](https://github.com/astral-sh/ruff/pull/21459))
|
||||
- Mention `force-exclude` in "Configuration > Python file discovery" ([#21500](https://github.com/astral-sh/ruff/pull/21500))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@dylwil3](https://github.com/dylwil3)
|
||||
- [@gauthsvenkat](https://github.com/gauthsvenkat)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@thamer](https://github.com/thamer)
|
||||
- [@Ruchir28](https://github.com/Ruchir28)
|
||||
- [@thejcannon](https://github.com/thejcannon)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@chirizxc](https://github.com/chirizxc)
|
||||
|
||||
## 0.14.5
|
||||
|
||||
Released on 2025-11-13.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Apply `SIM113` when index variable is of type `int` ([#21395](https://github.com/astral-sh/ruff/pull/21395))
|
||||
- \[`pydoclint`\] Fix false positive when Sphinx directives follow a "Raises" section (`DOC502`) ([#20535](https://github.com/astral-sh/ruff/pull/20535))
|
||||
- \[`pydoclint`\] Support NumPy-style comma-separated parameters (`DOC102`) ([#20972](https://github.com/astral-sh/ruff/pull/20972))
|
||||
- \[`refurb`\] Auto-fix annotated assignments (`FURB101`) ([#21278](https://github.com/astral-sh/ruff/pull/21278))
|
||||
- \[`ruff`\] Ignore `str()` when not used for simple conversion (`RUF065`) ([#21330](https://github.com/astral-sh/ruff/pull/21330))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix syntax error false positive on alternative `match` patterns ([#21362](https://github.com/astral-sh/ruff/pull/21362))
|
||||
- \[`flake8-simplify`\] Fix false positive for iterable initializers with generator arguments (`SIM222`) ([#21187](https://github.com/astral-sh/ruff/pull/21187))
|
||||
- \[`pyupgrade`\] Fix false positive on relative imports from local `.builtins` module (`UP029`) ([#21309](https://github.com/astral-sh/ruff/pull/21309))
|
||||
- \[`pyupgrade`\] Consistently set the deprecated tag (`UP035`) ([#21396](https://github.com/astral-sh/ruff/pull/21396))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Detect empty f-strings (`FURB105`) ([#21348](https://github.com/astral-sh/ruff/pull/21348))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add option to provide a reason to `--add-noqa` ([#21294](https://github.com/astral-sh/ruff/pull/21294))
|
||||
- Add upstream linter URL to `ruff linter --output-format=json` ([#21316](https://github.com/astral-sh/ruff/pull/21316))
|
||||
- Add color to `--help` ([#21337](https://github.com/astral-sh/ruff/pull/21337))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a new "Opening a PR" section to the contribution guide ([#21298](https://github.com/astral-sh/ruff/pull/21298))
|
||||
- Added the PyScripter IDE to the list of "Who is using Ruff?" ([#21402](https://github.com/astral-sh/ruff/pull/21402))
|
||||
- Update PyCharm setup instructions ([#21409](https://github.com/astral-sh/ruff/pull/21409))
|
||||
- \[`flake8-annotations`\] Add link to `allow-star-arg-any` option (`ANN401`) ([#21326](https://github.com/astral-sh/ruff/pull/21326))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`configuration`\] Improve error message when `line-length` exceeds `u16::MAX` ([#21329](https://github.com/astral-sh/ruff/pull/21329))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@njhearp](https://github.com/njhearp)
|
||||
- [@11happy](https://github.com/11happy)
|
||||
- [@hugovk](https://github.com/hugovk)
|
||||
- [@Gankra](https://github.com/Gankra)
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@pyscripter](https://github.com/pyscripter)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@henryiii](https://github.com/henryiii)
|
||||
- [@charliecloudberry](https://github.com/charliecloudberry)
|
||||
|
||||
## 0.14.4
|
||||
|
||||
Released on 2025-11-06.
|
||||
|
||||
77
Cargo.lock
generated
77
Cargo.lock
generated
@@ -442,9 +442,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.53"
|
||||
version = "4.5.51"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
|
||||
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -452,9 +452,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.53"
|
||||
version = "4.5.51"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
|
||||
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -642,7 +642,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -651,7 +651,7 @@ version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1016,7 +1016,7 @@ dependencies = [
|
||||
"libc",
|
||||
"option-ext",
|
||||
"redox_users",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1238,9 +1238,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "get-size-derive2"
|
||||
version = "0.7.2"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ff47daa61505c85af126e9dd64af6a342a33dc0cccfe1be74ceadc7d352e6efd"
|
||||
checksum = "46b134aa084df7c3a513a1035c52f623e4b3065dfaf3d905a4f28a2e79b5bb3f"
|
||||
dependencies = [
|
||||
"attribute-derive",
|
||||
"quote",
|
||||
@@ -1249,14 +1249,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "get-size2"
|
||||
version = "0.7.2"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac7bb8710e1f09672102be7ddf39f764d8440ae74a9f4e30aaa4820dcdffa4af"
|
||||
checksum = "c0d51c9f2e956a517619ad9e7eaebc7a573f9c49b38152e12eade750f89156f9"
|
||||
dependencies = [
|
||||
"compact_str",
|
||||
"get-size-derive2",
|
||||
"hashbrown 0.16.1",
|
||||
"indexmap",
|
||||
"hashbrown 0.16.0",
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
@@ -1353,9 +1352,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.16.1"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
|
||||
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
]
|
||||
@@ -1564,21 +1563,21 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.12.1"
|
||||
version = "2.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
|
||||
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.16.1",
|
||||
"hashbrown 0.16.0",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.18.3"
|
||||
version = "0.18.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88"
|
||||
checksum = "ade6dfcba0dfb62ad59e59e7241ec8912af34fd29e0e743e3db992bd278e8b65"
|
||||
dependencies = [
|
||||
"console 0.16.1",
|
||||
"portable-atomic",
|
||||
@@ -1699,7 +1698,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2607,9 +2606,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quick-junit"
|
||||
version = "0.5.2"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ee9342d671fae8d66b3ae9fd7a9714dfd089c04d2a8b1ec0436ef77aee15e5f"
|
||||
checksum = "3ed1a693391a16317257103ad06a88c6529ac640846021da7c435a06fffdacd7"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"indexmap",
|
||||
@@ -2622,9 +2621,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quick-xml"
|
||||
version = "0.38.4"
|
||||
version = "0.37.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c"
|
||||
checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
@@ -2859,7 +2858,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.14.6"
|
||||
version = "0.14.4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -3005,7 +3004,6 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"similar",
|
||||
"supports-hyperlinks",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"tracing",
|
||||
@@ -3117,7 +3115,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.14.6"
|
||||
version = "0.14.4"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -3127,7 +3125,7 @@ dependencies = [
|
||||
"fern",
|
||||
"glob",
|
||||
"globset",
|
||||
"hashbrown 0.16.1",
|
||||
"hashbrown 0.16.0",
|
||||
"imperative",
|
||||
"insta",
|
||||
"is-macro",
|
||||
@@ -3472,7 +3470,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.14.6"
|
||||
version = "0.14.4"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3588,7 +3586,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.24.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=17bc55d699565e5a1cb1bd42363b905af2f9f3e7#17bc55d699565e5a1cb1bd42363b905af2f9f3e7"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
|
||||
dependencies = [
|
||||
"boxcar",
|
||||
"compact_str",
|
||||
@@ -3612,12 +3610,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.24.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=17bc55d699565e5a1cb1bd42363b905af2f9f3e7#17bc55d699565e5a1cb1bd42363b905af2f9f3e7"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.24.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=17bc55d699565e5a1cb1bd42363b905af2f9f3e7#17bc55d699565e5a1cb1bd42363b905af2f9f3e7"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3927,17 +3925,11 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "supports-hyperlinks"
|
||||
version = "3.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.111"
|
||||
version = "2.0.110"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
|
||||
checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -4462,7 +4454,7 @@ dependencies = [
|
||||
"drop_bomb",
|
||||
"get-size2",
|
||||
"glob",
|
||||
"hashbrown 0.16.1",
|
||||
"hashbrown 0.16.0",
|
||||
"indexmap",
|
||||
"indoc",
|
||||
"insta",
|
||||
@@ -4529,7 +4521,6 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"smallvec",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"tracing",
|
||||
|
||||
@@ -146,7 +146,7 @@ regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "17bc55d699565e5a1cb1bd42363b905af2f9f3e7", default-features = false, features = [
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "05a9af7f554b64b8aadc2eeb6f2caf73d0408d09", default-features = false, features = [
|
||||
"compact_str",
|
||||
"macros",
|
||||
"salsa_unstable",
|
||||
@@ -173,7 +173,6 @@ snapbox = { version = "0.6.0", features = [
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.27.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.27.0" }
|
||||
supports-hyperlinks = { version = "3.1.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
|
||||
@@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.14.6/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.6/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.14.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.4/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.6
|
||||
rev: v0.14.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
@@ -491,7 +491,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||
- [Pylint](https://github.com/PyCQA/pylint)
|
||||
- [PyScripter](https://github.com/pyscripter/pyscripter)
|
||||
- [PyVista](https://github.com/pyvista/pyvista)
|
||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||
- [River](https://github.com/online-ml/river)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.14.6"
|
||||
version = "0.14.4"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -167,7 +167,6 @@ pub enum AnalyzeCommand {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct AnalyzeGraphCommand {
|
||||
/// List of files or directories to include.
|
||||
#[clap(help = "List of files or directories to include [default: .]")]
|
||||
@@ -194,12 +193,6 @@ pub struct AnalyzeGraphCommand {
|
||||
/// Path to a virtual environment to use for resolving additional dependencies
|
||||
#[arg(long)]
|
||||
python: Option<PathBuf>,
|
||||
/// Include imports that are only used for type checking (i.e., imports within `if TYPE_CHECKING:` blocks).
|
||||
/// Use `--no-type-checking-imports` to exclude imports that are only used for type checking.
|
||||
#[arg(long, overrides_with("no_type_checking_imports"))]
|
||||
type_checking_imports: bool,
|
||||
#[arg(long, overrides_with("type_checking_imports"), hide = true)]
|
||||
no_type_checking_imports: bool,
|
||||
}
|
||||
|
||||
// The `Parser` derive is for ruff_dev, for ruff `Args` would be sufficient
|
||||
@@ -846,10 +839,6 @@ impl AnalyzeGraphCommand {
|
||||
string_imports_min_dots: self.min_dots,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
type_checking_imports: resolve_bool_arg(
|
||||
self.type_checking_imports,
|
||||
self.no_type_checking_imports,
|
||||
),
|
||||
..ExplicitConfigOverrides::default()
|
||||
};
|
||||
|
||||
@@ -1346,7 +1335,6 @@ struct ExplicitConfigOverrides {
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
string_imports_min_dots: Option<usize>,
|
||||
type_checking_imports: Option<bool>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1437,9 +1425,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||
}
|
||||
if let Some(type_checking_imports) = &self.type_checking_imports {
|
||||
config.analyze.type_checking_imports = Some(*type_checking_imports);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -105,7 +105,6 @@ pub(crate) fn analyze_graph(
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
let type_checking_imports = settings.analyze.type_checking_imports;
|
||||
|
||||
// Skip excluded files.
|
||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||
@@ -168,7 +167,6 @@ pub(crate) fn analyze_graph(
|
||||
&path,
|
||||
package.as_deref(),
|
||||
string_imports,
|
||||
type_checking_imports,
|
||||
)
|
||||
.unwrap_or_else(|err| {
|
||||
warn!("Failed to generate import map for {path}: {err}");
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
use std::process::Command;
|
||||
|
||||
use insta_cmd::assert_cmd_snapshot;
|
||||
|
||||
use crate::CliTest;
|
||||
|
||||
#[test]
|
||||
fn type_checking_imports() -> anyhow::Result<()> {
|
||||
let test = AnalyzeTest::with_files([
|
||||
("ruff/__init__.py", ""),
|
||||
(
|
||||
"ruff/a.py",
|
||||
r#"
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import ruff.b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import ruff.c
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"ruff/b.py",
|
||||
r#"
|
||||
if TYPE_CHECKING:
|
||||
from ruff import c
|
||||
"#,
|
||||
),
|
||||
("ruff/c.py", ""),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py",
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
test.command()
|
||||
.arg("--no-type-checking-imports"),
|
||||
@r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_checking_imports_from_config() -> anyhow::Result<()> {
|
||||
let test = AnalyzeTest::with_files([
|
||||
("ruff/__init__.py", ""),
|
||||
(
|
||||
"ruff/a.py",
|
||||
r#"
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import ruff.b
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import ruff.c
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"ruff/b.py",
|
||||
r#"
|
||||
if TYPE_CHECKING:
|
||||
from ruff import c
|
||||
"#,
|
||||
),
|
||||
("ruff/c.py", ""),
|
||||
(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[analyze]
|
||||
type-checking-imports = false
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
test.write_file(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[analyze]
|
||||
type-checking-imports = true
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(test.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py",
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct AnalyzeTest {
|
||||
cli_test: CliTest,
|
||||
}
|
||||
|
||||
impl AnalyzeTest {
|
||||
pub(crate) fn new() -> anyhow::Result<Self> {
|
||||
Ok(Self {
|
||||
cli_test: CliTest::with_settings(|_, mut settings| {
|
||||
settings.add_filter(r#"\\\\"#, "/");
|
||||
settings
|
||||
})?,
|
||||
})
|
||||
}
|
||||
|
||||
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_files(files)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
#[expect(unused)]
|
||||
fn with_file(path: impl AsRef<std::path::Path>, content: &str) -> anyhow::Result<Self> {
|
||||
let fixture = Self::new()?;
|
||||
fixture.write_file(path, content)?;
|
||||
Ok(fixture)
|
||||
}
|
||||
|
||||
fn command(&self) -> Command {
|
||||
let mut command = self.cli_test.command();
|
||||
command.arg("analyze").arg("graph").arg("--preview");
|
||||
command
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for AnalyzeTest {
|
||||
type Target = CliTest;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.cli_test
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,6 @@ use std::{
|
||||
};
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod analyze_graph;
|
||||
mod format;
|
||||
mod lint;
|
||||
|
||||
@@ -63,7 +62,9 @@ impl CliTest {
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_files(files)?;
|
||||
for file in files {
|
||||
case.write_file(file.0, file.1)?;
|
||||
}
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
@@ -152,16 +153,6 @@ impl CliTest {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn write_files<'a>(
|
||||
&self,
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> Result<()> {
|
||||
for file in files {
|
||||
self.write_file(file.0, file.1)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the path to the test directory root.
|
||||
pub(crate) fn root(&self) -> &Path {
|
||||
&self.project_dir
|
||||
|
||||
@@ -9,6 +9,7 @@ info:
|
||||
- concise
|
||||
- "--show-settings"
|
||||
- test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -283,6 +284,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -12,6 +12,7 @@ info:
|
||||
- UP007
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -285,6 +286,5 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -13,6 +13,7 @@ info:
|
||||
- UP007
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -287,6 +288,5 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -14,6 +14,7 @@ info:
|
||||
- py310
|
||||
- test.py
|
||||
- "-"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -287,6 +288,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -11,6 +11,7 @@ info:
|
||||
- "--select"
|
||||
- UP007
|
||||
- foo/test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -284,6 +285,5 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -11,6 +11,7 @@ info:
|
||||
- "--select"
|
||||
- UP007
|
||||
- foo/test.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -284,6 +285,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -283,6 +283,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -283,6 +283,5 @@ analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -9,6 +9,7 @@ info:
|
||||
- concise
|
||||
- test.py
|
||||
- "--show-settings"
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -283,6 +284,5 @@ analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -396,6 +396,5 @@ analyze.target_version = 3.7
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
analyze.type_checking_imports = true
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
//! styling.
|
||||
//!
|
||||
//! The above snippet has been built out of the following structure:
|
||||
use crate::{Id, snippet};
|
||||
use crate::snippet;
|
||||
use std::cmp::{Reverse, max, min};
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Display;
|
||||
@@ -189,7 +189,6 @@ impl DisplaySet<'_> {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn format_annotation(
|
||||
&self,
|
||||
line_offset: usize,
|
||||
@@ -200,13 +199,11 @@ impl DisplaySet<'_> {
|
||||
) -> fmt::Result {
|
||||
let hide_severity = annotation.annotation_type.is_none();
|
||||
let color = get_annotation_style(&annotation.annotation_type, stylesheet);
|
||||
|
||||
let formatted_len = if let Some(id) = &annotation.id {
|
||||
let id_len = id.id.len();
|
||||
if hide_severity {
|
||||
id_len
|
||||
id.len()
|
||||
} else {
|
||||
2 + id_len + annotation_type_len(&annotation.annotation_type)
|
||||
2 + id.len() + annotation_type_len(&annotation.annotation_type)
|
||||
}
|
||||
} else {
|
||||
annotation_type_len(&annotation.annotation_type)
|
||||
@@ -259,20 +256,9 @@ impl DisplaySet<'_> {
|
||||
let annotation_type = annotation_type_str(&annotation.annotation_type);
|
||||
if let Some(id) = annotation.id {
|
||||
if hide_severity {
|
||||
buffer.append(
|
||||
line_offset,
|
||||
&format!("{id} ", id = fmt_with_hyperlink(id.id, id.url, stylesheet)),
|
||||
*stylesheet.error(),
|
||||
);
|
||||
buffer.append(line_offset, &format!("{id} "), *stylesheet.error());
|
||||
} else {
|
||||
buffer.append(
|
||||
line_offset,
|
||||
&format!(
|
||||
"{annotation_type}[{id}]",
|
||||
id = fmt_with_hyperlink(id.id, id.url, stylesheet)
|
||||
),
|
||||
*color,
|
||||
);
|
||||
buffer.append(line_offset, &format!("{annotation_type}[{id}]"), *color);
|
||||
}
|
||||
} else {
|
||||
buffer.append(line_offset, annotation_type, *color);
|
||||
@@ -721,7 +707,7 @@ impl DisplaySet<'_> {
|
||||
let style =
|
||||
get_annotation_style(&annotation.annotation_type, stylesheet);
|
||||
let mut formatted_len = if let Some(id) = &annotation.annotation.id {
|
||||
2 + id.id.len()
|
||||
2 + id.len()
|
||||
+ annotation_type_len(&annotation.annotation.annotation_type)
|
||||
} else {
|
||||
annotation_type_len(&annotation.annotation.annotation_type)
|
||||
@@ -738,10 +724,7 @@ impl DisplaySet<'_> {
|
||||
} else if formatted_len != 0 {
|
||||
formatted_len += 2;
|
||||
let id = match &annotation.annotation.id {
|
||||
Some(id) => format!(
|
||||
"[{id}]",
|
||||
id = fmt_with_hyperlink(&id.id, id.url, stylesheet)
|
||||
),
|
||||
Some(id) => format!("[{id}]"),
|
||||
None => String::new(),
|
||||
};
|
||||
buffer.puts(
|
||||
@@ -844,7 +827,7 @@ impl DisplaySet<'_> {
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub(crate) struct Annotation<'a> {
|
||||
pub(crate) annotation_type: DisplayAnnotationType,
|
||||
pub(crate) id: Option<Id<'a>>,
|
||||
pub(crate) id: Option<&'a str>,
|
||||
pub(crate) label: Vec<DisplayTextFragment<'a>>,
|
||||
pub(crate) is_fixable: bool,
|
||||
}
|
||||
@@ -1157,7 +1140,7 @@ fn format_message<'m>(
|
||||
|
||||
fn format_title<'a>(
|
||||
level: crate::Level,
|
||||
id: Option<Id<'a>>,
|
||||
id: Option<&'a str>,
|
||||
label: &'a str,
|
||||
is_fixable: bool,
|
||||
) -> DisplayLine<'a> {
|
||||
@@ -1175,7 +1158,7 @@ fn format_title<'a>(
|
||||
|
||||
fn format_footer<'a>(
|
||||
level: crate::Level,
|
||||
id: Option<Id<'a>>,
|
||||
id: Option<&'a str>,
|
||||
label: &'a str,
|
||||
) -> Vec<DisplayLine<'a>> {
|
||||
let mut result = vec![];
|
||||
@@ -1723,7 +1706,6 @@ fn format_body<'m>(
|
||||
annotation: Annotation {
|
||||
annotation_type,
|
||||
id: None,
|
||||
|
||||
label: format_label(annotation.label, None),
|
||||
is_fixable: false,
|
||||
},
|
||||
@@ -1905,40 +1887,3 @@ fn char_width(c: char) -> Option<usize> {
|
||||
unicode_width::UnicodeWidthChar::width(c)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn fmt_with_hyperlink<'a, T>(
|
||||
content: T,
|
||||
url: Option<&'a str>,
|
||||
stylesheet: &Stylesheet,
|
||||
) -> impl std::fmt::Display + 'a
|
||||
where
|
||||
T: std::fmt::Display + 'a,
|
||||
{
|
||||
struct FmtHyperlink<'a, T> {
|
||||
content: T,
|
||||
url: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Display for FmtHyperlink<'_, T>
|
||||
where
|
||||
T: std::fmt::Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some(url) = self.url {
|
||||
write!(f, "\x1B]8;;{url}\x1B\\")?;
|
||||
}
|
||||
|
||||
self.content.fmt(f)?;
|
||||
|
||||
if self.url.is_some() {
|
||||
f.write_str("\x1B]8;;\x1B\\")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let url = if stylesheet.hyperlink { url } else { None };
|
||||
|
||||
FmtHyperlink { content, url }
|
||||
}
|
||||
|
||||
@@ -76,7 +76,6 @@ impl Renderer {
|
||||
}
|
||||
.effects(Effects::BOLD),
|
||||
none: Style::new(),
|
||||
hyperlink: true,
|
||||
},
|
||||
..Self::plain()
|
||||
}
|
||||
@@ -155,11 +154,6 @@ impl Renderer {
|
||||
self
|
||||
}
|
||||
|
||||
pub const fn hyperlink(mut self, hyperlink: bool) -> Self {
|
||||
self.stylesheet.hyperlink = hyperlink;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the string used for when a long line is cut.
|
||||
///
|
||||
/// The default is `...` (three `U+002E` characters).
|
||||
|
||||
@@ -10,7 +10,6 @@ pub(crate) struct Stylesheet {
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) none: Style,
|
||||
pub(crate) hyperlink: bool,
|
||||
}
|
||||
|
||||
impl Default for Stylesheet {
|
||||
@@ -30,7 +29,6 @@ impl Stylesheet {
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
none: Style::new(),
|
||||
hyperlink: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,19 +12,13 @@
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq)]
|
||||
pub(crate) struct Id<'a> {
|
||||
pub(crate) id: &'a str,
|
||||
pub(crate) url: Option<&'a str>,
|
||||
}
|
||||
|
||||
/// Primary structure provided for formatting
|
||||
///
|
||||
/// See [`Level::title`] to create a [`Message`]
|
||||
#[derive(Debug)]
|
||||
pub struct Message<'a> {
|
||||
pub(crate) level: Level,
|
||||
pub(crate) id: Option<Id<'a>>,
|
||||
pub(crate) id: Option<&'a str>,
|
||||
pub(crate) title: &'a str,
|
||||
pub(crate) snippets: Vec<Snippet<'a>>,
|
||||
pub(crate) footer: Vec<Message<'a>>,
|
||||
@@ -34,12 +28,7 @@ pub struct Message<'a> {
|
||||
|
||||
impl<'a> Message<'a> {
|
||||
pub fn id(mut self, id: &'a str) -> Self {
|
||||
self.id = Some(Id { id, url: None });
|
||||
self
|
||||
}
|
||||
|
||||
pub fn id_with_url(mut self, id: &'a str, url: Option<&'a str>) -> Self {
|
||||
self.id = Some(Id { id, url });
|
||||
self.id = Some(id);
|
||||
self
|
||||
}
|
||||
|
||||
|
||||
@@ -59,6 +59,8 @@ divan = { workspace = true, optional = true }
|
||||
anyhow = { workspace = true }
|
||||
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
|
||||
criterion = { workspace = true, default-features = false, optional = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
@@ -86,7 +88,3 @@ mimalloc = { workspace = true }
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dev-dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rustc-hash = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
|
||||
@@ -667,7 +667,7 @@ fn attrs(criterion: &mut Criterion) {
|
||||
max_dep_date: "2025-06-17",
|
||||
python_version: PythonVersion::PY313,
|
||||
},
|
||||
120,
|
||||
110,
|
||||
);
|
||||
|
||||
bench_project(&benchmark, criterion);
|
||||
|
||||
@@ -71,13 +71,16 @@ impl Display for Benchmark<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usize) {
|
||||
fn check_project(db: &ProjectDatabase, max_diagnostics: usize) {
|
||||
let result = db.check();
|
||||
let diagnostics = result.len();
|
||||
|
||||
assert!(
|
||||
diagnostics > 1 && diagnostics <= max_diagnostics,
|
||||
"Expected between 1 and {max_diagnostics} diagnostics on project '{project_name}' but got {diagnostics}",
|
||||
"Expected between {} and {} diagnostics but got {}",
|
||||
1,
|
||||
max_diagnostics,
|
||||
diagnostics
|
||||
);
|
||||
}
|
||||
|
||||
@@ -143,7 +146,7 @@ static FREQTRADE: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-06-17",
|
||||
python_version: PythonVersion::PY312,
|
||||
},
|
||||
600,
|
||||
525,
|
||||
);
|
||||
|
||||
static PANDAS: Benchmark = Benchmark::new(
|
||||
@@ -163,7 +166,7 @@ static PANDAS: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-06-17",
|
||||
python_version: PythonVersion::PY312,
|
||||
},
|
||||
4000,
|
||||
3000,
|
||||
);
|
||||
|
||||
static PYDANTIC: Benchmark = Benchmark::new(
|
||||
@@ -181,7 +184,7 @@ static PYDANTIC: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-06-17",
|
||||
python_version: PythonVersion::PY39,
|
||||
},
|
||||
7000,
|
||||
1000,
|
||||
);
|
||||
|
||||
static SYMPY: Benchmark = Benchmark::new(
|
||||
@@ -223,7 +226,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new(
|
||||
max_dep_date: "2025-08-09",
|
||||
python_version: PythonVersion::PY311,
|
||||
},
|
||||
900,
|
||||
800,
|
||||
);
|
||||
|
||||
#[track_caller]
|
||||
@@ -231,11 +234,11 @@ fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
bencher
|
||||
.with_inputs(|| benchmark.setup_iteration())
|
||||
.bench_local_refs(|db| {
|
||||
check_project(db, benchmark.project.name, benchmark.max_diagnostics);
|
||||
check_project(db, benchmark.max_diagnostics);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench(args=[&ALTAIR, &FREQTRADE, &TANJUN], sample_size=2, sample_count=3)]
|
||||
#[bench(args=[&ALTAIR, &FREQTRADE, &PYDANTIC, &TANJUN], sample_size=2, sample_count=3)]
|
||||
fn small(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
@@ -245,12 +248,12 @@ fn medium(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&SYMPY, &PYDANTIC], sample_size=1, sample_count=2)]
|
||||
#[bench(args=[&SYMPY], sample_size=1, sample_count=2)]
|
||||
fn large(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&ALTAIR], sample_size=3, sample_count=8)]
|
||||
#[bench(args=[&PYDANTIC], sample_size=3, sample_count=8)]
|
||||
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
|
||||
|
||||
@@ -258,7 +261,7 @@ fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
.with_inputs(|| benchmark.setup_iteration())
|
||||
.bench_local_values(|db| {
|
||||
thread_pool.install(|| {
|
||||
check_project(&db, benchmark.project.name, benchmark.max_diagnostics);
|
||||
check_project(&db, benchmark.max_diagnostics);
|
||||
db
|
||||
})
|
||||
});
|
||||
@@ -282,7 +285,7 @@ fn main() {
|
||||
// branch when looking up the ingredient index.
|
||||
{
|
||||
let db = TANJUN.setup_iteration();
|
||||
check_project(&db, TANJUN.project.name, TANJUN.max_diagnostics);
|
||||
check_project(&db, TANJUN.max_diagnostics);
|
||||
}
|
||||
|
||||
divan::main();
|
||||
|
||||
@@ -42,7 +42,6 @@ schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
serde_json = { workspace = true, optional = true }
|
||||
similar = { workspace = true }
|
||||
supports-hyperlinks = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
|
||||
@@ -64,8 +64,6 @@ impl Diagnostic {
|
||||
id,
|
||||
severity,
|
||||
message: message.into_diagnostic_message(),
|
||||
custom_concise_message: None,
|
||||
documentation_url: None,
|
||||
annotations: vec![],
|
||||
subs: vec![],
|
||||
fix: None,
|
||||
@@ -215,10 +213,6 @@ impl Diagnostic {
|
||||
/// cases, just converting it to a string (or printing it) will do what
|
||||
/// you want.
|
||||
pub fn concise_message(&self) -> ConciseMessage<'_> {
|
||||
if let Some(custom_message) = &self.inner.custom_concise_message {
|
||||
return ConciseMessage::Custom(custom_message.as_str());
|
||||
}
|
||||
|
||||
let main = self.inner.message.as_str();
|
||||
let annotation = self
|
||||
.primary_annotation()
|
||||
@@ -232,15 +226,6 @@ impl Diagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
/// Set a custom message for the concise formatting of this diagnostic.
|
||||
///
|
||||
/// This overrides the default behavior of generating a concise message
|
||||
/// from the main diagnostic message and the primary annotation.
|
||||
pub fn set_concise_message(&mut self, message: impl IntoDiagnosticMessage) {
|
||||
Arc::make_mut(&mut self.inner).custom_concise_message =
|
||||
Some(message.into_diagnostic_message());
|
||||
}
|
||||
|
||||
/// Returns the severity of this diagnostic.
|
||||
///
|
||||
/// Note that this may be different than the severity of sub-diagnostics.
|
||||
@@ -371,14 +356,6 @@ impl Diagnostic {
|
||||
.is_some_and(|fix| fix.applies(config.fix_applicability))
|
||||
}
|
||||
|
||||
pub fn documentation_url(&self) -> Option<&str> {
|
||||
self.inner.documentation_url.as_deref()
|
||||
}
|
||||
|
||||
pub fn set_documentation_url(&mut self, url: Option<String>) {
|
||||
Arc::make_mut(&mut self.inner).documentation_url = url;
|
||||
}
|
||||
|
||||
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
||||
///
|
||||
/// This is primarily used for checking noqa/secondary code suppressions.
|
||||
@@ -452,6 +429,28 @@ impl Diagnostic {
|
||||
.map(|sub| sub.inner.message.as_str())
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
pub fn to_ruff_url(&self) -> Option<String> {
|
||||
match self.id() {
|
||||
DiagnosticId::Panic
|
||||
| DiagnosticId::Io
|
||||
| DiagnosticId::InvalidSyntax
|
||||
| DiagnosticId::RevealedType
|
||||
| DiagnosticId::UnknownRule
|
||||
| DiagnosticId::InvalidGlob
|
||||
| DiagnosticId::EmptyInclude
|
||||
| DiagnosticId::UnnecessaryOverridesSection
|
||||
| DiagnosticId::UselessOverridesSection
|
||||
| DiagnosticId::DeprecatedSetting
|
||||
| DiagnosticId::Unformatted
|
||||
| DiagnosticId::InvalidCliOption
|
||||
| DiagnosticId::InternalError => None,
|
||||
DiagnosticId::Lint(lint_name) => {
|
||||
Some(format!("{}/rules/{lint_name}", env!("CARGO_PKG_HOMEPAGE")))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the filename for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
@@ -531,10 +530,8 @@ impl Diagnostic {
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
struct DiagnosticInner {
|
||||
id: DiagnosticId,
|
||||
documentation_url: Option<String>,
|
||||
severity: Severity,
|
||||
message: DiagnosticMessage,
|
||||
custom_concise_message: Option<DiagnosticMessage>,
|
||||
annotations: Vec<Annotation>,
|
||||
subs: Vec<SubDiagnostic>,
|
||||
fix: Option<Fix>,
|
||||
@@ -1523,8 +1520,6 @@ pub enum ConciseMessage<'a> {
|
||||
/// This indicates that the diagnostic is probably using the old
|
||||
/// model.
|
||||
Empty,
|
||||
/// A custom concise message has been provided.
|
||||
Custom(&'a str),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ConciseMessage<'_> {
|
||||
@@ -1540,9 +1535,6 @@ impl std::fmt::Display for ConciseMessage<'_> {
|
||||
write!(f, "{main}: {annotation}")
|
||||
}
|
||||
ConciseMessage::Empty => Ok(()),
|
||||
ConciseMessage::Custom(message) => {
|
||||
write!(f, "{message}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,7 +205,6 @@ impl<'a> Resolved<'a> {
|
||||
struct ResolvedDiagnostic<'a> {
|
||||
level: AnnotateLevel,
|
||||
id: Option<String>,
|
||||
documentation_url: Option<String>,
|
||||
message: String,
|
||||
annotations: Vec<ResolvedAnnotation<'a>>,
|
||||
is_fixable: bool,
|
||||
@@ -241,12 +240,12 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
// `DisplaySet::format_annotation` for both cases, but this is a small hack to improve
|
||||
// the formatting of syntax errors for now. This should also be kept consistent with the
|
||||
// concise formatting.
|
||||
diag.secondary_code().map_or_else(
|
||||
Some(diag.secondary_code().map_or_else(
|
||||
|| format!("{id}:", id = diag.inner.id),
|
||||
|code| code.to_string(),
|
||||
)
|
||||
))
|
||||
} else {
|
||||
diag.inner.id.to_string()
|
||||
Some(diag.inner.id.to_string())
|
||||
};
|
||||
|
||||
let level = if config.hide_severity {
|
||||
@@ -257,8 +256,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
|
||||
ResolvedDiagnostic {
|
||||
level,
|
||||
id: Some(id),
|
||||
documentation_url: diag.documentation_url().map(ToString::to_string),
|
||||
id,
|
||||
message: diag.inner.message.as_str().to_string(),
|
||||
annotations,
|
||||
is_fixable: config.show_fix_status && diag.has_applicable_fix(config),
|
||||
@@ -289,7 +287,6 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
ResolvedDiagnostic {
|
||||
level: diag.inner.severity.to_annotate(),
|
||||
id: None,
|
||||
documentation_url: None,
|
||||
message: diag.inner.message.as_str().to_string(),
|
||||
annotations,
|
||||
is_fixable: false,
|
||||
@@ -388,7 +385,6 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
RenderableDiagnostic {
|
||||
level: self.level,
|
||||
id: self.id.as_deref(),
|
||||
documentation_url: self.documentation_url.as_deref(),
|
||||
message: &self.message,
|
||||
snippets_by_input,
|
||||
is_fixable: self.is_fixable,
|
||||
@@ -489,7 +485,6 @@ struct RenderableDiagnostic<'r> {
|
||||
/// An ID is always present for top-level diagnostics and always absent for
|
||||
/// sub-diagnostics.
|
||||
id: Option<&'r str>,
|
||||
documentation_url: Option<&'r str>,
|
||||
/// The message emitted with the diagnostic, before any snippets are
|
||||
/// rendered.
|
||||
message: &'r str,
|
||||
@@ -524,7 +519,7 @@ impl RenderableDiagnostic<'_> {
|
||||
.is_fixable(self.is_fixable)
|
||||
.lineno_offset(self.header_offset);
|
||||
if let Some(id) = self.id {
|
||||
message = message.id_with_url(id, self.documentation_url);
|
||||
message = message.id(id);
|
||||
}
|
||||
message.snippets(snippets)
|
||||
}
|
||||
@@ -2881,12 +2876,6 @@ watermelon
|
||||
self.diag.help(message);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the documentation URL for the diagnostic.
|
||||
pub(super) fn documentation_url(mut self, url: impl Into<String>) -> DiagnosticBuilder<'e> {
|
||||
self.diag.set_documentation_url(Some(url.into()));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper builder for tersely populating a `SubDiagnostic`.
|
||||
@@ -3001,7 +2990,6 @@ def fibonacci(n):
|
||||
TextSize::from(10),
|
||||
))))
|
||||
.noqa_offset(TextSize::from(7))
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-import")
|
||||
.build(),
|
||||
env.builder(
|
||||
"unused-variable",
|
||||
@@ -3016,13 +3004,11 @@ def fibonacci(n):
|
||||
TextSize::from(99),
|
||||
)))
|
||||
.noqa_offset(TextSize::from(94))
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-variable")
|
||||
.build(),
|
||||
env.builder("undefined-name", Severity::Error, "Undefined name `a`")
|
||||
.primary("undef.py", "1:3", "1:4", "")
|
||||
.secondary_code("F821")
|
||||
.noqa_offset(TextSize::from(3))
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/undefined-name")
|
||||
.build(),
|
||||
];
|
||||
|
||||
@@ -3137,7 +3123,6 @@ if call(foo
|
||||
TextSize::from(19),
|
||||
))))
|
||||
.noqa_offset(TextSize::from(16))
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-import")
|
||||
.build(),
|
||||
env.builder(
|
||||
"unused-import",
|
||||
@@ -3152,7 +3137,6 @@ if call(foo
|
||||
TextSize::from(40),
|
||||
))))
|
||||
.noqa_offset(TextSize::from(35))
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-import")
|
||||
.build(),
|
||||
env.builder(
|
||||
"unused-variable",
|
||||
@@ -3167,7 +3151,6 @@ if call(foo
|
||||
TextSize::from(104),
|
||||
))))
|
||||
.noqa_offset(TextSize::from(98))
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-variable")
|
||||
.build(),
|
||||
];
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::diagnostic::{
|
||||
Diagnostic, DisplayDiagnosticConfig, Severity,
|
||||
stylesheet::{DiagnosticStylesheet, fmt_styled, fmt_with_hyperlink},
|
||||
stylesheet::{DiagnosticStylesheet, fmt_styled},
|
||||
};
|
||||
|
||||
use super::FileResolver;
|
||||
@@ -62,29 +62,18 @@ impl<'a> ConciseRenderer<'a> {
|
||||
}
|
||||
write!(f, "{sep} ")?;
|
||||
}
|
||||
|
||||
if self.config.hide_severity {
|
||||
if let Some(code) = diag.secondary_code() {
|
||||
write!(
|
||||
f,
|
||||
"{code} ",
|
||||
code = fmt_styled(
|
||||
fmt_with_hyperlink(&code, diag.documentation_url(), &stylesheet),
|
||||
stylesheet.secondary_code
|
||||
)
|
||||
code = fmt_styled(code, stylesheet.secondary_code)
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{id}: ",
|
||||
id = fmt_styled(
|
||||
fmt_with_hyperlink(
|
||||
&diag.inner.id,
|
||||
diag.documentation_url(),
|
||||
&stylesheet
|
||||
),
|
||||
stylesheet.secondary_code
|
||||
)
|
||||
id = fmt_styled(diag.inner.id.as_str(), stylesheet.secondary_code)
|
||||
)?;
|
||||
}
|
||||
if self.config.show_fix_status {
|
||||
@@ -104,10 +93,7 @@ impl<'a> ConciseRenderer<'a> {
|
||||
f,
|
||||
"{severity}[{id}] ",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(
|
||||
fmt_with_hyperlink(&diag.id(), diag.documentation_url(), &stylesheet),
|
||||
stylesheet.emphasis
|
||||
)
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
}
|
||||
|
||||
|
||||
@@ -49,8 +49,7 @@ impl<'a> FullRenderer<'a> {
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none)
|
||||
.hyperlink(stylesheet.hyperlink);
|
||||
.none(stylesheet.none);
|
||||
|
||||
for diag in diagnostics {
|
||||
let resolved = Resolved::new(self.resolver, diag, self.config);
|
||||
@@ -704,7 +703,52 @@ print()
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
::: cell 1
|
||||
1 | # cell 1
|
||||
- import os
|
||||
|
||||
error[unused-import][*]: `math` imported but unused
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
::: cell 2
|
||||
1 | # cell 2
|
||||
- import math
|
||||
2 |
|
||||
3 | print('hello world')
|
||||
|
||||
error[unused-variable][*]: Local variable `x` is assigned to but never used
|
||||
--> notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
::: cell 3
|
||||
1 | # cell 3
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
- x = 1
|
||||
4 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -724,7 +768,31 @@ print()
|
||||
}
|
||||
*fix = Fix::unsafe_edits(edits.remove(0), edits);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic));
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
::: cell 1
|
||||
1 | # cell 1
|
||||
- import os
|
||||
::: cell 2
|
||||
1 | # cell 2
|
||||
- import math
|
||||
2 |
|
||||
3 | print('hello world')
|
||||
::: cell 3
|
||||
1 | # cell 3
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
- x = 1
|
||||
4 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
");
|
||||
}
|
||||
|
||||
/// Carriage return (`\r`) is a valid line-ending in Python, so we should normalize this to a
|
||||
|
||||
@@ -100,7 +100,7 @@ pub(super) fn diagnostic_to_json<'a>(
|
||||
if config.preview {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code_or_id(),
|
||||
url: diagnostic.documentation_url(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
@@ -112,7 +112,7 @@ pub(super) fn diagnostic_to_json<'a>(
|
||||
} else {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code_or_id(),
|
||||
url: diagnostic.documentation_url(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
@@ -228,7 +228,7 @@ pub(crate) struct JsonDiagnostic<'a> {
|
||||
location: Option<JsonLocation>,
|
||||
message: &'a str,
|
||||
noqa_row: Option<OneIndexed>,
|
||||
url: Option<&'a str>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -294,10 +294,7 @@ mod tests {
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env
|
||||
.err()
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/test-diagnostic")
|
||||
.build();
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@@ -331,10 +328,7 @@ mod tests {
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env
|
||||
.err()
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/test-diagnostic")
|
||||
.build();
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
|
||||
@@ -82,7 +82,7 @@ fn diagnostic_to_rdjson<'a>(
|
||||
value: diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
|
||||
url: diagnostic.documentation_url(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
},
|
||||
suggestions: rdjson_suggestions(
|
||||
edits,
|
||||
@@ -182,7 +182,7 @@ impl RdjsonRange {
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonCode<'a> {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
url: Option<&'a str>,
|
||||
url: Option<String>,
|
||||
value: &'a str,
|
||||
}
|
||||
|
||||
@@ -217,10 +217,7 @@ mod tests {
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env
|
||||
.err()
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/test-diagnostic")
|
||||
.build();
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
@@ -231,10 +228,7 @@ mod tests {
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env
|
||||
.err()
|
||||
.documentation_url("https://docs.astral.sh/ruff/rules/test-diagnostic")
|
||||
.build();
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/full.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
::: cell 1
|
||||
1 | # cell 1
|
||||
- import os
|
||||
|
||||
error[unused-import][*]: `math` imported but unused
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
::: cell 2
|
||||
1 | # cell 2
|
||||
- import math
|
||||
2 |
|
||||
3 | print('hello world')
|
||||
|
||||
error[unused-variable][*]: Local variable `x` is assigned to but never used
|
||||
--> notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
::: cell 3
|
||||
1 | # cell 3
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
- x = 1
|
||||
4 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
@@ -1,27 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/full.rs
|
||||
expression: env.render(&diagnostic)
|
||||
---
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
::: cell 1
|
||||
1 | # cell 1
|
||||
- import os
|
||||
::: cell 2
|
||||
1 | # cell 2
|
||||
- import math
|
||||
2 |
|
||||
3 | print('hello world')
|
||||
::: cell 3
|
||||
1 | # cell 3
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
- x = 1
|
||||
4 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
@@ -31,43 +31,6 @@ where
|
||||
FmtStyled { content, style }
|
||||
}
|
||||
|
||||
pub(super) fn fmt_with_hyperlink<'a, T>(
|
||||
content: T,
|
||||
url: Option<&'a str>,
|
||||
stylesheet: &DiagnosticStylesheet,
|
||||
) -> impl std::fmt::Display + 'a
|
||||
where
|
||||
T: std::fmt::Display + 'a,
|
||||
{
|
||||
struct FmtHyperlink<'a, T> {
|
||||
content: T,
|
||||
url: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Display for FmtHyperlink<'_, T>
|
||||
where
|
||||
T: std::fmt::Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some(url) = self.url {
|
||||
write!(f, "\x1B]8;;{url}\x1B\\")?;
|
||||
}
|
||||
|
||||
self.content.fmt(f)?;
|
||||
|
||||
if self.url.is_some() {
|
||||
f.write_str("\x1B]8;;\x1B\\")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let url = if stylesheet.hyperlink { url } else { None };
|
||||
|
||||
FmtHyperlink { content, url }
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DiagnosticStylesheet {
|
||||
pub(crate) error: Style,
|
||||
@@ -84,7 +47,6 @@ pub struct DiagnosticStylesheet {
|
||||
pub(crate) deletion: Style,
|
||||
pub(crate) insertion_line_no: Style,
|
||||
pub(crate) deletion_line_no: Style,
|
||||
pub(crate) hyperlink: bool,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
@@ -97,8 +59,6 @@ impl DiagnosticStylesheet {
|
||||
/// Default terminal styling
|
||||
pub fn styled() -> Self {
|
||||
let bright_blue = AnsiColor::BrightBlue.on_default();
|
||||
|
||||
let hyperlink = supports_hyperlinks::supports_hyperlinks();
|
||||
Self {
|
||||
error: AnsiColor::BrightRed.on_default().effects(Effects::BOLD),
|
||||
warning: AnsiColor::Yellow.on_default().effects(Effects::BOLD),
|
||||
@@ -114,7 +74,6 @@ impl DiagnosticStylesheet {
|
||||
deletion: AnsiColor::Red.on_default(),
|
||||
insertion_line_no: AnsiColor::Green.on_default().effects(Effects::BOLD),
|
||||
deletion_line_no: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
hyperlink,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,7 +93,6 @@ impl DiagnosticStylesheet {
|
||||
deletion: Style::new(),
|
||||
insertion_line_no: Style::new(),
|
||||
deletion_line_no: Style::new(),
|
||||
hyperlink: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::Db;
|
||||
use crate::files::{File, FilePath};
|
||||
use crate::system::System;
|
||||
|
||||
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
|
||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||
@@ -16,7 +15,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
let mut read_error = None;
|
||||
|
||||
let kind = if is_notebook(db.system(), path) {
|
||||
let kind = if is_notebook(file.path(db)) {
|
||||
file.read_to_notebook(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read notebook '{path}': {error}");
|
||||
@@ -41,17 +40,18 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_notebook(system: &dyn System, path: &FilePath) -> bool {
|
||||
let source_type = match path {
|
||||
FilePath::System(path) => system.source_type(path),
|
||||
FilePath::SystemVirtual(system_virtual) => system.virtual_path_source_type(system_virtual),
|
||||
FilePath::Vendored(_) => return false,
|
||||
};
|
||||
|
||||
let with_extension_fallback =
|
||||
source_type.or_else(|| PySourceType::try_from_extension(path.extension()?));
|
||||
|
||||
with_extension_fallback == Some(PySourceType::Ipynb)
|
||||
fn is_notebook(path: &FilePath) -> bool {
|
||||
match path {
|
||||
FilePath::System(system) => system.extension().is_some_and(|extension| {
|
||||
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
|
||||
}),
|
||||
FilePath::SystemVirtual(system_virtual) => {
|
||||
system_virtual.extension().is_some_and(|extension| {
|
||||
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
|
||||
})
|
||||
}
|
||||
FilePath::Vendored(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// The source text of a file containing python code.
|
||||
|
||||
@@ -9,7 +9,6 @@ pub use os::OsSystem;
|
||||
|
||||
use filetime::FileTime;
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use std::error::Error;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::path::{Path, PathBuf};
|
||||
@@ -17,11 +16,12 @@ use std::{fmt, io};
|
||||
pub use test::{DbWithTestSystem, DbWithWritableSystem, InMemorySystem, TestSystem};
|
||||
use walk_directory::WalkDirectoryBuilder;
|
||||
|
||||
use crate::file_revision::FileRevision;
|
||||
|
||||
pub use self::path::{
|
||||
DeduplicatedNestedPathsIter, SystemPath, SystemPathBuf, SystemVirtualPath,
|
||||
SystemVirtualPathBuf, deduplicate_nested_paths,
|
||||
};
|
||||
use crate::file_revision::FileRevision;
|
||||
|
||||
mod memory_fs;
|
||||
#[cfg(feature = "os")]
|
||||
@@ -66,35 +66,6 @@ pub trait System: Debug + Sync + Send {
|
||||
/// See [dunce::canonicalize] for more information.
|
||||
fn canonicalize_path(&self, path: &SystemPath) -> Result<SystemPathBuf>;
|
||||
|
||||
/// Returns the source type for `path` if known or `None`.
|
||||
///
|
||||
/// The default is to always return `None`, assuming the system
|
||||
/// has no additional information and that the caller should
|
||||
/// rely on the file extension instead.
|
||||
///
|
||||
/// This is primarily used for the LSP integration to respect
|
||||
/// the chosen language (or the fact that it is a notebook) in
|
||||
/// the editor.
|
||||
fn source_type(&self, path: &SystemPath) -> Option<PySourceType> {
|
||||
let _ = path;
|
||||
None
|
||||
}
|
||||
|
||||
/// Returns the source type for `path` if known or `None`.
|
||||
///
|
||||
/// The default is to always return `None`, assuming the system
|
||||
/// has no additional information and that the caller should
|
||||
/// rely on the file extension instead.
|
||||
///
|
||||
/// This is primarily used for the LSP integration to respect
|
||||
/// the chosen language (or the fact that it is a notebook) in
|
||||
/// the editor.
|
||||
fn virtual_path_source_type(&self, path: &SystemVirtualPath) -> Option<PySourceType> {
|
||||
let _ = path;
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Reads the content of the file at `path` into a [`String`].
|
||||
fn read_to_string(&self, path: &SystemPath) -> Result<String>;
|
||||
|
||||
|
||||
@@ -14,21 +14,14 @@ pub(crate) struct Collector<'a> {
|
||||
string_imports: StringImports,
|
||||
/// The collected imports from the Python AST.
|
||||
imports: Vec<CollectedImport>,
|
||||
/// Whether to detect type checking imports
|
||||
type_checking_imports: bool,
|
||||
}
|
||||
|
||||
impl<'a> Collector<'a> {
|
||||
pub(crate) fn new(
|
||||
module_path: Option<&'a [String]>,
|
||||
string_imports: StringImports,
|
||||
type_checking_imports: bool,
|
||||
) -> Self {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
|
||||
Self {
|
||||
module_path,
|
||||
string_imports,
|
||||
imports: Vec::new(),
|
||||
type_checking_imports,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,25 +91,10 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
}
|
||||
}
|
||||
Stmt::If(ast::StmtIf {
|
||||
test,
|
||||
body,
|
||||
elif_else_clauses,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) => {
|
||||
// Skip TYPE_CHECKING blocks if not requested
|
||||
if self.type_checking_imports || !is_type_checking_condition(test) {
|
||||
self.visit_body(body);
|
||||
}
|
||||
|
||||
for clause in elif_else_clauses {
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(_)
|
||||
| Stmt::ClassDef(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Try(_)
|
||||
@@ -174,30 +152,6 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if an expression is a `TYPE_CHECKING` condition.
|
||||
///
|
||||
/// Returns `true` for:
|
||||
/// - `TYPE_CHECKING`
|
||||
/// - `typing.TYPE_CHECKING`
|
||||
///
|
||||
/// NOTE: Aliased `TYPE_CHECKING`, i.e. `import typing.TYPE_CHECKING as TC; if TC: ...`
|
||||
/// will not be detected!
|
||||
fn is_type_checking_condition(expr: &Expr) -> bool {
|
||||
match expr {
|
||||
// `if TYPE_CHECKING:`
|
||||
Expr::Name(ast::ExprName { id, .. }) => id.as_str() == "TYPE_CHECKING",
|
||||
// `if typing.TYPE_CHECKING:`
|
||||
Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
|
||||
attr.as_str() == "TYPE_CHECKING"
|
||||
&& matches!(
|
||||
value.as_ref(),
|
||||
Expr::Name(ast::ExprName { id, .. }) if id.as_str() == "typing"
|
||||
)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum CollectedImport {
|
||||
/// The import was part of an `import` statement.
|
||||
|
||||
@@ -30,7 +30,6 @@ impl ModuleImports {
|
||||
path: &SystemPath,
|
||||
package: Option<&SystemPath>,
|
||||
string_imports: StringImports,
|
||||
type_checking_imports: bool,
|
||||
) -> Result<Self> {
|
||||
// Parse the source code.
|
||||
let parsed = parse(source, ParseOptions::from(source_type))?;
|
||||
@@ -39,12 +38,8 @@ impl ModuleImports {
|
||||
package.and_then(|package| to_module_path(package.as_std_path(), path.as_std_path()));
|
||||
|
||||
// Collect the imports.
|
||||
let imports = Collector::new(
|
||||
module_path.as_deref(),
|
||||
string_imports,
|
||||
type_checking_imports,
|
||||
)
|
||||
.collect(parsed.syntax());
|
||||
let imports =
|
||||
Collector::new(module_path.as_deref(), string_imports).collect(parsed.syntax());
|
||||
|
||||
// Resolve the imports.
|
||||
let mut resolved_imports = ModuleImports::default();
|
||||
|
||||
@@ -6,7 +6,7 @@ use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, CacheKey)]
|
||||
#[derive(Debug, Default, Clone, CacheKey)]
|
||||
pub struct AnalyzeSettings {
|
||||
pub exclude: FilePatternSet,
|
||||
pub preview: PreviewMode,
|
||||
@@ -14,21 +14,6 @@ pub struct AnalyzeSettings {
|
||||
pub string_imports: StringImports,
|
||||
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
|
||||
pub extension: ExtensionMapping,
|
||||
pub type_checking_imports: bool,
|
||||
}
|
||||
|
||||
impl Default for AnalyzeSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
exclude: FilePatternSet::default(),
|
||||
preview: PreviewMode::default(),
|
||||
target_version: PythonVersion::default(),
|
||||
string_imports: StringImports::default(),
|
||||
include_dependencies: BTreeMap::default(),
|
||||
extension: ExtensionMapping::default(),
|
||||
type_checking_imports: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AnalyzeSettings {
|
||||
@@ -44,7 +29,6 @@ impl fmt::Display for AnalyzeSettings {
|
||||
self.string_imports,
|
||||
self.extension | debug,
|
||||
self.include_dependencies | debug,
|
||||
self.type_checking_imports,
|
||||
]
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.14.6"
|
||||
version = "0.14.4"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -4,31 +4,3 @@ CommunityData("public", mpModel=0) # S508
|
||||
CommunityData("public", mpModel=1) # S508
|
||||
|
||||
CommunityData("public", mpModel=2) # OK
|
||||
|
||||
# New API paths
|
||||
import pysnmp.hlapi.asyncio
|
||||
import pysnmp.hlapi.v1arch
|
||||
import pysnmp.hlapi.v1arch.asyncio
|
||||
import pysnmp.hlapi.v1arch.asyncio.auth
|
||||
import pysnmp.hlapi.v3arch
|
||||
import pysnmp.hlapi.v3arch.asyncio
|
||||
import pysnmp.hlapi.v3arch.asyncio.auth
|
||||
import pysnmp.hlapi.auth
|
||||
|
||||
pysnmp.hlapi.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
pysnmp.hlapi.v1arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
pysnmp.hlapi.v1arch.CommunityData("public", mpModel=0) # S508
|
||||
pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
pysnmp.hlapi.v3arch.CommunityData("public", mpModel=0) # S508
|
||||
pysnmp.hlapi.auth.CommunityData("public", mpModel=0) # S508
|
||||
|
||||
pysnmp.hlapi.asyncio.CommunityData("public", mpModel=2) # OK
|
||||
pysnmp.hlapi.v1arch.asyncio.auth.CommunityData("public", mpModel=2) # OK
|
||||
pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=2) # OK
|
||||
pysnmp.hlapi.v1arch.CommunityData("public", mpModel=2) # OK
|
||||
pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=2) # OK
|
||||
pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=2) # OK
|
||||
pysnmp.hlapi.v3arch.CommunityData("public", mpModel=2) # OK
|
||||
pysnmp.hlapi.auth.CommunityData("public", mpModel=2) # OK
|
||||
|
||||
@@ -5,19 +5,3 @@ insecure = UsmUserData("securityName") # S509
|
||||
auth_no_priv = UsmUserData("securityName", "authName") # S509
|
||||
|
||||
less_insecure = UsmUserData("securityName", "authName", "privName") # OK
|
||||
|
||||
# New API paths
|
||||
import pysnmp.hlapi.asyncio
|
||||
import pysnmp.hlapi.v3arch.asyncio
|
||||
import pysnmp.hlapi.v3arch.asyncio.auth
|
||||
import pysnmp.hlapi.auth
|
||||
|
||||
pysnmp.hlapi.asyncio.UsmUserData("user") # S509
|
||||
pysnmp.hlapi.v3arch.asyncio.UsmUserData("user") # S509
|
||||
pysnmp.hlapi.v3arch.asyncio.auth.UsmUserData("user") # S509
|
||||
pysnmp.hlapi.auth.UsmUserData("user") # S509
|
||||
|
||||
pysnmp.hlapi.asyncio.UsmUserData("user", "authkey", "privkey") # OK
|
||||
pysnmp.hlapi.v3arch.asyncio.UsmUserData("user", "authkey", "privkey") # OK
|
||||
pysnmp.hlapi.v3arch.asyncio.auth.UsmUserData("user", "authkey", "privkey") # OK
|
||||
pysnmp.hlapi.auth.UsmUserData("user", "authkey", "privkey") # OK
|
||||
|
||||
@@ -208,17 +208,3 @@ _ = t"b {f"c" f"d {t"e" t"f"} g"} h"
|
||||
_ = f"b {t"abc" \
|
||||
t"def"} g"
|
||||
|
||||
|
||||
# Explicit concatenation with either operand being
|
||||
# a string literal that wraps across multiple lines (in parentheses)
|
||||
# reports diagnostic - no autofix.
|
||||
# See https://github.com/astral-sh/ruff/issues/19757
|
||||
_ = "abc" + (
|
||||
"def"
|
||||
"ghi"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"abc"
|
||||
"def"
|
||||
) + "ghi"
|
||||
|
||||
@@ -46,8 +46,7 @@ def func():
|
||||
|
||||
|
||||
def func():
|
||||
# SIM113
|
||||
# https://github.com/astral-sh/ruff/pull/21395
|
||||
# OK (index doesn't start at 0
|
||||
idx = 10
|
||||
for x in range(5):
|
||||
g(x, idx)
|
||||
|
||||
@@ -371,61 +371,6 @@ class Foo:
|
||||
"""
|
||||
return
|
||||
|
||||
# DOC102 - Test case from issue #20959: comma-separated parameters
|
||||
def leq(x: object, y: object) -> bool:
|
||||
"""Compare two objects for loose equality.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x1, x2 : object
|
||||
Objects.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
Whether the objects are identical or equal.
|
||||
"""
|
||||
return x is y or x == y
|
||||
|
||||
|
||||
# OK - comma-separated parameters that match function signature
|
||||
def compare_values(x1: int, x2: int) -> bool:
|
||||
"""Compare two integer values.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x1, x2 : int
|
||||
Values to compare.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
True if values are equal.
|
||||
"""
|
||||
return x1 == x2
|
||||
|
||||
|
||||
# DOC102 - mixed comma-separated and regular parameters
|
||||
def process_data(data, x1: str, x2: str) -> str:
|
||||
"""Process data with multiple string parameters.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
data : list
|
||||
Input data to process.
|
||||
x1, x2 : str
|
||||
String parameters for processing.
|
||||
extra_param : str
|
||||
Extra parameter not in signature.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
Processed result.
|
||||
"""
|
||||
return f"{x1}{x2}{len(data)}"
|
||||
|
||||
|
||||
# OK
|
||||
def baz(x: int) -> int:
|
||||
"""
|
||||
@@ -444,21 +389,3 @@ def baz(x: int) -> int:
|
||||
int
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
# OK - comma-separated parameters without type annotations
|
||||
def add_numbers(a, b):
|
||||
"""
|
||||
Adds two numbers and returns the result.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
a, b
|
||||
The numbers to add.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The sum of the two numbers.
|
||||
"""
|
||||
return a + b
|
||||
|
||||
@@ -83,37 +83,6 @@ def calculate_speed(distance: float, time: float) -> float:
|
||||
raise
|
||||
|
||||
|
||||
# DOC502 regression for Sphinx directive after Raises (issue #18959)
|
||||
def foo():
|
||||
"""First line.
|
||||
|
||||
Raises:
|
||||
ValueError:
|
||||
some text
|
||||
|
||||
.. versionadded:: 0.7.0
|
||||
The ``init_kwargs`` argument.
|
||||
"""
|
||||
raise ValueError
|
||||
|
||||
|
||||
# DOC502 regression for following section with colons
|
||||
def example_with_following_section():
|
||||
"""Summary.
|
||||
|
||||
Returns:
|
||||
str: The resulting expression.
|
||||
|
||||
Raises:
|
||||
ValueError: If the unit is not valid.
|
||||
|
||||
Relation to `time_range_lookup`:
|
||||
- Handles the "start of" modifier.
|
||||
- Example: "start of month" → `DATETRUNC()`.
|
||||
"""
|
||||
raise ValueError
|
||||
|
||||
|
||||
# This should NOT trigger DOC502 because OSError is explicitly re-raised
|
||||
def f():
|
||||
"""Do nothing.
|
||||
|
||||
@@ -117,33 +117,3 @@ def calculate_speed(distance: float, time: float) -> float:
|
||||
except TypeError:
|
||||
print("Not a number? Shame on you!")
|
||||
raise
|
||||
|
||||
|
||||
# DOC502 regression for Sphinx directive after Raises (issue #18959)
|
||||
def foo():
|
||||
"""First line.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
some text
|
||||
|
||||
.. versionadded:: 0.7.0
|
||||
The ``init_kwargs`` argument.
|
||||
"""
|
||||
raise ValueError
|
||||
|
||||
# Make sure we don't bail out on a Sphinx directive in the description of one
|
||||
# of the exceptions
|
||||
def foo():
|
||||
"""First line.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
some text
|
||||
.. math:: e^{xception}
|
||||
ZeroDivisionError
|
||||
Will not be raised, DOC502
|
||||
"""
|
||||
raise ValueError
|
||||
|
||||
@@ -30,23 +30,3 @@ for a, b in d_tuple:
|
||||
pass
|
||||
for a, b in d_tuple_annotated:
|
||||
pass
|
||||
|
||||
# Empty dict cases
|
||||
empty_dict = {}
|
||||
empty_dict["x"] = 1
|
||||
for k, v in empty_dict:
|
||||
pass
|
||||
|
||||
empty_dict_annotated_tuple_keys: dict[tuple[int, str], bool] = {}
|
||||
for k, v in empty_dict_annotated_tuple_keys:
|
||||
pass
|
||||
|
||||
empty_dict_unannotated = {}
|
||||
empty_dict_unannotated[("x", "y")] = True
|
||||
for k, v in empty_dict_unannotated:
|
||||
pass
|
||||
|
||||
empty_dict_annotated_str_keys: dict[str, int] = {}
|
||||
empty_dict_annotated_str_keys["x"] = 1
|
||||
for k, v in empty_dict_annotated_str_keys:
|
||||
pass
|
||||
|
||||
@@ -129,26 +129,3 @@ def generator_with_lambda():
|
||||
yield 1
|
||||
func = lambda x: x # Just a regular lambda
|
||||
yield 2
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/21162
|
||||
def foo():
|
||||
def g():
|
||||
yield 1
|
||||
raise StopIteration # Should not trigger
|
||||
|
||||
|
||||
def foo():
|
||||
def g():
|
||||
raise StopIteration # Should not trigger
|
||||
yield 1
|
||||
|
||||
# https://github.com/astral-sh/ruff/pull/21177#pullrequestreview-3430209718
|
||||
def foo():
|
||||
yield 1
|
||||
class C:
|
||||
raise StopIteration # Should trigger
|
||||
yield C
|
||||
|
||||
# https://github.com/astral-sh/ruff/pull/21177#discussion_r2539702728
|
||||
def foo():
|
||||
raise StopIteration((yield 1)) # Should trigger
|
||||
@@ -152,13 +152,4 @@ import json
|
||||
data = {"price": 100}
|
||||
|
||||
with open("test.json", "wb") as f:
|
||||
f.write(json.dumps(data, indent=4).encode("utf-8"))
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/21381
|
||||
with open("tmp_path/pyproject.toml", "w") as f:
|
||||
f.write(dedent(
|
||||
"""
|
||||
[project]
|
||||
other = 1.234
|
||||
""",
|
||||
))
|
||||
f.write(json.dumps(data, indent=4).encode("utf-8"))
|
||||
@@ -132,9 +132,3 @@ class AWithQuotes:
|
||||
final_variable: 'Final[list[int]]' = []
|
||||
class_variable_without_subscript: 'ClassVar' = []
|
||||
final_variable_without_subscript: 'Final' = []
|
||||
|
||||
|
||||
# Reassignment of a ClassVar should not trigger RUF012
|
||||
class P:
|
||||
class_variable: ClassVar[list] = [10, 20, 30, 40, 50]
|
||||
class_variable = [*class_variable[0::1], *class_variable[2::3]]
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
# Correct usage in loop and comprehension
|
||||
def process_data():
|
||||
return 42
|
||||
def test_correct_dummy_usage():
|
||||
my_list = [{"foo": 1}, {"foo": 2}]
|
||||
|
||||
# Should NOT detect - dummy variable is not used
|
||||
[process_data() for _ in my_list] # OK: `_` is ignored by rule
|
||||
|
||||
# Should NOT detect - dummy variable is not used
|
||||
[item["foo"] for item in my_list] # OK: not a dummy variable name
|
||||
|
||||
# Should NOT detect - dummy variable is not used
|
||||
[42 for _unused in my_list] # OK: `_unused` is not accessed
|
||||
|
||||
# Regular For Loops
|
||||
def test_for_loops():
|
||||
my_list = [{"foo": 1}, {"foo": 2}]
|
||||
|
||||
# Should detect used dummy variable
|
||||
for _item in my_list:
|
||||
print(_item["foo"]) # RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Should detect used dummy variable
|
||||
for _index, _value in enumerate(my_list):
|
||||
result = _index + _value["foo"] # RUF052: Both `_index` and `_value` are accessed
|
||||
|
||||
# List Comprehensions
|
||||
def test_list_comprehensions():
|
||||
my_list = [{"foo": 1}, {"foo": 2}]
|
||||
|
||||
# Should detect used dummy variable
|
||||
result = [_item["foo"] for _item in my_list] # RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Should detect used dummy variable in nested comprehension
|
||||
nested = [[_item["foo"] for _item in _sublist] for _sublist in [my_list, my_list]]
|
||||
# RUF052: Both `_item` and `_sublist` are accessed
|
||||
|
||||
# Should detect with conditions
|
||||
filtered = [_item["foo"] for _item in my_list if _item["foo"] > 0]
|
||||
# RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Dict Comprehensions
|
||||
def test_dict_comprehensions():
|
||||
my_list = [{"key": "a", "value": 1}, {"key": "b", "value": 2}]
|
||||
|
||||
# Should detect used dummy variable
|
||||
result = {_item["key"]: _item["value"] for _item in my_list}
|
||||
# RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Should detect with enumerate
|
||||
indexed = {_index: _item["value"] for _index, _item in enumerate(my_list)}
|
||||
# RUF052: Both `_index` and `_item` are accessed
|
||||
|
||||
# Should detect in nested dict comprehension
|
||||
nested = {_outer: {_inner["key"]: _inner["value"] for _inner in sublist}
|
||||
for _outer, sublist in enumerate([my_list])}
|
||||
# RUF052: `_outer`, `_inner` are accessed
|
||||
|
||||
# Set Comprehensions
|
||||
def test_set_comprehensions():
|
||||
my_list = [{"foo": 1}, {"foo": 2}, {"foo": 1}] # Note: duplicate values
|
||||
|
||||
# Should detect used dummy variable
|
||||
unique_values = {_item["foo"] for _item in my_list}
|
||||
# RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Should detect with conditions
|
||||
filtered_set = {_item["foo"] for _item in my_list if _item["foo"] > 0}
|
||||
# RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Should detect with complex expression
|
||||
processed = {_item["foo"] * 2 for _item in my_list}
|
||||
# RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Generator Expressions
|
||||
def test_generator_expressions():
|
||||
my_list = [{"foo": 1}, {"foo": 2}]
|
||||
|
||||
# Should detect used dummy variable
|
||||
gen = (_item["foo"] for _item in my_list)
|
||||
# RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Should detect when passed to function
|
||||
total = sum(_item["foo"] for _item in my_list)
|
||||
# RUF052: Local dummy variable `_item` is accessed
|
||||
|
||||
# Should detect with multiple generators
|
||||
pairs = ((_x, _y) for _x in range(3) for _y in range(3) if _x != _y)
|
||||
# RUF052: Both `_x` and `_y` are accessed
|
||||
|
||||
# Should detect in nested generator
|
||||
nested_gen = (sum(_inner["foo"] for _inner in sublist) for _sublist in [my_list] for sublist in _sublist)
|
||||
# RUF052: `_inner` and `_sublist` are accessed
|
||||
|
||||
# Complex Examples with Multiple Comprehension Types
|
||||
def test_mixed_comprehensions():
|
||||
data = [{"items": [1, 2, 3]}, {"items": [4, 5, 6]}]
|
||||
|
||||
# Should detect in mixed comprehensions
|
||||
result = [
|
||||
{_key: [_val * 2 for _val in _record["items"]] for _key in ["doubled"]}
|
||||
for _record in data
|
||||
]
|
||||
# RUF052: `_key`, `_val`, and `_record` are all accessed
|
||||
|
||||
# Should detect in generator passed to list constructor
|
||||
gen_list = list(_item["items"][0] for _item in data)
|
||||
# RUF052: Local dummy variable `_item` is accessed
|
||||
@@ -16,19 +16,3 @@ logging.warning("%s", str(**{"object": b"\xf0\x9f\x9a\xa8", "encoding": "utf-8"}
|
||||
# str() with single keyword argument - should be flagged (equivalent to str("!"))
|
||||
logging.warning("%s", str(object="!"))
|
||||
|
||||
|
||||
# Complex conversion specifiers that make oct() and hex() necessary
|
||||
# These should NOT be flagged because the behavior differs between %s and %#o/%#x
|
||||
# https://github.com/astral-sh/ruff/issues/21458
|
||||
|
||||
# %06s with oct() - zero-pad flag with width (should NOT be flagged)
|
||||
logging.warning("%06s", oct(123))
|
||||
|
||||
# % s with oct() - blank sign flag (should NOT be flagged)
|
||||
logging.warning("% s", oct(123))
|
||||
|
||||
# %+s with oct() - sign char flag (should NOT be flagged)
|
||||
logging.warning("%+s", oct(123))
|
||||
|
||||
# %.3s with hex() - precision (should NOT be flagged)
|
||||
logging.warning("%.3s", hex(123))
|
||||
|
||||
@@ -131,9 +131,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.is_rule_enabled(Rule::GeneratorReturnFromIterMethod) {
|
||||
flake8_pyi::rules::bad_generator_return_type(function_def, checker);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::StopIterationReturn) {
|
||||
pylint::rules::stop_iteration_return(checker, function_def);
|
||||
}
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.is_rule_enabled(Rule::StrOrReprDefinedInStub) {
|
||||
flake8_pyi::rules::str_or_repr_defined_in_stub(checker, stmt);
|
||||
@@ -953,6 +950,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.is_rule_enabled(Rule::MisplacedBareRaise) {
|
||||
pylint::rules::misplaced_bare_raise(checker, raise);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::StopIterationReturn) {
|
||||
pylint::rules::stop_iteration_return(checker, raise);
|
||||
}
|
||||
}
|
||||
Stmt::AugAssign(aug_assign @ ast::StmtAugAssign { target, .. }) => {
|
||||
if checker.is_rule_enabled(Rule::GlobalStatement) {
|
||||
|
||||
@@ -860,17 +860,23 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
}
|
||||
|
||||
fn is_bound_parameter(&self, name: &str) -> bool {
|
||||
match self.semantic.current_scope().kind {
|
||||
ScopeKind::Function(ast::StmtFunctionDef { parameters, .. }) => {
|
||||
parameters.includes(name)
|
||||
for scope in self.semantic.current_scopes() {
|
||||
match scope.kind {
|
||||
ScopeKind::Class(_) => return false,
|
||||
ScopeKind::Function(ast::StmtFunctionDef { parameters, .. })
|
||||
| ScopeKind::Lambda(ast::ExprLambda {
|
||||
parameters: Some(parameters),
|
||||
..
|
||||
}) => return parameters.includes(name),
|
||||
ScopeKind::Lambda(_)
|
||||
| ScopeKind::Generator { .. }
|
||||
| ScopeKind::Module
|
||||
| ScopeKind::Type
|
||||
| ScopeKind::DunderClassCell => {}
|
||||
}
|
||||
ScopeKind::Class(_)
|
||||
| ScopeKind::Lambda(_)
|
||||
| ScopeKind::Generator { .. }
|
||||
| ScopeKind::Module
|
||||
| ScopeKind::Type
|
||||
| ScopeKind::DunderClassCell => false,
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ impl<'a> Importer<'a> {
|
||||
.into_edit(&required_import)
|
||||
} else {
|
||||
// Insert at the start of the file.
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist, None)
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist)
|
||||
.into_edit(&required_import)
|
||||
}
|
||||
}
|
||||
@@ -113,7 +113,7 @@ impl<'a> Importer<'a> {
|
||||
Insertion::end_of_statement(stmt, self.source, self.stylist)
|
||||
} else {
|
||||
// Insert at the start of the file.
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist, None)
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist)
|
||||
};
|
||||
let add_import_edit = insertion.into_edit(&content);
|
||||
|
||||
@@ -498,7 +498,7 @@ impl<'a> Importer<'a> {
|
||||
Insertion::end_of_statement(stmt, self.source, self.stylist)
|
||||
} else {
|
||||
// Insert at the start of the file.
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist, None)
|
||||
Insertion::start_of_file(self.python_ast, self.source, self.stylist)
|
||||
};
|
||||
if insertion.is_inline() {
|
||||
Err(anyhow::anyhow!(
|
||||
|
||||
@@ -125,7 +125,6 @@ where
|
||||
}
|
||||
|
||||
diagnostic.set_secondary_code(SecondaryCode::new(rule.noqa_code().to_string()));
|
||||
diagnostic.set_documentation_url(rule.url());
|
||||
|
||||
diagnostic
|
||||
}
|
||||
|
||||
@@ -269,13 +269,3 @@ pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSett
|
||||
pub(crate) const fn is_extended_i18n_function_matching_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/21374
|
||||
pub(crate) const fn is_extended_snmp_api_path_detection_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/21395
|
||||
pub(crate) const fn is_enumerate_for_loop_int_index_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
@@ -104,8 +104,6 @@ mod tests {
|
||||
#[test_case(Rule::SuspiciousURLOpenUsage, Path::new("S310.py"))]
|
||||
#[test_case(Rule::SuspiciousNonCryptographicRandomUsage, Path::new("S311.py"))]
|
||||
#[test_case(Rule::SuspiciousTelnetUsage, Path::new("S312.py"))]
|
||||
#[test_case(Rule::SnmpInsecureVersion, Path::new("S508.py"))]
|
||||
#[test_case(Rule::SnmpWeakCryptography, Path::new("S509.py"))]
|
||||
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__{}_{}",
|
||||
|
||||
@@ -4,7 +4,6 @@ use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Violation;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_extended_snmp_api_path_detection_enabled;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of SNMPv1 or SNMPv2.
|
||||
@@ -48,17 +47,10 @@ pub(crate) fn snmp_insecure_version(checker: &Checker, call: &ast::ExprCall) {
|
||||
.semantic()
|
||||
.resolve_qualified_name(&call.func)
|
||||
.is_some_and(|qualified_name| {
|
||||
if is_extended_snmp_api_path_detection_enabled(checker.settings()) {
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
["pysnmp", "hlapi", .., "CommunityData"]
|
||||
)
|
||||
} else {
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
["pysnmp", "hlapi", "CommunityData"]
|
||||
)
|
||||
}
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
["pysnmp", "hlapi", "CommunityData"]
|
||||
)
|
||||
})
|
||||
{
|
||||
if let Some(keyword) = call.arguments.find_keyword("mpModel") {
|
||||
|
||||
@@ -4,7 +4,6 @@ use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Violation;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_extended_snmp_api_path_detection_enabled;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of the SNMPv3 protocol without encryption.
|
||||
@@ -48,17 +47,10 @@ pub(crate) fn snmp_weak_cryptography(checker: &Checker, call: &ast::ExprCall) {
|
||||
.semantic()
|
||||
.resolve_qualified_name(&call.func)
|
||||
.is_some_and(|qualified_name| {
|
||||
if is_extended_snmp_api_path_detection_enabled(checker.settings()) {
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
["pysnmp", "hlapi", .., "UsmUserData"]
|
||||
)
|
||||
} else {
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
["pysnmp", "hlapi", "UsmUserData"]
|
||||
)
|
||||
}
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
["pysnmp", "hlapi", "UsmUserData"]
|
||||
)
|
||||
})
|
||||
{
|
||||
checker.report_diagnostic(SnmpWeakCryptography, call.func.range());
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_bandit/mod.rs
|
||||
---
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:3:25
|
||||
|
|
||||
1 | from pysnmp.hlapi import CommunityData
|
||||
2 |
|
||||
3 | CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
4 | CommunityData("public", mpModel=1) # S508
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:4:25
|
||||
|
|
||||
3 | CommunityData("public", mpModel=0) # S508
|
||||
4 | CommunityData("public", mpModel=1) # S508
|
||||
| ^^^^^^^^^
|
||||
5 |
|
||||
6 | CommunityData("public", mpModel=2) # OK
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:18:46
|
||||
|
|
||||
16 | import pysnmp.hlapi.auth
|
||||
17 |
|
||||
18 | pysnmp.hlapi.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
19 | pysnmp.hlapi.v1arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
20 | pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:19:58
|
||||
|
|
||||
18 | pysnmp.hlapi.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
19 | pysnmp.hlapi.v1arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
20 | pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
21 | pysnmp.hlapi.v1arch.CommunityData("public", mpModel=0) # S508
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:20:53
|
||||
|
|
||||
18 | pysnmp.hlapi.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
19 | pysnmp.hlapi.v1arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
20 | pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
21 | pysnmp.hlapi.v1arch.CommunityData("public", mpModel=0) # S508
|
||||
22 | pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:21:45
|
||||
|
|
||||
19 | pysnmp.hlapi.v1arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
20 | pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
21 | pysnmp.hlapi.v1arch.CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
22 | pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
23 | pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:22:58
|
||||
|
|
||||
20 | pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
21 | pysnmp.hlapi.v1arch.CommunityData("public", mpModel=0) # S508
|
||||
22 | pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
23 | pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
24 | pysnmp.hlapi.v3arch.CommunityData("public", mpModel=0) # S508
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:23:53
|
||||
|
|
||||
21 | pysnmp.hlapi.v1arch.CommunityData("public", mpModel=0) # S508
|
||||
22 | pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
23 | pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
24 | pysnmp.hlapi.v3arch.CommunityData("public", mpModel=0) # S508
|
||||
25 | pysnmp.hlapi.auth.CommunityData("public", mpModel=0) # S508
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:24:45
|
||||
|
|
||||
22 | pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
||||
23 | pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
24 | pysnmp.hlapi.v3arch.CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
25 | pysnmp.hlapi.auth.CommunityData("public", mpModel=0) # S508
|
||||
|
|
||||
|
||||
S508 The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.
|
||||
--> S508.py:25:43
|
||||
|
|
||||
23 | pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=0) # S508
|
||||
24 | pysnmp.hlapi.v3arch.CommunityData("public", mpModel=0) # S508
|
||||
25 | pysnmp.hlapi.auth.CommunityData("public", mpModel=0) # S508
|
||||
| ^^^^^^^^^
|
||||
26 |
|
||||
27 | pysnmp.hlapi.asyncio.CommunityData("public", mpModel=2) # OK
|
||||
|
|
||||
@@ -1,62 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_bandit/mod.rs
|
||||
---
|
||||
S509 You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure.
|
||||
--> S509.py:4:12
|
||||
|
|
||||
4 | insecure = UsmUserData("securityName") # S509
|
||||
| ^^^^^^^^^^^
|
||||
5 | auth_no_priv = UsmUserData("securityName", "authName") # S509
|
||||
|
|
||||
|
||||
S509 You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure.
|
||||
--> S509.py:5:16
|
||||
|
|
||||
4 | insecure = UsmUserData("securityName") # S509
|
||||
5 | auth_no_priv = UsmUserData("securityName", "authName") # S509
|
||||
| ^^^^^^^^^^^
|
||||
6 |
|
||||
7 | less_insecure = UsmUserData("securityName", "authName", "privName") # OK
|
||||
|
|
||||
|
||||
S509 You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure.
|
||||
--> S509.py:15:1
|
||||
|
|
||||
13 | import pysnmp.hlapi.auth
|
||||
14 |
|
||||
15 | pysnmp.hlapi.asyncio.UsmUserData("user") # S509
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
16 | pysnmp.hlapi.v3arch.asyncio.UsmUserData("user") # S509
|
||||
17 | pysnmp.hlapi.v3arch.asyncio.auth.UsmUserData("user") # S509
|
||||
|
|
||||
|
||||
S509 You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure.
|
||||
--> S509.py:16:1
|
||||
|
|
||||
15 | pysnmp.hlapi.asyncio.UsmUserData("user") # S509
|
||||
16 | pysnmp.hlapi.v3arch.asyncio.UsmUserData("user") # S509
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
17 | pysnmp.hlapi.v3arch.asyncio.auth.UsmUserData("user") # S509
|
||||
18 | pysnmp.hlapi.auth.UsmUserData("user") # S509
|
||||
|
|
||||
|
||||
S509 You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure.
|
||||
--> S509.py:17:1
|
||||
|
|
||||
15 | pysnmp.hlapi.asyncio.UsmUserData("user") # S509
|
||||
16 | pysnmp.hlapi.v3arch.asyncio.UsmUserData("user") # S509
|
||||
17 | pysnmp.hlapi.v3arch.asyncio.auth.UsmUserData("user") # S509
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
18 | pysnmp.hlapi.auth.UsmUserData("user") # S509
|
||||
|
|
||||
|
||||
S509 You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure.
|
||||
--> S509.py:18:1
|
||||
|
|
||||
16 | pysnmp.hlapi.v3arch.asyncio.UsmUserData("user") # S509
|
||||
17 | pysnmp.hlapi.v3arch.asyncio.auth.UsmUserData("user") # S509
|
||||
18 | pysnmp.hlapi.auth.UsmUserData("user") # S509
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
19 |
|
||||
20 | pysnmp.hlapi.asyncio.UsmUserData("user", "authkey", "privkey") # OK
|
||||
|
|
||||
@@ -1,12 +1,12 @@
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::parenthesize::parenthesized_range;
|
||||
use ruff_python_ast::{self as ast, Expr, Operator};
|
||||
use ruff_python_trivia::is_python_whitespace;
|
||||
use ruff_source_file::LineRanges;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::AlwaysFixableViolation;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
use crate::{Edit, Fix};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for string literals that are explicitly concatenated (using the
|
||||
@@ -36,16 +36,14 @@ use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
#[violation_metadata(stable_since = "v0.0.201")]
|
||||
pub(crate) struct ExplicitStringConcatenation;
|
||||
|
||||
impl Violation for ExplicitStringConcatenation {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
impl AlwaysFixableViolation for ExplicitStringConcatenation {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"Explicitly concatenated string should be implicitly concatenated".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Remove redundant '+' operator to implicitly concatenate".to_string())
|
||||
fn fix_title(&self) -> String {
|
||||
"Remove redundant '+' operator to implicitly concatenate".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,27 +82,9 @@ pub(crate) fn explicit(checker: &Checker, expr: &Expr) {
|
||||
.locator()
|
||||
.contains_line_break(TextRange::new(left.end(), right.start()))
|
||||
{
|
||||
let mut diagnostic =
|
||||
checker.report_diagnostic(ExplicitStringConcatenation, expr.range());
|
||||
|
||||
let is_parenthesized = |expr: &Expr| {
|
||||
parenthesized_range(
|
||||
expr.into(),
|
||||
bin_op.into(),
|
||||
checker.comment_ranges(),
|
||||
checker.source(),
|
||||
)
|
||||
.is_some()
|
||||
};
|
||||
// If either `left` or `right` is parenthesized, generating
|
||||
// a fix would be too involved. Just report the diagnostic.
|
||||
// Currently, attempting `generate_fix` would result in
|
||||
// an invalid code. See: #19757
|
||||
if is_parenthesized(left) || is_parenthesized(right) {
|
||||
return;
|
||||
}
|
||||
|
||||
diagnostic.set_fix(generate_fix(checker, bin_op));
|
||||
checker
|
||||
.report_diagnostic(ExplicitStringConcatenation, expr.range())
|
||||
.set_fix(generate_fix(checker, bin_op));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -357,33 +357,3 @@ help: Remove redundant '+' operator to implicitly concatenate
|
||||
203 | )
|
||||
204 |
|
||||
205 | # nested examples with both t and f-strings
|
||||
|
||||
ISC003 Explicitly concatenated string should be implicitly concatenated
|
||||
--> ISC.py:216:5
|
||||
|
|
||||
214 | # reports diagnostic - no autofix.
|
||||
215 | # See https://github.com/astral-sh/ruff/issues/19757
|
||||
216 | _ = "abc" + (
|
||||
| _____^
|
||||
217 | | "def"
|
||||
218 | | "ghi"
|
||||
219 | | )
|
||||
| |_^
|
||||
220 |
|
||||
221 | _ = (
|
||||
|
|
||||
help: Remove redundant '+' operator to implicitly concatenate
|
||||
|
||||
ISC003 Explicitly concatenated string should be implicitly concatenated
|
||||
--> ISC.py:221:5
|
||||
|
|
||||
219 | )
|
||||
220 |
|
||||
221 | _ = (
|
||||
| _____^
|
||||
222 | | "abc"
|
||||
223 | | "def"
|
||||
224 | | ) + "ghi"
|
||||
| |_________^
|
||||
|
|
||||
help: Remove redundant '+' operator to implicitly concatenate
|
||||
|
||||
@@ -89,24 +89,3 @@ ISC002 Implicitly concatenated string literals over multiple lines
|
||||
209 | | t"def"} g"
|
||||
| |__________^
|
||||
|
|
||||
|
||||
ISC002 Implicitly concatenated string literals over multiple lines
|
||||
--> ISC.py:217:5
|
||||
|
|
||||
215 | # See https://github.com/astral-sh/ruff/issues/19757
|
||||
216 | _ = "abc" + (
|
||||
217 | / "def"
|
||||
218 | | "ghi"
|
||||
| |_________^
|
||||
219 | )
|
||||
|
|
||||
|
||||
ISC002 Implicitly concatenated string literals over multiple lines
|
||||
--> ISC.py:222:5
|
||||
|
|
||||
221 | _ = (
|
||||
222 | / "abc"
|
||||
223 | | "def"
|
||||
| |_________^
|
||||
224 | ) + "ghi"
|
||||
|
|
||||
|
||||
@@ -61,7 +61,6 @@ mod tests {
|
||||
|
||||
#[test_case(Rule::SplitStaticString, Path::new("SIM905.py"))]
|
||||
#[test_case(Rule::DictGetWithNoneDefault, Path::new("SIM910.py"))]
|
||||
#[test_case(Rule::EnumerateForLoop, Path::new("SIM113.py"))]
|
||||
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__{}_{}",
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use crate::preview::is_enumerate_for_loop_int_index_enabled;
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt};
|
||||
use ruff_python_ast::{self as ast, Expr, Int, Number, Operator, Stmt};
|
||||
use ruff_python_semantic::analyze::type_inference::{NumberLike, PythonType, ResolvedPythonType};
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -13,9 +11,6 @@ use crate::checkers::ast::Checker;
|
||||
/// Checks for `for` loops with explicit loop-index variables that can be replaced
|
||||
/// with `enumerate()`.
|
||||
///
|
||||
/// In [preview], this rule checks for index variables initialized with any integer rather than only
|
||||
/// a literal zero.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// When iterating over a sequence, it's often desirable to keep track of the
|
||||
/// index of each element alongside the element itself. Prefer the `enumerate`
|
||||
@@ -40,8 +35,6 @@ use crate::checkers::ast::Checker;
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `enumerate`](https://docs.python.org/3/library/functions.html#enumerate)
|
||||
///
|
||||
/// [preview]: https://docs.astral.sh/ruff/preview/
|
||||
#[derive(ViolationMetadata)]
|
||||
#[violation_metadata(stable_since = "v0.2.0")]
|
||||
pub(crate) struct EnumerateForLoop {
|
||||
@@ -89,21 +82,17 @@ pub(crate) fn enumerate_for_loop(checker: &Checker, for_stmt: &ast::StmtFor) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ensure that the index variable was initialized to 0 (or instance of `int` if preview is enabled).
|
||||
// Ensure that the index variable was initialized to 0.
|
||||
let Some(value) = typing::find_binding_value(binding, checker.semantic()) else {
|
||||
continue;
|
||||
};
|
||||
if !(matches!(
|
||||
if !matches!(
|
||||
value,
|
||||
Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: Number::Int(Int::ZERO),
|
||||
..
|
||||
})
|
||||
) || matches!(
|
||||
ResolvedPythonType::from(value),
|
||||
ResolvedPythonType::Atom(PythonType::Number(NumberLike::Integer))
|
||||
) && is_enumerate_for_loop_int_index_enabled(checker.settings()))
|
||||
{
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_simplify/mod.rs
|
||||
---
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:6:9
|
||||
|
|
||||
4 | for x in range(5):
|
||||
5 | g(x, idx)
|
||||
6 | idx += 1
|
||||
| ^^^^^^^^
|
||||
7 | h(x)
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:17:9
|
||||
|
|
||||
15 | if g(x):
|
||||
16 | break
|
||||
17 | idx += 1
|
||||
| ^^^^^^^^
|
||||
18 | sum += h(x, idx)
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:27:9
|
||||
|
|
||||
25 | g(x)
|
||||
26 | h(x, y)
|
||||
27 | idx += 1
|
||||
| ^^^^^^^^
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:36:9
|
||||
|
|
||||
34 | for x in range(5):
|
||||
35 | sum += h(x, idx)
|
||||
36 | idx += 1
|
||||
| ^^^^^^^^
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:44:9
|
||||
|
|
||||
42 | for x in range(5):
|
||||
43 | g(x, idx)
|
||||
44 | idx += 1
|
||||
| ^^^^^^^^
|
||||
45 | h(x)
|
||||
|
|
||||
|
||||
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
|
||||
--> SIM113.py:54:9
|
||||
|
|
||||
52 | for x in range(5):
|
||||
53 | g(x, idx)
|
||||
54 | idx += 1
|
||||
| ^^^^^^^^
|
||||
55 | h(x)
|
||||
|
|
||||
@@ -661,31 +661,19 @@ fn parse_parameters_numpy(content: &str, content_start: TextSize) -> Vec<Paramet
|
||||
.is_some_and(|first_char| !first_char.is_whitespace())
|
||||
{
|
||||
if let Some(before_colon) = entry.split(':').next() {
|
||||
let param_line = before_colon.trim_end();
|
||||
let param = before_colon.trim_end();
|
||||
let param_name = param.trim_start_matches('*');
|
||||
if is_identifier(param_name) {
|
||||
let param_start = line_start + indentation.text_len();
|
||||
let param_end = param_start + param.text_len();
|
||||
|
||||
// Split on commas to handle comma-separated parameters
|
||||
let mut current_offset = TextSize::from(0);
|
||||
for param_part in param_line.split(',') {
|
||||
let param_part_trimmed = param_part.trim();
|
||||
let param_name = param_part_trimmed.trim_start_matches('*');
|
||||
if is_identifier(param_name) {
|
||||
// Calculate the position of this specific parameter part within the line
|
||||
// Account for leading whitespace that gets trimmed
|
||||
let param_start_in_line = current_offset
|
||||
+ (param_part.text_len() - param_part_trimmed.text_len());
|
||||
let param_start =
|
||||
line_start + indentation.text_len() + param_start_in_line;
|
||||
|
||||
entries.push(ParameterEntry {
|
||||
name: param_name,
|
||||
range: TextRange::at(
|
||||
content_start + param_start,
|
||||
param_part_trimmed.text_len(),
|
||||
),
|
||||
});
|
||||
}
|
||||
// Update offset for next iteration: add the part length plus comma length
|
||||
current_offset = current_offset + param_part.text_len() + ','.text_len();
|
||||
entries.push(ParameterEntry {
|
||||
name: param_name,
|
||||
range: TextRange::new(
|
||||
content_start + param_start,
|
||||
content_start + param_end,
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -722,30 +710,12 @@ fn parse_raises(content: &str, style: Option<SectionStyle>) -> Vec<QualifiedName
|
||||
/// ```
|
||||
fn parse_raises_google(content: &str) -> Vec<QualifiedName<'_>> {
|
||||
let mut entries: Vec<QualifiedName> = Vec::new();
|
||||
let mut lines = content.lines().peekable();
|
||||
let Some(first) = lines.peek() else {
|
||||
return entries;
|
||||
};
|
||||
let indentation = &first[..first.len() - first.trim_start().len()];
|
||||
for potential in lines {
|
||||
if let Some(entry) = potential.strip_prefix(indentation) {
|
||||
if let Some(first_char) = entry.chars().next() {
|
||||
if !first_char.is_whitespace() {
|
||||
if let Some(colon_idx) = entry.find(':') {
|
||||
let entry = entry[..colon_idx].trim();
|
||||
if !entry.is_empty() {
|
||||
entries.push(QualifiedName::user_defined(entry));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If we can't strip the expected indentation, check if this is a dedented line
|
||||
// (not blank) - if so, break early as we've reached the end of this section
|
||||
if !potential.trim().is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
for potential in content.lines() {
|
||||
let Some(colon_idx) = potential.find(':') else {
|
||||
continue;
|
||||
};
|
||||
let entry = potential[..colon_idx].trim();
|
||||
entries.push(QualifiedName::user_defined(entry));
|
||||
}
|
||||
entries
|
||||
}
|
||||
@@ -769,12 +739,6 @@ fn parse_raises_numpy(content: &str) -> Vec<QualifiedName<'_>> {
|
||||
let indentation = &dashes[..dashes.len() - dashes.trim_start().len()];
|
||||
for potential in lines {
|
||||
if let Some(entry) = potential.strip_prefix(indentation) {
|
||||
// Check for Sphinx directives (lines starting with ..) - these indicate the end of the
|
||||
// section. In numpy-style, exceptions are dedented to the same level as sphinx
|
||||
// directives.
|
||||
if entry.starts_with("..") {
|
||||
break;
|
||||
}
|
||||
if let Some(first_char) = entry.chars().next() {
|
||||
if !first_char.is_whitespace() {
|
||||
entries.push(QualifiedName::user_defined(entry.trim_end()));
|
||||
|
||||
@@ -95,23 +95,3 @@ DOC502 Raised exception is not explicitly raised: `DivisionByZero`
|
||||
82 | return distance / time
|
||||
|
|
||||
help: Remove `DivisionByZero` from the docstring
|
||||
|
||||
DOC502 Raised exception is not explicitly raised: `ZeroDivisionError`
|
||||
--> DOC502_numpy.py:139:5
|
||||
|
|
||||
137 | # of the exceptions
|
||||
138 | def foo():
|
||||
139 | / """First line.
|
||||
140 | |
|
||||
141 | | Raises
|
||||
142 | | ------
|
||||
143 | | ValueError
|
||||
144 | | some text
|
||||
145 | | .. math:: e^{xception}
|
||||
146 | | ZeroDivisionError
|
||||
147 | | Will not be raised, DOC502
|
||||
148 | | """
|
||||
| |_______^
|
||||
149 | raise ValueError
|
||||
|
|
||||
help: Remove `ZeroDivisionError` from the docstring
|
||||
|
||||
@@ -187,36 +187,3 @@ DOC102 Documented parameter `a` is not in the function's signature
|
||||
302 | b
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `x1` is not in the function's signature
|
||||
--> DOC102_numpy.py:380:5
|
||||
|
|
||||
378 | Parameters
|
||||
379 | ----------
|
||||
380 | x1, x2 : object
|
||||
| ^^
|
||||
381 | Objects.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `x2` is not in the function's signature
|
||||
--> DOC102_numpy.py:380:9
|
||||
|
|
||||
378 | Parameters
|
||||
379 | ----------
|
||||
380 | x1, x2 : object
|
||||
| ^^
|
||||
381 | Objects.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `extra_param` is not in the function's signature
|
||||
--> DOC102_numpy.py:418:5
|
||||
|
|
||||
416 | x1, x2 : str
|
||||
417 | String parameters for processing.
|
||||
418 | extra_param : str
|
||||
| ^^^^^^^^^^^
|
||||
419 | Extra parameter not in signature.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, Expr, Stmt};
|
||||
use ruff_python_ast::{Expr, Stmt};
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_semantic::analyze::typing::is_dict;
|
||||
@@ -108,77 +108,15 @@ fn is_dict_key_tuple_with_two_elements(binding: &Binding, semantic: &SemanticMod
|
||||
return false;
|
||||
};
|
||||
|
||||
let (value, annotation) = match statement {
|
||||
Stmt::Assign(assign_stmt) => (assign_stmt.value.as_ref(), None),
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign {
|
||||
value: Some(value),
|
||||
annotation,
|
||||
..
|
||||
}) => (value.as_ref(), Some(annotation.as_ref())),
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
let Expr::Dict(dict_expr) = value else {
|
||||
let Stmt::Assign(assign_stmt) = statement else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Check if dict is empty
|
||||
let is_empty = dict_expr.is_empty();
|
||||
let Expr::Dict(dict_expr) = &*assign_stmt.value else {
|
||||
return false;
|
||||
};
|
||||
|
||||
if is_empty {
|
||||
// For empty dicts, check type annotation
|
||||
return annotation
|
||||
.is_some_and(|annotation| is_annotation_dict_with_tuple_keys(annotation, semantic));
|
||||
}
|
||||
|
||||
// For non-empty dicts, check if all keys are 2-tuples
|
||||
dict_expr
|
||||
.iter_keys()
|
||||
.all(|key| matches!(key, Some(Expr::Tuple(tuple)) if tuple.len() == 2))
|
||||
}
|
||||
|
||||
/// Returns true if the annotation is `dict[tuple[T1, T2], ...]` where tuple has exactly 2 elements.
|
||||
fn is_annotation_dict_with_tuple_keys(annotation: &Expr, semantic: &SemanticModel) -> bool {
|
||||
// Check if it's a subscript: dict[...]
|
||||
let Expr::Subscript(subscript) = annotation else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Check if it's dict or typing.Dict
|
||||
if !semantic.match_builtin_expr(subscript.value.as_ref(), "dict")
|
||||
&& !semantic.match_typing_expr(subscript.value.as_ref(), "Dict")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Extract the slice (should be a tuple: (key_type, value_type))
|
||||
let Expr::Tuple(tuple) = subscript.slice.as_ref() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// dict[K, V] format - check if K is tuple with 2 elements
|
||||
if let [key, _value] = tuple.elts.as_slice() {
|
||||
return is_tuple_type_with_two_elements(key, semantic);
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Returns true if the expression represents a tuple type with exactly 2 elements.
|
||||
fn is_tuple_type_with_two_elements(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
// Handle tuple[...] subscript
|
||||
if let Expr::Subscript(subscript) = expr {
|
||||
// Check if it's tuple or typing.Tuple
|
||||
if semantic.match_builtin_expr(subscript.value.as_ref(), "tuple")
|
||||
|| semantic.match_typing_expr(subscript.value.as_ref(), "Tuple")
|
||||
{
|
||||
// Check the slice - tuple[T1, T2]
|
||||
if let Expr::Tuple(tuple_slice) = subscript.slice.as_ref() {
|
||||
return tuple_slice.elts.len() == 2;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::{
|
||||
self as ast,
|
||||
helpers::map_callable,
|
||||
visitor::{Visitor, walk_expr, walk_stmt},
|
||||
};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::{Visitor, walk_expr, walk_stmt};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Violation;
|
||||
@@ -53,54 +50,65 @@ impl Violation for StopIterationReturn {
|
||||
}
|
||||
|
||||
/// PLR1708
|
||||
pub(crate) fn stop_iteration_return(checker: &Checker, function_def: &ast::StmtFunctionDef) {
|
||||
let mut analyzer = GeneratorAnalyzer {
|
||||
checker,
|
||||
has_yield: false,
|
||||
stop_iteration_raises: Vec::new(),
|
||||
pub(crate) fn stop_iteration_return(checker: &Checker, raise_stmt: &ast::StmtRaise) {
|
||||
// Fast-path: only continue if this is `raise StopIteration` (with or without args)
|
||||
let Some(exc) = &raise_stmt.exc else {
|
||||
return;
|
||||
};
|
||||
|
||||
analyzer.visit_body(&function_def.body);
|
||||
|
||||
if analyzer.has_yield {
|
||||
for raise_stmt in analyzer.stop_iteration_raises {
|
||||
checker.report_diagnostic(StopIterationReturn, raise_stmt.range());
|
||||
let is_stop_iteration = match exc.as_ref() {
|
||||
ast::Expr::Call(ast::ExprCall { func, .. }) => {
|
||||
checker.semantic().match_builtin_expr(func, "StopIteration")
|
||||
}
|
||||
expr => checker.semantic().match_builtin_expr(expr, "StopIteration"),
|
||||
};
|
||||
|
||||
if !is_stop_iteration {
|
||||
return;
|
||||
}
|
||||
|
||||
// Now check the (more expensive) generator context
|
||||
if !in_generator_context(checker) {
|
||||
return;
|
||||
}
|
||||
|
||||
checker.report_diagnostic(StopIterationReturn, raise_stmt.range());
|
||||
}
|
||||
|
||||
struct GeneratorAnalyzer<'a, 'b> {
|
||||
checker: &'a Checker<'b>,
|
||||
has_yield: bool,
|
||||
stop_iteration_raises: Vec<&'a ast::StmtRaise>,
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for GeneratorAnalyzer<'a, '_> {
|
||||
fn visit_stmt(&mut self, stmt: &'a ast::Stmt) {
|
||||
match stmt {
|
||||
ast::Stmt::FunctionDef(_) => {}
|
||||
ast::Stmt::Raise(raise @ ast::StmtRaise { exc: Some(exc), .. }) => {
|
||||
if self
|
||||
.checker
|
||||
.semantic()
|
||||
.match_builtin_expr(map_callable(exc), "StopIteration")
|
||||
{
|
||||
self.stop_iteration_raises.push(raise);
|
||||
}
|
||||
walk_stmt(self, stmt);
|
||||
/// Returns true if we're inside a function that contains any `yield`/`yield from`.
|
||||
fn in_generator_context(checker: &Checker) -> bool {
|
||||
for scope in checker.semantic().current_scopes() {
|
||||
if let ruff_python_semantic::ScopeKind::Function(function_def) = scope.kind {
|
||||
if contains_yield_statement(&function_def.body) {
|
||||
return true;
|
||||
}
|
||||
_ => walk_stmt(self, stmt),
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a ast::Expr) {
|
||||
match expr {
|
||||
ast::Expr::Lambda(_) => {}
|
||||
ast::Expr::Yield(_) | ast::Expr::YieldFrom(_) => {
|
||||
self.has_yield = true;
|
||||
/// Check if a statement list contains any yield statements
|
||||
fn contains_yield_statement(body: &[ast::Stmt]) -> bool {
|
||||
struct YieldFinder {
|
||||
found: bool,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for YieldFinder {
|
||||
fn visit_expr(&mut self, expr: &ast::Expr) {
|
||||
if matches!(expr, ast::Expr::Yield(_) | ast::Expr::YieldFrom(_)) {
|
||||
self.found = true;
|
||||
} else {
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
_ => walk_expr(self, expr),
|
||||
}
|
||||
}
|
||||
|
||||
let mut finder = YieldFinder { found: false };
|
||||
for stmt in body {
|
||||
walk_stmt(&mut finder, stmt);
|
||||
if finder.found {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
@@ -39,61 +39,3 @@ help: Add a call to `.items()`
|
||||
18 |
|
||||
19 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
PLE1141 [*] Unpacking a dictionary in iteration without calling `.items()`
|
||||
--> dict_iter_missing_items.py:37:13
|
||||
|
|
||||
35 | empty_dict = {}
|
||||
36 | empty_dict["x"] = 1
|
||||
37 | for k, v in empty_dict:
|
||||
| ^^^^^^^^^^
|
||||
38 | pass
|
||||
|
|
||||
help: Add a call to `.items()`
|
||||
34 | # Empty dict cases
|
||||
35 | empty_dict = {}
|
||||
36 | empty_dict["x"] = 1
|
||||
- for k, v in empty_dict:
|
||||
37 + for k, v in empty_dict.items():
|
||||
38 | pass
|
||||
39 |
|
||||
40 | empty_dict_annotated_tuple_keys: dict[tuple[int, str], bool] = {}
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
PLE1141 [*] Unpacking a dictionary in iteration without calling `.items()`
|
||||
--> dict_iter_missing_items.py:46:13
|
||||
|
|
||||
44 | empty_dict_unannotated = {}
|
||||
45 | empty_dict_unannotated[("x", "y")] = True
|
||||
46 | for k, v in empty_dict_unannotated:
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
47 | pass
|
||||
|
|
||||
help: Add a call to `.items()`
|
||||
43 |
|
||||
44 | empty_dict_unannotated = {}
|
||||
45 | empty_dict_unannotated[("x", "y")] = True
|
||||
- for k, v in empty_dict_unannotated:
|
||||
46 + for k, v in empty_dict_unannotated.items():
|
||||
47 | pass
|
||||
48 |
|
||||
49 | empty_dict_annotated_str_keys: dict[str, int] = {}
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
PLE1141 [*] Unpacking a dictionary in iteration without calling `.items()`
|
||||
--> dict_iter_missing_items.py:51:13
|
||||
|
|
||||
49 | empty_dict_annotated_str_keys: dict[str, int] = {}
|
||||
50 | empty_dict_annotated_str_keys["x"] = 1
|
||||
51 | for k, v in empty_dict_annotated_str_keys:
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
52 | pass
|
||||
|
|
||||
help: Add a call to `.items()`
|
||||
48 |
|
||||
49 | empty_dict_annotated_str_keys: dict[str, int] = {}
|
||||
50 | empty_dict_annotated_str_keys["x"] = 1
|
||||
- for k, v in empty_dict_annotated_str_keys:
|
||||
51 + for k, v in empty_dict_annotated_str_keys.items():
|
||||
52 | pass
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -107,24 +107,3 @@ PLR1708 Explicit `raise StopIteration` in generator
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Use `return` instead
|
||||
|
||||
PLR1708 Explicit `raise StopIteration` in generator
|
||||
--> stop_iteration_return.py:149:9
|
||||
|
|
||||
147 | yield 1
|
||||
148 | class C:
|
||||
149 | raise StopIteration # Should trigger
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
150 | yield C
|
||||
|
|
||||
help: Use `return` instead
|
||||
|
||||
PLR1708 Explicit `raise StopIteration` in generator
|
||||
--> stop_iteration_return.py:154:5
|
||||
|
|
||||
152 | # https://github.com/astral-sh/ruff/pull/21177#discussion_r2539702728
|
||||
153 | def foo():
|
||||
154 | raise StopIteration((yield 1)) # Should trigger
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Use `return` instead
|
||||
|
||||
@@ -766,12 +766,11 @@ pub(crate) fn deprecated_import(checker: &Checker, import_from_stmt: &StmtImport
|
||||
}
|
||||
|
||||
for operation in fixer.with_renames() {
|
||||
let mut diagnostic = checker.report_diagnostic(
|
||||
checker.report_diagnostic(
|
||||
DeprecatedImport {
|
||||
deprecation: Deprecation::WithRename(operation),
|
||||
},
|
||||
import_from_stmt.range(),
|
||||
);
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,15 +2,17 @@ use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::{
|
||||
self as ast, Expr, Stmt,
|
||||
relocate::relocate_expr,
|
||||
visitor::{self, Visitor},
|
||||
};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_codegen::Generator;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::snippet::SourceCodeSnippet;
|
||||
use crate::importer::ImportRequest;
|
||||
use crate::rules::refurb::helpers::{FileOpen, find_file_opens};
|
||||
use crate::{FixAvailability, Locator, Violation};
|
||||
use crate::{FixAvailability, Violation};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `open` and `write` that can be replaced by `pathlib`
|
||||
@@ -127,7 +129,7 @@ impl<'a> Visitor<'a> for WriteMatcher<'a, '_> {
|
||||
let open = self.candidates.remove(open);
|
||||
|
||||
if self.loop_counter == 0 {
|
||||
let suggestion = make_suggestion(&open, content, self.checker.locator());
|
||||
let suggestion = make_suggestion(&open, content, self.checker.generator());
|
||||
|
||||
let mut diagnostic = self.checker.report_diagnostic(
|
||||
WriteWholeFile {
|
||||
@@ -170,21 +172,27 @@ fn match_write_call(expr: &Expr) -> Option<(&Expr, &Expr)> {
|
||||
Some((&*attr.value, call.arguments.args.first()?))
|
||||
}
|
||||
|
||||
fn make_suggestion(open: &FileOpen<'_>, arg: &Expr, locator: &Locator) -> String {
|
||||
let method_name = open.mode.pathlib_method();
|
||||
let arg_code = locator.slice(arg.range());
|
||||
|
||||
if open.keywords.is_empty() {
|
||||
format!("{method_name}({arg_code})")
|
||||
} else {
|
||||
format!(
|
||||
"{method_name}({arg_code}, {})",
|
||||
itertools::join(
|
||||
open.keywords.iter().map(|kw| locator.slice(kw.range())),
|
||||
", "
|
||||
)
|
||||
)
|
||||
}
|
||||
fn make_suggestion(open: &FileOpen<'_>, arg: &Expr, generator: Generator) -> String {
|
||||
let name = ast::ExprName {
|
||||
id: open.mode.pathlib_method(),
|
||||
ctx: ast::ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
};
|
||||
let mut arg = arg.clone();
|
||||
relocate_expr(&mut arg, TextRange::default());
|
||||
let call = ast::ExprCall {
|
||||
func: Box::new(name.into()),
|
||||
arguments: ast::Arguments {
|
||||
args: Box::new([arg]),
|
||||
keywords: open.keywords.iter().copied().cloned().collect(),
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
},
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
};
|
||||
generator.expr(&call.into())
|
||||
}
|
||||
|
||||
fn generate_fix(
|
||||
|
||||
@@ -279,34 +279,3 @@ help: Replace with `Path("test.json")....`
|
||||
- with open("test.json", "wb") as f:
|
||||
- f.write(json.dumps(data, indent=4).encode("utf-8"))
|
||||
155 + pathlib.Path("test.json").write_bytes(json.dumps(data, indent=4).encode("utf-8"))
|
||||
156 |
|
||||
157 | # See: https://github.com/astral-sh/ruff/issues/21381
|
||||
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
||||
|
||||
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
|
||||
--> FURB103.py:158:6
|
||||
|
|
||||
157 | # See: https://github.com/astral-sh/ruff/issues/21381
|
||||
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
159 | f.write(dedent(
|
||||
160 | """
|
||||
|
|
||||
help: Replace with `Path("tmp_path/pyproject.toml")....`
|
||||
148 |
|
||||
149 | # See: https://github.com/astral-sh/ruff/issues/20785
|
||||
150 | import json
|
||||
151 + import pathlib
|
||||
152 |
|
||||
153 | data = {"price": 100}
|
||||
154 |
|
||||
--------------------------------------------------------------------------------
|
||||
156 | f.write(json.dumps(data, indent=4).encode("utf-8"))
|
||||
157 |
|
||||
158 | # See: https://github.com/astral-sh/ruff/issues/21381
|
||||
- with open("tmp_path/pyproject.toml", "w") as f:
|
||||
- f.write(dedent(
|
||||
159 + pathlib.Path("tmp_path/pyproject.toml").write_text(dedent(
|
||||
160 | """
|
||||
161 | [project]
|
||||
162 | other = 1.234
|
||||
|
||||
@@ -209,34 +209,3 @@ help: Replace with `Path("test.json")....`
|
||||
- with open("test.json", "wb") as f:
|
||||
- f.write(json.dumps(data, indent=4).encode("utf-8"))
|
||||
155 + pathlib.Path("test.json").write_bytes(json.dumps(data, indent=4).encode("utf-8"))
|
||||
156 |
|
||||
157 | # See: https://github.com/astral-sh/ruff/issues/21381
|
||||
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
||||
|
||||
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
|
||||
--> FURB103.py:158:6
|
||||
|
|
||||
157 | # See: https://github.com/astral-sh/ruff/issues/21381
|
||||
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
159 | f.write(dedent(
|
||||
160 | """
|
||||
|
|
||||
help: Replace with `Path("tmp_path/pyproject.toml")....`
|
||||
148 |
|
||||
149 | # See: https://github.com/astral-sh/ruff/issues/20785
|
||||
150 | import json
|
||||
151 + import pathlib
|
||||
152 |
|
||||
153 | data = {"price": 100}
|
||||
154 |
|
||||
--------------------------------------------------------------------------------
|
||||
156 | f.write(json.dumps(data, indent=4).encode("utf-8"))
|
||||
157 |
|
||||
158 | # See: https://github.com/astral-sh/ruff/issues/21381
|
||||
- with open("tmp_path/pyproject.toml", "w") as f:
|
||||
- f.write(dedent(
|
||||
159 + pathlib.Path("tmp_path/pyproject.toml").write_text(dedent(
|
||||
160 | """
|
||||
161 | [project]
|
||||
162 | other = 1.234
|
||||
|
||||
@@ -97,8 +97,7 @@ mod tests {
|
||||
#[test_case(Rule::MapIntVersionParsing, Path::new("RUF048_1.py"))]
|
||||
#[test_case(Rule::DataclassEnum, Path::new("RUF049.py"))]
|
||||
#[test_case(Rule::IfKeyInDictDel, Path::new("RUF051.py"))]
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052_0.py"))]
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052_1.py"))]
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052.py"))]
|
||||
#[test_case(Rule::ClassWithMixedTypeVars, Path::new("RUF053.py"))]
|
||||
#[test_case(Rule::FalsyDictGetFallback, Path::new("RUF056.py"))]
|
||||
#[test_case(Rule::UnnecessaryRound, Path::new("RUF057.py"))]
|
||||
@@ -622,8 +621,8 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052_0.py"), r"^_+", 1)]
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052_0.py"), r"", 2)]
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052.py"), r"^_+", 1)]
|
||||
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052.py"), r"", 2)]
|
||||
fn custom_regexp_preset(
|
||||
rule_code: Rule,
|
||||
path: &Path,
|
||||
|
||||
@@ -2,9 +2,7 @@ use std::str::FromStr;
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_literal::cformat::{
|
||||
CConversionFlags, CFormatPart, CFormatSpec, CFormatString, CFormatType,
|
||||
};
|
||||
use ruff_python_literal::cformat::{CFormatPart, CFormatString, CFormatType};
|
||||
use ruff_python_literal::format::FormatConversion;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -197,8 +195,7 @@ pub(crate) fn logging_eager_conversion(checker: &Checker, call: &ast::ExprCall)
|
||||
}
|
||||
// %s with oct() - suggest using %#o instead
|
||||
FormatConversion::Str
|
||||
if checker.semantic().match_builtin_expr(func.as_ref(), "oct")
|
||||
&& !has_complex_conversion_specifier(spec) =>
|
||||
if checker.semantic().match_builtin_expr(func.as_ref(), "oct") =>
|
||||
{
|
||||
checker.report_diagnostic(
|
||||
LoggingEagerConversion {
|
||||
@@ -210,8 +207,7 @@ pub(crate) fn logging_eager_conversion(checker: &Checker, call: &ast::ExprCall)
|
||||
}
|
||||
// %s with hex() - suggest using %#x instead
|
||||
FormatConversion::Str
|
||||
if checker.semantic().match_builtin_expr(func.as_ref(), "hex")
|
||||
&& !has_complex_conversion_specifier(spec) =>
|
||||
if checker.semantic().match_builtin_expr(func.as_ref(), "hex") =>
|
||||
{
|
||||
checker.report_diagnostic(
|
||||
LoggingEagerConversion {
|
||||
@@ -226,23 +222,3 @@ pub(crate) fn logging_eager_conversion(checker: &Checker, call: &ast::ExprCall)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a conversion specifier has complex flags or precision that make `oct()` or `hex()` necessary.
|
||||
///
|
||||
/// Returns `true` if any of these conditions are met:
|
||||
/// - Flag `0` (zero-pad) is used, flag `-` (left-adjust) is not used, and minimum width is specified
|
||||
/// - Flag ` ` (blank sign) is used
|
||||
/// - Flag `+` (sign char) is used
|
||||
/// - Precision is specified
|
||||
fn has_complex_conversion_specifier(spec: &CFormatSpec) -> bool {
|
||||
if spec.flags.intersects(CConversionFlags::ZERO_PAD)
|
||||
&& !spec.flags.intersects(CConversionFlags::LEFT_ADJUST)
|
||||
&& spec.min_field_width.is_some()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
spec.flags
|
||||
.intersects(CConversionFlags::BLANK_SIGN | CConversionFlags::SIGN_CHAR)
|
||||
|| spec.precision.is_some()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
use ruff_python_ast::{self as ast, Stmt};
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::{self as ast, Stmt};
|
||||
use ruff_python_semantic::analyze::typing::{is_immutable_annotation, is_mutable_expr};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -97,9 +96,6 @@ impl Violation for MutableClassDefault {
|
||||
|
||||
/// RUF012
|
||||
pub(crate) fn mutable_class_default(checker: &Checker, class_def: &ast::StmtClassDef) {
|
||||
// Collect any `ClassVar`s we find in case they get reassigned later.
|
||||
let mut class_var_targets = FxHashSet::default();
|
||||
|
||||
for statement in &class_def.body {
|
||||
match statement {
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign {
|
||||
@@ -108,12 +104,6 @@ pub(crate) fn mutable_class_default(checker: &Checker, class_def: &ast::StmtClas
|
||||
value: Some(value),
|
||||
..
|
||||
}) => {
|
||||
if let ast::Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||
if is_class_var_annotation(annotation, checker.semantic()) {
|
||||
class_var_targets.insert(id);
|
||||
}
|
||||
}
|
||||
|
||||
if !is_special_attribute(target)
|
||||
&& is_mutable_expr(value, checker.semantic())
|
||||
&& !is_class_var_annotation(annotation, checker.semantic())
|
||||
@@ -133,12 +123,8 @@ pub(crate) fn mutable_class_default(checker: &Checker, class_def: &ast::StmtClas
|
||||
}
|
||||
}
|
||||
Stmt::Assign(ast::StmtAssign { value, targets, .. }) => {
|
||||
if !targets.iter().all(|target| {
|
||||
is_special_attribute(target)
|
||||
|| target
|
||||
.as_name_expr()
|
||||
.is_some_and(|name| class_var_targets.contains(&name.id))
|
||||
}) && is_mutable_expr(value, checker.semantic())
|
||||
if !targets.iter().all(is_special_attribute)
|
||||
&& is_mutable_expr(value, checker.semantic())
|
||||
{
|
||||
// Avoid, e.g., Pydantic and msgspec models, which end up copying defaults on instance creation.
|
||||
if has_default_copy_semantics(class_def, checker.semantic()) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::helpers::is_dunder;
|
||||
use ruff_python_semantic::{Binding, BindingId, BindingKind, ScopeKind};
|
||||
use ruff_python_semantic::{Binding, BindingId};
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -111,7 +111,7 @@ pub(crate) fn used_dummy_variable(checker: &Checker, binding: &Binding, binding_
|
||||
return;
|
||||
}
|
||||
|
||||
// We only emit the lint on local variables.
|
||||
// We only emit the lint on variables defined via assignments.
|
||||
//
|
||||
// ## Why not also emit the lint on function parameters?
|
||||
//
|
||||
@@ -127,30 +127,8 @@ pub(crate) fn used_dummy_variable(checker: &Checker, binding: &Binding, binding_
|
||||
// autofixing the diagnostic for assignments. See:
|
||||
// - <https://github.com/astral-sh/ruff/issues/14790>
|
||||
// - <https://github.com/astral-sh/ruff/issues/14799>
|
||||
match binding.kind {
|
||||
BindingKind::Annotation
|
||||
| BindingKind::Argument
|
||||
| BindingKind::NamedExprAssignment
|
||||
| BindingKind::Assignment
|
||||
| BindingKind::LoopVar
|
||||
| BindingKind::WithItemVar
|
||||
| BindingKind::BoundException
|
||||
| BindingKind::UnboundException(_) => {}
|
||||
|
||||
BindingKind::TypeParam
|
||||
| BindingKind::Global(_)
|
||||
| BindingKind::Nonlocal(_, _)
|
||||
| BindingKind::Builtin
|
||||
| BindingKind::ClassDefinition(_)
|
||||
| BindingKind::FunctionDefinition(_)
|
||||
| BindingKind::Export(_)
|
||||
| BindingKind::FutureImport
|
||||
| BindingKind::Import(_)
|
||||
| BindingKind::FromImport(_)
|
||||
| BindingKind::SubmoduleImport(_)
|
||||
| BindingKind::Deletion
|
||||
| BindingKind::ConditionalDeletion(_)
|
||||
| BindingKind::DunderClassCell => return,
|
||||
if !binding.kind.is_assignment() {
|
||||
return;
|
||||
}
|
||||
|
||||
// This excludes `global` and `nonlocal` variables.
|
||||
@@ -160,12 +138,9 @@ pub(crate) fn used_dummy_variable(checker: &Checker, binding: &Binding, binding_
|
||||
|
||||
let semantic = checker.semantic();
|
||||
|
||||
// Only variables defined in function and generator scopes
|
||||
// Only variables defined in function scopes
|
||||
let scope = &semantic.scopes[binding.scope];
|
||||
if !matches!(
|
||||
scope.kind,
|
||||
ScopeKind::Function(_) | ScopeKind::Generator { .. }
|
||||
) {
|
||||
if !scope.kind.is_function() {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
---
|
||||
RUF052 [*] Local dummy variable `_var` is accessed
|
||||
--> RUF052_0.py:92:9
|
||||
--> RUF052.py:92:9
|
||||
|
|
||||
90 | class Class_:
|
||||
91 | def fun(self):
|
||||
@@ -24,7 +24,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_list` is accessed
|
||||
--> RUF052_0.py:99:5
|
||||
--> RUF052.py:99:5
|
||||
|
|
||||
98 | def fun():
|
||||
99 | _list = "built-in" # [RUF052]
|
||||
@@ -45,7 +45,7 @@ help: Prefer using trailing underscores to avoid shadowing a built-in
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:106:5
|
||||
--> RUF052.py:106:5
|
||||
|
|
||||
104 | def fun():
|
||||
105 | global x
|
||||
@@ -67,7 +67,7 @@ help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:113:5
|
||||
--> RUF052.py:113:5
|
||||
|
|
||||
111 | def bar():
|
||||
112 | nonlocal x
|
||||
@@ -90,7 +90,7 @@ help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:120:5
|
||||
--> RUF052.py:120:5
|
||||
|
|
||||
118 | def fun():
|
||||
119 | x = "local"
|
||||
@@ -112,7 +112,7 @@ help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
--> RUF052_0.py:128:5
|
||||
--> RUF052.py:128:5
|
||||
|
|
||||
127 | def unfixables():
|
||||
128 | _GLOBAL_1 = "foo"
|
||||
@@ -123,7 +123,7 @@ RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_local` is accessed
|
||||
--> RUF052_0.py:136:5
|
||||
--> RUF052.py:136:5
|
||||
|
|
||||
135 | # unfixable because the rename would shadow a local variable
|
||||
136 | _local = "local3" # [RUF052]
|
||||
@@ -133,7 +133,7 @@ RUF052 Local dummy variable `_local` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
--> RUF052_0.py:140:9
|
||||
--> RUF052.py:140:9
|
||||
|
|
||||
139 | def nested():
|
||||
140 | _GLOBAL_1 = "foo"
|
||||
@@ -144,7 +144,7 @@ RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_local` is accessed
|
||||
--> RUF052_0.py:145:9
|
||||
--> RUF052.py:145:9
|
||||
|
|
||||
144 | # unfixable because the rename would shadow a variable from the outer function
|
||||
145 | _local = "local4"
|
||||
@@ -154,7 +154,7 @@ RUF052 Local dummy variable `_local` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 [*] Local dummy variable `_P` is accessed
|
||||
--> RUF052_0.py:153:5
|
||||
--> RUF052.py:153:5
|
||||
|
|
||||
151 | from collections import namedtuple
|
||||
152 |
|
||||
@@ -184,7 +184,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_T` is accessed
|
||||
--> RUF052_0.py:154:5
|
||||
--> RUF052.py:154:5
|
||||
|
|
||||
153 | _P = ParamSpec("_P")
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
@@ -213,7 +213,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_NT` is accessed
|
||||
--> RUF052_0.py:155:5
|
||||
--> RUF052.py:155:5
|
||||
|
|
||||
153 | _P = ParamSpec("_P")
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
@@ -242,7 +242,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_E` is accessed
|
||||
--> RUF052_0.py:156:5
|
||||
--> RUF052.py:156:5
|
||||
|
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
155 | _NT = NamedTuple("_NT", [("foo", int)])
|
||||
@@ -270,7 +270,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_NT2` is accessed
|
||||
--> RUF052_0.py:157:5
|
||||
--> RUF052.py:157:5
|
||||
|
|
||||
155 | _NT = NamedTuple("_NT", [("foo", int)])
|
||||
156 | _E = Enum("_E", ["a", "b", "c"])
|
||||
@@ -297,7 +297,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_NT3` is accessed
|
||||
--> RUF052_0.py:158:5
|
||||
--> RUF052.py:158:5
|
||||
|
|
||||
156 | _E = Enum("_E", ["a", "b", "c"])
|
||||
157 | _NT2 = namedtuple("_NT2", ['x', 'y', 'z'])
|
||||
@@ -323,7 +323,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_DynamicClass` is accessed
|
||||
--> RUF052_0.py:159:5
|
||||
--> RUF052.py:159:5
|
||||
|
|
||||
157 | _NT2 = namedtuple("_NT2", ['x', 'y', 'z'])
|
||||
158 | _NT3 = namedtuple(typename="_NT3", field_names=['x', 'y', 'z'])
|
||||
@@ -347,7 +347,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_NotADynamicClass` is accessed
|
||||
--> RUF052_0.py:160:5
|
||||
--> RUF052.py:160:5
|
||||
|
|
||||
158 | _NT3 = namedtuple(typename="_NT3", field_names=['x', 'y', 'z'])
|
||||
159 | _DynamicClass = type("_DynamicClass", (), {})
|
||||
@@ -371,7 +371,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_dummy_var` is accessed
|
||||
--> RUF052_0.py:182:5
|
||||
--> RUF052.py:182:5
|
||||
|
|
||||
181 | def foo():
|
||||
182 | _dummy_var = 42
|
||||
@@ -396,7 +396,7 @@ help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 Local dummy variable `_dummy_var` is accessed
|
||||
--> RUF052_0.py:192:5
|
||||
--> RUF052.py:192:5
|
||||
|
|
||||
190 | # Unfixable because both possible candidates for the new name are shadowed
|
||||
191 | # in the scope of one of the references to the variable
|
||||
@@ -1,494 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
---
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:21:9
|
||||
|
|
||||
20 | # Should detect used dummy variable
|
||||
21 | for _item in my_list:
|
||||
| ^^^^^
|
||||
22 | print(_item["foo"]) # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
18 | my_list = [{"foo": 1}, {"foo": 2}]
|
||||
19 |
|
||||
20 | # Should detect used dummy variable
|
||||
- for _item in my_list:
|
||||
- print(_item["foo"]) # RUF052: Local dummy variable `_item` is accessed
|
||||
21 + for item in my_list:
|
||||
22 + print(item["foo"]) # RUF052: Local dummy variable `_item` is accessed
|
||||
23 |
|
||||
24 | # Should detect used dummy variable
|
||||
25 | for _index, _value in enumerate(my_list):
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_index` is accessed
|
||||
--> RUF052_1.py:25:9
|
||||
|
|
||||
24 | # Should detect used dummy variable
|
||||
25 | for _index, _value in enumerate(my_list):
|
||||
| ^^^^^^
|
||||
26 | result = _index + _value["foo"] # RUF052: Both `_index` and `_value` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
22 | print(_item["foo"]) # RUF052: Local dummy variable `_item` is accessed
|
||||
23 |
|
||||
24 | # Should detect used dummy variable
|
||||
- for _index, _value in enumerate(my_list):
|
||||
- result = _index + _value["foo"] # RUF052: Both `_index` and `_value` are accessed
|
||||
25 + for index, _value in enumerate(my_list):
|
||||
26 + result = index + _value["foo"] # RUF052: Both `_index` and `_value` are accessed
|
||||
27 |
|
||||
28 | # List Comprehensions
|
||||
29 | def test_list_comprehensions():
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_value` is accessed
|
||||
--> RUF052_1.py:25:17
|
||||
|
|
||||
24 | # Should detect used dummy variable
|
||||
25 | for _index, _value in enumerate(my_list):
|
||||
| ^^^^^^
|
||||
26 | result = _index + _value["foo"] # RUF052: Both `_index` and `_value` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
22 | print(_item["foo"]) # RUF052: Local dummy variable `_item` is accessed
|
||||
23 |
|
||||
24 | # Should detect used dummy variable
|
||||
- for _index, _value in enumerate(my_list):
|
||||
- result = _index + _value["foo"] # RUF052: Both `_index` and `_value` are accessed
|
||||
25 + for _index, value in enumerate(my_list):
|
||||
26 + result = _index + value["foo"] # RUF052: Both `_index` and `_value` are accessed
|
||||
27 |
|
||||
28 | # List Comprehensions
|
||||
29 | def test_list_comprehensions():
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:33:32
|
||||
|
|
||||
32 | # Should detect used dummy variable
|
||||
33 | result = [_item["foo"] for _item in my_list] # RUF052: Local dummy variable `_item` is accessed
|
||||
| ^^^^^
|
||||
34 |
|
||||
35 | # Should detect used dummy variable in nested comprehension
|
||||
|
|
||||
help: Remove leading underscores
|
||||
30 | my_list = [{"foo": 1}, {"foo": 2}]
|
||||
31 |
|
||||
32 | # Should detect used dummy variable
|
||||
- result = [_item["foo"] for _item in my_list] # RUF052: Local dummy variable `_item` is accessed
|
||||
33 + result = [item["foo"] for item in my_list] # RUF052: Local dummy variable `_item` is accessed
|
||||
34 |
|
||||
35 | # Should detect used dummy variable in nested comprehension
|
||||
36 | nested = [[_item["foo"] for _item in _sublist] for _sublist in [my_list, my_list]]
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:36:33
|
||||
|
|
||||
35 | # Should detect used dummy variable in nested comprehension
|
||||
36 | nested = [[_item["foo"] for _item in _sublist] for _sublist in [my_list, my_list]]
|
||||
| ^^^^^
|
||||
37 | # RUF052: Both `_item` and `_sublist` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
33 | result = [_item["foo"] for _item in my_list] # RUF052: Local dummy variable `_item` is accessed
|
||||
34 |
|
||||
35 | # Should detect used dummy variable in nested comprehension
|
||||
- nested = [[_item["foo"] for _item in _sublist] for _sublist in [my_list, my_list]]
|
||||
36 + nested = [[item["foo"] for item in _sublist] for _sublist in [my_list, my_list]]
|
||||
37 | # RUF052: Both `_item` and `_sublist` are accessed
|
||||
38 |
|
||||
39 | # Should detect with conditions
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_sublist` is accessed
|
||||
--> RUF052_1.py:36:56
|
||||
|
|
||||
35 | # Should detect used dummy variable in nested comprehension
|
||||
36 | nested = [[_item["foo"] for _item in _sublist] for _sublist in [my_list, my_list]]
|
||||
| ^^^^^^^^
|
||||
37 | # RUF052: Both `_item` and `_sublist` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
33 | result = [_item["foo"] for _item in my_list] # RUF052: Local dummy variable `_item` is accessed
|
||||
34 |
|
||||
35 | # Should detect used dummy variable in nested comprehension
|
||||
- nested = [[_item["foo"] for _item in _sublist] for _sublist in [my_list, my_list]]
|
||||
36 + nested = [[_item["foo"] for _item in sublist] for sublist in [my_list, my_list]]
|
||||
37 | # RUF052: Both `_item` and `_sublist` are accessed
|
||||
38 |
|
||||
39 | # Should detect with conditions
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:40:34
|
||||
|
|
||||
39 | # Should detect with conditions
|
||||
40 | filtered = [_item["foo"] for _item in my_list if _item["foo"] > 0]
|
||||
| ^^^^^
|
||||
41 | # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
37 | # RUF052: Both `_item` and `_sublist` are accessed
|
||||
38 |
|
||||
39 | # Should detect with conditions
|
||||
- filtered = [_item["foo"] for _item in my_list if _item["foo"] > 0]
|
||||
40 + filtered = [item["foo"] for item in my_list if item["foo"] > 0]
|
||||
41 | # RUF052: Local dummy variable `_item` is accessed
|
||||
42 |
|
||||
43 | # Dict Comprehensions
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:48:48
|
||||
|
|
||||
47 | # Should detect used dummy variable
|
||||
48 | result = {_item["key"]: _item["value"] for _item in my_list}
|
||||
| ^^^^^
|
||||
49 | # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
45 | my_list = [{"key": "a", "value": 1}, {"key": "b", "value": 2}]
|
||||
46 |
|
||||
47 | # Should detect used dummy variable
|
||||
- result = {_item["key"]: _item["value"] for _item in my_list}
|
||||
48 + result = {item["key"]: item["value"] for item in my_list}
|
||||
49 | # RUF052: Local dummy variable `_item` is accessed
|
||||
50 |
|
||||
51 | # Should detect with enumerate
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_index` is accessed
|
||||
--> RUF052_1.py:52:43
|
||||
|
|
||||
51 | # Should detect with enumerate
|
||||
52 | indexed = {_index: _item["value"] for _index, _item in enumerate(my_list)}
|
||||
| ^^^^^^
|
||||
53 | # RUF052: Both `_index` and `_item` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
49 | # RUF052: Local dummy variable `_item` is accessed
|
||||
50 |
|
||||
51 | # Should detect with enumerate
|
||||
- indexed = {_index: _item["value"] for _index, _item in enumerate(my_list)}
|
||||
52 + indexed = {index: _item["value"] for index, _item in enumerate(my_list)}
|
||||
53 | # RUF052: Both `_index` and `_item` are accessed
|
||||
54 |
|
||||
55 | # Should detect in nested dict comprehension
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:52:51
|
||||
|
|
||||
51 | # Should detect with enumerate
|
||||
52 | indexed = {_index: _item["value"] for _index, _item in enumerate(my_list)}
|
||||
| ^^^^^
|
||||
53 | # RUF052: Both `_index` and `_item` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
49 | # RUF052: Local dummy variable `_item` is accessed
|
||||
50 |
|
||||
51 | # Should detect with enumerate
|
||||
- indexed = {_index: _item["value"] for _index, _item in enumerate(my_list)}
|
||||
52 + indexed = {_index: item["value"] for _index, item in enumerate(my_list)}
|
||||
53 | # RUF052: Both `_index` and `_item` are accessed
|
||||
54 |
|
||||
55 | # Should detect in nested dict comprehension
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_inner` is accessed
|
||||
--> RUF052_1.py:56:59
|
||||
|
|
||||
55 | # Should detect in nested dict comprehension
|
||||
56 | nested = {_outer: {_inner["key"]: _inner["value"] for _inner in sublist}
|
||||
| ^^^^^^
|
||||
57 | for _outer, sublist in enumerate([my_list])}
|
||||
58 | # RUF052: `_outer`, `_inner` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
53 | # RUF052: Both `_index` and `_item` are accessed
|
||||
54 |
|
||||
55 | # Should detect in nested dict comprehension
|
||||
- nested = {_outer: {_inner["key"]: _inner["value"] for _inner in sublist}
|
||||
56 + nested = {_outer: {inner["key"]: inner["value"] for inner in sublist}
|
||||
57 | for _outer, sublist in enumerate([my_list])}
|
||||
58 | # RUF052: `_outer`, `_inner` are accessed
|
||||
59 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_outer` is accessed
|
||||
--> RUF052_1.py:57:19
|
||||
|
|
||||
55 | # Should detect in nested dict comprehension
|
||||
56 | nested = {_outer: {_inner["key"]: _inner["value"] for _inner in sublist}
|
||||
57 | for _outer, sublist in enumerate([my_list])}
|
||||
| ^^^^^^
|
||||
58 | # RUF052: `_outer`, `_inner` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
53 | # RUF052: Both `_index` and `_item` are accessed
|
||||
54 |
|
||||
55 | # Should detect in nested dict comprehension
|
||||
- nested = {_outer: {_inner["key"]: _inner["value"] for _inner in sublist}
|
||||
- for _outer, sublist in enumerate([my_list])}
|
||||
56 + nested = {outer: {_inner["key"]: _inner["value"] for _inner in sublist}
|
||||
57 + for outer, sublist in enumerate([my_list])}
|
||||
58 | # RUF052: `_outer`, `_inner` are accessed
|
||||
59 |
|
||||
60 | # Set Comprehensions
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:65:39
|
||||
|
|
||||
64 | # Should detect used dummy variable
|
||||
65 | unique_values = {_item["foo"] for _item in my_list}
|
||||
| ^^^^^
|
||||
66 | # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
62 | my_list = [{"foo": 1}, {"foo": 2}, {"foo": 1}] # Note: duplicate values
|
||||
63 |
|
||||
64 | # Should detect used dummy variable
|
||||
- unique_values = {_item["foo"] for _item in my_list}
|
||||
65 + unique_values = {item["foo"] for item in my_list}
|
||||
66 | # RUF052: Local dummy variable `_item` is accessed
|
||||
67 |
|
||||
68 | # Should detect with conditions
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:69:38
|
||||
|
|
||||
68 | # Should detect with conditions
|
||||
69 | filtered_set = {_item["foo"] for _item in my_list if _item["foo"] > 0}
|
||||
| ^^^^^
|
||||
70 | # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
66 | # RUF052: Local dummy variable `_item` is accessed
|
||||
67 |
|
||||
68 | # Should detect with conditions
|
||||
- filtered_set = {_item["foo"] for _item in my_list if _item["foo"] > 0}
|
||||
69 + filtered_set = {item["foo"] for item in my_list if item["foo"] > 0}
|
||||
70 | # RUF052: Local dummy variable `_item` is accessed
|
||||
71 |
|
||||
72 | # Should detect with complex expression
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:73:39
|
||||
|
|
||||
72 | # Should detect with complex expression
|
||||
73 | processed = {_item["foo"] * 2 for _item in my_list}
|
||||
| ^^^^^
|
||||
74 | # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
70 | # RUF052: Local dummy variable `_item` is accessed
|
||||
71 |
|
||||
72 | # Should detect with complex expression
|
||||
- processed = {_item["foo"] * 2 for _item in my_list}
|
||||
73 + processed = {item["foo"] * 2 for item in my_list}
|
||||
74 | # RUF052: Local dummy variable `_item` is accessed
|
||||
75 |
|
||||
76 | # Generator Expressions
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:81:29
|
||||
|
|
||||
80 | # Should detect used dummy variable
|
||||
81 | gen = (_item["foo"] for _item in my_list)
|
||||
| ^^^^^
|
||||
82 | # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
78 | my_list = [{"foo": 1}, {"foo": 2}]
|
||||
79 |
|
||||
80 | # Should detect used dummy variable
|
||||
- gen = (_item["foo"] for _item in my_list)
|
||||
81 + gen = (item["foo"] for item in my_list)
|
||||
82 | # RUF052: Local dummy variable `_item` is accessed
|
||||
83 |
|
||||
84 | # Should detect when passed to function
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:85:34
|
||||
|
|
||||
84 | # Should detect when passed to function
|
||||
85 | total = sum(_item["foo"] for _item in my_list)
|
||||
| ^^^^^
|
||||
86 | # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
82 | # RUF052: Local dummy variable `_item` is accessed
|
||||
83 |
|
||||
84 | # Should detect when passed to function
|
||||
- total = sum(_item["foo"] for _item in my_list)
|
||||
85 + total = sum(item["foo"] for item in my_list)
|
||||
86 | # RUF052: Local dummy variable `_item` is accessed
|
||||
87 |
|
||||
88 | # Should detect with multiple generators
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_x` is accessed
|
||||
--> RUF052_1.py:89:27
|
||||
|
|
||||
88 | # Should detect with multiple generators
|
||||
89 | pairs = ((_x, _y) for _x in range(3) for _y in range(3) if _x != _y)
|
||||
| ^^
|
||||
90 | # RUF052: Both `_x` and `_y` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
86 | # RUF052: Local dummy variable `_item` is accessed
|
||||
87 |
|
||||
88 | # Should detect with multiple generators
|
||||
- pairs = ((_x, _y) for _x in range(3) for _y in range(3) if _x != _y)
|
||||
89 + pairs = ((x, _y) for x in range(3) for _y in range(3) if x != _y)
|
||||
90 | # RUF052: Both `_x` and `_y` are accessed
|
||||
91 |
|
||||
92 | # Should detect in nested generator
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_y` is accessed
|
||||
--> RUF052_1.py:89:46
|
||||
|
|
||||
88 | # Should detect with multiple generators
|
||||
89 | pairs = ((_x, _y) for _x in range(3) for _y in range(3) if _x != _y)
|
||||
| ^^
|
||||
90 | # RUF052: Both `_x` and `_y` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
86 | # RUF052: Local dummy variable `_item` is accessed
|
||||
87 |
|
||||
88 | # Should detect with multiple generators
|
||||
- pairs = ((_x, _y) for _x in range(3) for _y in range(3) if _x != _y)
|
||||
89 + pairs = ((_x, y) for _x in range(3) for y in range(3) if _x != y)
|
||||
90 | # RUF052: Both `_x` and `_y` are accessed
|
||||
91 |
|
||||
92 | # Should detect in nested generator
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_inner` is accessed
|
||||
--> RUF052_1.py:93:41
|
||||
|
|
||||
92 | # Should detect in nested generator
|
||||
93 | nested_gen = (sum(_inner["foo"] for _inner in sublist) for _sublist in [my_list] for sublist in _sublist)
|
||||
| ^^^^^^
|
||||
94 | # RUF052: `_inner` and `_sublist` are accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
90 | # RUF052: Both `_x` and `_y` are accessed
|
||||
91 |
|
||||
92 | # Should detect in nested generator
|
||||
- nested_gen = (sum(_inner["foo"] for _inner in sublist) for _sublist in [my_list] for sublist in _sublist)
|
||||
93 + nested_gen = (sum(inner["foo"] for inner in sublist) for _sublist in [my_list] for sublist in _sublist)
|
||||
94 | # RUF052: `_inner` and `_sublist` are accessed
|
||||
95 |
|
||||
96 | # Complex Examples with Multiple Comprehension Types
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_sublist` is accessed
|
||||
--> RUF052_1.py:93:64
|
||||
|
|
||||
92 | # Should detect in nested generator
|
||||
93 | nested_gen = (sum(_inner["foo"] for _inner in sublist) for _sublist in [my_list] for sublist in _sublist)
|
||||
| ^^^^^^^^
|
||||
94 | # RUF052: `_inner` and `_sublist` are accessed
|
||||
|
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
90 | # RUF052: Both `_x` and `_y` are accessed
|
||||
91 |
|
||||
92 | # Should detect in nested generator
|
||||
- nested_gen = (sum(_inner["foo"] for _inner in sublist) for _sublist in [my_list] for sublist in _sublist)
|
||||
93 + nested_gen = (sum(_inner["foo"] for _inner in sublist) for sublist_ in [my_list] for sublist in sublist_)
|
||||
94 | # RUF052: `_inner` and `_sublist` are accessed
|
||||
95 |
|
||||
96 | # Complex Examples with Multiple Comprehension Types
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_val` is accessed
|
||||
--> RUF052_1.py:102:30
|
||||
|
|
||||
100 | # Should detect in mixed comprehensions
|
||||
101 | result = [
|
||||
102 | {_key: [_val * 2 for _val in _record["items"]] for _key in ["doubled"]}
|
||||
| ^^^^
|
||||
103 | for _record in data
|
||||
104 | ]
|
||||
|
|
||||
help: Remove leading underscores
|
||||
99 |
|
||||
100 | # Should detect in mixed comprehensions
|
||||
101 | result = [
|
||||
- {_key: [_val * 2 for _val in _record["items"]] for _key in ["doubled"]}
|
||||
102 + {_key: [val * 2 for val in _record["items"]] for _key in ["doubled"]}
|
||||
103 | for _record in data
|
||||
104 | ]
|
||||
105 | # RUF052: `_key`, `_val`, and `_record` are all accessed
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_key` is accessed
|
||||
--> RUF052_1.py:102:60
|
||||
|
|
||||
100 | # Should detect in mixed comprehensions
|
||||
101 | result = [
|
||||
102 | {_key: [_val * 2 for _val in _record["items"]] for _key in ["doubled"]}
|
||||
| ^^^^
|
||||
103 | for _record in data
|
||||
104 | ]
|
||||
|
|
||||
help: Remove leading underscores
|
||||
99 |
|
||||
100 | # Should detect in mixed comprehensions
|
||||
101 | result = [
|
||||
- {_key: [_val * 2 for _val in _record["items"]] for _key in ["doubled"]}
|
||||
102 + {key: [_val * 2 for _val in _record["items"]] for key in ["doubled"]}
|
||||
103 | for _record in data
|
||||
104 | ]
|
||||
105 | # RUF052: `_key`, `_val`, and `_record` are all accessed
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_record` is accessed
|
||||
--> RUF052_1.py:103:13
|
||||
|
|
||||
101 | result = [
|
||||
102 | {_key: [_val * 2 for _val in _record["items"]] for _key in ["doubled"]}
|
||||
103 | for _record in data
|
||||
| ^^^^^^^
|
||||
104 | ]
|
||||
105 | # RUF052: `_key`, `_val`, and `_record` are all accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
99 |
|
||||
100 | # Should detect in mixed comprehensions
|
||||
101 | result = [
|
||||
- {_key: [_val * 2 for _val in _record["items"]] for _key in ["doubled"]}
|
||||
- for _record in data
|
||||
102 + {_key: [_val * 2 for _val in record["items"]] for _key in ["doubled"]}
|
||||
103 + for record in data
|
||||
104 | ]
|
||||
105 | # RUF052: `_key`, `_val`, and `_record` are all accessed
|
||||
106 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_item` is accessed
|
||||
--> RUF052_1.py:108:43
|
||||
|
|
||||
107 | # Should detect in generator passed to list constructor
|
||||
108 | gen_list = list(_item["items"][0] for _item in data)
|
||||
| ^^^^^
|
||||
109 | # RUF052: Local dummy variable `_item` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
105 | # RUF052: `_key`, `_val`, and `_record` are all accessed
|
||||
106 |
|
||||
107 | # Should detect in generator passed to list constructor
|
||||
- gen_list = list(_item["items"][0] for _item in data)
|
||||
108 + gen_list = list(item["items"][0] for item in data)
|
||||
109 | # RUF052: Local dummy variable `_item` is accessed
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
@@ -2,7 +2,7 @@
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
---
|
||||
RUF052 [*] Local dummy variable `_var` is accessed
|
||||
--> RUF052_0.py:92:9
|
||||
--> RUF052.py:92:9
|
||||
|
|
||||
90 | class Class_:
|
||||
91 | def fun(self):
|
||||
@@ -24,7 +24,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_list` is accessed
|
||||
--> RUF052_0.py:99:5
|
||||
--> RUF052.py:99:5
|
||||
|
|
||||
98 | def fun():
|
||||
99 | _list = "built-in" # [RUF052]
|
||||
@@ -45,7 +45,7 @@ help: Prefer using trailing underscores to avoid shadowing a built-in
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:106:5
|
||||
--> RUF052.py:106:5
|
||||
|
|
||||
104 | def fun():
|
||||
105 | global x
|
||||
@@ -67,7 +67,7 @@ help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:113:5
|
||||
--> RUF052.py:113:5
|
||||
|
|
||||
111 | def bar():
|
||||
112 | nonlocal x
|
||||
@@ -90,7 +90,7 @@ help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:120:5
|
||||
--> RUF052.py:120:5
|
||||
|
|
||||
118 | def fun():
|
||||
119 | x = "local"
|
||||
@@ -112,7 +112,7 @@ help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
--> RUF052_0.py:128:5
|
||||
--> RUF052.py:128:5
|
||||
|
|
||||
127 | def unfixables():
|
||||
128 | _GLOBAL_1 = "foo"
|
||||
@@ -123,7 +123,7 @@ RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_local` is accessed
|
||||
--> RUF052_0.py:136:5
|
||||
--> RUF052.py:136:5
|
||||
|
|
||||
135 | # unfixable because the rename would shadow a local variable
|
||||
136 | _local = "local3" # [RUF052]
|
||||
@@ -133,7 +133,7 @@ RUF052 Local dummy variable `_local` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
--> RUF052_0.py:140:9
|
||||
--> RUF052.py:140:9
|
||||
|
|
||||
139 | def nested():
|
||||
140 | _GLOBAL_1 = "foo"
|
||||
@@ -144,7 +144,7 @@ RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_local` is accessed
|
||||
--> RUF052_0.py:145:9
|
||||
--> RUF052.py:145:9
|
||||
|
|
||||
144 | # unfixable because the rename would shadow a variable from the outer function
|
||||
145 | _local = "local4"
|
||||
@@ -154,7 +154,7 @@ RUF052 Local dummy variable `_local` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 [*] Local dummy variable `_P` is accessed
|
||||
--> RUF052_0.py:153:5
|
||||
--> RUF052.py:153:5
|
||||
|
|
||||
151 | from collections import namedtuple
|
||||
152 |
|
||||
@@ -184,7 +184,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_T` is accessed
|
||||
--> RUF052_0.py:154:5
|
||||
--> RUF052.py:154:5
|
||||
|
|
||||
153 | _P = ParamSpec("_P")
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
@@ -213,7 +213,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_NT` is accessed
|
||||
--> RUF052_0.py:155:5
|
||||
--> RUF052.py:155:5
|
||||
|
|
||||
153 | _P = ParamSpec("_P")
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
@@ -242,7 +242,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_E` is accessed
|
||||
--> RUF052_0.py:156:5
|
||||
--> RUF052.py:156:5
|
||||
|
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
155 | _NT = NamedTuple("_NT", [("foo", int)])
|
||||
@@ -270,7 +270,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_NT2` is accessed
|
||||
--> RUF052_0.py:157:5
|
||||
--> RUF052.py:157:5
|
||||
|
|
||||
155 | _NT = NamedTuple("_NT", [("foo", int)])
|
||||
156 | _E = Enum("_E", ["a", "b", "c"])
|
||||
@@ -297,7 +297,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_NT3` is accessed
|
||||
--> RUF052_0.py:158:5
|
||||
--> RUF052.py:158:5
|
||||
|
|
||||
156 | _E = Enum("_E", ["a", "b", "c"])
|
||||
157 | _NT2 = namedtuple("_NT2", ['x', 'y', 'z'])
|
||||
@@ -323,7 +323,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_DynamicClass` is accessed
|
||||
--> RUF052_0.py:159:5
|
||||
--> RUF052.py:159:5
|
||||
|
|
||||
157 | _NT2 = namedtuple("_NT2", ['x', 'y', 'z'])
|
||||
158 | _NT3 = namedtuple(typename="_NT3", field_names=['x', 'y', 'z'])
|
||||
@@ -347,7 +347,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_NotADynamicClass` is accessed
|
||||
--> RUF052_0.py:160:5
|
||||
--> RUF052.py:160:5
|
||||
|
|
||||
158 | _NT3 = namedtuple(typename="_NT3", field_names=['x', 'y', 'z'])
|
||||
159 | _DynamicClass = type("_DynamicClass", (), {})
|
||||
@@ -371,7 +371,7 @@ help: Remove leading underscores
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 [*] Local dummy variable `_dummy_var` is accessed
|
||||
--> RUF052_0.py:182:5
|
||||
--> RUF052.py:182:5
|
||||
|
|
||||
181 | def foo():
|
||||
182 | _dummy_var = 42
|
||||
@@ -396,7 +396,7 @@ help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
RUF052 Local dummy variable `_dummy_var` is accessed
|
||||
--> RUF052_0.py:192:5
|
||||
--> RUF052.py:192:5
|
||||
|
|
||||
190 | # Unfixable because both possible candidates for the new name are shadowed
|
||||
191 | # in the scope of one of the references to the variable
|
||||
@@ -2,7 +2,7 @@
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
---
|
||||
RUF052 Local dummy variable `_var` is accessed
|
||||
--> RUF052_0.py:92:9
|
||||
--> RUF052.py:92:9
|
||||
|
|
||||
90 | class Class_:
|
||||
91 | def fun(self):
|
||||
@@ -13,7 +13,7 @@ RUF052 Local dummy variable `_var` is accessed
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `_list` is accessed
|
||||
--> RUF052_0.py:99:5
|
||||
--> RUF052.py:99:5
|
||||
|
|
||||
98 | def fun():
|
||||
99 | _list = "built-in" # [RUF052]
|
||||
@@ -23,7 +23,7 @@ RUF052 Local dummy variable `_list` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a built-in
|
||||
|
||||
RUF052 Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:106:5
|
||||
--> RUF052.py:106:5
|
||||
|
|
||||
104 | def fun():
|
||||
105 | global x
|
||||
@@ -34,7 +34,7 @@ RUF052 Local dummy variable `_x` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `x` is accessed
|
||||
--> RUF052_0.py:110:3
|
||||
--> RUF052.py:110:3
|
||||
|
|
||||
109 | def foo():
|
||||
110 | x = "outer"
|
||||
@@ -44,7 +44,7 @@ RUF052 Local dummy variable `x` is accessed
|
||||
|
|
||||
|
||||
RUF052 Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:113:5
|
||||
--> RUF052.py:113:5
|
||||
|
|
||||
111 | def bar():
|
||||
112 | nonlocal x
|
||||
@@ -56,7 +56,7 @@ RUF052 Local dummy variable `_x` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_x` is accessed
|
||||
--> RUF052_0.py:120:5
|
||||
--> RUF052.py:120:5
|
||||
|
|
||||
118 | def fun():
|
||||
119 | x = "local"
|
||||
@@ -67,7 +67,7 @@ RUF052 Local dummy variable `_x` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
--> RUF052_0.py:128:5
|
||||
--> RUF052.py:128:5
|
||||
|
|
||||
127 | def unfixables():
|
||||
128 | _GLOBAL_1 = "foo"
|
||||
@@ -78,7 +78,7 @@ RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_local` is accessed
|
||||
--> RUF052_0.py:136:5
|
||||
--> RUF052.py:136:5
|
||||
|
|
||||
135 | # unfixable because the rename would shadow a local variable
|
||||
136 | _local = "local3" # [RUF052]
|
||||
@@ -88,7 +88,7 @@ RUF052 Local dummy variable `_local` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
--> RUF052_0.py:140:9
|
||||
--> RUF052.py:140:9
|
||||
|
|
||||
139 | def nested():
|
||||
140 | _GLOBAL_1 = "foo"
|
||||
@@ -99,7 +99,7 @@ RUF052 Local dummy variable `_GLOBAL_1` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_local` is accessed
|
||||
--> RUF052_0.py:145:9
|
||||
--> RUF052.py:145:9
|
||||
|
|
||||
144 | # unfixable because the rename would shadow a variable from the outer function
|
||||
145 | _local = "local4"
|
||||
@@ -109,7 +109,7 @@ RUF052 Local dummy variable `_local` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_P` is accessed
|
||||
--> RUF052_0.py:153:5
|
||||
--> RUF052.py:153:5
|
||||
|
|
||||
151 | from collections import namedtuple
|
||||
152 |
|
||||
@@ -121,7 +121,7 @@ RUF052 Local dummy variable `_P` is accessed
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `_T` is accessed
|
||||
--> RUF052_0.py:154:5
|
||||
--> RUF052.py:154:5
|
||||
|
|
||||
153 | _P = ParamSpec("_P")
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
@@ -132,7 +132,7 @@ RUF052 Local dummy variable `_T` is accessed
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `_NT` is accessed
|
||||
--> RUF052_0.py:155:5
|
||||
--> RUF052.py:155:5
|
||||
|
|
||||
153 | _P = ParamSpec("_P")
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
@@ -144,7 +144,7 @@ RUF052 Local dummy variable `_NT` is accessed
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `_E` is accessed
|
||||
--> RUF052_0.py:156:5
|
||||
--> RUF052.py:156:5
|
||||
|
|
||||
154 | _T = TypeVar(name="_T", covariant=True, bound=int|str)
|
||||
155 | _NT = NamedTuple("_NT", [("foo", int)])
|
||||
@@ -156,7 +156,7 @@ RUF052 Local dummy variable `_E` is accessed
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `_NT2` is accessed
|
||||
--> RUF052_0.py:157:5
|
||||
--> RUF052.py:157:5
|
||||
|
|
||||
155 | _NT = NamedTuple("_NT", [("foo", int)])
|
||||
156 | _E = Enum("_E", ["a", "b", "c"])
|
||||
@@ -168,7 +168,7 @@ RUF052 Local dummy variable `_NT2` is accessed
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `_NT3` is accessed
|
||||
--> RUF052_0.py:158:5
|
||||
--> RUF052.py:158:5
|
||||
|
|
||||
156 | _E = Enum("_E", ["a", "b", "c"])
|
||||
157 | _NT2 = namedtuple("_NT2", ['x', 'y', 'z'])
|
||||
@@ -180,7 +180,7 @@ RUF052 Local dummy variable `_NT3` is accessed
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `_DynamicClass` is accessed
|
||||
--> RUF052_0.py:159:5
|
||||
--> RUF052.py:159:5
|
||||
|
|
||||
157 | _NT2 = namedtuple("_NT2", ['x', 'y', 'z'])
|
||||
158 | _NT3 = namedtuple(typename="_NT3", field_names=['x', 'y', 'z'])
|
||||
@@ -191,7 +191,7 @@ RUF052 Local dummy variable `_DynamicClass` is accessed
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `_NotADynamicClass` is accessed
|
||||
--> RUF052_0.py:160:5
|
||||
--> RUF052.py:160:5
|
||||
|
|
||||
158 | _NT3 = namedtuple(typename="_NT3", field_names=['x', 'y', 'z'])
|
||||
159 | _DynamicClass = type("_DynamicClass", (), {})
|
||||
@@ -202,18 +202,8 @@ RUF052 Local dummy variable `_NotADynamicClass` is accessed
|
||||
|
|
||||
help: Remove leading underscores
|
||||
|
||||
RUF052 Local dummy variable `other` is accessed
|
||||
--> RUF052_0.py:177:13
|
||||
|
|
||||
175 | return
|
||||
176 | _seen.add(self)
|
||||
177 | for other in self.connected:
|
||||
| ^^^^^
|
||||
178 | other.recurse(_seen=_seen)
|
||||
|
|
||||
|
||||
RUF052 Local dummy variable `_dummy_var` is accessed
|
||||
--> RUF052_0.py:182:5
|
||||
--> RUF052.py:182:5
|
||||
|
|
||||
181 | def foo():
|
||||
182 | _dummy_var = 42
|
||||
@@ -224,7 +214,7 @@ RUF052 Local dummy variable `_dummy_var` is accessed
|
||||
help: Prefer using trailing underscores to avoid shadowing a variable
|
||||
|
||||
RUF052 Local dummy variable `_dummy_var` is accessed
|
||||
--> RUF052_0.py:192:5
|
||||
--> RUF052.py:192:5
|
||||
|
|
||||
190 | # Unfixable because both possible candidates for the new name are shadowed
|
||||
191 | # in the scope of one of the references to the variable
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user