Compare commits
102 Commits
brent/cach
...
david/enum
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e935bc5578 | ||
|
|
81867ea7ce | ||
|
|
a54061e757 | ||
|
|
19569bf838 | ||
|
|
e0f4f25d28 | ||
|
|
c6a123290d | ||
|
|
d4f64cd474 | ||
|
|
e4f64480da | ||
|
|
4016aff057 | ||
|
|
24134837f3 | ||
|
|
130d4e1135 | ||
|
|
e63dfa3d18 | ||
|
|
6d0f3ef3a5 | ||
|
|
201b079084 | ||
|
|
2680f2ed81 | ||
|
|
afdfa042f3 | ||
|
|
8c0743df97 | ||
|
|
13634ff433 | ||
|
|
7a541f597f | ||
|
|
2deb50f4e3 | ||
|
|
85e22645aa | ||
|
|
d3f6de8b0e | ||
|
|
9eb8174209 | ||
|
|
9c68616d91 | ||
|
|
9280c7e945 | ||
|
|
e19145040f | ||
|
|
ef3a195f28 | ||
|
|
008bbfdf5a | ||
|
|
df5eba7583 | ||
|
|
469c50b0b7 | ||
|
|
738246627f | ||
|
|
e867830848 | ||
|
|
72fdb7d439 | ||
|
|
fbf1dfc782 | ||
|
|
a0d8ff51dd | ||
|
|
165091a31c | ||
|
|
4a4dc38b5b | ||
|
|
3e366fdf13 | ||
|
|
53e9e4421c | ||
|
|
859262bd49 | ||
|
|
c0768dfd96 | ||
|
|
d4eb4277ad | ||
|
|
b033fb6bfd | ||
|
|
f722bfa9e6 | ||
|
|
b124e182ca | ||
|
|
57373a7e4d | ||
|
|
ae9d450b5f | ||
|
|
c8c80e054e | ||
|
|
4bc34b82ef | ||
|
|
d9cab4d242 | ||
|
|
d77b7312b0 | ||
|
|
f9091ea8bb | ||
|
|
1d2181623c | ||
|
|
dc6be457b5 | ||
|
|
1079975b35 | ||
|
|
39eb0f6c6c | ||
|
|
d13228ab85 | ||
|
|
9461d3076f | ||
|
|
63d1d332b3 | ||
|
|
e0149cd9f3 | ||
|
|
2a00eca66b | ||
|
|
3d17897c02 | ||
|
|
fa1df4cedc | ||
|
|
89258f1938 | ||
|
|
1dcef1a011 | ||
|
|
ba629fe262 | ||
|
|
bb3a05f92b | ||
|
|
4daf59e5e7 | ||
|
|
88bd82938f | ||
|
|
5a55bab3f3 | ||
|
|
cc5885e564 | ||
|
|
4573a0f6a0 | ||
|
|
905b9d7f51 | ||
|
|
b605c3e232 | ||
|
|
c281891b5c | ||
|
|
53d795da67 | ||
|
|
385d6fa608 | ||
|
|
ba070bb6d5 | ||
|
|
dc10ab81bd | ||
|
|
7673d46b71 | ||
|
|
9d5ecacdc5 | ||
|
|
9af8597608 | ||
|
|
64e5780037 | ||
|
|
da8aa6a631 | ||
|
|
ee69d38000 | ||
|
|
fd335eb8b7 | ||
|
|
c82fa94e0a | ||
|
|
6d4687c9af | ||
|
|
9180cd094d | ||
|
|
9d98a66f65 | ||
|
|
cb60ecef6b | ||
|
|
215a1c55d4 | ||
|
|
5e29278aa2 | ||
|
|
af62d0368f | ||
|
|
30683e3a93 | ||
|
|
cbc8c08016 | ||
|
|
897889d1ce | ||
|
|
cb5a9ff8dc | ||
|
|
fcdffe4ac9 | ||
|
|
88de5727df | ||
|
|
b8dec79182 | ||
|
|
dc66019fbc |
28
.github/workflows/ci.yaml
vendored
28
.github/workflows/ci.yaml
vendored
@@ -143,12 +143,12 @@ jobs:
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
|
||||
':!**/*.md' \
|
||||
':crates/ty_python_semantic/resources/mdtest/**/*.md' \
|
||||
# NOTE: Do not exclude all Markdown files here, but rather use
|
||||
# specific exclude patterns like 'docs/**'), because tests for
|
||||
# 'ty' are written in Markdown.
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
':!docs/**' \
|
||||
':!assets/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
@@ -429,7 +429,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
|
||||
uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -451,7 +451,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -652,7 +652,7 @@ jobs:
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -682,7 +682,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
|
||||
- uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -722,7 +722,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
@@ -765,7 +765,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -897,7 +897,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
@@ -911,7 +911,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
@@ -930,7 +930,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
@@ -944,7 +944,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.github/workflows/daily_fuzz.yaml
vendored
2
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
|
||||
4
.github/workflows/mypy_primer.yaml
vendored
4
.github/workflows/mypy_primer.yaml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
@@ -81,7 +81,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
6
.github/workflows/sync_typeshed.yaml
vendored
6
.github/workflows/sync_typeshed.yaml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
|
||||
4
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
4
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@f0eec0e549684d8e1d7b8bc3e351202124b63bda"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
|
||||
4
.github/workflows/ty-ecosystem-report.yaml
vendored
4
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@f0eec0e549684d8e1d7b8bc3e351202124b63bda"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
|
||||
109
.github/workflows/typing_conformance.yaml
vendored
Normal file
109
.github/workflows/typing_conformance.yaml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
name: Run typing conformance
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/typing_conformance.yaml"
|
||||
- ".github/workflows/typing_conformance_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
typing_conformance:
|
||||
name: Compute diagnostic diff
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
repository: python/typing
|
||||
ref: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
path: typing
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Compute diagnostic diff
|
||||
shell: bash
|
||||
run: |
|
||||
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
|
||||
|
||||
# Build the executable for the old and new commit
|
||||
(
|
||||
cd ruff
|
||||
|
||||
echo "new commit"
|
||||
git checkout -b new_commit "${{ github.event.pull_request.head.sha }}"
|
||||
git rev-list --format=%s --max-count=1 new_commit
|
||||
cargo build --release --bin ty
|
||||
mv target/release/ty ty-new
|
||||
|
||||
echo "old commit (merge base)"
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b old_commit "$MERGE_BASE"
|
||||
git rev-list --format=%s --max-count=1 old_commit
|
||||
cargo build --release --bin ty
|
||||
mv target/release/ty ty-old
|
||||
)
|
||||
|
||||
(
|
||||
cd typing/conformance/tests
|
||||
|
||||
echo "Running ty on old commit (merge base)"
|
||||
"$RUFF_DIR/ty-old" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/old-output.txt" 2>&1 || true
|
||||
|
||||
echo "Running ty on new commit"
|
||||
"$RUFF_DIR/ty-new" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/new-output.txt" 2>&1 || true
|
||||
)
|
||||
|
||||
if ! diff -u old-output.txt new-output.txt > typing_conformance_diagnostics.diff; then
|
||||
echo "Differences found between base and PR"
|
||||
else
|
||||
echo "No differences found"
|
||||
touch typing_conformance_diagnostics.diff
|
||||
fi
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
path: typing_conformance_diagnostics.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
97
.github/workflows/typing_conformance_comment.yaml
vendored
Normal file
97
.github/workflows/typing_conformance_comment.yaml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: PR comment (typing_conformance)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run typing conformance]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The typing_conformance workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download typing_conformance results"
|
||||
id: download-typing_conformance_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
workflow: typing_conformance.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/typing_conformance_diagnostics_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious typing_conformance results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
|
||||
then
|
||||
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||
|
||||
echo '## Diagnostic diff on typing conformance tests' >> comment.txt
|
||||
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
@@ -81,10 +81,10 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.4
|
||||
rev: v0.12.5
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
- id: ruff-check
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -1,5 +1,23 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
|
||||
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
|
||||
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
|
||||
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
|
||||
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
|
||||
|
||||
## 0.12.4
|
||||
|
||||
### Preview features
|
||||
|
||||
84
Cargo.lock
generated
84
Cargo.lock
generated
@@ -261,6 +261,18 @@ version = "2.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
|
||||
|
||||
[[package]]
|
||||
name = "bitvec"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
|
||||
dependencies = [
|
||||
"funty",
|
||||
"radium",
|
||||
"tap",
|
||||
"wyz",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.4"
|
||||
@@ -518,7 +530,7 @@ dependencies = [
|
||||
"ciborium",
|
||||
"clap",
|
||||
"codspeed",
|
||||
"criterion-plot",
|
||||
"criterion-plot 0.5.0",
|
||||
"is-terminal",
|
||||
"itertools 0.10.5",
|
||||
"num-traits",
|
||||
@@ -701,15 +713,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "criterion"
|
||||
version = "0.6.0"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679"
|
||||
checksum = "e1c047a62b0cc3e145fa84415a3191f628e980b194c2755aa12300a4e6cbd928"
|
||||
dependencies = [
|
||||
"anes",
|
||||
"cast",
|
||||
"ciborium",
|
||||
"clap",
|
||||
"criterion-plot",
|
||||
"criterion-plot 0.6.0",
|
||||
"itertools 0.13.0",
|
||||
"num-traits",
|
||||
"oorandom",
|
||||
@@ -730,6 +742,16 @@ dependencies = [
|
||||
"itertools 0.10.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "criterion-plot"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b1bcc0dc7dfae599d84ad0b1a55f80cde8af3725da8313b528da95ef783e338"
|
||||
dependencies = [
|
||||
"cast",
|
||||
"itertools 0.13.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam"
|
||||
version = "0.8.4"
|
||||
@@ -1121,6 +1143,12 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.7"
|
||||
@@ -1133,9 +1161,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "get-size-derive2"
|
||||
version = "0.5.2"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "028f3cfad7c3e3b1d8d04ef0a1c03576f2d62800803fe1301a4cd262849f2dea"
|
||||
checksum = "ca171f9f8ed2f416ac044de2dc4acde3e356662a14ac990345639653bdc7fc28"
|
||||
dependencies = [
|
||||
"attribute-derive",
|
||||
"quote",
|
||||
@@ -1144,9 +1172,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "get-size2"
|
||||
version = "0.5.2"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a09c2043819a3def7bfbb4927e7df96aab0da4cfd8824484b22d0c94e84458e"
|
||||
checksum = "965bc5c1c5fe05c5bbd398bb9b3f0f14d750261ebdd1af959f2c8a603fedb5ad"
|
||||
dependencies = [
|
||||
"compact_str",
|
||||
"get-size-derive2",
|
||||
@@ -2548,6 +2576,12 @@ version = "5.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
|
||||
|
||||
[[package]]
|
||||
name = "radium"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
@@ -2710,7 +2744,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2827,7 +2861,6 @@ dependencies = [
|
||||
"anstyle",
|
||||
"arc-swap",
|
||||
"camino",
|
||||
"countme",
|
||||
"dashmap",
|
||||
"dunce",
|
||||
"etcetera",
|
||||
@@ -2858,6 +2891,7 @@ dependencies = [
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"ty_static",
|
||||
"unicode-width 0.2.1",
|
||||
"web-time",
|
||||
"zip",
|
||||
]
|
||||
@@ -2937,6 +2971,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"memchr",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
@@ -2962,7 +2997,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -3294,7 +3329,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3768,6 +3803,12 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tap"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.20.0"
|
||||
@@ -4182,6 +4223,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"insta",
|
||||
"itertools 0.14.0",
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
@@ -4190,11 +4232,10 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
"salsa",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
"ty_project",
|
||||
"ty_python_semantic",
|
||||
"ty_vendored",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4228,7 +4269,6 @@ dependencies = [
|
||||
"thiserror 2.0.12",
|
||||
"toml 0.9.2",
|
||||
"tracing",
|
||||
"ty_ide",
|
||||
"ty_python_semantic",
|
||||
"ty_vendored",
|
||||
]
|
||||
@@ -4239,6 +4279,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.1",
|
||||
"bitvec",
|
||||
"camino",
|
||||
"colored 3.0.0",
|
||||
"compact_str",
|
||||
@@ -4290,10 +4331,13 @@ dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.1",
|
||||
"crossbeam",
|
||||
"dunce",
|
||||
"insta",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
@@ -4304,6 +4348,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
@@ -5092,6 +5137,15 @@ version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
|
||||
dependencies = [
|
||||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yansi"
|
||||
version = "1.0.1"
|
||||
|
||||
@@ -57,6 +57,9 @@ assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bitvec = { version = "1.0.1", default-features = false, features = [
|
||||
"alloc",
|
||||
] }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
@@ -70,7 +73,7 @@ console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.6.0", default-features = false }
|
||||
criterion = { version = "0.7.0", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
@@ -80,7 +83,7 @@ etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
get-size2 = { version = "0.5.0", features = [
|
||||
get-size2 = { version = "0.6.0", features = [
|
||||
"derive",
|
||||
"smallvec",
|
||||
"hashbrown",
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.4/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.5/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.5/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.4
|
||||
rev: v0.12.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -169,6 +169,9 @@ pub struct AnalyzeGraphCommand {
|
||||
/// Attempt to detect imports from string literals.
|
||||
#[clap(long)]
|
||||
detect_string_imports: bool,
|
||||
/// The minimum number of dots in a string import to consider it a valid import.
|
||||
#[clap(long)]
|
||||
min_dots: Option<usize>,
|
||||
/// Enable preview mode. Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
@@ -808,6 +811,7 @@ impl AnalyzeGraphCommand {
|
||||
} else {
|
||||
None
|
||||
},
|
||||
string_imports_min_dots: self.min_dots,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
..ExplicitConfigOverrides::default()
|
||||
@@ -1305,6 +1309,7 @@ struct ExplicitConfigOverrides {
|
||||
show_fixes: Option<bool>,
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
string_imports_min_dots: Option<usize>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1392,6 +1397,9 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(detect_string_imports) = &self.detect_string_imports {
|
||||
config.analyze.detect_string_imports = Some(*detect_string_imports);
|
||||
}
|
||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -454,7 +454,7 @@ impl LintCacheData {
|
||||
CacheMessage {
|
||||
rule,
|
||||
body: msg.body().to_string(),
|
||||
suggestion: msg.suggestion().map(ToString::to_string),
|
||||
suggestion: msg.first_help_text().map(ToString::to_string),
|
||||
range: msg.expect_range(),
|
||||
parent: msg.parent(),
|
||||
fix: msg.fix().cloned(),
|
||||
|
||||
@@ -102,7 +102,7 @@ pub(crate) fn analyze_graph(
|
||||
|
||||
// Resolve the per-file settings.
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.detect_string_imports;
|
||||
let string_imports = settings.analyze.string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
|
||||
// Skip excluded files.
|
||||
|
||||
@@ -279,6 +279,7 @@ mod test {
|
||||
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_color(false)
|
||||
.emit(
|
||||
&mut output,
|
||||
&diagnostics.inner,
|
||||
|
||||
@@ -264,6 +264,7 @@ impl Printer {
|
||||
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
|
||||
.with_show_source(self.format == OutputFormat::Full)
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.with_preview(preview)
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
|
||||
@@ -57,33 +57,40 @@ fn dependencies() -> Result<()> {
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f(): pass
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("e.pyi")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f() -> None: ...
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py"
|
||||
],
|
||||
"ruff/e.py": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py",
|
||||
"ruff/e.pyi"
|
||||
],
|
||||
"ruff/e.py": [],
|
||||
"ruff/e.pyi": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -197,23 +204,43 @@ fn string_detection() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").arg("--min-dots").arg("1").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -2422,7 +2422,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -2734,7 +2734,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3098,7 +3098,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3478,7 +3478,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3806,7 +3806,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4134,7 +4134,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4419,7 +4419,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4757,7 +4757,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
|
||||
@@ -392,7 +392,7 @@ formatter.docstring_code_line_width = dynamic
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.7
|
||||
analyze.detect_string_imports = false
|
||||
analyze.string_imports = disabled
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ ty_static = { workspace = true }
|
||||
anstyle = { workspace = true }
|
||||
arc-swap = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
@@ -43,6 +42,7 @@ serde_json = { workspace = true, optional = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
unicode-width = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[target.'cfg(target_arch="wasm32")'.dependencies]
|
||||
@@ -59,6 +59,11 @@ tempfile = { workspace = true }
|
||||
cache = ["ruff_cache"]
|
||||
junit = ["dep:quick-junit"]
|
||||
os = ["ignore", "dep:etcetera"]
|
||||
serde = ["camino/serde1", "dep:serde", "dep:serde_json", "ruff_diagnostics/serde"]
|
||||
serde = [
|
||||
"camino/serde1",
|
||||
"dep:serde",
|
||||
"dep:serde_json",
|
||||
"ruff_diagnostics/serde",
|
||||
]
|
||||
# Exposes testing utilities.
|
||||
testing = ["tracing-subscriber"]
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
use std::{fmt::Formatter, path::Path, sync::Arc};
|
||||
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||
|
||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
pub use self::render::{DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input};
|
||||
pub use self::render::{
|
||||
DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary,
|
||||
};
|
||||
use crate::{Db, files::File};
|
||||
|
||||
mod render;
|
||||
@@ -122,7 +124,14 @@ impl Diagnostic {
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn info<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(Severity::Info, message));
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Info, message));
|
||||
}
|
||||
|
||||
/// Adds a "help" sub-diagnostic with the given message.
|
||||
///
|
||||
/// See the closely related [`Diagnostic::info`] method for more details.
|
||||
pub fn help<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Help, message));
|
||||
}
|
||||
|
||||
/// Adds a "sub" diagnostic to this diagnostic.
|
||||
@@ -377,9 +386,15 @@ impl Diagnostic {
|
||||
self.primary_message()
|
||||
}
|
||||
|
||||
/// Returns the fix suggestion for the violation.
|
||||
pub fn suggestion(&self) -> Option<&str> {
|
||||
self.primary_annotation()?.get_message()
|
||||
/// Returns the message of the first sub-diagnostic with a `Help` severity.
|
||||
///
|
||||
/// Note that this is used as the fix title/suggestion for some of Ruff's output formats, but in
|
||||
/// general this is not the guaranteed meaning of such a message.
|
||||
pub fn first_help_text(&self) -> Option<&str> {
|
||||
self.sub_diagnostics()
|
||||
.iter()
|
||||
.find(|sub| matches!(sub.inner.severity, SubDiagnosticSeverity::Help))
|
||||
.map(|sub| sub.inner.message.as_str())
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
@@ -565,7 +580,10 @@ impl SubDiagnostic {
|
||||
/// Callers can pass anything that implements `std::fmt::Display`
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn new<'a>(severity: Severity, message: impl IntoDiagnosticMessage + 'a) -> SubDiagnostic {
|
||||
pub fn new<'a>(
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: impl IntoDiagnosticMessage + 'a,
|
||||
) -> SubDiagnostic {
|
||||
let inner = Box::new(SubDiagnosticInner {
|
||||
severity,
|
||||
message: message.into_diagnostic_message(),
|
||||
@@ -643,7 +661,7 @@ impl SubDiagnostic {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SubDiagnosticInner {
|
||||
severity: Severity,
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
}
|
||||
@@ -1170,6 +1188,32 @@ impl Severity {
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`Severity`] but exclusively for sub-diagnostics.
|
||||
///
|
||||
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
|
||||
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
|
||||
/// deleted and the two combined again.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
|
||||
pub enum SubDiagnosticSeverity {
|
||||
Help,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Fatal,
|
||||
}
|
||||
|
||||
impl SubDiagnosticSeverity {
|
||||
fn to_annotate(self) -> AnnotateLevel {
|
||||
match self {
|
||||
SubDiagnosticSeverity::Help => AnnotateLevel::Help,
|
||||
SubDiagnosticSeverity::Info => AnnotateLevel::Info,
|
||||
SubDiagnosticSeverity::Warning => AnnotateLevel::Warning,
|
||||
SubDiagnosticSeverity::Error => AnnotateLevel::Error,
|
||||
SubDiagnosticSeverity::Fatal => AnnotateLevel::Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for rendering diagnostics.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DisplayDiagnosticConfig {
|
||||
@@ -1196,6 +1240,15 @@ pub struct DisplayDiagnosticConfig {
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
preview: bool,
|
||||
/// Whether to hide the real `Severity` of diagnostics.
|
||||
///
|
||||
/// This is intended for temporary use by Ruff, which only has a single `error` severity at the
|
||||
/// moment. We should be able to remove this option when Ruff gets more severities.
|
||||
hide_severity: bool,
|
||||
/// Whether to show the availability of a fix in a diagnostic.
|
||||
show_fix_status: bool,
|
||||
/// The lowest applicability that should be shown when reporting diagnostics.
|
||||
fix_applicability: Applicability,
|
||||
}
|
||||
|
||||
impl DisplayDiagnosticConfig {
|
||||
@@ -1224,6 +1277,35 @@ impl DisplayDiagnosticConfig {
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to hide a diagnostic's severity or not.
|
||||
pub fn hide_severity(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
hide_severity: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to show a fix's availability or not.
|
||||
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
show_fix_status: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the lowest fix applicability that should be shown.
|
||||
///
|
||||
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
|
||||
/// availability for unsafe or display-only fixes.
|
||||
///
|
||||
/// Note that this option is currently ignored when `hide_severity` is false.
|
||||
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
fix_applicability: applicability,
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DisplayDiagnosticConfig {
|
||||
@@ -1233,6 +1315,9 @@ impl Default for DisplayDiagnosticConfig {
|
||||
color: false,
|
||||
context: 2,
|
||||
preview: false,
|
||||
hide_severity: false,
|
||||
show_fix_status: false,
|
||||
fix_applicability: Applicability::Safe,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
@@ -7,9 +8,9 @@ use ruff_annotate_snippets::{
|
||||
};
|
||||
use ruff_notebook::{Notebook, NotebookIndex};
|
||||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::stylesheet::{DiagnosticStylesheet, fmt_styled};
|
||||
use crate::diagnostic::stylesheet::DiagnosticStylesheet;
|
||||
use crate::{
|
||||
Db,
|
||||
files::File,
|
||||
@@ -18,14 +19,17 @@ use crate::{
|
||||
};
|
||||
|
||||
use super::{
|
||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig, Severity,
|
||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig,
|
||||
SubDiagnostic, UnifiedFile,
|
||||
};
|
||||
|
||||
use azure::AzureRenderer;
|
||||
use concise::ConciseRenderer;
|
||||
use pylint::PylintRenderer;
|
||||
|
||||
mod azure;
|
||||
mod concise;
|
||||
mod full;
|
||||
#[cfg(feature = "serde")]
|
||||
mod json;
|
||||
#[cfg(feature = "serde")]
|
||||
@@ -104,48 +108,7 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self.config.format {
|
||||
DiagnosticFormat::Concise => {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
for diag in self.diagnostics {
|
||||
let (severity, severity_style) = match diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}]",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
if let Some(span) = diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
" {path}",
|
||||
path = fmt_styled(span.file().path(self.resolver), stylesheet.emphasis)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
|
||||
write!(
|
||||
f,
|
||||
":{line}:{col}",
|
||||
line = fmt_styled(start.line, stylesheet.emphasis),
|
||||
col = fmt_styled(start.column, stylesheet.emphasis),
|
||||
)?;
|
||||
}
|
||||
write!(f, ":")?;
|
||||
}
|
||||
writeln!(f, " {message}", message = diag.concise_message())?;
|
||||
}
|
||||
ConciseRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?;
|
||||
}
|
||||
DiagnosticFormat::Full => {
|
||||
let stylesheet = if self.config.color {
|
||||
@@ -256,7 +219,7 @@ impl<'a> Resolved<'a> {
|
||||
/// both.)
|
||||
#[derive(Debug)]
|
||||
struct ResolvedDiagnostic<'a> {
|
||||
severity: Severity,
|
||||
level: AnnotateLevel,
|
||||
id: Option<String>,
|
||||
message: String,
|
||||
annotations: Vec<ResolvedAnnotation<'a>>,
|
||||
@@ -281,7 +244,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
let id = Some(diag.inner.id.to_string());
|
||||
let message = diag.inner.message.as_str().to_string();
|
||||
ResolvedDiagnostic {
|
||||
severity: diag.inner.severity,
|
||||
level: diag.inner.severity.to_annotate(),
|
||||
id,
|
||||
message,
|
||||
annotations,
|
||||
@@ -304,7 +267,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
})
|
||||
.collect();
|
||||
ResolvedDiagnostic {
|
||||
severity: diag.inner.severity,
|
||||
level: diag.inner.severity.to_annotate(),
|
||||
id: None,
|
||||
message: diag.inner.message.as_str().to_string(),
|
||||
annotations,
|
||||
@@ -371,7 +334,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
snippets_by_input
|
||||
.sort_by(|snips1, snips2| snips1.has_primary.cmp(&snips2.has_primary).reverse());
|
||||
RenderableDiagnostic {
|
||||
severity: self.severity,
|
||||
level: self.level,
|
||||
id: self.id.as_deref(),
|
||||
message: &self.message,
|
||||
snippets_by_input,
|
||||
@@ -459,7 +422,7 @@ struct Renderable<'r> {
|
||||
#[derive(Debug)]
|
||||
struct RenderableDiagnostic<'r> {
|
||||
/// The severity of the diagnostic.
|
||||
severity: Severity,
|
||||
level: AnnotateLevel,
|
||||
/// The ID of the diagnostic. The ID can usually be used on the CLI or in a
|
||||
/// config file to change the severity of a lint.
|
||||
///
|
||||
@@ -478,7 +441,6 @@ struct RenderableDiagnostic<'r> {
|
||||
impl RenderableDiagnostic<'_> {
|
||||
/// Convert this to an "annotate" snippet.
|
||||
fn to_annotate(&self) -> AnnotateMessage<'_> {
|
||||
let level = self.severity.to_annotate();
|
||||
let snippets = self.snippets_by_input.iter().flat_map(|snippets| {
|
||||
let path = snippets.path;
|
||||
snippets
|
||||
@@ -486,7 +448,7 @@ impl RenderableDiagnostic<'_> {
|
||||
.iter()
|
||||
.map(|snippet| snippet.to_annotate(path))
|
||||
});
|
||||
let mut message = level.title(self.message);
|
||||
let mut message = self.level.title(self.message);
|
||||
if let Some(id) = self.id {
|
||||
message = message.id(id);
|
||||
}
|
||||
@@ -559,7 +521,7 @@ impl<'r> RenderableSnippets<'r> {
|
||||
#[derive(Debug)]
|
||||
struct RenderableSnippet<'r> {
|
||||
/// The actual snippet text.
|
||||
snippet: &'r str,
|
||||
snippet: Cow<'r, str>,
|
||||
/// The absolute line number corresponding to where this
|
||||
/// snippet begins.
|
||||
line_start: OneIndexed,
|
||||
@@ -619,6 +581,13 @@ impl<'r> RenderableSnippet<'r> {
|
||||
.iter()
|
||||
.map(|ann| RenderableAnnotation::new(snippet_start, ann))
|
||||
.collect();
|
||||
|
||||
let EscapedSourceCode {
|
||||
text: snippet,
|
||||
annotations,
|
||||
} = replace_whitespace_and_unprintable(snippet, annotations)
|
||||
.fix_up_empty_spans_after_line_terminator();
|
||||
|
||||
RenderableSnippet {
|
||||
snippet,
|
||||
line_start,
|
||||
@@ -629,7 +598,7 @@ impl<'r> RenderableSnippet<'r> {
|
||||
|
||||
/// Convert this to an "annotate" snippet.
|
||||
fn to_annotate<'a>(&'a self, path: &'a str) -> AnnotateSnippet<'a> {
|
||||
AnnotateSnippet::source(self.snippet)
|
||||
AnnotateSnippet::source(&self.snippet)
|
||||
.origin(path)
|
||||
.line_start(self.line_start.get())
|
||||
.annotations(
|
||||
@@ -859,12 +828,239 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
|
||||
path
|
||||
}
|
||||
|
||||
/// Given some source code and annotation ranges, this routine replaces tabs
|
||||
/// with ASCII whitespace, and unprintable characters with printable
|
||||
/// representations of them.
|
||||
///
|
||||
/// The source code and annotations returned are updated to reflect changes made
|
||||
/// to the source code (if any).
|
||||
fn replace_whitespace_and_unprintable<'r>(
|
||||
source: &'r str,
|
||||
mut annotations: Vec<RenderableAnnotation<'r>>,
|
||||
) -> EscapedSourceCode<'r> {
|
||||
// Updates the annotation ranges given by the caller whenever a single byte (at `index` in
|
||||
// `source`) is replaced with `len` bytes.
|
||||
//
|
||||
// When the index occurs before the start of the range, the range is
|
||||
// offset by `len`. When the range occurs after or at the start but before
|
||||
// the end, then the end of the range only is offset by `len`.
|
||||
let mut update_ranges = |index: usize, len: u32| {
|
||||
for ann in &mut annotations {
|
||||
if index < usize::from(ann.range.start()) {
|
||||
ann.range += TextSize::new(len - 1);
|
||||
} else if index < usize::from(ann.range.end()) {
|
||||
ann.range = ann.range.add_end(TextSize::new(len - 1));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// If `c` is an unprintable character, then this returns a printable
|
||||
// representation of it (using a fancier Unicode codepoint).
|
||||
let unprintable_replacement = |c: char| -> Option<char> {
|
||||
match c {
|
||||
'\x07' => Some('␇'),
|
||||
'\x08' => Some('␈'),
|
||||
'\x1b' => Some('␛'),
|
||||
'\x7f' => Some('␡'),
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
|
||||
const TAB_SIZE: usize = 4;
|
||||
let mut width = 0;
|
||||
let mut column = 0;
|
||||
let mut last_end = 0;
|
||||
let mut result = String::new();
|
||||
for (index, c) in source.char_indices() {
|
||||
let old_width = width;
|
||||
match c {
|
||||
'\n' | '\r' => {
|
||||
width = 0;
|
||||
column = 0;
|
||||
}
|
||||
'\t' => {
|
||||
let tab_offset = TAB_SIZE - (column % TAB_SIZE);
|
||||
width += tab_offset;
|
||||
column += tab_offset;
|
||||
|
||||
let tab_width =
|
||||
u32::try_from(width - old_width).expect("small width because of tab size");
|
||||
result.push_str(&source[last_end..index]);
|
||||
|
||||
update_ranges(result.text_len().to_usize(), tab_width);
|
||||
|
||||
for _ in 0..tab_width {
|
||||
result.push(' ');
|
||||
}
|
||||
last_end = index + 1;
|
||||
}
|
||||
_ => {
|
||||
width += unicode_width::UnicodeWidthChar::width(c).unwrap_or(0);
|
||||
column += 1;
|
||||
|
||||
if let Some(printable) = unprintable_replacement(c) {
|
||||
result.push_str(&source[last_end..index]);
|
||||
|
||||
let len = printable.text_len().to_u32();
|
||||
update_ranges(result.text_len().to_usize(), len);
|
||||
|
||||
result.push(printable);
|
||||
last_end = index + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No tabs or unprintable chars
|
||||
if result.is_empty() {
|
||||
EscapedSourceCode {
|
||||
annotations,
|
||||
text: Cow::Borrowed(source),
|
||||
}
|
||||
} else {
|
||||
result.push_str(&source[last_end..]);
|
||||
EscapedSourceCode {
|
||||
annotations,
|
||||
text: Cow::Owned(result),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct EscapedSourceCode<'r> {
|
||||
text: Cow<'r, str>,
|
||||
annotations: Vec<RenderableAnnotation<'r>>,
|
||||
}
|
||||
|
||||
impl<'r> EscapedSourceCode<'r> {
|
||||
// This attempts to "fix up" the spans on each annotation in the case where
|
||||
// it's an empty span immediately following a line terminator.
|
||||
//
|
||||
// At present, `annotate-snippets` (both upstream and our vendored copy)
|
||||
// will render annotations of such spans to point to the space immediately
|
||||
// following the previous line. But ideally, this should point to the space
|
||||
// immediately preceding the next line.
|
||||
//
|
||||
// After attempting to fix `annotate-snippets` and giving up after a couple
|
||||
// hours, this routine takes a different tact: it adjusts the span to be
|
||||
// non-empty and it will cover the first codepoint of the following line.
|
||||
// This forces `annotate-snippets` to point to the right place.
|
||||
//
|
||||
// See also: <https://github.com/astral-sh/ruff/issues/15509> and
|
||||
// `ruff_linter::message::text::SourceCode::fix_up_empty_spans_after_line_terminator`,
|
||||
// from which this was adapted.
|
||||
fn fix_up_empty_spans_after_line_terminator(mut self) -> EscapedSourceCode<'r> {
|
||||
for ann in &mut self.annotations {
|
||||
let range = ann.range;
|
||||
if !range.is_empty()
|
||||
|| range.start() == TextSize::from(0)
|
||||
|| range.start() >= self.text.text_len()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if !matches!(
|
||||
self.text.as_bytes()[range.start().to_usize() - 1],
|
||||
b'\n' | b'\r'
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
let start = range.start();
|
||||
let end = ceil_char_boundary(&self.text, start + TextSize::from(1));
|
||||
ann.range = TextRange::new(start, end);
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Finds the closest [`TextSize`] not less than the offset given for which
|
||||
/// `is_char_boundary` is `true`. Unless the offset given is greater than
|
||||
/// the length of the underlying contents, in which case, the length of the
|
||||
/// contents is returned.
|
||||
///
|
||||
/// Can be replaced with `str::ceil_char_boundary` once it's stable.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// From `std`:
|
||||
///
|
||||
/// ```
|
||||
/// use ruff_db::diagnostic::ceil_char_boundary;
|
||||
/// use ruff_text_size::{Ranged, TextLen, TextSize};
|
||||
///
|
||||
/// let source = "❤️🧡💛💚💙💜";
|
||||
/// assert_eq!(source.text_len(), TextSize::from(26));
|
||||
/// assert!(!source.is_char_boundary(13));
|
||||
///
|
||||
/// let closest = ceil_char_boundary(source, TextSize::from(13));
|
||||
/// assert_eq!(closest, TextSize::from(14));
|
||||
/// assert_eq!(&source[..closest.to_usize()], "❤️🧡💛");
|
||||
/// ```
|
||||
///
|
||||
/// Additional examples:
|
||||
///
|
||||
/// ```
|
||||
/// use ruff_db::diagnostic::ceil_char_boundary;
|
||||
/// use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
///
|
||||
/// let source = "Hello";
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// ceil_char_boundary(source, TextSize::from(0)),
|
||||
/// TextSize::from(0)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// ceil_char_boundary(source, TextSize::from(5)),
|
||||
/// TextSize::from(5)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// ceil_char_boundary(source, TextSize::from(6)),
|
||||
/// TextSize::from(5)
|
||||
/// );
|
||||
///
|
||||
/// let source = "α";
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// ceil_char_boundary(source, TextSize::from(0)),
|
||||
/// TextSize::from(0)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// ceil_char_boundary(source, TextSize::from(1)),
|
||||
/// TextSize::from(2)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// ceil_char_boundary(source, TextSize::from(2)),
|
||||
/// TextSize::from(2)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// ceil_char_boundary(source, TextSize::from(3)),
|
||||
/// TextSize::from(2)
|
||||
/// );
|
||||
/// ```
|
||||
pub fn ceil_char_boundary(text: &str, offset: TextSize) -> TextSize {
|
||||
let upper_bound = offset
|
||||
.to_u32()
|
||||
.saturating_add(4)
|
||||
.min(text.text_len().to_u32());
|
||||
(offset.to_u32()..upper_bound)
|
||||
.map(TextSize::from)
|
||||
.find(|offset| text.is_char_boundary(offset.to_usize()))
|
||||
.unwrap_or_else(|| TextSize::from(upper_bound))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||
|
||||
use crate::diagnostic::{Annotation, DiagnosticId, SecondaryCode, Severity, Span};
|
||||
use crate::diagnostic::{
|
||||
Annotation, DiagnosticId, IntoDiagnosticMessage, SecondaryCode, Severity, Span,
|
||||
SubDiagnosticSeverity,
|
||||
};
|
||||
use crate::files::system_path_to_file;
|
||||
use crate::system::{DbWithWritableSystem, SystemPath};
|
||||
use crate::tests::TestDb;
|
||||
@@ -1548,7 +1744,7 @@ watermelon
|
||||
|
||||
let mut diag = env.err().primary("animals", "3", "3", "").build();
|
||||
diag.sub(
|
||||
env.sub_builder(Severity::Info, "this is a helpful note")
|
||||
env.sub_builder(SubDiagnosticSeverity::Info, "this is a helpful note")
|
||||
.build(),
|
||||
);
|
||||
insta::assert_snapshot!(
|
||||
@@ -1577,15 +1773,15 @@ watermelon
|
||||
|
||||
let mut diag = env.err().primary("animals", "3", "3", "").build();
|
||||
diag.sub(
|
||||
env.sub_builder(Severity::Info, "this is a helpful note")
|
||||
env.sub_builder(SubDiagnosticSeverity::Info, "this is a helpful note")
|
||||
.build(),
|
||||
);
|
||||
diag.sub(
|
||||
env.sub_builder(Severity::Info, "another helpful note")
|
||||
env.sub_builder(SubDiagnosticSeverity::Info, "another helpful note")
|
||||
.build(),
|
||||
);
|
||||
diag.sub(
|
||||
env.sub_builder(Severity::Info, "and another helpful note")
|
||||
env.sub_builder(SubDiagnosticSeverity::Info, "and another helpful note")
|
||||
.build(),
|
||||
);
|
||||
insta::assert_snapshot!(
|
||||
@@ -2307,6 +2503,27 @@ watermelon
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Hide diagnostic severity when rendering.
|
||||
pub(super) fn hide_severity(&mut self, yes: bool) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.hide_severity(yes);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Show fix availability when rendering.
|
||||
pub(super) fn show_fix_status(&mut self, yes: bool) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.show_fix_status(yes);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// The lowest fix applicability to show when rendering.
|
||||
pub(super) fn fix_applicability(&mut self, applicability: Applicability) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.fix_applicability(applicability);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Add a file with the given path and contents to this environment.
|
||||
pub(super) fn add(&mut self, path: &str, contents: &str) {
|
||||
let path = SystemPath::new(path);
|
||||
@@ -2370,11 +2587,11 @@ watermelon
|
||||
/// sub-diagnostic with "error" severity and canned values for
|
||||
/// its identifier and message.
|
||||
fn sub_warn(&mut self) -> SubDiagnosticBuilder<'_> {
|
||||
self.sub_builder(Severity::Warning, "sub-diagnostic message")
|
||||
self.sub_builder(SubDiagnosticSeverity::Warning, "sub-diagnostic message")
|
||||
}
|
||||
|
||||
/// Returns a builder for tersely constructing diagnostics.
|
||||
fn builder(
|
||||
pub(super) fn builder(
|
||||
&mut self,
|
||||
identifier: &'static str,
|
||||
severity: Severity,
|
||||
@@ -2391,7 +2608,11 @@ watermelon
|
||||
}
|
||||
|
||||
/// Returns a builder for tersely constructing sub-diagnostics.
|
||||
fn sub_builder(&mut self, severity: Severity, message: &str) -> SubDiagnosticBuilder<'_> {
|
||||
fn sub_builder(
|
||||
&mut self,
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: &str,
|
||||
) -> SubDiagnosticBuilder<'_> {
|
||||
let subdiag = SubDiagnostic::new(severity, message);
|
||||
SubDiagnosticBuilder { env: self, subdiag }
|
||||
}
|
||||
@@ -2437,7 +2658,7 @@ watermelon
|
||||
///
|
||||
/// See the docs on `TestEnvironment::span` for the meaning of
|
||||
/// `path`, `line_offset_start` and `line_offset_end`.
|
||||
fn primary(
|
||||
pub(super) fn primary(
|
||||
mut self,
|
||||
path: &str,
|
||||
line_offset_start: &str,
|
||||
@@ -2494,6 +2715,12 @@ watermelon
|
||||
self.diag.set_noqa_offset(noqa_offset);
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds a "help" sub-diagnostic with the given message.
|
||||
fn help(mut self, message: impl IntoDiagnosticMessage) -> DiagnosticBuilder<'e> {
|
||||
self.diag.help(message);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper builder for tersely populating a `SubDiagnostic`.
|
||||
@@ -2600,7 +2827,8 @@ def fibonacci(n):
|
||||
|
||||
let diagnostics = vec![
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("fib.py", "1:7", "1:9", "Remove unused import: `os`")
|
||||
.primary("fib.py", "1:7", "1:9", "")
|
||||
.help("Remove unused import: `os`")
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(0),
|
||||
@@ -2613,12 +2841,8 @@ def fibonacci(n):
|
||||
Severity::Error,
|
||||
"Local variable `x` is assigned to but never used",
|
||||
)
|
||||
.primary(
|
||||
"fib.py",
|
||||
"6:4",
|
||||
"6:5",
|
||||
"Remove assignment to unused variable `x`",
|
||||
)
|
||||
.primary("fib.py", "6:4", "6:5", "")
|
||||
.help("Remove assignment to unused variable `x`")
|
||||
.secondary_code("F841")
|
||||
.fix(Fix::unsafe_edit(Edit::deletion(
|
||||
TextSize::from(94),
|
||||
@@ -2665,6 +2889,25 @@ if call(foo
|
||||
}
|
||||
|
||||
/// Create Ruff-style diagnostics for testing the various output formats for a notebook.
|
||||
///
|
||||
/// The concatenated cells look like this:
|
||||
///
|
||||
/// ```python
|
||||
/// # cell 1
|
||||
/// import os
|
||||
/// # cell 2
|
||||
/// import math
|
||||
///
|
||||
/// print('hello world')
|
||||
/// # cell 3
|
||||
/// def foo():
|
||||
/// print()
|
||||
/// x = 1
|
||||
/// ```
|
||||
///
|
||||
/// The first diagnostic is on the unused `os` import with location cell 1, row 2, column 8
|
||||
/// (`cell 1:2:8`). The second diagnostic is the unused `math` import at `cell 2:2:8`, and the
|
||||
/// third diagnostic is an unfixable unused variable at `cell 3:4:5`.
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
@@ -2720,7 +2963,8 @@ if call(foo
|
||||
|
||||
let diagnostics = vec![
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "Remove unused import: `os`")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "")
|
||||
.help("Remove unused import: `os`")
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(9),
|
||||
@@ -2733,12 +2977,8 @@ if call(foo
|
||||
Severity::Error,
|
||||
"`math` imported but unused",
|
||||
)
|
||||
.primary(
|
||||
"notebook.ipynb",
|
||||
"4:7",
|
||||
"4:11",
|
||||
"Remove unused import: `math`",
|
||||
)
|
||||
.primary("notebook.ipynb", "4:7", "4:11", "")
|
||||
.help("Remove unused import: `math`")
|
||||
.secondary_code("F401")
|
||||
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(28),
|
||||
@@ -2751,12 +2991,8 @@ if call(foo
|
||||
Severity::Error,
|
||||
"Local variable `x` is assigned to but never used",
|
||||
)
|
||||
.primary(
|
||||
"notebook.ipynb",
|
||||
"10:4",
|
||||
"10:5",
|
||||
"Remove assignment to unused variable `x`",
|
||||
)
|
||||
.primary("notebook.ipynb", "10:4", "10:5", "")
|
||||
.help("Remove assignment to unused variable `x`")
|
||||
.secondary_code("F841")
|
||||
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(94),
|
||||
|
||||
195
crates/ruff_db/src/diagnostic/render/concise.rs
Normal file
195
crates/ruff_db/src/diagnostic/render/concise.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
use crate::diagnostic::{
|
||||
Diagnostic, DisplayDiagnosticConfig, Severity,
|
||||
stylesheet::{DiagnosticStylesheet, fmt_styled},
|
||||
};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct ConciseRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> ConciseRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let sep = fmt_styled(":", stylesheet.separator);
|
||||
for diag in diagnostics {
|
||||
if let Some(span) = diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
"{path}",
|
||||
path = fmt_styled(
|
||||
span.file().relative_path(self.resolver).to_string_lossy(),
|
||||
stylesheet.emphasis
|
||||
)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
|
||||
if let Some(notebook_index) = self.resolver.notebook_index(span.file()) {
|
||||
write!(
|
||||
f,
|
||||
"{sep}cell {cell}{sep}{line}{sep}{col}",
|
||||
cell = notebook_index.cell(start.line).unwrap_or_default(),
|
||||
line = notebook_index.cell_row(start.line).unwrap_or_default(),
|
||||
col = start.column,
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{sep}{line}{sep}{col}",
|
||||
line = start.line,
|
||||
col = start.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
write!(f, "{sep} ")?;
|
||||
}
|
||||
if self.config.hide_severity {
|
||||
if let Some(code) = diag.secondary_code() {
|
||||
write!(
|
||||
f,
|
||||
"{code} ",
|
||||
code = fmt_styled(code, stylesheet.secondary_code)
|
||||
)?;
|
||||
}
|
||||
if self.config.show_fix_status {
|
||||
if let Some(fix) = diag.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.config.fix_applicability) {
|
||||
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let (severity, severity_style) = match diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}] ",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
}
|
||||
|
||||
writeln!(f, "{message}", message = diag.concise_message())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: error[unused-import] `os` imported but unused
|
||||
fib.py:6:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: error[undefined-name] Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_preview() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
env.preview(true);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_syntax_errors() {
|
||||
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: error[invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: error[invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
notebook.ipynb:cell 1:2:8: error[unused-import] `os` imported but unused
|
||||
notebook.ipynb:cell 2:2:8: error[unused-import] `math` imported but unused
|
||||
notebook.ipynb:cell 3:4:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Concise);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@"error[test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
180
crates/ruff_db/src/diagnostic/render/full.rs
Normal file
180
crates/ruff_db/src/diagnostic/render/full.rs
Normal file
@@ -0,0 +1,180 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat, Severity,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
error[undefined-name]: Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[invalid-syntax]: SyntaxError: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
| ^
|
||||
2 |
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
error[invalid-syntax]: SyntaxError: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
2 |
|
||||
3 | if call(foo
|
||||
| ^
|
||||
4 | def bar():
|
||||
5 | pass
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit c9b99e4.
|
||||
///
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[no-indented-block]: Expected an indented block
|
||||
/// --> example.py:3:1
|
||||
/// |
|
||||
/// 2 | if False:
|
||||
/// | ^
|
||||
/// 3 | print()
|
||||
/// |
|
||||
/// ```
|
||||
///
|
||||
/// where the caret points to the end of the previous line instead of the start of the next.
|
||||
#[test]
|
||||
fn empty_span_after_line_terminator() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"example.py",
|
||||
r#"
|
||||
if False:
|
||||
print()
|
||||
"#,
|
||||
);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"no-indented-block",
|
||||
Severity::Error,
|
||||
"Expected an indented block",
|
||||
)
|
||||
.primary("example.py", "3:0", "3:0", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[no-indented-block]: Expected an indented block
|
||||
--> example.py:3:1
|
||||
|
|
||||
2 | if False:
|
||||
3 | print()
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit 2922490.
|
||||
///
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
/// --> example.py:1:25
|
||||
/// |
|
||||
/// 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
/// | ^
|
||||
/// |
|
||||
/// ```
|
||||
///
|
||||
/// where the caret points to the `f` in the f-string instead of the start of the invalid
|
||||
/// character (`^Z`).
|
||||
#[test]
|
||||
fn unprintable_characters() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "nested_fstrings = f'{f'{f''}'}'");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:24", "1:24", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
--> example.py:1:25
|
||||
|
|
||||
1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_unprintable_characters() -> std::io::Result<()> {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:1", "1:1", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
--> example.py:1:2
|
||||
|
|
||||
1 | ␈␛
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -87,7 +87,7 @@ pub(super) fn diagnostic_to_json<'a>(
|
||||
|
||||
let fix = diagnostic.fix().map(|fix| JsonFix {
|
||||
applicability: fix.applicability(),
|
||||
message: diagnostic.suggestion(),
|
||||
message: diagnostic.first_help_text(),
|
||||
edits: ExpandedEdits {
|
||||
edits: fix.edits(),
|
||||
notebook_index,
|
||||
|
||||
@@ -41,6 +41,8 @@ pub struct DiagnosticStylesheet {
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) none: Style,
|
||||
pub(crate) separator: Style,
|
||||
pub(crate) secondary_code: Style,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
@@ -62,6 +64,8 @@ impl DiagnosticStylesheet {
|
||||
line_no: bright_blue.effects(Effects::BOLD),
|
||||
emphasis: Style::new().effects(Effects::BOLD),
|
||||
none: Style::new(),
|
||||
separator: AnsiColor::Cyan.on_default(),
|
||||
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,6 +79,8 @@ impl DiagnosticStylesheet {
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
none: Style::new(),
|
||||
separator: Style::new(),
|
||||
secondary_code: Style::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
pub use file_root::{FileRoot, FileRootKind};
|
||||
pub use path::FilePath;
|
||||
@@ -312,11 +311,6 @@ pub struct File {
|
||||
/// the file has been deleted is to change the status to `Deleted`.
|
||||
#[default]
|
||||
status: FileStatus,
|
||||
|
||||
/// Counter that counts the number of created file instances and active file instances.
|
||||
/// Only enabled in debug builds.
|
||||
#[default]
|
||||
count: Count<File>,
|
||||
}
|
||||
|
||||
// The Salsa heap is tracked separately.
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_source_file::LineIndex;
|
||||
@@ -38,11 +36,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
};
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind,
|
||||
read_error,
|
||||
count: Count::new(),
|
||||
}),
|
||||
inner: Arc::new(SourceTextInner { kind, read_error }),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,8 +119,6 @@ impl std::fmt::Debug for SourceText {
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SourceTextInner {
|
||||
#[get_size(ignore)]
|
||||
count: Count<SourceText>,
|
||||
kind: SourceTextKind,
|
||||
read_error: Option<SourceTextError>,
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ ty_python_semantic = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, optional = true }
|
||||
memchr = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::StringImports;
|
||||
use ruff_python_ast::visitor::source_order::{
|
||||
SourceOrderVisitor, walk_expr, walk_module, walk_stmt,
|
||||
};
|
||||
@@ -10,13 +11,13 @@ pub(crate) struct Collector<'a> {
|
||||
/// The path to the current module.
|
||||
module_path: Option<&'a [String]>,
|
||||
/// Whether to detect imports from string literals.
|
||||
string_imports: bool,
|
||||
string_imports: StringImports,
|
||||
/// The collected imports from the Python AST.
|
||||
imports: Vec<CollectedImport>,
|
||||
}
|
||||
|
||||
impl<'a> Collector<'a> {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: bool) -> Self {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
|
||||
Self {
|
||||
module_path,
|
||||
string_imports,
|
||||
@@ -118,7 +119,7 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// Only traverse simple statements when string imports is enabled.
|
||||
if self.string_imports {
|
||||
if self.string_imports.enabled {
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
@@ -126,20 +127,26 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'ast Expr) {
|
||||
if self.string_imports {
|
||||
if self.string_imports.enabled {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral {
|
||||
value,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) = expr
|
||||
{
|
||||
// Determine whether the string literal "looks like" an import statement: contains
|
||||
// a dot, and consists solely of valid Python identifiers.
|
||||
let value = value.to_str();
|
||||
if let Some(module_name) = ModuleName::new(value) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
// Determine whether the string literal "looks like" an import statement: contains
|
||||
// the requisite number of dots, and consists solely of valid Python identifiers.
|
||||
if self.string_imports.min_dots == 0
|
||||
|| memchr::memchr_iter(b'.', value.as_bytes()).count()
|
||||
>= self.string_imports.min_dots
|
||||
{
|
||||
if let Some(module_name) = ModuleName::new(value) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use ruff_python_parser::{Mode, ParseOptions, parse};
|
||||
use crate::collector::Collector;
|
||||
pub use crate::db::ModuleDb;
|
||||
use crate::resolver::Resolver;
|
||||
pub use crate::settings::{AnalyzeSettings, Direction};
|
||||
pub use crate::settings::{AnalyzeSettings, Direction, StringImports};
|
||||
|
||||
mod collector;
|
||||
mod db;
|
||||
@@ -26,7 +26,7 @@ impl ModuleImports {
|
||||
db: &ModuleDb,
|
||||
path: &SystemPath,
|
||||
package: Option<&SystemPath>,
|
||||
string_imports: bool,
|
||||
string_imports: StringImports,
|
||||
) -> Result<Self> {
|
||||
// Read and parse the source code.
|
||||
let source = std::fs::read_to_string(path)?;
|
||||
@@ -42,13 +42,11 @@ impl ModuleImports {
|
||||
// Resolve the imports.
|
||||
let mut resolved_imports = ModuleImports::default();
|
||||
for import in imports {
|
||||
let Some(resolved) = Resolver::new(db).resolve(import) else {
|
||||
continue;
|
||||
};
|
||||
let Some(path) = resolved.as_system_path() else {
|
||||
continue;
|
||||
};
|
||||
resolved_imports.insert(path.to_path_buf());
|
||||
for resolved in Resolver::new(db).resolve(import) {
|
||||
if let Some(path) = resolved.as_system_path() {
|
||||
resolved_imports.insert(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(resolved_imports)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use ruff_db::files::FilePath;
|
||||
use ty_python_semantic::resolve_module;
|
||||
use ty_python_semantic::{ModuleName, resolve_module, resolve_real_module};
|
||||
|
||||
use crate::ModuleDb;
|
||||
use crate::collector::CollectedImport;
|
||||
@@ -16,24 +16,67 @@ impl<'a> Resolver<'a> {
|
||||
}
|
||||
|
||||
/// Resolve the [`CollectedImport`] into a [`FilePath`].
|
||||
pub(crate) fn resolve(&self, import: CollectedImport) -> Option<&'a FilePath> {
|
||||
pub(crate) fn resolve(&self, import: CollectedImport) -> impl Iterator<Item = &'a FilePath> {
|
||||
match import {
|
||||
CollectedImport::Import(import) => {
|
||||
let module = resolve_module(self.db, &import)?;
|
||||
Some(module.file()?.path(self.db))
|
||||
// Attempt to resolve the module (e.g., given `import foo`, look for `foo`).
|
||||
let file = self.resolve_module(&import);
|
||||
|
||||
// If the file is a stub, look for the corresponding source file.
|
||||
let source_file = file
|
||||
.is_some_and(|file| file.extension() == Some("pyi"))
|
||||
.then(|| self.resolve_real_module(&import))
|
||||
.flatten();
|
||||
|
||||
std::iter::once(file)
|
||||
.chain(std::iter::once(source_file))
|
||||
.flatten()
|
||||
}
|
||||
CollectedImport::ImportFrom(import) => {
|
||||
// Attempt to resolve the member (e.g., given `from foo import bar`, look for `foo.bar`).
|
||||
if let Some(file) = self.resolve_module(&import) {
|
||||
// If the file is a stub, look for the corresponding source file.
|
||||
let source_file = (file.extension() == Some("pyi"))
|
||||
.then(|| self.resolve_real_module(&import))
|
||||
.flatten();
|
||||
|
||||
return std::iter::once(Some(file))
|
||||
.chain(std::iter::once(source_file))
|
||||
.flatten();
|
||||
}
|
||||
|
||||
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
|
||||
let parent = import.parent();
|
||||
let file = parent
|
||||
.as_ref()
|
||||
.and_then(|parent| self.resolve_module(parent));
|
||||
|
||||
let module = resolve_module(self.db, &import).or_else(|| {
|
||||
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
|
||||
// If the file is a stub, look for the corresponding source file.
|
||||
let source_file = file
|
||||
.is_some_and(|file| file.extension() == Some("pyi"))
|
||||
.then(|| {
|
||||
parent
|
||||
.as_ref()
|
||||
.and_then(|parent| self.resolve_real_module(parent))
|
||||
})
|
||||
.flatten();
|
||||
|
||||
resolve_module(self.db, &parent?)
|
||||
})?;
|
||||
|
||||
Some(module.file()?.path(self.db))
|
||||
std::iter::once(file)
|
||||
.chain(std::iter::once(source_file))
|
||||
.flatten()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||
let module = resolve_module(self.db, module_name)?;
|
||||
Some(module.file(self.db)?.path(self.db))
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module (stubs not allowed).
|
||||
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||
let module = resolve_real_module(self.db, module_name)?;
|
||||
Some(module.file(self.db)?.path(self.db))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ pub struct AnalyzeSettings {
|
||||
pub exclude: FilePatternSet,
|
||||
pub preview: PreviewMode,
|
||||
pub target_version: PythonVersion,
|
||||
pub detect_string_imports: bool,
|
||||
pub string_imports: StringImports,
|
||||
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
|
||||
pub extension: ExtensionMapping,
|
||||
}
|
||||
@@ -26,7 +26,7 @@ impl fmt::Display for AnalyzeSettings {
|
||||
self.exclude,
|
||||
self.preview,
|
||||
self.target_version,
|
||||
self.detect_string_imports,
|
||||
self.string_imports,
|
||||
self.extension | debug,
|
||||
self.include_dependencies | debug,
|
||||
]
|
||||
@@ -35,6 +35,31 @@ impl fmt::Display for AnalyzeSettings {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, CacheKey)]
|
||||
pub struct StringImports {
|
||||
pub enabled: bool,
|
||||
pub min_dots: usize,
|
||||
}
|
||||
|
||||
impl Default for StringImports {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: false,
|
||||
min_dots: 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for StringImports {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.enabled {
|
||||
write!(f, "enabled (min_dots: {})", self.min_dots)
|
||||
} else {
|
||||
write!(f, "disabled")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, CacheKey)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.4"
|
||||
version = "0.12.5"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -25,5 +25,5 @@ def my_func():
|
||||
|
||||
# t-strings - all ok
|
||||
t"0.0.0.0"
|
||||
"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
t"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
t"0.0.0.0" t"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
@@ -94,7 +94,7 @@ except Exception:
|
||||
logging.error("...", exc_info=True)
|
||||
|
||||
|
||||
from logging import error, exception
|
||||
from logging import critical, error, exception
|
||||
|
||||
try:
|
||||
pass
|
||||
@@ -114,6 +114,23 @@ except Exception:
|
||||
error("...", exc_info=None)
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
critical("...")
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
critical("...", exc_info=False)
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
critical("...", exc_info=None)
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
@@ -125,6 +142,13 @@ try:
|
||||
except Exception:
|
||||
error("...", exc_info=True)
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
critical("...", exc_info=True)
|
||||
|
||||
|
||||
try:
|
||||
...
|
||||
except Exception as e:
|
||||
|
||||
@@ -650,3 +650,17 @@ f"""This is a test. {
|
||||
if True else
|
||||
"Don't add a trailing comma here ->"
|
||||
}"""
|
||||
|
||||
type X[
|
||||
T
|
||||
] = T
|
||||
def f[
|
||||
T
|
||||
](): pass
|
||||
class C[
|
||||
T
|
||||
]: pass
|
||||
|
||||
type X[T,] = T
|
||||
def f[T,](): pass
|
||||
class C[T,]: pass
|
||||
@@ -142,3 +142,7 @@ field47: typing.Optional[int] | typing.Optional[dict]
|
||||
# avoid reporting twice
|
||||
field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
field49: typing.Optional[complex | complex] | complex
|
||||
|
||||
# Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
# Should throw duplicate union member but not fix
|
||||
isinstance(None, typing.Union[None, None])
|
||||
@@ -47,3 +47,19 @@ def _():
|
||||
from builtin import open
|
||||
|
||||
with open(p) as _: ... # No error
|
||||
|
||||
file = "file_1.py"
|
||||
|
||||
rename(file, "file_2.py")
|
||||
|
||||
rename(
|
||||
# commment 1
|
||||
file, # comment 2
|
||||
"file_2.py"
|
||||
,
|
||||
# comment 3
|
||||
)
|
||||
|
||||
rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
|
||||
rename(file, "file_2.py", src_dir_fd=1)
|
||||
@@ -84,3 +84,25 @@ class MyRequestHandler(BaseHTTPRequestHandler):
|
||||
def dont_GET(self):
|
||||
pass
|
||||
|
||||
|
||||
from http.server import CGIHTTPRequestHandler
|
||||
|
||||
|
||||
class MyCGIRequestHandler(CGIHTTPRequestHandler):
|
||||
def do_OPTIONS(self):
|
||||
pass
|
||||
|
||||
def dont_OPTIONS(self):
|
||||
pass
|
||||
|
||||
|
||||
from http.server import SimpleHTTPRequestHandler
|
||||
|
||||
|
||||
class MySimpleRequestHandler(SimpleHTTPRequestHandler):
|
||||
def do_OPTIONS(self):
|
||||
pass
|
||||
|
||||
def dont_OPTIONS(self):
|
||||
pass
|
||||
|
||||
|
||||
@@ -278,3 +278,30 @@ def f():
|
||||
for i in src:
|
||||
if lambda: 0:
|
||||
dst.append(i)
|
||||
|
||||
def f():
|
||||
i = "xyz"
|
||||
result = []
|
||||
for i in range(3):
|
||||
result.append(x for x in [i])
|
||||
|
||||
def f():
|
||||
i = "xyz"
|
||||
result = []
|
||||
for i in range(3):
|
||||
result.append((x for x in [i]))
|
||||
|
||||
G_INDEX = None
|
||||
def f():
|
||||
global G_INDEX
|
||||
result = []
|
||||
for G_INDEX in range(3):
|
||||
result.append(G_INDEX)
|
||||
|
||||
def f():
|
||||
NL_INDEX = None
|
||||
def x():
|
||||
nonlocal NL_INDEX
|
||||
result = []
|
||||
for NL_INDEX in range(3):
|
||||
result.append(NL_INDEX)
|
||||
5
crates/ruff_linter/resources/test/fixtures/pylint/empty_comment_line_continuation.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/pylint/empty_comment_line_continuation.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
#
|
||||
x = 0 \
|
||||
#
|
||||
+1
|
||||
print(x)
|
||||
@@ -143,3 +143,23 @@ class NotAMethodButHardToDetect:
|
||||
# without risking false positives elsewhere or introducing complex heuristics
|
||||
# that users would find surprising and confusing
|
||||
FOO = sorted([x for x in BAR], key=lambda x: x.baz)
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/19305
|
||||
import pytest
|
||||
|
||||
@pytest.fixture
|
||||
def my_fixture_with_param(request):
|
||||
return request.param
|
||||
|
||||
@pytest.fixture()
|
||||
def my_fixture_with_param2(request):
|
||||
return request.param
|
||||
|
||||
|
||||
# Decorated function (should be ignored)
|
||||
def custom_decorator(func):
|
||||
return func
|
||||
|
||||
@custom_decorator
|
||||
def add(x, y):
|
||||
return x + y
|
||||
|
||||
@@ -55,3 +55,12 @@ _ = Decimal(0.1)
|
||||
_ = Decimal(-0.5)
|
||||
_ = Decimal(5.0)
|
||||
_ = decimal.Decimal(4.2)
|
||||
|
||||
# Cases with int and bool - should produce safe fixes
|
||||
_ = Decimal.from_float(1)
|
||||
_ = Decimal.from_float(True)
|
||||
|
||||
# Cases with non-finite floats - should produce safe fixes
|
||||
_ = Decimal.from_float(float("-nan"))
|
||||
_ = Decimal.from_float(float("\x2dnan"))
|
||||
_ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))
|
||||
|
||||
@@ -65,3 +65,62 @@ class Foo:
|
||||
bar = "should've used attrs"
|
||||
|
||||
def __post_init__(self, bar: str = "ahhh", baz: str = "hmm") -> None: ...
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18950
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(self, bar: int = (x := 1)) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(
|
||||
self,
|
||||
bar: int = (x := 1) # comment
|
||||
,
|
||||
baz: int = (y := 2), # comment
|
||||
foo = (a := 1) # comment
|
||||
,
|
||||
faz = (b := 2), # comment
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(
|
||||
self,
|
||||
bar: int = 1, # comment
|
||||
baz: int = 2, # comment
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(
|
||||
self,
|
||||
arg1: int = (1) # comment
|
||||
,
|
||||
arg2: int = ((1)) # comment
|
||||
,
|
||||
arg2: int = (i for i in range(10)) # comment
|
||||
,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
# makes little sense, but is valid syntax
|
||||
def fun_with_python_syntax():
|
||||
@dataclass
|
||||
class Foo:
|
||||
def __post_init__(
|
||||
self,
|
||||
bar: (int) = (yield from range(5)) # comment
|
||||
,
|
||||
) -> None:
|
||||
...
|
||||
|
||||
return Foo
|
||||
|
||||
@@ -53,3 +53,16 @@ regex.subn(br"""eak your machine with rm -""", rf"""/""")
|
||||
regex.splititer(both, non_literal)
|
||||
regex.subf(f, lambda _: r'means', '"format"')
|
||||
regex.subfn(fn, f'''a$1n't''', lambda: "'function'")
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/16713
|
||||
re.compile("\a\f\n\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
|
||||
re.compile("\b") # without fix
|
||||
re.compile("\"") # without fix
|
||||
re.compile("\'") # without fix
|
||||
re.compile('\"') # without fix
|
||||
re.compile('\'') # without fix
|
||||
re.compile("\\") # without fix
|
||||
re.compile("\101") # without fix
|
||||
re.compile("a\
|
||||
b") # without fix
|
||||
|
||||
@@ -91,3 +91,20 @@ regex.subf(
|
||||
br''br""br''
|
||||
)
|
||||
regex.subfn(br'I\s\nee*d\s[O0o]me\x20\Qoffe\E, ' br'b')
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/16713
|
||||
re.compile(
|
||||
"["
|
||||
"\U0001F600-\U0001F64F" # emoticons
|
||||
"\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
"\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
"\U00002702-\U000027B0"
|
||||
"\U000024C2-\U0001F251"
|
||||
"\u200d" # zero width joiner
|
||||
"\u200c" # zero width non-joiner
|
||||
"\\u200c" # must not be escaped in a raw string
|
||||
"]+",
|
||||
flags=re.UNICODE,
|
||||
)
|
||||
|
||||
3
crates/ruff_linter/resources/test/fixtures/ruff/RUF039_py_version_sensitive.py
vendored
Normal file
3
crates/ruff_linter/resources/test/fixtures/ruff/RUF039_py_version_sensitive.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import re
|
||||
|
||||
re.compile("\N{Partial Differential}") # with unsafe fix if python target is 3.8 or higher, else without fix
|
||||
@@ -1039,14 +1039,10 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
flake8_simplify::rules::zip_dict_keys_and_values(checker, call);
|
||||
}
|
||||
if checker.any_rule_enabled(&[
|
||||
Rule::OsChmod,
|
||||
Rule::OsMkdir,
|
||||
Rule::OsMakedirs,
|
||||
Rule::OsRename,
|
||||
Rule::OsReplace,
|
||||
Rule::OsStat,
|
||||
Rule::OsPathJoin,
|
||||
Rule::OsPathSamefile,
|
||||
Rule::OsPathSplitext,
|
||||
Rule::BuiltinOpen,
|
||||
Rule::PyPath,
|
||||
@@ -1112,6 +1108,18 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
if checker.is_rule_enabled(Rule::OsGetcwd) {
|
||||
flake8_use_pathlib::rules::os_getcwd(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsChmod) {
|
||||
flake8_use_pathlib::rules::os_chmod(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsRename) {
|
||||
flake8_use_pathlib::rules::os_rename(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsReplace) {
|
||||
flake8_use_pathlib::rules::os_replace(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsPathSamefile) {
|
||||
flake8_use_pathlib::rules::os_path_samefile(checker, call, segments);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::PathConstructorCurrentDirectory) {
|
||||
flake8_use_pathlib::rules::path_constructor_current_directory(
|
||||
checker, call, segments,
|
||||
|
||||
@@ -3216,6 +3216,11 @@ impl<'a> LintContext<'a> {
|
||||
pub(crate) fn iter(&mut self) -> impl Iterator<Item = &Diagnostic> {
|
||||
self.diagnostics.get_mut().iter()
|
||||
}
|
||||
|
||||
/// The [`LinterSettings`] for the current analysis, including the enabled rules.
|
||||
pub(crate) const fn settings(&self) -> &LinterSettings {
|
||||
self.settings
|
||||
}
|
||||
}
|
||||
|
||||
/// An abstraction for mutating a diagnostic.
|
||||
|
||||
@@ -16,7 +16,6 @@ use crate::rules::{
|
||||
eradicate, flake8_commas, flake8_executable, flake8_fixme, flake8_implicit_str_concat,
|
||||
flake8_pyi, flake8_todos, pycodestyle, pygrep_hooks, pylint, pyupgrade, ruff,
|
||||
};
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
use super::ast::LintContext;
|
||||
|
||||
@@ -27,7 +26,6 @@ pub(crate) fn check_tokens(
|
||||
locator: &Locator,
|
||||
indexer: &Indexer,
|
||||
stylist: &Stylist,
|
||||
settings: &LinterSettings,
|
||||
source_type: PySourceType,
|
||||
cell_offsets: Option<&CellOffsets>,
|
||||
context: &mut LintContext,
|
||||
@@ -42,15 +40,8 @@ pub(crate) fn check_tokens(
|
||||
Rule::BlankLinesAfterFunctionOrClass,
|
||||
Rule::BlankLinesBeforeNestedDefinition,
|
||||
]) {
|
||||
BlankLinesChecker::new(
|
||||
locator,
|
||||
stylist,
|
||||
settings,
|
||||
source_type,
|
||||
cell_offsets,
|
||||
context,
|
||||
)
|
||||
.check_lines(tokens);
|
||||
BlankLinesChecker::new(locator, stylist, source_type, cell_offsets, context)
|
||||
.check_lines(tokens);
|
||||
}
|
||||
|
||||
if context.is_rule_enabled(Rule::BlanketTypeIgnore) {
|
||||
@@ -58,17 +49,17 @@ pub(crate) fn check_tokens(
|
||||
}
|
||||
|
||||
if context.is_rule_enabled(Rule::EmptyComment) {
|
||||
pylint::rules::empty_comments(context, comment_ranges, locator);
|
||||
pylint::rules::empty_comments(context, comment_ranges, locator, indexer);
|
||||
}
|
||||
|
||||
if context.is_rule_enabled(Rule::AmbiguousUnicodeCharacterComment) {
|
||||
for range in comment_ranges {
|
||||
ruff::rules::ambiguous_unicode_character_comment(context, locator, range, settings);
|
||||
ruff::rules::ambiguous_unicode_character_comment(context, locator, range);
|
||||
}
|
||||
}
|
||||
|
||||
if context.is_rule_enabled(Rule::CommentedOutCode) {
|
||||
eradicate::rules::commented_out_code(context, locator, comment_ranges, settings);
|
||||
eradicate::rules::commented_out_code(context, locator, comment_ranges);
|
||||
}
|
||||
|
||||
if context.is_rule_enabled(Rule::UTF8EncodingDeclaration) {
|
||||
@@ -110,7 +101,7 @@ pub(crate) fn check_tokens(
|
||||
Rule::SingleLineImplicitStringConcatenation,
|
||||
Rule::MultiLineImplicitStringConcatenation,
|
||||
]) {
|
||||
flake8_implicit_str_concat::rules::implicit(context, tokens, locator, indexer, settings);
|
||||
flake8_implicit_str_concat::rules::implicit(context, tokens, locator, indexer);
|
||||
}
|
||||
|
||||
if context.any_rule_enabled(&[
|
||||
|
||||
@@ -920,11 +920,11 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
|
||||
// flake8-use-pathlib
|
||||
(Flake8UsePathlib, "100") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathAbspath),
|
||||
(Flake8UsePathlib, "101") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsChmod),
|
||||
(Flake8UsePathlib, "101") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsChmod),
|
||||
(Flake8UsePathlib, "102") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsMkdir),
|
||||
(Flake8UsePathlib, "103") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsMakedirs),
|
||||
(Flake8UsePathlib, "104") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsRename),
|
||||
(Flake8UsePathlib, "105") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsReplace),
|
||||
(Flake8UsePathlib, "104") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRename),
|
||||
(Flake8UsePathlib, "105") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsReplace),
|
||||
(Flake8UsePathlib, "106") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRmdir),
|
||||
(Flake8UsePathlib, "107") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRemove),
|
||||
(Flake8UsePathlib, "108") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsUnlink),
|
||||
@@ -940,7 +940,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8UsePathlib, "118") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathJoin),
|
||||
(Flake8UsePathlib, "119") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathBasename),
|
||||
(Flake8UsePathlib, "120") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathDirname),
|
||||
(Flake8UsePathlib, "121") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathSamefile),
|
||||
(Flake8UsePathlib, "121") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathSamefile),
|
||||
(Flake8UsePathlib, "122") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathSplitext),
|
||||
(Flake8UsePathlib, "123") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::BuiltinOpen),
|
||||
(Flake8UsePathlib, "124") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::PyPath),
|
||||
|
||||
@@ -188,7 +188,6 @@ pub fn check_path(
|
||||
locator,
|
||||
indexer,
|
||||
stylist,
|
||||
settings,
|
||||
source_type,
|
||||
source_kind.as_ipy_notebook().map(Notebook::cell_offsets),
|
||||
&mut context,
|
||||
@@ -473,7 +472,7 @@ pub fn lint_only(
|
||||
&& !is_py314_support_enabled(settings)
|
||||
{
|
||||
warn_user_once!(
|
||||
"Support for Python 3.14 is under development and may be unstable. Enable `preview` to remove this warning."
|
||||
"Support for Python 3.14 is in preview and may undergo breaking changes. Enable `preview` to remove this warning."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -584,7 +583,7 @@ pub fn lint_fix<'a>(
|
||||
&& !is_py314_support_enabled(settings)
|
||||
{
|
||||
warn_user_once!(
|
||||
"Support for Python 3.14 is under development and may be unstable. Enable `preview` to remove this warning."
|
||||
"Support for Python 3.14 is in preview and may undergo breaking changes. Enable `preview` to remove this warning."
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -118,86 +118,6 @@ impl<'a> Locator<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Finds the closest [`TextSize`] not less than the offset given for which
|
||||
/// `is_char_boundary` is `true`. Unless the offset given is greater than
|
||||
/// the length of the underlying contents, in which case, the length of the
|
||||
/// contents is returned.
|
||||
///
|
||||
/// Can be replaced with `str::ceil_char_boundary` once it's stable.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// From `std`:
|
||||
///
|
||||
/// ```
|
||||
/// use ruff_text_size::{Ranged, TextSize};
|
||||
/// use ruff_linter::Locator;
|
||||
///
|
||||
/// let locator = Locator::new("❤️🧡💛💚💙💜");
|
||||
/// assert_eq!(locator.text_len(), TextSize::from(26));
|
||||
/// assert!(!locator.contents().is_char_boundary(13));
|
||||
///
|
||||
/// let closest = locator.ceil_char_boundary(TextSize::from(13));
|
||||
/// assert_eq!(closest, TextSize::from(14));
|
||||
/// assert_eq!(&locator.contents()[..closest.to_usize()], "❤️🧡💛");
|
||||
/// ```
|
||||
///
|
||||
/// Additional examples:
|
||||
///
|
||||
/// ```
|
||||
/// use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
/// use ruff_linter::Locator;
|
||||
///
|
||||
/// let locator = Locator::new("Hello");
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// locator.ceil_char_boundary(TextSize::from(0)),
|
||||
/// TextSize::from(0)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// locator.ceil_char_boundary(TextSize::from(5)),
|
||||
/// TextSize::from(5)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// locator.ceil_char_boundary(TextSize::from(6)),
|
||||
/// TextSize::from(5)
|
||||
/// );
|
||||
///
|
||||
/// let locator = Locator::new("α");
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// locator.ceil_char_boundary(TextSize::from(0)),
|
||||
/// TextSize::from(0)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// locator.ceil_char_boundary(TextSize::from(1)),
|
||||
/// TextSize::from(2)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// locator.ceil_char_boundary(TextSize::from(2)),
|
||||
/// TextSize::from(2)
|
||||
/// );
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// locator.ceil_char_boundary(TextSize::from(3)),
|
||||
/// TextSize::from(2)
|
||||
/// );
|
||||
/// ```
|
||||
pub fn ceil_char_boundary(&self, offset: TextSize) -> TextSize {
|
||||
let upper_bound = offset
|
||||
.to_u32()
|
||||
.saturating_add(4)
|
||||
.min(self.text_len().to_u32());
|
||||
(offset.to_u32()..upper_bound)
|
||||
.map(TextSize::from)
|
||||
.find(|offset| self.contents.is_char_boundary(offset.to_usize()))
|
||||
.unwrap_or_else(|| TextSize::from(upper_bound))
|
||||
}
|
||||
|
||||
/// Take the source code between the given [`TextRange`].
|
||||
#[inline]
|
||||
pub fn slice<T: Ranged>(&self, ranged: T) -> &'a str {
|
||||
|
||||
@@ -75,12 +75,13 @@ where
|
||||
);
|
||||
|
||||
let span = Span::from(file).with_range(range);
|
||||
let mut annotation = Annotation::primary(span);
|
||||
if let Some(suggestion) = suggestion {
|
||||
annotation = annotation.message(suggestion);
|
||||
}
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
if let Some(suggestion) = suggestion {
|
||||
diagnostic.help(suggestion);
|
||||
}
|
||||
|
||||
if let Some(fix) = fix {
|
||||
diagnostic.set_fix(fix);
|
||||
}
|
||||
|
||||
@@ -6,13 +6,13 @@ use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, SecondaryCode, ceil_char_boundary,
|
||||
};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::line_width::{IndentWidth, LineWidthBuilder};
|
||||
use crate::message::diff::Diff;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
@@ -21,8 +21,6 @@ use crate::settings::types::UnsafeFixes;
|
||||
bitflags! {
|
||||
#[derive(Default)]
|
||||
struct EmitterFlags: u8 {
|
||||
/// Whether to show the fix status of a diagnostic.
|
||||
const SHOW_FIX_STATUS = 1 << 0;
|
||||
/// Whether to show the diff of a fix, for diagnostics that have a fix.
|
||||
const SHOW_FIX_DIFF = 1 << 1;
|
||||
/// Whether to show the source code of a diagnostic.
|
||||
@@ -30,17 +28,27 @@ bitflags! {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct TextEmitter {
|
||||
flags: EmitterFlags,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
config: DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl Default for TextEmitter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
flags: EmitterFlags::default(),
|
||||
config: DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Concise)
|
||||
.hide_severity(true)
|
||||
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TextEmitter {
|
||||
#[must_use]
|
||||
pub fn with_show_fix_status(mut self, show_fix_status: bool) -> Self {
|
||||
self.flags
|
||||
.set(EmitterFlags::SHOW_FIX_STATUS, show_fix_status);
|
||||
self.config = self.config.show_fix_status(show_fix_status);
|
||||
self
|
||||
}
|
||||
|
||||
@@ -58,7 +66,21 @@ impl TextEmitter {
|
||||
|
||||
#[must_use]
|
||||
pub fn with_unsafe_fixes(mut self, unsafe_fixes: UnsafeFixes) -> Self {
|
||||
self.unsafe_fixes = unsafe_fixes;
|
||||
self.config = self
|
||||
.config
|
||||
.fix_applicability(unsafe_fixes.required_applicability());
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_preview(mut self, preview: bool) -> Self {
|
||||
self.config = self.config.preview(preview);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_color(mut self, color: bool) -> Self {
|
||||
self.config = self.config.color(color);
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -71,51 +93,10 @@ impl Emitter for TextEmitter {
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for message in diagnostics {
|
||||
write!(writer, "{}", message.display(context, &self.config))?;
|
||||
|
||||
let filename = message.expect_ruff_filename();
|
||||
write!(
|
||||
writer,
|
||||
"{path}{sep}",
|
||||
path = relativize_path(&filename).bold(),
|
||||
sep = ":".cyan(),
|
||||
)?;
|
||||
|
||||
let start_location = message.expect_ruff_start_location();
|
||||
let notebook_index = context.notebook_index(&filename);
|
||||
|
||||
// Check if we're working on a jupyter notebook and translate positions with cell accordingly
|
||||
let diagnostic_location = if let Some(notebook_index) = notebook_index {
|
||||
write!(
|
||||
writer,
|
||||
"cell {cell}{sep}",
|
||||
cell = notebook_index
|
||||
.cell(start_location.line)
|
||||
.unwrap_or(OneIndexed::MIN),
|
||||
sep = ":".cyan(),
|
||||
)?;
|
||||
|
||||
LineColumn {
|
||||
line: notebook_index
|
||||
.cell_row(start_location.line)
|
||||
.unwrap_or(OneIndexed::MIN),
|
||||
column: start_location.column,
|
||||
}
|
||||
} else {
|
||||
start_location
|
||||
};
|
||||
|
||||
writeln!(
|
||||
writer,
|
||||
"{row}{sep}{col}{sep} {code_and_body}",
|
||||
row = diagnostic_location.line,
|
||||
col = diagnostic_location.column,
|
||||
sep = ":".cyan(),
|
||||
code_and_body = RuleCodeAndBody {
|
||||
message,
|
||||
show_fix_status: self.flags.intersects(EmitterFlags::SHOW_FIX_STATUS),
|
||||
unsafe_fixes: self.unsafe_fixes,
|
||||
}
|
||||
)?;
|
||||
|
||||
if self.flags.intersects(EmitterFlags::SHOW_SOURCE) {
|
||||
// The `0..0` range is used to highlight file-level diagnostics.
|
||||
if message.expect_range() != TextRange::default() {
|
||||
@@ -186,7 +167,7 @@ pub(super) struct MessageCodeFrame<'a> {
|
||||
|
||||
impl Display for MessageCodeFrame<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let suggestion = self.message.suggestion();
|
||||
let suggestion = self.message.first_help_text();
|
||||
let footers = if let Some(suggestion) = suggestion {
|
||||
vec![Level::Help.title(suggestion)]
|
||||
} else {
|
||||
@@ -396,9 +377,8 @@ impl<'a> SourceCode<'a> {
|
||||
if self.text.as_bytes()[self.annotation_range.start().to_usize() - 1] != b'\n' {
|
||||
return self;
|
||||
}
|
||||
let locator = Locator::new(&self.text);
|
||||
let start = self.annotation_range.start();
|
||||
let end = locator.ceil_char_boundary(start + TextSize::from(1));
|
||||
let end = ceil_char_boundary(&self.text, start + TextSize::from(1));
|
||||
SourceCode {
|
||||
annotation_range: TextRange::new(start, end),
|
||||
..self
|
||||
|
||||
@@ -134,6 +134,26 @@ pub(crate) const fn is_fix_os_path_dirname_enabled(settings: &LinterSettings) ->
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19404
|
||||
pub(crate) const fn is_fix_os_chmod_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19404
|
||||
pub(crate) const fn is_fix_os_rename_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19404
|
||||
pub(crate) const fn is_fix_os_replace_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19404
|
||||
pub(crate) const fn is_fix_os_path_samefile_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19245
|
||||
pub(crate) const fn is_fix_os_getcwd_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
@@ -205,3 +225,8 @@ pub(crate) const fn is_assert_raises_exception_call_enabled(settings: &LinterSet
|
||||
pub(crate) const fn is_add_future_annotations_imports_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19390
|
||||
pub(crate) const fn is_trailing_comma_type_params_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ use ruff_text_size::TextRange;
|
||||
|
||||
use crate::Locator;
|
||||
use crate::checkers::ast::LintContext;
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
|
||||
use crate::rules::eradicate::detection::comment_contains_code;
|
||||
@@ -51,7 +50,6 @@ pub(crate) fn commented_out_code(
|
||||
context: &LintContext,
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
settings: &LinterSettings,
|
||||
) {
|
||||
let mut comments = comment_ranges.into_iter().peekable();
|
||||
// Iterate over all comments in the document.
|
||||
@@ -65,7 +63,9 @@ pub(crate) fn commented_out_code(
|
||||
}
|
||||
|
||||
// Verify that the comment is on its own line, and that it contains code.
|
||||
if is_own_line_comment(line) && comment_contains_code(line, &settings.task_tags[..]) {
|
||||
if is_own_line_comment(line)
|
||||
&& comment_contains_code(line, &context.settings().task_tags[..])
|
||||
{
|
||||
if let Some(mut diagnostic) =
|
||||
context.report_diagnostic_if_enabled(CommentedOutCode, range)
|
||||
{
|
||||
|
||||
@@ -47,9 +47,10 @@ use crate::checkers::ast::Checker;
|
||||
/// raise
|
||||
/// ```
|
||||
///
|
||||
/// Exceptions that are logged via `logging.exception()` or `logging.error()`
|
||||
/// with `exc_info` enabled will _not_ be flagged, as this is a common pattern
|
||||
/// for propagating exception traces:
|
||||
/// Exceptions that are logged via `logging.exception()` or are logged via
|
||||
/// `logging.error()` or `logging.critical()` with `exc_info` enabled will
|
||||
/// _not_ be flagged, as this is a common pattern for propagating exception
|
||||
/// traces:
|
||||
/// ```python
|
||||
/// try:
|
||||
/// foo()
|
||||
@@ -201,7 +202,7 @@ impl<'a> StatementVisitor<'a> for LogExceptionVisitor<'a> {
|
||||
) {
|
||||
if match attr.as_str() {
|
||||
"exception" => true,
|
||||
"error" => arguments
|
||||
"error" | "critical" => arguments
|
||||
.find_keyword("exc_info")
|
||||
.is_some_and(|keyword| is_const_true(&keyword.value)),
|
||||
_ => false,
|
||||
@@ -214,7 +215,7 @@ impl<'a> StatementVisitor<'a> for LogExceptionVisitor<'a> {
|
||||
if self.semantic.resolve_qualified_name(func).is_some_and(
|
||||
|qualified_name| match qualified_name.segments() {
|
||||
["logging", "exception"] => true,
|
||||
["logging", "error"] => arguments
|
||||
["logging", "error" | "critical"] => arguments
|
||||
.find_keyword("exc_info")
|
||||
.is_some_and(|keyword| is_const_true(&keyword.value)),
|
||||
_ => false,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_blind_except/mod.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
BLE.py:25:8: BLE001 Do not catch blind exception: `BaseException`
|
||||
|
|
||||
@@ -121,3 +120,30 @@ BLE.py:113:8: BLE001 Do not catch blind exception: `Exception`
|
||||
| ^^^^^^^^^ BLE001
|
||||
114 | error("...", exc_info=None)
|
||||
|
|
||||
|
||||
BLE.py:119:8: BLE001 Do not catch blind exception: `Exception`
|
||||
|
|
||||
117 | try:
|
||||
118 | pass
|
||||
119 | except Exception:
|
||||
| ^^^^^^^^^ BLE001
|
||||
120 | critical("...")
|
||||
|
|
||||
|
||||
BLE.py:125:8: BLE001 Do not catch blind exception: `Exception`
|
||||
|
|
||||
123 | try:
|
||||
124 | pass
|
||||
125 | except Exception:
|
||||
| ^^^^^^^^^ BLE001
|
||||
126 | critical("...", exc_info=False)
|
||||
|
|
||||
|
||||
BLE.py:131:8: BLE001 Do not catch blind exception: `Exception`
|
||||
|
|
||||
129 | try:
|
||||
130 | pass
|
||||
131 | except Exception:
|
||||
| ^^^^^^^^^ BLE001
|
||||
132 | critical("...", exc_info=None)
|
||||
|
|
||||
|
||||
@@ -27,4 +27,23 @@ mod tests {
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("COM81.py"))]
|
||||
#[test_case(Path::new("COM81_syntax_error.py"))]
|
||||
fn preview_rules(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("preview__{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("flake8_commas").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
preview: crate::settings::types::PreviewMode::Enabled,
|
||||
..settings::LinterSettings::for_rules(vec![
|
||||
Rule::MissingTrailingComma,
|
||||
Rule::TrailingCommaOnBareTuple,
|
||||
Rule::ProhibitedTrailingComma,
|
||||
])
|
||||
},
|
||||
)?;
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::checkers::ast::LintContext;
|
||||
use crate::preview::is_trailing_comma_type_params_enabled;
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::{AlwaysFixableViolation, Violation};
|
||||
use crate::{Edit, Fix};
|
||||
|
||||
@@ -24,6 +26,8 @@ enum TokenType {
|
||||
Def,
|
||||
For,
|
||||
Lambda,
|
||||
Class,
|
||||
Type,
|
||||
Irrelevant,
|
||||
}
|
||||
|
||||
@@ -69,6 +73,8 @@ impl From<(TokenKind, TextRange)> for SimpleToken {
|
||||
TokenKind::Lbrace => TokenType::OpeningCurlyBracket,
|
||||
TokenKind::Rbrace => TokenType::ClosingBracket,
|
||||
TokenKind::Def => TokenType::Def,
|
||||
TokenKind::Class => TokenType::Class,
|
||||
TokenKind::Type => TokenType::Type,
|
||||
TokenKind::For => TokenType::For,
|
||||
TokenKind::Lambda => TokenType::Lambda,
|
||||
// Import treated like a function.
|
||||
@@ -98,6 +104,8 @@ enum ContextType {
|
||||
Dict,
|
||||
/// Lambda parameter list, e.g. `lambda a, b`.
|
||||
LambdaParameters,
|
||||
/// Type parameter list, e.g. `def foo[T, U](): ...`
|
||||
TypeParameters,
|
||||
}
|
||||
|
||||
/// Comma context - described a comma-delimited "situation".
|
||||
@@ -290,7 +298,7 @@ pub(crate) fn trailing_commas(
|
||||
}
|
||||
|
||||
// Update the comma context stack.
|
||||
let context = update_context(token, prev, prev_prev, &mut stack);
|
||||
let context = update_context(token, prev, prev_prev, &mut stack, lint_context.settings());
|
||||
|
||||
check_token(token, prev, prev_prev, context, locator, lint_context);
|
||||
|
||||
@@ -326,6 +334,7 @@ fn check_token(
|
||||
ContextType::No => false,
|
||||
ContextType::FunctionParameters => true,
|
||||
ContextType::CallArguments => true,
|
||||
ContextType::TypeParameters => true,
|
||||
// `(1)` is not equivalent to `(1,)`.
|
||||
ContextType::Tuple => context.num_commas != 0,
|
||||
// `x[1]` is not equivalent to `x[1,]`.
|
||||
@@ -408,6 +417,7 @@ fn update_context(
|
||||
prev: SimpleToken,
|
||||
prev_prev: SimpleToken,
|
||||
stack: &mut Vec<Context>,
|
||||
settings: &LinterSettings,
|
||||
) -> Context {
|
||||
let new_context = match token.ty {
|
||||
TokenType::OpeningBracket => match (prev.ty, prev_prev.ty) {
|
||||
@@ -417,6 +427,17 @@ fn update_context(
|
||||
}
|
||||
_ => Context::new(ContextType::Tuple),
|
||||
},
|
||||
TokenType::OpeningSquareBracket if is_trailing_comma_type_params_enabled(settings) => {
|
||||
match (prev.ty, prev_prev.ty) {
|
||||
(TokenType::Named, TokenType::Def | TokenType::Class | TokenType::Type) => {
|
||||
Context::new(ContextType::TypeParameters)
|
||||
}
|
||||
(TokenType::ClosingBracket | TokenType::Named | TokenType::String, _) => {
|
||||
Context::new(ContextType::Subscript)
|
||||
}
|
||||
_ => Context::new(ContextType::List),
|
||||
}
|
||||
}
|
||||
TokenType::OpeningSquareBracket => match prev.ty {
|
||||
TokenType::ClosingBracket | TokenType::Named | TokenType::String => {
|
||||
Context::new(ContextType::Subscript)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,30 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_commas/mod.rs
|
||||
---
|
||||
COM81_syntax_error.py:3:5: SyntaxError: Starred expression cannot be used here
|
||||
|
|
||||
1 | # Check for `flake8-commas` violation for a file containing syntax errors.
|
||||
2 | (
|
||||
3 | *args
|
||||
| ^^^^^
|
||||
4 | )
|
||||
|
|
||||
|
||||
COM81_syntax_error.py:6:9: SyntaxError: Type parameter list cannot be empty
|
||||
|
|
||||
4 | )
|
||||
5 |
|
||||
6 | def foo[(param1='test', param2='test',):
|
||||
| ^
|
||||
7 | pass
|
||||
|
|
||||
|
||||
COM81_syntax_error.py:6:38: COM819 Trailing comma prohibited
|
||||
|
|
||||
4 | )
|
||||
5 |
|
||||
6 | def foo[(param1='test', param2='test',):
|
||||
| ^ COM819
|
||||
7 | pass
|
||||
|
|
||||
= help: Remove trailing comma
|
||||
@@ -11,7 +11,6 @@ use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::checkers::ast::LintContext;
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
|
||||
/// ## What it does
|
||||
@@ -108,13 +107,15 @@ pub(crate) fn implicit(
|
||||
tokens: &Tokens,
|
||||
locator: &Locator,
|
||||
indexer: &Indexer,
|
||||
settings: &LinterSettings,
|
||||
) {
|
||||
for (a_token, b_token) in tokens
|
||||
.iter()
|
||||
.filter(|token| {
|
||||
token.kind() != TokenKind::Comment
|
||||
&& (settings.flake8_implicit_str_concat.allow_multiline
|
||||
&& (context
|
||||
.settings()
|
||||
.flake8_implicit_str_concat
|
||||
.allow_multiline
|
||||
|| token.kind() != TokenKind::NonLogicalNewline)
|
||||
})
|
||||
.tuple_windows()
|
||||
|
||||
@@ -64,6 +64,7 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let mut union_type = UnionKind::TypingUnion;
|
||||
let mut optional_present = false;
|
||||
// Adds a member to `literal_exprs` if it is a `Literal` annotation
|
||||
let mut check_for_duplicate_members = |expr: &'a Expr, parent: &'a Expr| {
|
||||
if matches!(parent, Expr::BinOp(_)) {
|
||||
@@ -74,6 +75,7 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
|
||||
&& is_optional_type(checker, expr)
|
||||
{
|
||||
// If the union member is an `Optional`, add a virtual `None` literal.
|
||||
optional_present = true;
|
||||
&VIRTUAL_NONE_LITERAL
|
||||
} else {
|
||||
expr
|
||||
@@ -87,7 +89,7 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
|
||||
DuplicateUnionMember {
|
||||
duplicate_name: checker.generator().expr(virtual_expr),
|
||||
},
|
||||
// Use the real expression's range for diagnostics,
|
||||
// Use the real expression's range for diagnostics.
|
||||
expr.range(),
|
||||
));
|
||||
}
|
||||
@@ -104,6 +106,13 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Do not reduce `Union[None, ... None]` to avoid introducing a `TypeError` unintentionally
|
||||
// e.g. `isinstance(None, Union[None, None])`, if reduced to `isinstance(None, None)`, causes
|
||||
// `TypeError: isinstance() arg 2 must be a type, a tuple of types, or a union` to throw.
|
||||
if unique_nodes.iter().all(|expr| expr.is_none_literal_expr()) && !optional_present {
|
||||
return;
|
||||
}
|
||||
|
||||
// Mark [`Fix`] as unsafe when comments are in range.
|
||||
let applicability = if checker.comment_ranges().intersects(expr.range()) {
|
||||
Applicability::Unsafe
|
||||
|
||||
@@ -974,6 +974,8 @@ PYI016.py:143:61: PYI016 [*] Duplicate union member `complex`
|
||||
143 |-field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
143 |+field48: typing.Union[typing.Optional[complex], complex]
|
||||
144 144 | field49: typing.Optional[complex | complex] | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
||||
PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
|
|
||||
@@ -981,6 +983,8 @@ PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 | field49: typing.Optional[complex | complex] | complex
|
||||
| ^^^^^^^ PYI016
|
||||
145 |
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
|
||||
= help: Remove duplicate union member `complex`
|
||||
|
||||
@@ -990,3 +994,15 @@ PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
143 143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 |-field49: typing.Optional[complex | complex] | complex
|
||||
144 |+field49: typing.Optional[complex] | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 147 | # Should throw duplicate union member but not fix
|
||||
|
||||
PYI016.py:148:37: PYI016 Duplicate union member `None`
|
||||
|
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 | # Should throw duplicate union member but not fix
|
||||
148 | isinstance(None, typing.Union[None, None])
|
||||
| ^^^^ PYI016
|
||||
|
|
||||
= help: Remove duplicate union member `None`
|
||||
|
||||
@@ -1162,6 +1162,8 @@ PYI016.py:143:61: PYI016 [*] Duplicate union member `complex`
|
||||
143 |-field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
143 |+field48: typing.Union[None, complex]
|
||||
144 144 | field49: typing.Optional[complex | complex] | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
||||
PYI016.py:143:72: PYI016 [*] Duplicate union member `complex`
|
||||
|
|
||||
@@ -1179,6 +1181,8 @@ PYI016.py:143:72: PYI016 [*] Duplicate union member `complex`
|
||||
143 |-field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
143 |+field48: typing.Union[None, complex]
|
||||
144 144 | field49: typing.Optional[complex | complex] | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
||||
PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
|
|
||||
@@ -1186,6 +1190,8 @@ PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 | field49: typing.Optional[complex | complex] | complex
|
||||
| ^^^^^^^ PYI016
|
||||
145 |
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
|
||||
= help: Remove duplicate union member `complex`
|
||||
|
||||
@@ -1195,6 +1201,9 @@ PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
||||
143 143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 |-field49: typing.Optional[complex | complex] | complex
|
||||
144 |+field49: None | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 147 | # Should throw duplicate union member but not fix
|
||||
|
||||
PYI016.py:144:47: PYI016 [*] Duplicate union member `complex`
|
||||
|
|
||||
@@ -1202,6 +1211,8 @@ PYI016.py:144:47: PYI016 [*] Duplicate union member `complex`
|
||||
143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 | field49: typing.Optional[complex | complex] | complex
|
||||
| ^^^^^^^ PYI016
|
||||
145 |
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
|
|
||||
= help: Remove duplicate union member `complex`
|
||||
|
||||
@@ -1211,3 +1222,15 @@ PYI016.py:144:47: PYI016 [*] Duplicate union member `complex`
|
||||
143 143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
144 |-field49: typing.Optional[complex | complex] | complex
|
||||
144 |+field49: None | complex
|
||||
145 145 |
|
||||
146 146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 147 | # Should throw duplicate union member but not fix
|
||||
|
||||
PYI016.py:148:37: PYI016 Duplicate union member `None`
|
||||
|
|
||||
146 | # Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
147 | # Should throw duplicate union member but not fix
|
||||
148 | isinstance(None, typing.Union[None, None])
|
||||
| ^^^^ PYI016
|
||||
|
|
||||
= help: Remove duplicate union member `None`
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::importer::ImportRequest;
|
||||
use crate::{Applicability, Edit, Fix, Violation};
|
||||
use ruff_python_ast::{self as ast};
|
||||
use ruff_python_ast::{Expr, ExprCall};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprCall};
|
||||
use ruff_python_semantic::{SemanticModel, analyze::typing};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
pub(crate) fn is_keyword_only_argument_non_default(arguments: &ast::Arguments, name: &str) -> bool {
|
||||
@@ -72,3 +72,85 @@ pub(crate) fn check_os_pathlib_single_arg_calls(
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_name_expr(expr: &Expr) -> Option<&ast::ExprName> {
|
||||
match expr {
|
||||
Expr::Name(name) => Some(name),
|
||||
Expr::Call(ExprCall { func, .. }) => get_name_expr(func),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given expression looks like a file descriptor, i.e., if it is an integer.
|
||||
pub(crate) fn is_file_descriptor(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
if matches!(
|
||||
expr,
|
||||
Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(_),
|
||||
..
|
||||
})
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let Some(name) = get_name_expr(expr) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(binding) = semantic.only_binding(name).map(|id| semantic.binding(id)) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
typing::is_int(binding, semantic)
|
||||
}
|
||||
|
||||
pub(crate) fn check_os_pathlib_two_arg_calls(
|
||||
checker: &Checker,
|
||||
call: &ExprCall,
|
||||
attr: &str,
|
||||
path_arg: &str,
|
||||
second_arg: &str,
|
||||
fix_enabled: bool,
|
||||
violation: impl Violation,
|
||||
) {
|
||||
let range = call.range();
|
||||
let mut diagnostic = checker.report_diagnostic(violation, call.func.range());
|
||||
|
||||
let (Some(path_expr), Some(second_expr)) = (
|
||||
call.arguments.find_argument_value(path_arg, 0),
|
||||
call.arguments.find_argument_value(second_arg, 1),
|
||||
) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let path_code = checker.locator().slice(path_expr.range());
|
||||
let second_code = checker.locator().slice(second_expr.range());
|
||||
|
||||
if fix_enabled {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
&ImportRequest::import("pathlib", "Path"),
|
||||
call.start(),
|
||||
checker.semantic(),
|
||||
)?;
|
||||
|
||||
let replacement = if is_pathlib_path_call(checker, path_expr) {
|
||||
format!("{path_code}.{attr}({second_code})")
|
||||
} else {
|
||||
format!("{binding}({path_code}).{attr}({second_code})")
|
||||
};
|
||||
|
||||
let applicability = if checker.comment_ranges().intersects(range) {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
};
|
||||
|
||||
Ok(Fix::applicable_edits(
|
||||
Edit::range_replacement(replacement, range),
|
||||
[import_edit],
|
||||
applicability,
|
||||
))
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub(crate) use glob_rule::*;
|
||||
pub(crate) use invalid_pathlib_with_suffix::*;
|
||||
pub(crate) use os_chmod::*;
|
||||
pub(crate) use os_getcwd::*;
|
||||
pub(crate) use os_path_abspath::*;
|
||||
pub(crate) use os_path_basename::*;
|
||||
@@ -14,8 +15,11 @@ pub(crate) use os_path_isabs::*;
|
||||
pub(crate) use os_path_isdir::*;
|
||||
pub(crate) use os_path_isfile::*;
|
||||
pub(crate) use os_path_islink::*;
|
||||
pub(crate) use os_path_samefile::*;
|
||||
pub(crate) use os_readlink::*;
|
||||
pub(crate) use os_remove::*;
|
||||
pub(crate) use os_rename::*;
|
||||
pub(crate) use os_replace::*;
|
||||
pub(crate) use os_rmdir::*;
|
||||
pub(crate) use os_sep_split::*;
|
||||
pub(crate) use os_unlink::*;
|
||||
@@ -24,6 +28,7 @@ pub(crate) use replaceable_by_pathlib::*;
|
||||
|
||||
mod glob_rule;
|
||||
mod invalid_pathlib_with_suffix;
|
||||
mod os_chmod;
|
||||
mod os_getcwd;
|
||||
mod os_path_abspath;
|
||||
mod os_path_basename;
|
||||
@@ -38,8 +43,11 @@ mod os_path_isabs;
|
||||
mod os_path_isdir;
|
||||
mod os_path_isfile;
|
||||
mod os_path_islink;
|
||||
mod os_path_samefile;
|
||||
mod os_readlink;
|
||||
mod os_remove;
|
||||
mod os_rename;
|
||||
mod os_replace;
|
||||
mod os_rmdir;
|
||||
mod os_sep_split;
|
||||
mod os_unlink;
|
||||
|
||||
@@ -0,0 +1,94 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_chmod_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::{
|
||||
check_os_pathlib_two_arg_calls, is_file_descriptor, is_keyword_only_argument_non_default,
|
||||
};
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.chmod`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.chmod()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.chmod()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.chmod("file.py", 0o444)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("file.py").chmod(0o444)
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.chmod`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.chmod)
|
||||
/// - [Python documentation: `os.chmod`](https://docs.python.org/3/library/os.html#os.chmod)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsChmod;
|
||||
|
||||
impl Violation for OsChmod {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.chmod()` should be replaced by `Path.chmod()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path(...).chmod(...)`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH101
|
||||
pub(crate) fn os_chmod(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if segments != ["os", "chmod"] {
|
||||
return;
|
||||
}
|
||||
|
||||
// `dir_fd` is not supported by pathlib, so check if it's set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.chmod)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.chmod(path, mode, *, dir_fd=None, follow_symlinks=True)
|
||||
// ```
|
||||
if call
|
||||
.arguments
|
||||
.find_argument_value("path", 0)
|
||||
.is_some_and(|expr| is_file_descriptor(expr, checker.semantic()))
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
check_os_pathlib_two_arg_calls(
|
||||
checker,
|
||||
call,
|
||||
"chmod",
|
||||
"path",
|
||||
"mode",
|
||||
is_fix_os_chmod_enabled(checker.settings()),
|
||||
OsChmod,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_path_samefile_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::check_os_pathlib_two_arg_calls;
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.samefile`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os.path`. When possible, using `Path` object
|
||||
/// methods such as `Path.samefile()` can improve readability over the `os.path`
|
||||
/// module's counterparts (e.g., `os.path.samefile()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.path.samefile("f1.py", "f2.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("f1.py").samefile("f2.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.samefile`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.samefile)
|
||||
/// - [Python documentation: `os.path.samefile`](https://docs.python.org/3/library/os.path.html#os.path.samefile)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsPathSamefile;
|
||||
|
||||
impl Violation for OsPathSamefile {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.path.samefile()` should be replaced by `Path.samefile()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path(...).samefile()`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH121
|
||||
pub(crate) fn os_path_samefile(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if segments != ["os", "path", "samefile"] {
|
||||
return;
|
||||
}
|
||||
|
||||
check_os_pathlib_two_arg_calls(
|
||||
checker,
|
||||
call,
|
||||
"samefile",
|
||||
"f1",
|
||||
"f2",
|
||||
is_fix_os_path_samefile_enabled(checker.settings()),
|
||||
OsPathSamefile,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_rename_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::{
|
||||
check_os_pathlib_two_arg_calls, is_keyword_only_argument_non_default,
|
||||
};
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.rename`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.rename()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.rename()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.rename("old.py", "new.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("old.py").rename("new.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.rename`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.rename)
|
||||
/// - [Python documentation: `os.rename`](https://docs.python.org/3/library/os.html#os.rename)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsRename;
|
||||
|
||||
impl Violation for OsRename {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.rename()` should be replaced by `Path.rename()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path(...).rename(...)`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH104
|
||||
pub(crate) fn os_rename(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if segments != ["os", "rename"] {
|
||||
return;
|
||||
}
|
||||
// `src_dir_fd` and `dst_dir_fd` are not supported by pathlib, so check if they are
|
||||
// set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.rename)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.rename(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
|
||||
// ```
|
||||
if is_keyword_only_argument_non_default(&call.arguments, "src_dir_fd")
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dst_dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
check_os_pathlib_two_arg_calls(
|
||||
checker,
|
||||
call,
|
||||
"rename",
|
||||
"src",
|
||||
"dst",
|
||||
is_fix_os_rename_enabled(checker.settings()),
|
||||
OsRename,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_replace_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::{
|
||||
check_os_pathlib_two_arg_calls, is_keyword_only_argument_non_default,
|
||||
};
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.replace`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.replace()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.replace()`).
|
||||
///
|
||||
/// Note that `os` functions may be preferable if performance is a concern,
|
||||
/// e.g., in hot loops.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.replace("old.py", "new.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("old.py").replace("new.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.replace`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.replace)
|
||||
/// - [Python documentation: `os.replace`](https://docs.python.org/3/library/os.html#os.replace)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsReplace;
|
||||
|
||||
impl Violation for OsReplace {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.replace()` should be replaced by `Path.replace()`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path(...).replace(...)`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH105
|
||||
pub(crate) fn os_replace(checker: &Checker, call: &ExprCall, segments: &[&str]) {
|
||||
if segments != ["os", "replace"] {
|
||||
return;
|
||||
}
|
||||
// `src_dir_fd` and `dst_dir_fd` are not supported by pathlib, so check if they are
|
||||
// set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.replace)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.replace(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
|
||||
// ```
|
||||
if is_keyword_only_argument_non_default(&call.arguments, "src_dir_fd")
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dst_dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
check_os_pathlib_two_arg_calls(
|
||||
checker,
|
||||
call,
|
||||
"replace",
|
||||
"src",
|
||||
"dst",
|
||||
is_fix_os_replace_enabled(checker.settings()),
|
||||
OsReplace,
|
||||
);
|
||||
}
|
||||
@@ -1,14 +1,16 @@
|
||||
use ruff_python_ast::{self as ast, Expr, ExprBooleanLiteral, ExprCall};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_use_pathlib::helpers::is_keyword_only_argument_non_default;
|
||||
use crate::rules::flake8_use_pathlib::rules::Glob;
|
||||
use crate::rules::flake8_use_pathlib::violations::{
|
||||
BuiltinOpen, Joiner, OsChmod, OsListdir, OsMakedirs, OsMkdir, OsPathJoin, OsPathSamefile,
|
||||
OsPathSplitext, OsRename, OsReplace, OsStat, OsSymlink, PyPath,
|
||||
use crate::rules::flake8_use_pathlib::helpers::{
|
||||
is_file_descriptor, is_keyword_only_argument_non_default,
|
||||
};
|
||||
use crate::rules::flake8_use_pathlib::{
|
||||
rules::Glob,
|
||||
violations::{
|
||||
BuiltinOpen, Joiner, OsListdir, OsMakedirs, OsMkdir, OsPathJoin, OsPathSplitext, OsStat,
|
||||
OsSymlink, PyPath,
|
||||
},
|
||||
};
|
||||
|
||||
pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
@@ -18,24 +20,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
|
||||
let range = call.func.range();
|
||||
match qualified_name.segments() {
|
||||
// PTH101
|
||||
["os", "chmod"] => {
|
||||
// `dir_fd` is not supported by pathlib, so check if it's set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.chmod)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.chmod(path, mode, *, dir_fd=None, follow_symlinks=True)
|
||||
// ```
|
||||
if call
|
||||
.arguments
|
||||
.find_argument_value("path", 0)
|
||||
.is_some_and(|expr| is_file_descriptor(expr, checker.semantic()))
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsChmod, range)
|
||||
}
|
||||
// PTH102
|
||||
["os", "makedirs"] => checker.report_diagnostic_if_enabled(OsMakedirs, range),
|
||||
// PTH103
|
||||
@@ -51,38 +35,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsMkdir, range)
|
||||
}
|
||||
// PTH104
|
||||
["os", "rename"] => {
|
||||
// `src_dir_fd` and `dst_dir_fd` are not supported by pathlib, so check if they are
|
||||
// set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.rename)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.rename(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
|
||||
// ```
|
||||
if is_keyword_only_argument_non_default(&call.arguments, "src_dir_fd")
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dst_dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsRename, range)
|
||||
}
|
||||
// PTH105
|
||||
["os", "replace"] => {
|
||||
// `src_dir_fd` and `dst_dir_fd` are not supported by pathlib, so check if they are
|
||||
// set to non-default values.
|
||||
// Signature as of Python 3.13 (https://docs.python.org/3/library/os.html#os.replace)
|
||||
// ```text
|
||||
// 0 1 2 3
|
||||
// os.replace(src, dst, *, src_dir_fd=None, dst_dir_fd=None)
|
||||
// ```
|
||||
if is_keyword_only_argument_non_default(&call.arguments, "src_dir_fd")
|
||||
|| is_keyword_only_argument_non_default(&call.arguments, "dst_dir_fd")
|
||||
{
|
||||
return;
|
||||
}
|
||||
checker.report_diagnostic_if_enabled(OsReplace, range)
|
||||
}
|
||||
// PTH116
|
||||
["os", "stat"] => {
|
||||
// `dir_fd` is not supported by pathlib, so check if it's set to non-default values.
|
||||
@@ -124,8 +76,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
},
|
||||
range,
|
||||
),
|
||||
// PTH121
|
||||
["os", "path", "samefile"] => checker.report_diagnostic_if_enabled(OsPathSamefile, range),
|
||||
// PTH122
|
||||
["os", "path", "splitext"] => checker.report_diagnostic_if_enabled(OsPathSplitext, range),
|
||||
// PTH211
|
||||
@@ -234,37 +184,6 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
};
|
||||
}
|
||||
|
||||
/// Returns `true` if the given expression looks like a file descriptor, i.e., if it is an integer.
|
||||
fn is_file_descriptor(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
if matches!(
|
||||
expr,
|
||||
Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(_),
|
||||
..
|
||||
})
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let Some(name) = get_name_expr(expr) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(binding) = semantic.only_binding(name).map(|id| semantic.binding(id)) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
typing::is_int(binding, semantic)
|
||||
}
|
||||
|
||||
fn get_name_expr(expr: &Expr) -> Option<&ast::ExprName> {
|
||||
match expr {
|
||||
Expr::Name(name) => Some(name),
|
||||
Expr::Call(ExprCall { func, .. }) => get_name_expr(func),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if argument `name` is set to a non-default `None` value.
|
||||
fn is_argument_non_default(arguments: &ast::Arguments, name: &str, position: usize) -> bool {
|
||||
arguments
|
||||
|
||||
@@ -20,6 +20,7 @@ full_name.py:8:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
9 | aaa = os.mkdir(p)
|
||||
10 | os.makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
full_name.py:9:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -50,6 +51,7 @@ full_name.py:11:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
12 | os.replace(p)
|
||||
13 | os.rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
full_name.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -60,6 +62,7 @@ full_name.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
13 | os.rmdir(p)
|
||||
14 | os.remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
full_name.py:13:1: PTH106 `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -253,6 +256,7 @@ full_name.py:30:1: PTH121 `os.path.samefile()` should be replaced by `Path.samef
|
||||
31 | os.path.splitext(p)
|
||||
32 | with open(p) as fp:
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
full_name.py:31:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -20,6 +20,7 @@ import_as.py:8:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
9 | aaa = foo.mkdir(p)
|
||||
10 | foo.makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_as.py:9:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -50,6 +51,7 @@ import_as.py:11:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
12 | foo.replace(p)
|
||||
13 | foo.rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_as.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -60,6 +62,7 @@ import_as.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
13 | foo.rmdir(p)
|
||||
14 | foo.remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_as.py:13:1: PTH106 `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -252,6 +255,7 @@ import_as.py:30:1: PTH121 `os.path.samefile()` should be replaced by `Path.samef
|
||||
| ^^^^^^^^^^^^^^ PTH121
|
||||
31 | foo_p.splitext(p)
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_as.py:31:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -20,6 +20,7 @@ import_from.py:10:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
11 | aaa = mkdir(p)
|
||||
12 | makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_from.py:11:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -50,6 +51,7 @@ import_from.py:13:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
14 | replace(p)
|
||||
15 | rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from.py:14:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -60,6 +62,7 @@ import_from.py:14:1: PTH105 `os.replace()` should be replaced by `Path.replace()
|
||||
15 | rmdir(p)
|
||||
16 | remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_from.py:15:1: PTH106 `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -253,6 +256,7 @@ import_from.py:32:1: PTH121 `os.path.samefile()` should be replaced by `Path.sam
|
||||
33 | splitext(p)
|
||||
34 | with open(p) as fp:
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_from.py:33:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
@@ -289,3 +293,36 @@ import_from.py:43:10: PTH123 `open()` should be replaced by `Path.open()`
|
||||
43 | with open(p) as _: ... # Error
|
||||
| ^^^^ PTH123
|
||||
|
|
||||
|
||||
import_from.py:53:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
51 | file = "file_1.py"
|
||||
52 |
|
||||
53 | rename(file, "file_2.py")
|
||||
| ^^^^^^ PTH104
|
||||
54 |
|
||||
55 | rename(
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from.py:55:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
53 | rename(file, "file_2.py")
|
||||
54 |
|
||||
55 | rename(
|
||||
| ^^^^^^ PTH104
|
||||
56 | # commment 1
|
||||
57 | file, # comment 2
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from.py:63:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
61 | )
|
||||
62 |
|
||||
63 | rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
| ^^^^^^ PTH104
|
||||
64 |
|
||||
65 | rename(file, "file_2.py", src_dir_fd=1)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
@@ -20,6 +20,7 @@ import_from_as.py:15:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
16 | aaa = xmkdir(p)
|
||||
17 | xmakedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_from_as.py:16:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -50,6 +51,7 @@ import_from_as.py:18:1: PTH104 `os.rename()` should be replaced by `Path.rename(
|
||||
19 | xreplace(p)
|
||||
20 | xrmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from_as.py:19:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -60,6 +62,7 @@ import_from_as.py:19:1: PTH105 `os.replace()` should be replaced by `Path.replac
|
||||
20 | xrmdir(p)
|
||||
21 | xremove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_from_as.py:20:1: PTH106 `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -252,6 +255,7 @@ import_from_as.py:37:1: PTH121 `os.path.samefile()` should be replaced by `Path.
|
||||
| ^^^^^^^^^ PTH121
|
||||
38 | xsplitext(p)
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_from_as.py:38:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -34,6 +34,7 @@ full_name.py:8:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
9 | aaa = os.mkdir(p)
|
||||
10 | os.makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
full_name.py:9:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -64,6 +65,7 @@ full_name.py:11:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
12 | os.replace(p)
|
||||
13 | os.rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
full_name.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -74,6 +76,7 @@ full_name.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
13 | os.rmdir(p)
|
||||
14 | os.remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
full_name.py:13:1: PTH106 [*] `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -471,6 +474,7 @@ full_name.py:30:1: PTH121 `os.path.samefile()` should be replaced by `Path.samef
|
||||
31 | os.path.splitext(p)
|
||||
32 | with open(p) as fp:
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
full_name.py:31:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -34,6 +34,7 @@ import_as.py:8:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
9 | aaa = foo.mkdir(p)
|
||||
10 | foo.makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_as.py:9:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -64,6 +65,7 @@ import_as.py:11:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
12 | foo.replace(p)
|
||||
13 | foo.rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_as.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -74,6 +76,7 @@ import_as.py:12:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
13 | foo.rmdir(p)
|
||||
14 | foo.remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_as.py:13:1: PTH106 [*] `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -469,6 +472,7 @@ import_as.py:30:1: PTH121 `os.path.samefile()` should be replaced by `Path.samef
|
||||
| ^^^^^^^^^^^^^^ PTH121
|
||||
31 | foo_p.splitext(p)
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_as.py:31:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -35,6 +35,7 @@ import_from.py:10:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
11 | aaa = mkdir(p)
|
||||
12 | makedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_from.py:11:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -65,6 +66,7 @@ import_from.py:13:1: PTH104 `os.rename()` should be replaced by `Path.rename()`
|
||||
14 | replace(p)
|
||||
15 | rmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from.py:14:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -75,6 +77,7 @@ import_from.py:14:1: PTH105 `os.replace()` should be replaced by `Path.replace()
|
||||
15 | rmdir(p)
|
||||
16 | remove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_from.py:15:1: PTH106 [*] `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -484,6 +487,7 @@ import_from.py:32:1: PTH121 `os.path.samefile()` should be replaced by `Path.sam
|
||||
33 | splitext(p)
|
||||
34 | with open(p) as fp:
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_from.py:33:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
@@ -520,3 +524,95 @@ import_from.py:43:10: PTH123 `open()` should be replaced by `Path.open()`
|
||||
43 | with open(p) as _: ... # Error
|
||||
| ^^^^ PTH123
|
||||
|
|
||||
|
||||
import_from.py:53:1: PTH104 [*] `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
51 | file = "file_1.py"
|
||||
52 |
|
||||
53 | rename(file, "file_2.py")
|
||||
| ^^^^^^ PTH104
|
||||
54 |
|
||||
55 | rename(
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
ℹ Safe fix
|
||||
2 2 | from os import remove, unlink, getcwd, readlink, stat
|
||||
3 3 | from os.path import abspath, exists, expanduser, isdir, isfile, islink
|
||||
4 4 | from os.path import isabs, join, basename, dirname, samefile, splitext
|
||||
5 |+import pathlib
|
||||
5 6 |
|
||||
6 7 | p = "/foo"
|
||||
7 8 | q = "bar"
|
||||
--------------------------------------------------------------------------------
|
||||
50 51 |
|
||||
51 52 | file = "file_1.py"
|
||||
52 53 |
|
||||
53 |-rename(file, "file_2.py")
|
||||
54 |+pathlib.Path(file).rename("file_2.py")
|
||||
54 55 |
|
||||
55 56 | rename(
|
||||
56 57 | # commment 1
|
||||
|
||||
import_from.py:55:1: PTH104 [*] `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
53 | rename(file, "file_2.py")
|
||||
54 |
|
||||
55 | rename(
|
||||
| ^^^^^^ PTH104
|
||||
56 | # commment 1
|
||||
57 | file, # comment 2
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | from os import remove, unlink, getcwd, readlink, stat
|
||||
3 3 | from os.path import abspath, exists, expanduser, isdir, isfile, islink
|
||||
4 4 | from os.path import isabs, join, basename, dirname, samefile, splitext
|
||||
5 |+import pathlib
|
||||
5 6 |
|
||||
6 7 | p = "/foo"
|
||||
7 8 | q = "bar"
|
||||
--------------------------------------------------------------------------------
|
||||
52 53 |
|
||||
53 54 | rename(file, "file_2.py")
|
||||
54 55 |
|
||||
55 |-rename(
|
||||
56 |- # commment 1
|
||||
57 |- file, # comment 2
|
||||
58 |- "file_2.py"
|
||||
59 |- ,
|
||||
60 |- # comment 3
|
||||
61 |-)
|
||||
56 |+pathlib.Path(file).rename("file_2.py")
|
||||
62 57 |
|
||||
63 58 | rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
64 59 |
|
||||
|
||||
import_from.py:63:1: PTH104 [*] `os.rename()` should be replaced by `Path.rename()`
|
||||
|
|
||||
61 | )
|
||||
62 |
|
||||
63 | rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
| ^^^^^^ PTH104
|
||||
64 |
|
||||
65 | rename(file, "file_2.py", src_dir_fd=1)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
ℹ Safe fix
|
||||
2 2 | from os import remove, unlink, getcwd, readlink, stat
|
||||
3 3 | from os.path import abspath, exists, expanduser, isdir, isfile, islink
|
||||
4 4 | from os.path import isabs, join, basename, dirname, samefile, splitext
|
||||
5 |+import pathlib
|
||||
5 6 |
|
||||
6 7 | p = "/foo"
|
||||
7 8 | q = "bar"
|
||||
--------------------------------------------------------------------------------
|
||||
60 61 | # comment 3
|
||||
61 62 | )
|
||||
62 63 |
|
||||
63 |-rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
|
||||
64 |+pathlib.Path(file).rename("file_2.py")
|
||||
64 65 |
|
||||
65 66 | rename(file, "file_2.py", src_dir_fd=1)
|
||||
|
||||
@@ -35,6 +35,7 @@ import_from_as.py:15:6: PTH101 `os.chmod()` should be replaced by `Path.chmod()`
|
||||
16 | aaa = xmkdir(p)
|
||||
17 | xmakedirs(p)
|
||||
|
|
||||
= help: Replace with `Path(...).chmod(...)`
|
||||
|
||||
import_from_as.py:16:7: PTH102 `os.mkdir()` should be replaced by `Path.mkdir()`
|
||||
|
|
||||
@@ -65,6 +66,7 @@ import_from_as.py:18:1: PTH104 `os.rename()` should be replaced by `Path.rename(
|
||||
19 | xreplace(p)
|
||||
20 | xrmdir(p)
|
||||
|
|
||||
= help: Replace with `Path(...).rename(...)`
|
||||
|
||||
import_from_as.py:19:1: PTH105 `os.replace()` should be replaced by `Path.replace()`
|
||||
|
|
||||
@@ -75,6 +77,7 @@ import_from_as.py:19:1: PTH105 `os.replace()` should be replaced by `Path.replac
|
||||
20 | xrmdir(p)
|
||||
21 | xremove(p)
|
||||
|
|
||||
= help: Replace with `Path(...).replace(...)`
|
||||
|
||||
import_from_as.py:20:1: PTH106 [*] `os.rmdir()` should be replaced by `Path.rmdir()`
|
||||
|
|
||||
@@ -482,6 +485,7 @@ import_from_as.py:37:1: PTH121 `os.path.samefile()` should be replaced by `Path.
|
||||
| ^^^^^^^^^ PTH121
|
||||
38 | xsplitext(p)
|
||||
|
|
||||
= help: Replace with `Path(...).samefile()`
|
||||
|
||||
import_from_as.py:38:1: PTH122 `os.path.splitext()` should be replaced by `Path.suffix`, `Path.stem`, and `Path.parent`
|
||||
|
|
||||
|
||||
@@ -2,51 +2,6 @@ use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
|
||||
use crate::Violation;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.chmod`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.chmod()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.chmod()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.chmod("file.py", 0o444)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("file.py").chmod(0o444)
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.chmod`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.chmod)
|
||||
/// - [Python documentation: `os.chmod`](https://docs.python.org/3/library/os.html#os.chmod)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsChmod;
|
||||
|
||||
impl Violation for OsChmod {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.chmod()` should be replaced by `Path.chmod()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.makedirs`.
|
||||
///
|
||||
@@ -137,99 +92,6 @@ impl Violation for OsMkdir {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.rename`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.rename()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.rename()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.rename("old.py", "new.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("old.py").rename("new.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.rename`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.rename)
|
||||
/// - [Python documentation: `os.rename`](https://docs.python.org/3/library/os.html#os.rename)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsRename;
|
||||
|
||||
impl Violation for OsRename {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.rename()` should be replaced by `Path.rename()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.replace`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os`. When possible, using `Path` object
|
||||
/// methods such as `Path.replace()` can improve readability over the `os`
|
||||
/// module's counterparts (e.g., `os.replace()`).
|
||||
///
|
||||
/// Note that `os` functions may be preferable if performance is a concern,
|
||||
/// e.g., in hot loops.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.replace("old.py", "new.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("old.py").replace("new.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.replace`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.replace)
|
||||
/// - [Python documentation: `os.replace`](https://docs.python.org/3/library/os.html#os.replace)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsReplace;
|
||||
|
||||
impl Violation for OsReplace {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.replace()` should be replaced by `Path.replace()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.stat`.
|
||||
///
|
||||
@@ -347,51 +209,6 @@ pub(crate) enum Joiner {
|
||||
Joinpath,
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.samefile`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `pathlib` offers a high-level API for path manipulation, as compared to
|
||||
/// the lower-level API offered by `os.path`. When possible, using `Path` object
|
||||
/// methods such as `Path.samefile()` can improve readability over the `os.path`
|
||||
/// module's counterparts (e.g., `os.path.samefile()`).
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
/// import os
|
||||
///
|
||||
/// os.path.samefile("f1.py", "f2.py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// Path("f1.py").samefile("f2.py")
|
||||
/// ```
|
||||
///
|
||||
/// ## Known issues
|
||||
/// While using `pathlib` can improve the readability and type safety of your code,
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.samefile`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.samefile)
|
||||
/// - [Python documentation: `os.path.samefile`](https://docs.python.org/3/library/os.path.html#os.path.samefile)
|
||||
/// - [PEP 428 – The pathlib module – object-oriented filesystem paths](https://peps.python.org/pep-0428/)
|
||||
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
|
||||
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
|
||||
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct OsPathSamefile;
|
||||
|
||||
impl Violation for OsPathSamefile {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.path.samefile()` should be replaced by `Path.samefile()`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.splitext`.
|
||||
///
|
||||
|
||||
@@ -100,7 +100,7 @@ pub(crate) fn invalid_function_name(
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore the do_* methods of the http.server.BaseHTTPRequestHandler class
|
||||
// Ignore the do_* methods of the http.server.BaseHTTPRequestHandler class and its subclasses
|
||||
if name.starts_with("do_")
|
||||
&& parent_class.is_some_and(|class| {
|
||||
any_base_class(class, semantic, &mut |superclass| {
|
||||
@@ -108,7 +108,13 @@ pub(crate) fn invalid_function_name(
|
||||
qualified.is_some_and(|name| {
|
||||
matches!(
|
||||
name.segments(),
|
||||
["http", "server", "BaseHTTPRequestHandler"]
|
||||
[
|
||||
"http",
|
||||
"server",
|
||||
"BaseHTTPRequestHandler"
|
||||
| "CGIHTTPRequestHandler"
|
||||
| "SimpleHTTPRequestHandler"
|
||||
]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -55,3 +55,21 @@ N802.py:84:9: N802 Function name `dont_GET` should be lowercase
|
||||
| ^^^^^^^^ N802
|
||||
85 | pass
|
||||
|
|
||||
|
||||
N802.py:95:9: N802 Function name `dont_OPTIONS` should be lowercase
|
||||
|
|
||||
93 | pass
|
||||
94 |
|
||||
95 | def dont_OPTIONS(self):
|
||||
| ^^^^^^^^^^^^ N802
|
||||
96 | pass
|
||||
|
|
||||
|
||||
N802.py:106:9: N802 Function name `dont_OPTIONS` should be lowercase
|
||||
|
|
||||
104 | pass
|
||||
105 |
|
||||
106 | def dont_OPTIONS(self):
|
||||
| ^^^^^^^^^^^^ N802
|
||||
107 | pass
|
||||
|
|
||||
|
||||
@@ -249,6 +249,11 @@ pub(crate) fn manual_list_comprehension(checker: &Checker, for_stmt: &ast::StmtF
|
||||
.iter()
|
||||
.find(|binding| for_stmt.target.range() == binding.range)
|
||||
.unwrap();
|
||||
// If the target variable is global (e.g., `global INDEX`) or nonlocal (e.g., `nonlocal INDEX`),
|
||||
// then it is intended to be used elsewhere outside the for loop.
|
||||
if target_binding.is_global() || target_binding.is_nonlocal() {
|
||||
return;
|
||||
}
|
||||
// If any references to the loop target variable are after the loop,
|
||||
// then converting it into a comprehension would cause a NameError
|
||||
if target_binding
|
||||
@@ -406,7 +411,14 @@ fn convert_to_list_extend(
|
||||
};
|
||||
let target_str = locator.slice(for_stmt.target.range());
|
||||
let elt_str = locator.slice(to_append);
|
||||
let generator_str = format!("{elt_str} {for_type} {target_str} in {for_iter_str}{if_str}");
|
||||
let generator_str = if to_append
|
||||
.as_generator_expr()
|
||||
.is_some_and(|generator| !generator.parenthesized)
|
||||
{
|
||||
format!("({elt_str}) {for_type} {target_str} in {for_iter_str}{if_str}")
|
||||
} else {
|
||||
format!("{elt_str} {for_type} {target_str} in {for_iter_str}{if_str}")
|
||||
};
|
||||
|
||||
let variable_name = locator.slice(binding);
|
||||
let for_loop_inline_comments = comment_strings_in_range(
|
||||
|
||||
@@ -241,5 +241,29 @@ PERF401.py:280:13: PERF401 Use `list.extend` to create a transformed list
|
||||
279 | if lambda: 0:
|
||||
280 | dst.append(i)
|
||||
| ^^^^^^^^^^^^^ PERF401
|
||||
281 |
|
||||
282 | def f():
|
||||
|
|
||||
= help: Replace for loop with list.extend
|
||||
|
||||
PERF401.py:286:9: PERF401 Use a list comprehension to create a transformed list
|
||||
|
|
||||
284 | result = []
|
||||
285 | for i in range(3):
|
||||
286 | result.append(x for x in [i])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PERF401
|
||||
287 |
|
||||
288 | def f():
|
||||
|
|
||||
= help: Replace for loop with list comprehension
|
||||
|
||||
PERF401.py:292:9: PERF401 Use a list comprehension to create a transformed list
|
||||
|
|
||||
290 | result = []
|
||||
291 | for i in range(3):
|
||||
292 | result.append((x for x in [i]))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PERF401
|
||||
293 |
|
||||
294 | G_INDEX = None
|
||||
|
|
||||
= help: Replace for loop with list comprehension
|
||||
|
||||
@@ -566,6 +566,8 @@ PERF401.py:280:13: PERF401 [*] Use `list.extend` to create a transformed list
|
||||
279 | if lambda: 0:
|
||||
280 | dst.append(i)
|
||||
| ^^^^^^^^^^^^^ PERF401
|
||||
281 |
|
||||
282 | def f():
|
||||
|
|
||||
= help: Replace for loop with list.extend
|
||||
|
||||
@@ -577,3 +579,52 @@ PERF401.py:280:13: PERF401 [*] Use `list.extend` to create a transformed list
|
||||
279 |- if lambda: 0:
|
||||
280 |- dst.append(i)
|
||||
278 |+ dst.extend(i for i in src if (lambda: 0))
|
||||
281 279 |
|
||||
282 280 | def f():
|
||||
283 281 | i = "xyz"
|
||||
|
||||
PERF401.py:286:9: PERF401 [*] Use a list comprehension to create a transformed list
|
||||
|
|
||||
284 | result = []
|
||||
285 | for i in range(3):
|
||||
286 | result.append(x for x in [i])
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PERF401
|
||||
287 |
|
||||
288 | def f():
|
||||
|
|
||||
= help: Replace for loop with list comprehension
|
||||
|
||||
ℹ Unsafe fix
|
||||
281 281 |
|
||||
282 282 | def f():
|
||||
283 283 | i = "xyz"
|
||||
284 |- result = []
|
||||
285 |- for i in range(3):
|
||||
286 |- result.append(x for x in [i])
|
||||
284 |+ result = [(x for x in [i]) for i in range(3)]
|
||||
287 285 |
|
||||
288 286 | def f():
|
||||
289 287 | i = "xyz"
|
||||
|
||||
PERF401.py:292:9: PERF401 [*] Use a list comprehension to create a transformed list
|
||||
|
|
||||
290 | result = []
|
||||
291 | for i in range(3):
|
||||
292 | result.append((x for x in [i]))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PERF401
|
||||
293 |
|
||||
294 | G_INDEX = None
|
||||
|
|
||||
= help: Replace for loop with list comprehension
|
||||
|
||||
ℹ Unsafe fix
|
||||
287 287 |
|
||||
288 288 | def f():
|
||||
289 289 | i = "xyz"
|
||||
290 |- result = []
|
||||
291 |- for i in range(3):
|
||||
292 |- result.append((x for x in [i]))
|
||||
290 |+ result = [(x for x in [i]) for i in range(3)]
|
||||
293 291 |
|
||||
294 292 | G_INDEX = None
|
||||
295 293 | def f():
|
||||
|
||||
@@ -21,7 +21,6 @@ use crate::checkers::ast::{DiagnosticGuard, LintContext};
|
||||
use crate::checkers::logical_lines::expand_indent;
|
||||
use crate::line_width::IndentWidth;
|
||||
use crate::rules::pycodestyle::helpers::is_non_logical_token;
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::{AlwaysFixableViolation, Edit, Fix, Locator, Violation};
|
||||
|
||||
/// Number of blank lines around top level classes and functions.
|
||||
@@ -694,14 +693,12 @@ pub(crate) struct BlankLinesChecker<'a, 'b> {
|
||||
source_type: PySourceType,
|
||||
cell_offsets: Option<&'a CellOffsets>,
|
||||
context: &'a LintContext<'b>,
|
||||
settings: &'a LinterSettings,
|
||||
}
|
||||
|
||||
impl<'a, 'b> BlankLinesChecker<'a, 'b> {
|
||||
pub(crate) fn new(
|
||||
locator: &'a Locator<'a>,
|
||||
stylist: &'a Stylist<'a>,
|
||||
settings: &'a LinterSettings,
|
||||
source_type: PySourceType,
|
||||
cell_offsets: Option<&'a CellOffsets>,
|
||||
context: &'a LintContext<'b>,
|
||||
@@ -712,7 +709,6 @@ impl<'a, 'b> BlankLinesChecker<'a, 'b> {
|
||||
source_type,
|
||||
cell_offsets,
|
||||
context,
|
||||
settings,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -733,7 +729,7 @@ impl<'a, 'b> BlankLinesChecker<'a, 'b> {
|
||||
let line_preprocessor = LinePreprocessor::new(
|
||||
tokens,
|
||||
self.locator,
|
||||
self.settings.tab_size,
|
||||
self.context.settings().tab_size,
|
||||
self.cell_offsets,
|
||||
);
|
||||
|
||||
@@ -879,7 +875,8 @@ impl<'a, 'b> BlankLinesChecker<'a, 'b> {
|
||||
// `isort` defaults to 2 if before a class or function definition (except in stubs where it is one) and 1 otherwise.
|
||||
// Defaulting to 2 (or 1 in stubs) here is correct because the variable is only used when testing the
|
||||
// blank lines before a class or function definition.
|
||||
u32::try_from(self.settings.isort.lines_after_imports).unwrap_or(max_lines_level)
|
||||
u32::try_from(self.context.settings().isort.lines_after_imports)
|
||||
.unwrap_or(max_lines_level)
|
||||
} else {
|
||||
max_lines_level
|
||||
}
|
||||
@@ -941,8 +938,10 @@ impl<'a, 'b> BlankLinesChecker<'a, 'b> {
|
||||
(LogicalLineKind::Import, Follows::FromImport)
|
||||
| (LogicalLineKind::FromImport, Follows::Import)
|
||||
) {
|
||||
max_lines_level
|
||||
.max(u32::try_from(self.settings.isort.lines_between_types).unwrap_or(u32::MAX))
|
||||
max_lines_level.max(
|
||||
u32::try_from(self.context.settings().isort.lines_between_types)
|
||||
.unwrap_or(u32::MAX),
|
||||
)
|
||||
} else {
|
||||
expected_blank_lines_before_definition
|
||||
};
|
||||
|
||||
@@ -48,6 +48,7 @@ mod tests {
|
||||
#[test_case(Rule::ComparisonWithItself, Path::new("comparison_with_itself.py"))]
|
||||
#[test_case(Rule::EqWithoutHash, Path::new("eq_without_hash.py"))]
|
||||
#[test_case(Rule::EmptyComment, Path::new("empty_comment.py"))]
|
||||
#[test_case(Rule::EmptyComment, Path::new("empty_comment_line_continuation.py"))]
|
||||
#[test_case(Rule::ManualFromImport, Path::new("import_aliasing.py"))]
|
||||
#[test_case(Rule::IfStmtMinMax, Path::new("if_stmt_min_max.py"))]
|
||||
#[test_case(Rule::SingleStringSlots, Path::new("single_string_slots.py"))]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_trivia::{CommentRanges, is_python_whitespace};
|
||||
use ruff_source_file::LineRanges;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
@@ -49,6 +50,7 @@ pub(crate) fn empty_comments(
|
||||
context: &LintContext,
|
||||
comment_ranges: &CommentRanges,
|
||||
locator: &Locator,
|
||||
indexer: &Indexer,
|
||||
) {
|
||||
let block_comments = comment_ranges.block_comments(locator.contents());
|
||||
|
||||
@@ -59,12 +61,12 @@ pub(crate) fn empty_comments(
|
||||
}
|
||||
|
||||
// If the line contains an empty comment, add a diagnostic.
|
||||
empty_comment(context, range, locator);
|
||||
empty_comment(context, range, locator, indexer);
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a [`Diagnostic`] if the comment at the given [`TextRange`] is empty.
|
||||
fn empty_comment(context: &LintContext, range: TextRange, locator: &Locator) {
|
||||
fn empty_comment(context: &LintContext, range: TextRange, locator: &Locator, indexer: &Indexer) {
|
||||
// Check: is the comment empty?
|
||||
if !locator
|
||||
.slice(range)
|
||||
@@ -95,12 +97,20 @@ fn empty_comment(context: &LintContext, range: TextRange, locator: &Locator) {
|
||||
}
|
||||
});
|
||||
|
||||
// If there is no character preceding the comment, this comment must be on its own physical line.
|
||||
// If there is a line preceding the empty comment's line, check if it ends in a line continuation character. (`\`)
|
||||
let is_on_same_logical_line = indexer
|
||||
.preceded_by_continuations(first_hash_col, locator.contents())
|
||||
.is_some();
|
||||
|
||||
if let Some(mut diagnostic) = context
|
||||
.report_diagnostic_if_enabled(EmptyComment, TextRange::new(first_hash_col, line.end()))
|
||||
{
|
||||
diagnostic.set_fix(Fix::safe_edit(
|
||||
if let Some(deletion_start_col) = deletion_start_col {
|
||||
Edit::deletion(line.start() + deletion_start_col, line.end())
|
||||
} else if is_on_same_logical_line {
|
||||
Edit::deletion(first_hash_col, line.end())
|
||||
} else {
|
||||
Edit::range_deletion(locator.full_line_range(first_hash_col))
|
||||
},
|
||||
|
||||
@@ -6,8 +6,9 @@ use ruff_python_ast::{
|
||||
use ruff_python_semantic::{SemanticModel, analyze::typing};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Violation;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix;
|
||||
use crate::{AlwaysFixableViolation, Applicability, Edit, Fix};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for access to the first or last element of `str.split()` or `str.rsplit()` without
|
||||
@@ -35,10 +36,14 @@ use crate::checkers::ast::Checker;
|
||||
/// url = "www.example.com"
|
||||
/// suffix = url.rsplit(".", maxsplit=1)[-1]
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe for `split()`/`rsplit()` calls that contain `**kwargs`, as
|
||||
/// adding a `maxsplit` keyword to such a call may lead to a duplicate keyword argument error.
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct MissingMaxsplitArg {
|
||||
index: SliceBoundary,
|
||||
actual_split_type: String,
|
||||
suggested_split_type: String,
|
||||
}
|
||||
|
||||
/// Represents the index of the slice used for this rule (which can only be 0 or -1)
|
||||
@@ -47,25 +52,27 @@ enum SliceBoundary {
|
||||
Last,
|
||||
}
|
||||
|
||||
impl Violation for MissingMaxsplitArg {
|
||||
impl AlwaysFixableViolation for MissingMaxsplitArg {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let MissingMaxsplitArg {
|
||||
index,
|
||||
actual_split_type,
|
||||
actual_split_type: _,
|
||||
suggested_split_type,
|
||||
} = self;
|
||||
|
||||
let suggested_split_type = match index {
|
||||
SliceBoundary::First => "split",
|
||||
SliceBoundary::Last => "rsplit",
|
||||
};
|
||||
format!("Replace with `{suggested_split_type}(..., maxsplit=1)`.")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
let MissingMaxsplitArg {
|
||||
actual_split_type,
|
||||
suggested_split_type,
|
||||
} = self;
|
||||
|
||||
if actual_split_type == suggested_split_type {
|
||||
format!("Pass `maxsplit=1` into `str.{actual_split_type}()`")
|
||||
} else {
|
||||
format!(
|
||||
"Instead of `str.{actual_split_type}()`, call `str.{suggested_split_type}()` and pass `maxsplit=1`",
|
||||
)
|
||||
format!("Use `str.{suggested_split_type}()` and pass `maxsplit=1`")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -123,8 +130,8 @@ pub(crate) fn missing_maxsplit_arg(checker: &Checker, value: &Expr, slice: &Expr
|
||||
};
|
||||
|
||||
// Check the function is "split" or "rsplit"
|
||||
let attr = attr.as_str();
|
||||
if !matches!(attr, "split" | "rsplit") {
|
||||
let actual_split_type = attr.as_str();
|
||||
if !matches!(actual_split_type, "split" | "rsplit") {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -161,11 +168,48 @@ pub(crate) fn missing_maxsplit_arg(checker: &Checker, value: &Expr, slice: &Expr
|
||||
}
|
||||
}
|
||||
|
||||
checker.report_diagnostic(
|
||||
let suggested_split_type = match slice_boundary {
|
||||
SliceBoundary::First => "split",
|
||||
SliceBoundary::Last => "rsplit",
|
||||
};
|
||||
|
||||
let maxsplit_argument_edit = fix::edits::add_argument(
|
||||
"maxsplit=1",
|
||||
arguments,
|
||||
checker.comment_ranges(),
|
||||
checker.locator().contents(),
|
||||
);
|
||||
|
||||
// Only change `actual_split_type` if it doesn't match `suggested_split_type`
|
||||
let split_type_edit: Option<Edit> = if actual_split_type == suggested_split_type {
|
||||
None
|
||||
} else {
|
||||
Some(Edit::range_replacement(
|
||||
suggested_split_type.to_string(),
|
||||
attr.range(),
|
||||
))
|
||||
};
|
||||
|
||||
let mut diagnostic = checker.report_diagnostic(
|
||||
MissingMaxsplitArg {
|
||||
index: slice_boundary,
|
||||
actual_split_type: attr.to_string(),
|
||||
actual_split_type: actual_split_type.to_string(),
|
||||
suggested_split_type: suggested_split_type.to_string(),
|
||||
},
|
||||
expr.range(),
|
||||
);
|
||||
|
||||
diagnostic.set_fix(Fix::applicable_edits(
|
||||
maxsplit_argument_edit,
|
||||
split_type_edit,
|
||||
// If keyword.arg is `None` (i.e. if the function call contains `**kwargs`), mark the fix as unsafe
|
||||
if arguments
|
||||
.keywords
|
||||
.iter()
|
||||
.any(|keyword| keyword.arg.is_none())
|
||||
{
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pylint/mod.rs
|
||||
---
|
||||
missing_maxsplit_arg.py:14:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
missing_maxsplit_arg.py:14:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
12 | # Errors
|
||||
13 | ## Test split called directly on string literal
|
||||
@@ -10,8 +10,19 @@ missing_maxsplit_arg.py:14:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
15 | "1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
16 | "1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:15:1: PLC0207 Instead of `str.split()`, call `str.rsplit()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
11 11 |
|
||||
12 12 | # Errors
|
||||
13 13 | ## Test split called directly on string literal
|
||||
14 |-"1,2,3".split(",")[0] # [missing-maxsplit-arg]
|
||||
14 |+"1,2,3".split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
15 15 | "1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
16 16 | "1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
17 17 | "1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:15:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
13 | ## Test split called directly on string literal
|
||||
14 | "1,2,3".split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -20,8 +31,19 @@ missing_maxsplit_arg.py:15:1: PLC0207 Instead of `str.split()`, call `str.rsplit
|
||||
16 | "1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
17 | "1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.rsplit()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:16:1: PLC0207 Instead of `str.rsplit()`, call `str.split()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
12 12 | # Errors
|
||||
13 13 | ## Test split called directly on string literal
|
||||
14 14 | "1,2,3".split(",")[0] # [missing-maxsplit-arg]
|
||||
15 |-"1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
15 |+"1,2,3".rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
16 16 | "1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
17 17 | "1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
18 18 |
|
||||
|
||||
missing_maxsplit_arg.py:16:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
14 | "1,2,3".split(",")[0] # [missing-maxsplit-arg]
|
||||
15 | "1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
@@ -29,8 +51,19 @@ missing_maxsplit_arg.py:16:1: PLC0207 Instead of `str.rsplit()`, call `str.split
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
17 | "1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.split()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:17:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
ℹ Safe fix
|
||||
13 13 | ## Test split called directly on string literal
|
||||
14 14 | "1,2,3".split(",")[0] # [missing-maxsplit-arg]
|
||||
15 15 | "1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
16 |-"1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
16 |+"1,2,3".split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
17 17 | "1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
18 18 |
|
||||
19 19 | ## Test split called on string variable
|
||||
|
||||
missing_maxsplit_arg.py:17:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
15 | "1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
16 | "1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -39,8 +72,19 @@ missing_maxsplit_arg.py:17:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
18 |
|
||||
19 | ## Test split called on string variable
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.rsplit()`
|
||||
|
||||
missing_maxsplit_arg.py:20:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
14 14 | "1,2,3".split(",")[0] # [missing-maxsplit-arg]
|
||||
15 15 | "1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
16 16 | "1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
17 |-"1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
17 |+"1,2,3".rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
18 18 |
|
||||
19 19 | ## Test split called on string variable
|
||||
20 20 | SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:20:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
19 | ## Test split called on string variable
|
||||
20 | SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -48,8 +92,19 @@ missing_maxsplit_arg.py:20:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
21 | SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
22 | SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:21:1: PLC0207 Instead of `str.split()`, call `str.rsplit()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
17 17 | "1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
18 18 |
|
||||
19 19 | ## Test split called on string variable
|
||||
20 |-SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
20 |+SEQ.split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
21 21 | SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
22 22 | SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
23 23 | SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:21:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
19 | ## Test split called on string variable
|
||||
20 | SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -58,8 +113,19 @@ missing_maxsplit_arg.py:21:1: PLC0207 Instead of `str.split()`, call `str.rsplit
|
||||
22 | SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
23 | SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.rsplit()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:22:1: PLC0207 Instead of `str.rsplit()`, call `str.split()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
18 18 |
|
||||
19 19 | ## Test split called on string variable
|
||||
20 20 | SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
21 |-SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
21 |+SEQ.rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
22 22 | SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
23 23 | SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
24 24 |
|
||||
|
||||
missing_maxsplit_arg.py:22:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
20 | SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
21 | SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
@@ -67,8 +133,19 @@ missing_maxsplit_arg.py:22:1: PLC0207 Instead of `str.rsplit()`, call `str.split
|
||||
| ^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
23 | SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.split()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:23:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
ℹ Safe fix
|
||||
19 19 | ## Test split called on string variable
|
||||
20 20 | SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
21 21 | SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
22 |-SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
22 |+SEQ.split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
23 23 | SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
24 24 |
|
||||
25 25 | ## Test split called on class attribute
|
||||
|
||||
missing_maxsplit_arg.py:23:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
21 | SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
22 | SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -77,8 +154,19 @@ missing_maxsplit_arg.py:23:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
24 |
|
||||
25 | ## Test split called on class attribute
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.rsplit()`
|
||||
|
||||
missing_maxsplit_arg.py:26:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
20 20 | SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
21 21 | SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
22 22 | SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
23 |-SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
23 |+SEQ.rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
24 24 |
|
||||
25 25 | ## Test split called on class attribute
|
||||
26 26 | Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:26:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
25 | ## Test split called on class attribute
|
||||
26 | Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -86,8 +174,19 @@ missing_maxsplit_arg.py:26:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
27 | Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
28 | Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:27:1: PLC0207 Instead of `str.split()`, call `str.rsplit()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
23 23 | SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
24 24 |
|
||||
25 25 | ## Test split called on class attribute
|
||||
26 |-Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
26 |+Foo.class_str.split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
27 27 | Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
28 28 | Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
29 29 | Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:27:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
25 | ## Test split called on class attribute
|
||||
26 | Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -96,8 +195,19 @@ missing_maxsplit_arg.py:27:1: PLC0207 Instead of `str.split()`, call `str.rsplit
|
||||
28 | Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
29 | Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.rsplit()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:28:1: PLC0207 Instead of `str.rsplit()`, call `str.split()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
24 24 |
|
||||
25 25 | ## Test split called on class attribute
|
||||
26 26 | Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
27 |-Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
27 |+Foo.class_str.rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
28 28 | Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
29 29 | Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
30 30 |
|
||||
|
||||
missing_maxsplit_arg.py:28:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
26 | Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
27 | Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
@@ -105,8 +215,19 @@ missing_maxsplit_arg.py:28:1: PLC0207 Instead of `str.rsplit()`, call `str.split
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
29 | Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.split()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:29:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
ℹ Safe fix
|
||||
25 25 | ## Test split called on class attribute
|
||||
26 26 | Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
27 27 | Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
28 |-Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
28 |+Foo.class_str.split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
29 29 | Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
30 30 |
|
||||
31 31 | ## Test split called on sliced string
|
||||
|
||||
missing_maxsplit_arg.py:29:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
27 | Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
28 | Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -115,8 +236,19 @@ missing_maxsplit_arg.py:29:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
30 |
|
||||
31 | ## Test split called on sliced string
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.rsplit()`
|
||||
|
||||
missing_maxsplit_arg.py:32:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
26 26 | Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
27 27 | Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
28 28 | Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
29 |-Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
29 |+Foo.class_str.rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
30 30 |
|
||||
31 31 | ## Test split called on sliced string
|
||||
32 32 | "1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:32:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
31 | ## Test split called on sliced string
|
||||
32 | "1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -124,8 +256,19 @@ missing_maxsplit_arg.py:32:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
33 | "1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:33:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
29 29 | Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
30 30 |
|
||||
31 31 | ## Test split called on sliced string
|
||||
32 |-"1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
32 |+"1,2,3"[::-1].split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
33 33 | "1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
34 34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
35 35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:33:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
31 | ## Test split called on sliced string
|
||||
32 | "1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -134,8 +277,19 @@ missing_maxsplit_arg.py:33:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:34:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
30 30 |
|
||||
31 31 | ## Test split called on sliced string
|
||||
32 32 | "1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
33 |-"1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
33 |+"1,2,3"[::-1][::-1].split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
34 34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
35 35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
36 36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:34:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
32 | "1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
33 | "1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -144,8 +298,19 @@ missing_maxsplit_arg.py:34:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:35:1: PLC0207 Instead of `str.split()`, call `str.rsplit()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
31 31 | ## Test split called on sliced string
|
||||
32 32 | "1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
33 33 | "1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
34 |-SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
34 |+SEQ[:3].split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
35 35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
36 36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
37 37 | SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:35:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
33 | "1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -154,8 +319,19 @@ missing_maxsplit_arg.py:35:1: PLC0207 Instead of `str.split()`, call `str.rsplit
|
||||
36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
37 | SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.rsplit()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:36:1: PLC0207 Instead of `str.rsplit()`, call `str.split()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
32 32 | "1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
33 33 | "1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
34 34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
35 |-Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
35 |+Foo.class_str[1:3].rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
36 36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
37 37 | SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
38 38 | Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:36:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
@@ -164,8 +340,19 @@ missing_maxsplit_arg.py:36:1: PLC0207 Instead of `str.rsplit()`, call `str.split
|
||||
37 | SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
38 | Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.split()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:37:1: PLC0207 Instead of `str.rsplit()`, call `str.split()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
33 33 | "1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
34 34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
35 35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
36 |-"1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
36 |+"1,2,3"[::-1].split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
37 37 | SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
38 38 | Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
39 39 |
|
||||
|
||||
missing_maxsplit_arg.py:37:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -173,8 +360,19 @@ missing_maxsplit_arg.py:37:1: PLC0207 Instead of `str.rsplit()`, call `str.split
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
38 | Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.split()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:38:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
ℹ Safe fix
|
||||
34 34 | SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
35 35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
36 36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
37 |-SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
37 |+SEQ[:3].split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
38 38 | Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
39 39 |
|
||||
40 40 | ## Test sep given as named argument
|
||||
|
||||
missing_maxsplit_arg.py:38:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
37 | SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -183,8 +381,19 @@ missing_maxsplit_arg.py:38:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
39 |
|
||||
40 | ## Test sep given as named argument
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.rsplit()`
|
||||
|
||||
missing_maxsplit_arg.py:41:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
35 35 | Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
36 36 | "1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
37 37 | SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
38 |-Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
38 |+Foo.class_str[1:3].rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
39 39 |
|
||||
40 40 | ## Test sep given as named argument
|
||||
41 41 | "1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:41:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
40 | ## Test sep given as named argument
|
||||
41 | "1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
@@ -192,8 +401,19 @@ missing_maxsplit_arg.py:41:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
42 | "1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
43 | "1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:42:1: PLC0207 Instead of `str.split()`, call `str.rsplit()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
38 38 | Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
39 39 |
|
||||
40 40 | ## Test sep given as named argument
|
||||
41 |-"1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
41 |+"1,2,3".split(maxsplit=1, sep=",")[0] # [missing-maxsplit-arg]
|
||||
42 42 | "1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
43 43 | "1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
44 44 | "1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:42:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
40 | ## Test sep given as named argument
|
||||
41 | "1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
@@ -202,8 +422,19 @@ missing_maxsplit_arg.py:42:1: PLC0207 Instead of `str.split()`, call `str.rsplit
|
||||
43 | "1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
44 | "1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.rsplit()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:43:1: PLC0207 Instead of `str.rsplit()`, call `str.split()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
39 39 |
|
||||
40 40 | ## Test sep given as named argument
|
||||
41 41 | "1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
42 |-"1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
42 |+"1,2,3".rsplit(maxsplit=1, sep=",")[-1] # [missing-maxsplit-arg]
|
||||
43 43 | "1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
44 44 | "1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
45 45 |
|
||||
|
||||
missing_maxsplit_arg.py:43:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
41 | "1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
42 | "1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
@@ -211,8 +442,19 @@ missing_maxsplit_arg.py:43:1: PLC0207 Instead of `str.rsplit()`, call `str.split
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
44 | "1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.split()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:44:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
ℹ Safe fix
|
||||
40 40 | ## Test sep given as named argument
|
||||
41 41 | "1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
42 42 | "1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
43 |-"1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
43 |+"1,2,3".split(maxsplit=1, sep=",")[0] # [missing-maxsplit-arg]
|
||||
44 44 | "1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
45 45 |
|
||||
46 46 | ## Special cases
|
||||
|
||||
missing_maxsplit_arg.py:44:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
42 | "1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
43 | "1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
@@ -221,8 +463,19 @@ missing_maxsplit_arg.py:44:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
45 |
|
||||
46 | ## Special cases
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.rsplit()`
|
||||
|
||||
missing_maxsplit_arg.py:47:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
41 41 | "1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
42 42 | "1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
43 43 | "1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
44 |-"1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
44 |+"1,2,3".rsplit(maxsplit=1, sep=",")[-1] # [missing-maxsplit-arg]
|
||||
45 45 |
|
||||
46 46 | ## Special cases
|
||||
47 47 | "1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:47:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
46 | ## Special cases
|
||||
47 | "1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
@@ -230,8 +483,19 @@ missing_maxsplit_arg.py:47:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
48 | "1,2,3".split("split")[-1] # [missing-maxsplit-arg]
|
||||
49 | "1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:48:1: PLC0207 Instead of `str.split()`, call `str.rsplit()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
44 44 | "1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
45 45 |
|
||||
46 46 | ## Special cases
|
||||
47 |-"1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
47 |+"1,2,3".split("\n", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
48 48 | "1,2,3".split("split")[-1] # [missing-maxsplit-arg]
|
||||
49 49 | "1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
|
||||
50 50 |
|
||||
|
||||
missing_maxsplit_arg.py:48:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
46 | ## Special cases
|
||||
47 | "1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
@@ -239,8 +503,19 @@ missing_maxsplit_arg.py:48:1: PLC0207 Instead of `str.split()`, call `str.rsplit
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
49 | "1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.rsplit()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:49:1: PLC0207 Instead of `str.rsplit()`, call `str.split()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
45 45 |
|
||||
46 46 | ## Special cases
|
||||
47 47 | "1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
48 |-"1,2,3".split("split")[-1] # [missing-maxsplit-arg]
|
||||
48 |+"1,2,3".rsplit("split", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
49 49 | "1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
|
||||
50 50 |
|
||||
51 51 | ## Test class attribute named split
|
||||
|
||||
missing_maxsplit_arg.py:49:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
47 | "1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
48 | "1,2,3".split("split")[-1] # [missing-maxsplit-arg]
|
||||
@@ -249,8 +524,19 @@ missing_maxsplit_arg.py:49:1: PLC0207 Instead of `str.rsplit()`, call `str.split
|
||||
50 |
|
||||
51 | ## Test class attribute named split
|
||||
|
|
||||
= help: Use `str.split()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:52:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
46 46 | ## Special cases
|
||||
47 47 | "1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
48 48 | "1,2,3".split("split")[-1] # [missing-maxsplit-arg]
|
||||
49 |-"1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
|
||||
49 |+"1,2,3".split("rsplit", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
50 50 |
|
||||
51 51 | ## Test class attribute named split
|
||||
52 52 | Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:52:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
51 | ## Test class attribute named split
|
||||
52 | Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -258,8 +544,19 @@ missing_maxsplit_arg.py:52:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
53 | Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
54 | Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:53:1: PLC0207 Instead of `str.split()`, call `str.rsplit()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
49 49 | "1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
|
||||
50 50 |
|
||||
51 51 | ## Test class attribute named split
|
||||
52 |-Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
52 |+Bar.split.split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
53 53 | Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
54 54 | Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
55 55 | Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:53:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
51 | ## Test class attribute named split
|
||||
52 | Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -268,8 +565,19 @@ missing_maxsplit_arg.py:53:1: PLC0207 Instead of `str.split()`, call `str.rsplit
|
||||
54 | Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
55 | Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.rsplit()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:54:1: PLC0207 Instead of `str.rsplit()`, call `str.split()` and pass `maxsplit=1`
|
||||
ℹ Safe fix
|
||||
50 50 |
|
||||
51 51 | ## Test class attribute named split
|
||||
52 52 | Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
53 |-Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
53 |+Bar.split.rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
54 54 | Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
55 55 | Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
56 56 |
|
||||
|
||||
missing_maxsplit_arg.py:54:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
52 | Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
53 | Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
@@ -277,8 +585,19 @@ missing_maxsplit_arg.py:54:1: PLC0207 Instead of `str.rsplit()`, call `str.split
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
55 | Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
|
||||
= help: Use `str.split()` and pass `maxsplit=1`
|
||||
|
||||
missing_maxsplit_arg.py:55:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
ℹ Safe fix
|
||||
51 51 | ## Test class attribute named split
|
||||
52 52 | Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
53 53 | Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
54 |-Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
54 |+Bar.split.split(",", maxsplit=1)[0] # [missing-maxsplit-arg]
|
||||
55 55 | Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
56 56 |
|
||||
57 57 | ## Test unpacked dict literal kwargs
|
||||
|
||||
missing_maxsplit_arg.py:55:1: PLC0207 [*] Replace with `rsplit(..., maxsplit=1)`.
|
||||
|
|
||||
53 | Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
54 | Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
@@ -287,15 +606,37 @@ missing_maxsplit_arg.py:55:1: PLC0207 Pass `maxsplit=1` into `str.rsplit()`
|
||||
56 |
|
||||
57 | ## Test unpacked dict literal kwargs
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.rsplit()`
|
||||
|
||||
missing_maxsplit_arg.py:58:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Safe fix
|
||||
52 52 | Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
53 53 | Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
54 54 | Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
55 |-Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
55 |+Bar.split.rsplit(",", maxsplit=1)[-1] # [missing-maxsplit-arg]
|
||||
56 56 |
|
||||
57 57 | ## Test unpacked dict literal kwargs
|
||||
58 58 | "1,2,3".split(**{"sep": ","})[0] # [missing-maxsplit-arg]
|
||||
|
||||
missing_maxsplit_arg.py:58:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
57 | ## Test unpacked dict literal kwargs
|
||||
58 | "1,2,3".split(**{"sep": ","})[0] # [missing-maxsplit-arg]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:179:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Unsafe fix
|
||||
55 55 | Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
56 56 |
|
||||
57 57 | ## Test unpacked dict literal kwargs
|
||||
58 |-"1,2,3".split(**{"sep": ","})[0] # [missing-maxsplit-arg]
|
||||
58 |+"1,2,3".split(maxsplit=1, **{"sep": ","})[0] # [missing-maxsplit-arg]
|
||||
59 59 |
|
||||
60 60 |
|
||||
61 61 | # OK
|
||||
|
||||
missing_maxsplit_arg.py:179:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
177 | # Errors
|
||||
178 | kwargs_without_maxsplit = {"seq": ","}
|
||||
@@ -304,8 +645,19 @@ missing_maxsplit_arg.py:179:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
180 | # OK
|
||||
181 | kwargs_with_maxsplit = {"maxsplit": 1}
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:182:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Unsafe fix
|
||||
176 176 | ## TODO: These require the ability to resolve a dict variable name to a value
|
||||
177 177 | # Errors
|
||||
178 178 | kwargs_without_maxsplit = {"seq": ","}
|
||||
179 |-"1,2,3".split(**kwargs_without_maxsplit)[0] # TODO: [missing-maxsplit-arg]
|
||||
179 |+"1,2,3".split(maxsplit=1, **kwargs_without_maxsplit)[0] # TODO: [missing-maxsplit-arg]
|
||||
180 180 | # OK
|
||||
181 181 | kwargs_with_maxsplit = {"maxsplit": 1}
|
||||
182 182 | "1,2,3".split(",", **kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
|
||||
missing_maxsplit_arg.py:182:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
180 | # OK
|
||||
181 | kwargs_with_maxsplit = {"maxsplit": 1}
|
||||
@@ -314,11 +666,29 @@ missing_maxsplit_arg.py:182:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
183 | kwargs_with_maxsplit = {"sep": ",", "maxsplit": 1}
|
||||
184 | "1,2,3".split(**kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
missing_maxsplit_arg.py:184:1: PLC0207 Pass `maxsplit=1` into `str.split()`
|
||||
ℹ Unsafe fix
|
||||
179 179 | "1,2,3".split(**kwargs_without_maxsplit)[0] # TODO: [missing-maxsplit-arg]
|
||||
180 180 | # OK
|
||||
181 181 | kwargs_with_maxsplit = {"maxsplit": 1}
|
||||
182 |-"1,2,3".split(",", **kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
182 |+"1,2,3".split(",", maxsplit=1, **kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
183 183 | kwargs_with_maxsplit = {"sep": ",", "maxsplit": 1}
|
||||
184 184 | "1,2,3".split(**kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
|
||||
missing_maxsplit_arg.py:184:1: PLC0207 [*] Replace with `split(..., maxsplit=1)`.
|
||||
|
|
||||
182 | "1,2,3".split(",", **kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
183 | kwargs_with_maxsplit = {"sep": ",", "maxsplit": 1}
|
||||
184 | "1,2,3".split(**kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PLC0207
|
||||
|
|
||||
= help: Pass `maxsplit=1` into `str.split()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
181 181 | kwargs_with_maxsplit = {"maxsplit": 1}
|
||||
182 182 | "1,2,3".split(",", **kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
183 183 | kwargs_with_maxsplit = {"sep": ",", "maxsplit": 1}
|
||||
184 |-"1,2,3".split(**kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
184 |+"1,2,3".split(maxsplit=1, **kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pylint/mod.rs
|
||||
---
|
||||
empty_comment_line_continuation.py:1:1: PLR2044 [*] Line with empty comment
|
||||
|
|
||||
1 | #
|
||||
| ^ PLR2044
|
||||
2 | x = 0 \
|
||||
3 | #
|
||||
|
|
||||
= help: Delete the empty comment
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-#
|
||||
2 1 | x = 0 \
|
||||
3 2 | #
|
||||
4 3 | +1
|
||||
|
||||
empty_comment_line_continuation.py:3:1: PLR2044 [*] Line with empty comment
|
||||
|
|
||||
1 | #
|
||||
2 | x = 0 \
|
||||
3 | #
|
||||
| ^ PLR2044
|
||||
4 | +1
|
||||
5 | print(x)
|
||||
|
|
||||
= help: Delete the empty comment
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | #
|
||||
2 2 | x = 0 \
|
||||
3 |-#
|
||||
3 |+
|
||||
4 4 | +1
|
||||
5 5 | print(x)
|
||||
@@ -104,6 +104,13 @@ pub(crate) fn reimplemented_operator(checker: &Checker, target: &FunctionLike) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip decorated functions
|
||||
if let FunctionLike::Function(func) = target {
|
||||
if !func.decorator_list.is_empty() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let Some(params) = target.parameters() else {
|
||||
return;
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user