Compare commits
10 Commits
david/data
...
zb/dev-dri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40b4aa28f9 | ||
|
|
ea4bf00c23 | ||
|
|
7f4aa4b3fb | ||
|
|
34c98361ae | ||
|
|
38bb96a6c2 | ||
|
|
a014d55455 | ||
|
|
306f6f17a9 | ||
|
|
b233888f00 | ||
|
|
540cbd9085 | ||
|
|
0112f7f0e4 |
66
.github/workflows/ci.yaml
vendored
66
.github/workflows/ci.yaml
vendored
@@ -214,7 +214,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
@@ -234,17 +234,17 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -292,17 +292,17 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -321,14 +321,30 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: Setup Dev Drive
|
||||
run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1
|
||||
|
||||
# actions/checkout does not let us clone into anywhere outside `github.workspace`, so we have to copy the clone
|
||||
- name: Copy Git Repo to Dev Drive
|
||||
env:
|
||||
RUFF_WORKSPACE: ${{ env.RUFF_WORKSPACE }}
|
||||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${env:RUFF_WORKSPACE}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: ${{ env.RUFF_WORKSPACE }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
working-directory: ${{ env.RUFF_WORKSPACE }}
|
||||
run: rustup show
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
working-directory: ${{ env.RUFF_WORKSPACE }}
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
@@ -348,7 +364,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
@@ -377,7 +393,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -400,7 +416,7 @@ jobs:
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
@@ -408,11 +424,11 @@ jobs:
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -432,7 +448,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install Rust toolchain"
|
||||
@@ -494,7 +510,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
@@ -708,7 +724,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
@@ -732,7 +748,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
@@ -765,7 +781,7 @@ jobs:
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
@@ -804,7 +820,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run checks"
|
||||
@@ -874,7 +890,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
@@ -905,14 +921,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -938,14 +954,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
||||
2
.github/workflows/daily_fuzz.yaml
vendored
2
.github/workflows/daily_fuzz.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
|
||||
82
.github/workflows/mypy_primer.yaml
vendored
82
.github/workflows/mypy_primer.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
@@ -49,12 +49,46 @@ jobs:
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
DIFF_FILE: mypy_primer.diff
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
echo ${{ github.event.number }} > ../pr-number
|
||||
|
||||
echo "Enabling mypy primer specific configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/ty_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b base_commit "$MERGE_BASE"
|
||||
echo "base commit"
|
||||
git rev-list --format=%s --max-count=1 base_commit
|
||||
|
||||
cd ..
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx \
|
||||
--from="git+https://github.com/hauntsaninja/mypy_primer@e5f55447969d33ae3c7ccdb183e2a37101867270" \
|
||||
mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker ty \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
# Output diff with ANSI color codes
|
||||
cat mypy_primer.diff
|
||||
|
||||
# Remove ANSI color codes before uploading
|
||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
@@ -67,41 +101,3 @@ jobs:
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
TY_MAX_PARALLELISM: 1 # for deterministic memory numbers
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/memory.txt
|
||||
DIFF_FILE: mypy_primer_memory.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
path: mypy_primer_memory.diff
|
||||
|
||||
31
.github/workflows/mypy_primer_comment.yaml
vendored
31
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -45,28 +45,15 @@ jobs:
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer memory results"
|
||||
id: download-mypy_primer_memory_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_memory_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -87,18 +74,6 @@ jobs:
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
2
.github/workflows/publish-docs.yml
vendored
2
.github/workflows/publish-docs.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
||||
93
.github/workflows/setup-dev-drive.ps1
vendored
Normal file
93
.github/workflows/setup-dev-drive.ps1
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# Configures a drive for testing in CI.
|
||||
#
|
||||
# When using standard GitHub Actions runners, a `D:` drive is present and has
|
||||
# similar or better performance characteristics than a ReFS dev drive. Sometimes
|
||||
# using a larger runner is still more performant (e.g., when running the test
|
||||
# suite) and we need to create a dev drive. This script automatically configures
|
||||
# the appropriate drive.
|
||||
#
|
||||
# When using GitHub Actions' "larger runners", the `D:` drive is not present and
|
||||
# we create a DevDrive mount on `C:`. This is purported to be more performant
|
||||
# than an ReFS drive, though we did not see a change when we switched over.
|
||||
#
|
||||
# When using Depot runners, the underling infrastructure is EC2, which does not
|
||||
# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can
|
||||
# create a ReFS drive using `diskpart` and `format` directory. We cannot use a
|
||||
# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already,
|
||||
# so we must check if it's a Depot runner first, and we use `V:` as the target
|
||||
# instead.
|
||||
|
||||
|
||||
if ($env:DEPOT_RUNNER -eq "1") {
|
||||
Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..."
|
||||
|
||||
# Create VHD and configure drive using diskpart
|
||||
$vhdPath = "C:\ruff_dev_drive.vhdx"
|
||||
@"
|
||||
create vdisk file="$vhdPath" maximum=20480 type=expandable
|
||||
attach vdisk
|
||||
create partition primary
|
||||
active
|
||||
assign letter=V
|
||||
"@ | diskpart
|
||||
|
||||
# Format the drive as ReFS
|
||||
format V: /fs:ReFS /q /y
|
||||
$Drive = "V:"
|
||||
|
||||
Write-Output "Custom dev drive created at $Drive"
|
||||
} elseif (Test-Path "D:\") {
|
||||
# Note `Get-PSDrive` is not sufficient because the drive letter is assigned.
|
||||
Write-Output "Using existing drive at D:"
|
||||
$Drive = "D:"
|
||||
} else {
|
||||
# The size (20 GB) is chosen empirically to be large enough for our
|
||||
# workflows; larger drives can take longer to set up.
|
||||
$Volume = New-VHD -Path C:/ruff_dev_drive.vhdx -SizeBytes 20GB |
|
||||
Mount-VHD -Passthru |
|
||||
Initialize-Disk -Passthru |
|
||||
New-Partition -AssignDriveLetter -UseMaximumSize |
|
||||
Format-Volume -DevDrive -Confirm:$false -Force
|
||||
|
||||
$Drive = "$($Volume.DriveLetter):"
|
||||
|
||||
# Set the drive as trusted
|
||||
# See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-designate-a-dev-drive-as-trusted
|
||||
fsutil devdrv trust $Drive
|
||||
|
||||
# Disable antivirus filtering on dev drives
|
||||
# See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-configure-additional-filters-on-dev-drive
|
||||
fsutil devdrv enable /disallowAv
|
||||
|
||||
# Remount so the changes take effect
|
||||
Dismount-VHD -Path C:/ruff_dev_drive.vhdx
|
||||
Mount-VHD -Path C:/ruff_dev_drive.vhdx
|
||||
|
||||
# Show some debug information
|
||||
Write-Output $Volume
|
||||
fsutil devdrv query $Drive
|
||||
|
||||
Write-Output "Using Dev Drive at $Volume"
|
||||
}
|
||||
|
||||
$Tmp = "$($Drive)\ruff-tmp"
|
||||
|
||||
# Create the directory ahead of time in an attempt to avoid race-conditions
|
||||
New-Item $Tmp -ItemType Directory
|
||||
|
||||
# Move Cargo to the dev drive
|
||||
New-Item -Path "$($Drive)/.cargo/bin" -ItemType Directory -Force
|
||||
if (Test-Path "C:/Users/runneradmin/.cargo") {
|
||||
Copy-Item -Path "C:/Users/runneradmin/.cargo/*" -Destination "$($Drive)/.cargo/" -Recurse -Force
|
||||
}
|
||||
|
||||
Write-Output `
|
||||
"DEV_DRIVE=$($Drive)" `
|
||||
"TMP=$($Tmp)" `
|
||||
"TEMP=$($Tmp)" `
|
||||
"UV_INTERNAL__TEST_DIR=$($Tmp)" `
|
||||
"RUSTUP_HOME=$($Drive)/.rustup" `
|
||||
"CARGO_HOME=$($Drive)/.cargo" `
|
||||
"RUFF_WORKSPACE=$($Drive)/ruff" `
|
||||
"PATH=$($Drive)/.cargo/bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
2
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
2
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ repos:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.34.0
|
||||
rev: v1.33.1
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -81,7 +81,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.2
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -99,12 +99,12 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.11.0
|
||||
rev: v1.10.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.2
|
||||
rev: 0.33.1
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
|
||||
56
CHANGELOG.md
56
CHANGELOG.md
@@ -1,61 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-pyi`\] Expand `Optional[A]` to `A | None` (`PYI016`) ([#18572](https://github.com/astral-sh/ruff/pull/18572))
|
||||
- \[`pyupgrade`\] Mark `UP008` fix safe if no comments are in range ([#18683](https://github.com/astral-sh/ruff/pull/18683))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Fix `C420` to prepend whitespace when needed ([#18616](https://github.com/astral-sh/ruff/pull/18616))
|
||||
- \[`perflint`\] Fix `PERF403` panic on attribute or subscription loop variable ([#19042](https://github.com/astral-sh/ruff/pull/19042))
|
||||
- \[`pydocstyle`\] Fix `D413` infinite loop for parenthesized docstring ([#18930](https://github.com/astral-sh/ruff/pull/18930))
|
||||
- \[`pylint`\] Fix `PLW0108` autofix introducing a syntax error when the lambda's body contains an assignment expression ([#18678](https://github.com/astral-sh/ruff/pull/18678))
|
||||
- \[`refurb`\] Fix false positive on empty tuples (`FURB168`) ([#19058](https://github.com/astral-sh/ruff/pull/19058))
|
||||
- \[`ruff`\] Allow more `field` calls from `attrs` (`RUF009`) ([#19021](https://github.com/astral-sh/ruff/pull/19021))
|
||||
- \[`ruff`\] Fix syntax error introduced for an empty string followed by a u-prefixed string (`UP025`) ([#18899](https://github.com/astral-sh/ruff/pull/18899))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-executable`\] Allow `uvx` in shebang line (`EXE003`) ([#18967](https://github.com/astral-sh/ruff/pull/18967))
|
||||
- \[`pandas`\] Avoid flagging `PD002` if `pandas` is not imported ([#18963](https://github.com/astral-sh/ruff/pull/18963))
|
||||
- \[`pyupgrade`\] Avoid PEP-604 unions with `typing.NamedTuple` (`UP007`, `UP045`) ([#18682](https://github.com/astral-sh/ruff/pull/18682))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document link between `import-outside-top-level (PLC0415)` and `lint.flake8-tidy-imports.banned-module-level-imports` ([#18733](https://github.com/astral-sh/ruff/pull/18733))
|
||||
- Fix description of the `format.skip-magic-trailing-comma` example ([#19095](https://github.com/astral-sh/ruff/pull/19095))
|
||||
- \[`airflow`\] Make `AIR302` example error out-of-the-box ([#18988](https://github.com/astral-sh/ruff/pull/18988))
|
||||
- \[`airflow`\] Make `AIR312` example error out-of-the-box ([#18989](https://github.com/astral-sh/ruff/pull/18989))
|
||||
- \[`flake8-annotations`\] Make `ANN401` example error out-of-the-box ([#18974](https://github.com/astral-sh/ruff/pull/18974))
|
||||
- \[`flake8-async`\] Make `ASYNC100` example error out-of-the-box ([#18993](https://github.com/astral-sh/ruff/pull/18993))
|
||||
- \[`flake8-async`\] Make `ASYNC105` example error out-of-the-box ([#19002](https://github.com/astral-sh/ruff/pull/19002))
|
||||
- \[`flake8-async`\] Make `ASYNC110` example error out-of-the-box ([#18975](https://github.com/astral-sh/ruff/pull/18975))
|
||||
- \[`flake8-async`\] Make `ASYNC210` example error out-of-the-box ([#18977](https://github.com/astral-sh/ruff/pull/18977))
|
||||
- \[`flake8-async`\] Make `ASYNC220`, `ASYNC221`, and `ASYNC222` examples error out-of-the-box ([#18978](https://github.com/astral-sh/ruff/pull/18978))
|
||||
- \[`flake8-async`\] Make `ASYNC251` example error out-of-the-box ([#18990](https://github.com/astral-sh/ruff/pull/18990))
|
||||
- \[`flake8-bandit`\] Make `S201` example error out-of-the-box ([#19017](https://github.com/astral-sh/ruff/pull/19017))
|
||||
- \[`flake8-bandit`\] Make `S604` and `S609` examples error out-of-the-box ([#19049](https://github.com/astral-sh/ruff/pull/19049))
|
||||
- \[`flake8-bugbear`\] Make `B028` example error out-of-the-box ([#19054](https://github.com/astral-sh/ruff/pull/19054))
|
||||
- \[`flake8-bugbear`\] Make `B911` example error out-of-the-box ([#19051](https://github.com/astral-sh/ruff/pull/19051))
|
||||
- \[`flake8-datetimez`\] Make `DTZ011` example error out-of-the-box ([#19055](https://github.com/astral-sh/ruff/pull/19055))
|
||||
- \[`flake8-datetimez`\] Make `DTZ901` example error out-of-the-box ([#19056](https://github.com/astral-sh/ruff/pull/19056))
|
||||
- \[`flake8-pyi`\] Make `PYI032` example error out-of-the-box ([#19061](https://github.com/astral-sh/ruff/pull/19061))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI014`, `PYI015`) ([#19097](https://github.com/astral-sh/ruff/pull/19097))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI042`) ([#19101](https://github.com/astral-sh/ruff/pull/19101))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI059`) ([#19080](https://github.com/astral-sh/ruff/pull/19080))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI062`) ([#19079](https://github.com/astral-sh/ruff/pull/19079))
|
||||
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT023`) ([#19104](https://github.com/astral-sh/ruff/pull/19104))
|
||||
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT030`) ([#19105](https://github.com/astral-sh/ruff/pull/19105))
|
||||
- \[`flake8-quotes`\] Make example error out-of-the-box (`Q003`) ([#19106](https://github.com/astral-sh/ruff/pull/19106))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM110`) ([#19113](https://github.com/astral-sh/ruff/pull/19113))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM113`) ([#19109](https://github.com/astral-sh/ruff/pull/19109))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM401`) ([#19110](https://github.com/astral-sh/ruff/pull/19110))
|
||||
- \[`pyflakes`\] Fix backslash in docs (`F621`) ([#19098](https://github.com/astral-sh/ruff/pull/19098))
|
||||
- \[`pylint`\] Fix `PLC0415` example ([#18970](https://github.com/astral-sh/ruff/pull/18970))
|
||||
|
||||
## 0.12.1
|
||||
|
||||
### Preview features
|
||||
|
||||
180
Cargo.lock
generated
180
Cargo.lock
generated
@@ -591,7 +591,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -600,7 +600,7 @@ version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -629,24 +629,12 @@ name = "console"
|
||||
version = "0.15.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e09ced7ebbccb63b4c65413d821f2e00ce54c5ca4514ddc6b3c892fdbcbc69d"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"unicode-width 0.2.1",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1031,7 +1019,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1493,14 +1481,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.18.0"
|
||||
version = "0.17.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd"
|
||||
checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
|
||||
dependencies = [
|
||||
"console 0.16.0",
|
||||
"console",
|
||||
"number_prefix",
|
||||
"portable-atomic",
|
||||
"unicode-width 0.2.1",
|
||||
"unit-prefix",
|
||||
"vt100",
|
||||
"web-time",
|
||||
]
|
||||
@@ -1537,7 +1525,7 @@ version = "1.43.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371"
|
||||
dependencies = [
|
||||
"console 0.15.11",
|
||||
"console",
|
||||
"globset",
|
||||
"once_cell",
|
||||
"pest",
|
||||
@@ -1604,7 +1592,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi 0.5.1",
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1668,7 +1656,7 @@ dependencies = [
|
||||
"portable-atomic",
|
||||
"portable-atomic-util",
|
||||
"serde",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2037,11 +2025,12 @@ checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "8.1.0"
|
||||
version = "8.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3163f59cd3fa0e9ef8c32f242966a7b9994fd7378366099593e0e73077cd8c97"
|
||||
checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"filetime",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
"kqueue",
|
||||
@@ -2050,7 +2039,7 @@ dependencies = [
|
||||
"mio",
|
||||
"notify-types",
|
||||
"walkdir",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2088,6 +2077,12 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "number_prefix"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.21.3"
|
||||
@@ -2171,7 +2166,7 @@ dependencies = [
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"windows-targets 0.52.6",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2729,7 +2724,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.12.2"
|
||||
version = "0.12.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2859,7 +2854,6 @@ dependencies = [
|
||||
"path-slash",
|
||||
"ruff_annotate_snippets",
|
||||
"ruff_cache",
|
||||
"ruff_diagnostics",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
@@ -2924,7 +2918,6 @@ dependencies = [
|
||||
name = "ruff_diagnostics"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"get-size2",
|
||||
"is-macro",
|
||||
"ruff_text_size",
|
||||
"serde",
|
||||
@@ -2977,7 +2970,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.2"
|
||||
version = "0.12.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -3263,7 +3256,6 @@ dependencies = [
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"ruff_diagnostics",
|
||||
"ruff_formatter",
|
||||
"ruff_linter",
|
||||
@@ -3310,7 +3302,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.12.2"
|
||||
version = "0.12.1"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3412,7 +3404,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3608,9 +3600,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_with"
|
||||
version = "3.14.0"
|
||||
version = "3.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5"
|
||||
checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_derive",
|
||||
@@ -3619,9 +3611,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_with_macros"
|
||||
version = "3.14.0"
|
||||
version = "3.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
|
||||
checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
@@ -3802,7 +3794,7 @@ dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4061,9 +4053,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.34"
|
||||
version = "0.1.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
|
||||
checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
@@ -4082,9 +4074,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-indicatif"
|
||||
version = "0.3.11"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8c714cc8fc46db04fcfddbd274c6ef59bebb1b435155984e7c6e89c3ce66f200"
|
||||
checksum = "8201ca430e0cd893ef978226fd3516c06d9c494181c8bf4e5b32e30ed4b40aa1"
|
||||
dependencies = [
|
||||
"indicatif",
|
||||
"tracing",
|
||||
@@ -4172,7 +4164,6 @@ dependencies = [
|
||||
name = "ty_ide"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"insta",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
@@ -4479,12 +4470,6 @@ dependencies = [
|
||||
"rand 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unit-prefix"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "323402cff2dd658f39ca17c789b502021b3f18707c91cdf22e3838e1b4023817"
|
||||
|
||||
[[package]]
|
||||
name = "unscanny"
|
||||
version = "0.1.0"
|
||||
@@ -4817,7 +4802,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4891,7 +4876,7 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4900,16 +4885,7 @@ version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.60.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
|
||||
dependencies = [
|
||||
"windows-targets 0.53.2",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4918,30 +4894,14 @@ version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.6",
|
||||
"windows_aarch64_msvc 0.52.6",
|
||||
"windows_i686_gnu 0.52.6",
|
||||
"windows_i686_gnullvm 0.52.6",
|
||||
"windows_i686_msvc 0.52.6",
|
||||
"windows_x86_64_gnu 0.52.6",
|
||||
"windows_x86_64_gnullvm 0.52.6",
|
||||
"windows_x86_64_msvc 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.53.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.53.0",
|
||||
"windows_aarch64_msvc 0.53.0",
|
||||
"windows_i686_gnu 0.53.0",
|
||||
"windows_i686_gnullvm 0.53.0",
|
||||
"windows_i686_msvc 0.53.0",
|
||||
"windows_x86_64_gnu 0.53.0",
|
||||
"windows_x86_64_gnullvm 0.53.0",
|
||||
"windows_x86_64_msvc 0.53.0",
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4950,96 +4910,48 @@ version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.7.10"
|
||||
|
||||
@@ -98,7 +98,7 @@ ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.18.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
@@ -167,7 +167,7 @@ tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.11" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-log = { version = "0.2.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.2/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.2
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
@@ -506,7 +506,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- [Weblate](https://weblate.org/)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
- [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [Zulip](https://github.com/zulip/zulip)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.2"
|
||||
version = "0.12.1"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -18,15 +18,14 @@ use rustc_hash::FxHashMap;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_linter::message::create_lint_diagnostic;
|
||||
use ruff_linter::message::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::{VERSION, warn_user};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
use ruff_workspace::resolver::Resolver;
|
||||
|
||||
@@ -349,7 +348,7 @@ impl FileCache {
|
||||
lint.messages
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
create_lint_diagnostic(
|
||||
OldDiagnostic::lint(
|
||||
&msg.body,
|
||||
msg.suggestion.as_ref(),
|
||||
msg.range,
|
||||
@@ -429,11 +428,11 @@ pub(crate) struct LintCacheData {
|
||||
|
||||
impl LintCacheData {
|
||||
pub(crate) fn from_diagnostics(
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
) -> Self {
|
||||
let source = if let Some(msg) = diagnostics.first() {
|
||||
msg.expect_ruff_source_file().source_text().to_owned()
|
||||
msg.source_file().source_text().to_owned()
|
||||
} else {
|
||||
String::new() // No messages, no need to keep the source!
|
||||
};
|
||||
@@ -447,16 +446,16 @@ impl LintCacheData {
|
||||
.map(|(rule, msg)| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
msg.expect_ruff_source_file(),
|
||||
diagnostics.first().unwrap().expect_ruff_source_file(),
|
||||
msg.source_file(),
|
||||
diagnostics.first().unwrap().source_file(),
|
||||
"message uses a different source file"
|
||||
);
|
||||
CacheMessage {
|
||||
rule,
|
||||
body: msg.body().to_string(),
|
||||
suggestion: msg.suggestion().map(ToString::to_string),
|
||||
range: msg.expect_range(),
|
||||
parent: msg.parent(),
|
||||
range: msg.range(),
|
||||
parent: msg.parent,
|
||||
fix: msg.fix().cloned(),
|
||||
noqa_offset: msg.noqa_offset(),
|
||||
}
|
||||
@@ -609,12 +608,12 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use filetime::{FileTime, set_file_mtime};
|
||||
use itertools::Itertools;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_cache::CACHE_DIR_NAME;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_linter::message::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||
@@ -681,7 +680,7 @@ mod tests {
|
||||
UnsafeFixes::Enabled,
|
||||
)
|
||||
.unwrap();
|
||||
if diagnostics.inner.iter().any(Diagnostic::is_syntax_error) {
|
||||
if diagnostics.inner.iter().any(OldDiagnostic::is_syntax_error) {
|
||||
parse_errors.push(path.clone());
|
||||
}
|
||||
paths.push(path);
|
||||
|
||||
@@ -9,10 +9,10 @@ use ignore::Error;
|
||||
use log::{debug, error, warn};
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use rayon::prelude::*;
|
||||
use ruff_linter::message::diagnostic_from_violation;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::panic::catch_unwind;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
@@ -129,7 +129,7 @@ pub(crate) fn check(
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||
|
||||
Diagnostics::new(
|
||||
vec![diagnostic_from_violation(
|
||||
vec![OldDiagnostic::new(
|
||||
IOError { message },
|
||||
TextRange::default(),
|
||||
&dummy,
|
||||
|
||||
@@ -10,10 +10,11 @@ use std::path::Path;
|
||||
use anyhow::{Context, Result};
|
||||
use colored::Colorize;
|
||||
use log::{debug, warn};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||
use ruff_linter::message::{create_syntax_error_diagnostic, diagnostic_from_violation};
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
@@ -25,20 +26,19 @@ use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::Settings;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) inner: Vec<Diagnostic>,
|
||||
pub(crate) inner: Vec<OldDiagnostic>,
|
||||
pub(crate) fixed: FixMap,
|
||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub(crate) fn new(
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
diagnostics: Vec<OldDiagnostic>,
|
||||
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -62,7 +62,7 @@ impl Diagnostics {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let source_file = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![diagnostic_from_violation(
|
||||
vec![OldDiagnostic::new(
|
||||
IOError {
|
||||
message: err.to_string(),
|
||||
},
|
||||
@@ -98,10 +98,10 @@ impl Diagnostics {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let dummy = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![create_syntax_error_diagnostic(
|
||||
dummy,
|
||||
vec![OldDiagnostic::syntax_error(
|
||||
err,
|
||||
TextRange::default(),
|
||||
dummy,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
|
||||
@@ -9,13 +9,12 @@ use itertools::{Itertools, iterate};
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, RdjsonEmitter, SarifEmitter,
|
||||
TextEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter,
|
||||
SarifEmitter, SecondaryCode, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
@@ -307,7 +306,8 @@ impl Printer {
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &Diagnostic), usize)>, (code, message)| {
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &OldDiagnostic), usize)>,
|
||||
(code, message)| {
|
||||
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||
if *prev_code == code {
|
||||
*count += 1;
|
||||
|
||||
@@ -1067,7 +1067,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1080,7 +1080,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1093,7 +1093,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -498,8 +498,11 @@ fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
|
||||
let diagnostics = result.len();
|
||||
|
||||
assert!(
|
||||
diagnostics <= max_diagnostics,
|
||||
"Expected <={max_diagnostics} diagnostics but got {diagnostics}"
|
||||
diagnostics > 1 && diagnostics <= max_diagnostics,
|
||||
"Expected between {} and {} diagnostics but got {}",
|
||||
1,
|
||||
max_diagnostics,
|
||||
diagnostics
|
||||
);
|
||||
}
|
||||
|
||||
@@ -567,23 +570,6 @@ fn anyio(criterion: &mut Criterion) {
|
||||
bench_project(&benchmark, criterion);
|
||||
}
|
||||
|
||||
fn datetype(criterion: &mut Criterion) {
|
||||
let benchmark = ProjectBenchmark::new(
|
||||
RealWorldProject {
|
||||
name: "DateType",
|
||||
repository: "https://github.com/glyph/DateType",
|
||||
commit: "57c9c93cf2468069f72945fc04bf27b64100dad8",
|
||||
paths: vec![SystemPath::new("src")],
|
||||
dependencies: vec![],
|
||||
max_dep_date: "2025-07-04",
|
||||
python_version: PythonVersion::PY313,
|
||||
},
|
||||
0,
|
||||
);
|
||||
|
||||
bench_project(&benchmark, criterion);
|
||||
}
|
||||
|
||||
criterion_group!(check_file, benchmark_cold, benchmark_incremental);
|
||||
criterion_group!(
|
||||
micro,
|
||||
@@ -592,5 +578,5 @@ criterion_group!(
|
||||
benchmark_complex_constrained_attributes_1,
|
||||
benchmark_complex_constrained_attributes_2,
|
||||
);
|
||||
criterion_group!(project, anyio, attrs, hydra, datetype);
|
||||
criterion_group!(project, anyio, attrs, hydra);
|
||||
criterion_main!(check_file, micro, project);
|
||||
|
||||
@@ -242,7 +242,7 @@ fn large(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&*PYDANTIC], sample_size=3, sample_count=8)]
|
||||
#[bench(args=[&*PYDANTIC], sample_size=3, sample_count=3)]
|
||||
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_annotate_snippets = { workspace = true }
|
||||
ruff_cache = { workspace = true, optional = true }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use std::{fmt::Formatter, sync::Arc};
|
||||
|
||||
use render::{FileResolver, Input};
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||
use ruff_source_file::{SourceCode, SourceFile};
|
||||
|
||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
pub use self::render::DisplayDiagnostic;
|
||||
use crate::{Db, files::File};
|
||||
@@ -63,37 +62,10 @@ impl Diagnostic {
|
||||
message: message.into_diagnostic_message(),
|
||||
annotations: vec![],
|
||||
subs: vec![],
|
||||
fix: None,
|
||||
parent: None,
|
||||
noqa_offset: None,
|
||||
secondary_code: None,
|
||||
});
|
||||
Diagnostic { inner }
|
||||
}
|
||||
|
||||
/// Creates a `Diagnostic` for a syntax error.
|
||||
///
|
||||
/// Unlike the more general [`Diagnostic::new`], this requires a [`Span`] and a [`TextRange`]
|
||||
/// attached to it.
|
||||
///
|
||||
/// This should _probably_ be a method on the syntax errors, but
|
||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
///
|
||||
/// Note that `message` is stored in the primary annotation, _not_ in the primary diagnostic
|
||||
/// message.
|
||||
pub fn syntax_error(
|
||||
span: impl Into<Span>,
|
||||
message: impl IntoDiagnosticMessage,
|
||||
range: impl Ranged,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = span.into().with_range(range.range());
|
||||
diag.annotate(Annotation::primary(span).message(message));
|
||||
diag
|
||||
}
|
||||
|
||||
/// Add an annotation to this diagnostic.
|
||||
///
|
||||
/// Annotations for a diagnostic are optional, but if any are added,
|
||||
@@ -254,11 +226,6 @@ impl Diagnostic {
|
||||
self.primary_annotation().map(|ann| ann.span.clone())
|
||||
}
|
||||
|
||||
/// Returns a reference to the primary span of this diagnostic.
|
||||
pub fn primary_span_ref(&self) -> Option<&Span> {
|
||||
self.primary_annotation().map(|ann| &ann.span)
|
||||
}
|
||||
|
||||
/// Returns the tags from the primary annotation of this diagnostic if it exists.
|
||||
pub fn primary_tags(&self) -> Option<&[DiagnosticTag]> {
|
||||
self.primary_annotation().map(|ann| ann.tags.as_slice())
|
||||
@@ -301,167 +268,6 @@ impl Diagnostic {
|
||||
pub fn sub_diagnostics(&self) -> &[SubDiagnostic] {
|
||||
&self.inner.subs
|
||||
}
|
||||
|
||||
/// Returns the fix for this diagnostic if it exists.
|
||||
pub fn fix(&self) -> Option<&Fix> {
|
||||
self.inner.fix.as_ref()
|
||||
}
|
||||
|
||||
/// Set the fix for this diagnostic.
|
||||
pub fn set_fix(&mut self, fix: Fix) {
|
||||
Arc::make_mut(&mut self.inner).fix = Some(fix);
|
||||
}
|
||||
|
||||
/// Remove the fix for this diagnostic.
|
||||
pub fn remove_fix(&mut self) {
|
||||
Arc::make_mut(&mut self.inner).fix = None;
|
||||
}
|
||||
|
||||
/// Returns `true` if the diagnostic contains a [`Fix`].
|
||||
pub fn fixable(&self) -> bool {
|
||||
self.fix().is_some()
|
||||
}
|
||||
|
||||
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
||||
///
|
||||
/// This is primarily used for checking noqa/secondary code suppressions.
|
||||
pub fn parent(&self) -> Option<TextSize> {
|
||||
self.inner.parent
|
||||
}
|
||||
|
||||
/// Set the offset of the diagnostic's parent statement.
|
||||
pub fn set_parent(&mut self, parent: TextSize) {
|
||||
Arc::make_mut(&mut self.inner).parent = Some(parent);
|
||||
}
|
||||
|
||||
/// Returns the remapped offset for a suppression comment if it exists.
|
||||
///
|
||||
/// Like [`Diagnostic::parent`], this is used for noqa code suppression comments in Ruff.
|
||||
pub fn noqa_offset(&self) -> Option<TextSize> {
|
||||
self.inner.noqa_offset
|
||||
}
|
||||
|
||||
/// Set the remapped offset for a suppression comment.
|
||||
pub fn set_noqa_offset(&mut self, noqa_offset: TextSize) {
|
||||
Arc::make_mut(&mut self.inner).noqa_offset = Some(noqa_offset);
|
||||
}
|
||||
|
||||
/// Returns the secondary code for the diagnostic if it exists.
|
||||
///
|
||||
/// The "primary" code for the diagnostic is its lint name. Diagnostics in ty don't have
|
||||
/// secondary codes (yet), but in Ruff the noqa code is used.
|
||||
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
|
||||
self.inner.secondary_code.as_ref()
|
||||
}
|
||||
|
||||
/// Set the secondary code for this diagnostic.
|
||||
pub fn set_secondary_code(&mut self, code: SecondaryCode) {
|
||||
Arc::make_mut(&mut self.inner).secondary_code = Some(code);
|
||||
}
|
||||
|
||||
/// Returns the name used to represent the diagnostic.
|
||||
pub fn name(&self) -> &'static str {
|
||||
self.id().as_str()
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a syntax error message.
|
||||
pub fn is_syntax_error(&self) -> bool {
|
||||
self.id().is_invalid_syntax()
|
||||
}
|
||||
|
||||
/// Returns the message body to display to the user.
|
||||
pub fn body(&self) -> &str {
|
||||
self.primary_message()
|
||||
}
|
||||
|
||||
/// Returns the fix suggestion for the violation.
|
||||
pub fn suggestion(&self) -> Option<&str> {
|
||||
self.primary_annotation()?.get_message()
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
pub fn to_url(&self) -> Option<String> {
|
||||
if self.is_syntax_error() {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"{}/rules/{}",
|
||||
env!("CARGO_PKG_HOMEPAGE"),
|
||||
self.name()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the filename for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_filename(&self) -> String {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.name()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Computes the start source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_start_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().start())
|
||||
}
|
||||
|
||||
/// Computes the end source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_end_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().end())
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
pub fn ruff_source_file(&self) -> Option<&SourceFile> {
|
||||
self.primary_span_ref()?.as_ruff_file()
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_source_file(&self) -> SourceFile {
|
||||
self.expect_primary_span().expect_ruff_file().clone()
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
pub fn range(&self) -> Option<TextRange> {
|
||||
self.primary_span()?.range()
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span or if the span has no range.
|
||||
pub fn expect_range(&self) -> TextRange {
|
||||
self.range().expect("Expected a range for the primary span")
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Diagnostic {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.partial_cmp(other).unwrap_or(std::cmp::Ordering::Equal)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Diagnostic {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(
|
||||
(self.ruff_source_file()?, self.range()?.start())
|
||||
.cmp(&(other.ruff_source_file()?, other.range()?.start())),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
@@ -471,10 +277,6 @@ struct DiagnosticInner {
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
subs: Vec<SubDiagnostic>,
|
||||
fix: Option<Fix>,
|
||||
parent: Option<TextSize>,
|
||||
noqa_offset: Option<TextSize>,
|
||||
secondary_code: Option<SecondaryCode>,
|
||||
}
|
||||
|
||||
struct RenderingSortKey<'a> {
|
||||
@@ -1095,15 +897,9 @@ impl Span {
|
||||
///
|
||||
/// Panics if the file is a [`UnifiedFile::Ty`] instead of a [`UnifiedFile::Ruff`].
|
||||
pub fn expect_ruff_file(&self) -> &SourceFile {
|
||||
self.as_ruff_file()
|
||||
.expect("Expected a ruff `SourceFile`, found a ty `File`")
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] attached to this [`Span`].
|
||||
pub fn as_ruff_file(&self) -> Option<&SourceFile> {
|
||||
match &self.file {
|
||||
UnifiedFile::Ty(_) => None,
|
||||
UnifiedFile::Ruff(file) => Some(file),
|
||||
UnifiedFile::Ty(_) => panic!("Expected a ruff `SourceFile`, found a ty `File`"),
|
||||
UnifiedFile::Ruff(file) => file,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1351,52 +1147,41 @@ impl<T: std::fmt::Display> IntoDiagnosticMessage for T {
|
||||
}
|
||||
}
|
||||
|
||||
/// A secondary identifier for a lint diagnostic.
|
||||
/// Creates a `Diagnostic` from a parse error.
|
||||
///
|
||||
/// For Ruff rules this means the noqa code.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, get_size2::GetSize)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize), serde(transparent))]
|
||||
pub struct SecondaryCode(String);
|
||||
|
||||
impl SecondaryCode {
|
||||
pub fn new(code: String) -> Self {
|
||||
Self(code)
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
/// This should _probably_ be a method on `ruff_python_parser::ParseError`, but
|
||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
pub fn create_parse_diagnostic(file: File, err: &ruff_python_parser::ParseError) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.location);
|
||||
diag.annotate(Annotation::primary(span).message(&err.error));
|
||||
diag
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SecondaryCode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
/// Creates a `Diagnostic` from an unsupported syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_unsupported_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::UnsupportedSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SecondaryCode {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&str> for SecondaryCode {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for &str {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
// for `hashbrown::EntryRef`
|
||||
impl From<&SecondaryCode> for SecondaryCode {
|
||||
fn from(value: &SecondaryCode) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
/// Creates a `Diagnostic` from a semantic syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_semantic_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::semantic_errors::SemanticSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
@@ -16,6 +16,5 @@ doctest = false
|
||||
[dependencies]
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
get-size2 = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
serde = { workspace = true, optional = true, features = [] }
|
||||
|
||||
@@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
|
||||
/// content at a given location.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct Edit {
|
||||
/// The start location of the edit.
|
||||
|
||||
@@ -6,9 +6,7 @@ use ruff_text_size::{Ranged, TextSize};
|
||||
use crate::edit::Edit;
|
||||
|
||||
/// Indicates if a fix can be applied.
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is, get_size2::GetSize,
|
||||
)]
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))]
|
||||
pub enum Applicability {
|
||||
@@ -32,7 +30,7 @@ pub enum Applicability {
|
||||
}
|
||||
|
||||
/// Indicates the level of isolation required to apply a fix.
|
||||
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, get_size2::GetSize)]
|
||||
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub enum IsolationLevel {
|
||||
/// The fix should be applied as long as no other fixes in the same group have been applied.
|
||||
@@ -43,7 +41,7 @@ pub enum IsolationLevel {
|
||||
}
|
||||
|
||||
/// A collection of [`Edit`] elements to be applied to a source file.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, get_size2::GetSize)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct Fix {
|
||||
/// The [`Edit`] elements to be applied, sorted by [`Edit::start`] in ascending order.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.2"
|
||||
version = "0.12.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -15,7 +15,7 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_annotate_snippets = { workspace = true }
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["serde"] }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_diagnostics = { workspace = true, features = ["serde"] }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_macros = { workspace = true }
|
||||
|
||||
@@ -1,30 +1,29 @@
|
||||
for _ in []:
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception,):
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except (Exception,):
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, ValueError):
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except (Exception, ValueError):
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except ValueError:
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError,):
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except (ValueError,):
|
||||
continue
|
||||
|
||||
@@ -185,45 +185,38 @@ for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
collect_shop_items(shopper, section_items)
|
||||
|
||||
# Shouldn't trigger the warning when there is a return statement.
|
||||
def foo():
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
return section_items
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
return section_items
|
||||
collect_shop_items(shopper, section_items)
|
||||
|
||||
# Should trigger the warning for duplicate access, even if is a return statement after.
|
||||
def foo():
|
||||
from itertools import groupby
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
|
||||
# Should trigger the warning for duplicate access, even if is a return in another branch.
|
||||
def foo():
|
||||
from itertools import groupby
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items)
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items)
|
||||
|
||||
# Should trigger, since only one branch has a return statement.
|
||||
def foo():
|
||||
from itertools import groupby
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items) # B031
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items) # B031
|
||||
|
||||
# Let's redefine the `groupby` function to make sure we pick up the correct one.
|
||||
# NOTE: This should always be at the end of the file.
|
||||
|
||||
@@ -26,9 +26,8 @@ abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
|
||||
# Some values need to be parenthesized.
|
||||
def foo():
|
||||
abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18036
|
||||
# The autofix for this is unsafe due to the comments inside the dictionary.
|
||||
|
||||
@@ -27,9 +27,8 @@ with contextlib.ExitStack() as stack:
|
||||
close_files = stack.pop_all().close
|
||||
|
||||
# OK
|
||||
async def foo():
|
||||
with contextlib.AsyncExitStack() as exit_stack:
|
||||
f = await exit_stack.enter_async_context(open("filename"))
|
||||
with contextlib.AsyncExitStack() as exit_stack:
|
||||
f = await exit_stack.enter_async_context(open("filename"))
|
||||
|
||||
# OK (false negative)
|
||||
with contextlib.ExitStack():
|
||||
@@ -276,10 +275,9 @@ class ExampleClassTests(TestCase):
|
||||
cls.enterClassContext(open("filename"))
|
||||
|
||||
# OK
|
||||
async def foo():
|
||||
class ExampleAsyncTests(IsolatedAsyncioTestCase):
|
||||
async def test_something(self):
|
||||
await self.enterAsyncContext(open("filename"))
|
||||
class ExampleAsyncTests(IsolatedAsyncioTestCase):
|
||||
async def test_something(self):
|
||||
await self.enterAsyncContext(open("filename"))
|
||||
|
||||
# OK
|
||||
class ExampleTests(TestCase):
|
||||
|
||||
@@ -1,99 +1,98 @@
|
||||
def foo():
|
||||
# Errors
|
||||
a = "hello"
|
||||
# Errors
|
||||
a = "hello"
|
||||
|
||||
# SIM116
|
||||
if a == "foo":
|
||||
return "bar"
|
||||
elif a == "bar":
|
||||
return "baz"
|
||||
elif a == "boo":
|
||||
return "ooh"
|
||||
# SIM116
|
||||
if a == "foo":
|
||||
return "bar"
|
||||
elif a == "bar":
|
||||
return "baz"
|
||||
elif a == "boo":
|
||||
return "ooh"
|
||||
else:
|
||||
return 42
|
||||
|
||||
# SIM116
|
||||
if a == 1:
|
||||
return (1, 2, 3)
|
||||
elif a == 2:
|
||||
return (4, 5, 6)
|
||||
elif a == 3:
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# SIM116
|
||||
if a == 1:
|
||||
return (1, 2, 3)
|
||||
elif a == 2:
|
||||
return (4, 5, 6)
|
||||
elif a == 3:
|
||||
return (7, 8, 9)
|
||||
|
||||
# SIM116
|
||||
if a == "hello 'sir'":
|
||||
return (1, 2, 3)
|
||||
elif a == 'goodbye "mam"':
|
||||
return (4, 5, 6)
|
||||
elif a == """Fairwell 'mister'""":
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# SIM116
|
||||
if a == b"one":
|
||||
return 1
|
||||
elif a == b"two":
|
||||
return 2
|
||||
elif a == b"three":
|
||||
return 3
|
||||
|
||||
# SIM116
|
||||
if a == "hello 'sir'":
|
||||
return ("hello'", 'hi"', 3)
|
||||
elif a == 'goodbye "mam"':
|
||||
return (4, 5, 6)
|
||||
elif a == """Fairwell 'mister'""":
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# OK
|
||||
if a == "foo":
|
||||
return "bar"
|
||||
elif a == "bar":
|
||||
return baz()
|
||||
elif a == "boo":
|
||||
return "ooh"
|
||||
else:
|
||||
return 42
|
||||
|
||||
# OK
|
||||
if a == b"one":
|
||||
return 1
|
||||
elif b == b"two":
|
||||
return 2
|
||||
elif a == b"three":
|
||||
return 3
|
||||
|
||||
# SIM116
|
||||
if func_name == "create":
|
||||
return "A"
|
||||
elif func_name == "modify":
|
||||
return "M"
|
||||
elif func_name == "remove":
|
||||
return "D"
|
||||
elif func_name == "move":
|
||||
return "MV"
|
||||
|
||||
# OK
|
||||
def no_return_in_else(platform):
|
||||
if platform == "linux":
|
||||
return "auditwheel repair -w {dest_dir} {wheel}"
|
||||
elif platform == "macos":
|
||||
return "delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel}"
|
||||
elif platform == "windows":
|
||||
return ""
|
||||
else:
|
||||
return 42
|
||||
|
||||
# SIM116
|
||||
if a == 1:
|
||||
return (1, 2, 3)
|
||||
elif a == 2:
|
||||
return (4, 5, 6)
|
||||
elif a == 3:
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# SIM116
|
||||
if a == 1:
|
||||
return (1, 2, 3)
|
||||
elif a == 2:
|
||||
return (4, 5, 6)
|
||||
elif a == 3:
|
||||
return (7, 8, 9)
|
||||
|
||||
# SIM116
|
||||
if a == "hello 'sir'":
|
||||
return (1, 2, 3)
|
||||
elif a == 'goodbye "mam"':
|
||||
return (4, 5, 6)
|
||||
elif a == """Fairwell 'mister'""":
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# SIM116
|
||||
if a == b"one":
|
||||
return 1
|
||||
elif a == b"two":
|
||||
return 2
|
||||
elif a == b"three":
|
||||
return 3
|
||||
|
||||
# SIM116
|
||||
if a == "hello 'sir'":
|
||||
return ("hello'", 'hi"', 3)
|
||||
elif a == 'goodbye "mam"':
|
||||
return (4, 5, 6)
|
||||
elif a == """Fairwell 'mister'""":
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# OK
|
||||
if a == "foo":
|
||||
return "bar"
|
||||
elif a == "bar":
|
||||
return baz()
|
||||
elif a == "boo":
|
||||
return "ooh"
|
||||
else:
|
||||
return 42
|
||||
|
||||
# OK
|
||||
if a == b"one":
|
||||
return 1
|
||||
elif b == b"two":
|
||||
return 2
|
||||
elif a == b"three":
|
||||
return 3
|
||||
|
||||
# SIM116
|
||||
if func_name == "create":
|
||||
return "A"
|
||||
elif func_name == "modify":
|
||||
return "M"
|
||||
elif func_name == "remove":
|
||||
return "D"
|
||||
elif func_name == "move":
|
||||
return "MV"
|
||||
|
||||
# OK
|
||||
def no_return_in_else(platform):
|
||||
if platform == "linux":
|
||||
return "auditwheel repair -w {dest_dir} {wheel}"
|
||||
elif platform == "macos":
|
||||
return "delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel}"
|
||||
elif platform == "windows":
|
||||
return ""
|
||||
else:
|
||||
msg = f"Unknown platform: {platform!r}"
|
||||
raise ValueError(msg)
|
||||
msg = f"Unknown platform: {platform!r}"
|
||||
raise ValueError(msg)
|
||||
|
||||
@@ -50,23 +50,3 @@ class Baz:
|
||||
class Nested:
|
||||
a: TypeAlias = 'Baz' # OK
|
||||
type A = 'Baz' # TC008
|
||||
|
||||
# O should have parenthesis added
|
||||
o: TypeAlias = """int
|
||||
| None"""
|
||||
type O = """int
|
||||
| None"""
|
||||
|
||||
# P, Q, and R should not have parenthesis added
|
||||
p: TypeAlias = ("""int
|
||||
| None""")
|
||||
type P = ("""int
|
||||
| None""")
|
||||
|
||||
q: TypeAlias = """(int
|
||||
| None)"""
|
||||
type Q = """(int
|
||||
| None)"""
|
||||
|
||||
r: TypeAlias = """int | None"""
|
||||
type R = """int | None"""
|
||||
@@ -1,4 +1,4 @@
|
||||
import os.path, pathlib
|
||||
import os.path
|
||||
from pathlib import Path
|
||||
from os.path import getatime
|
||||
|
||||
@@ -10,26 +10,3 @@ os.path.getatime(Path("filename"))
|
||||
getatime("filename")
|
||||
getatime(b"filename")
|
||||
getatime(Path("filename"))
|
||||
|
||||
|
||||
file = __file__
|
||||
|
||||
os.path.getatime(file)
|
||||
os.path.getatime(filename="filename")
|
||||
os.path.getatime(filename=Path("filename"))
|
||||
|
||||
os.path.getatime( # comment 1
|
||||
# comment 2
|
||||
"filename" # comment 3
|
||||
# comment 4
|
||||
, # comment 5
|
||||
# comment 6
|
||||
) # comment 7
|
||||
|
||||
os.path.getatime("file" + "name")
|
||||
|
||||
getatime(Path("filename").resolve())
|
||||
|
||||
os.path.getatime(pathlib.Path("filename"))
|
||||
|
||||
getatime(Path("dir") / "file.txt")
|
||||
|
||||
@@ -81,5 +81,4 @@ match(foo):
|
||||
# https://github.com/astral-sh/ruff/issues/12094
|
||||
pass;
|
||||
|
||||
def foo():
|
||||
yield, x
|
||||
yield, x
|
||||
|
||||
@@ -125,20 +125,3 @@ class J:
|
||||
class K:
|
||||
f: F = F()
|
||||
g: G = G()
|
||||
|
||||
|
||||
# Regression test for https://github.com/astral-sh/ruff/issues/19014
|
||||
# These are all valid field calls and should not cause diagnostics.
|
||||
@attr.define
|
||||
class TestAttrField:
|
||||
attr_field_factory: list[int] = attr.field(factory=list)
|
||||
attr_field_default: list[int] = attr.field(default=attr.Factory(list))
|
||||
attr_factory: list[int] = attr.Factory(list)
|
||||
attr_ib: list[int] = attr.ib(factory=list)
|
||||
attr_attr: list[int] = attr.attr(factory=list)
|
||||
attr_attrib: list[int] = attr.attrib(factory=list)
|
||||
|
||||
|
||||
@attr.attributes
|
||||
class TestAttrAttributes:
|
||||
x: list[int] = list() # RUF009
|
||||
|
||||
@@ -1062,6 +1062,9 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
Rule::OsPathSplitext,
|
||||
Rule::BuiltinOpen,
|
||||
Rule::PyPath,
|
||||
Rule::OsPathGetatime,
|
||||
Rule::OsPathGetmtime,
|
||||
Rule::OsPathGetctime,
|
||||
Rule::Glob,
|
||||
Rule::OsListdir,
|
||||
Rule::OsSymlink,
|
||||
@@ -1071,15 +1074,6 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
if checker.is_rule_enabled(Rule::OsPathGetsize) {
|
||||
flake8_use_pathlib::rules::os_path_getsize(checker, call);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsPathGetatime) {
|
||||
flake8_use_pathlib::rules::os_path_getatime(checker, call);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsPathGetctime) {
|
||||
flake8_use_pathlib::rules::os_path_getctime(checker, call);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::OsPathGetmtime) {
|
||||
flake8_use_pathlib::rules::os_path_getmtime(checker, call);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::PathConstructorCurrentDirectory) {
|
||||
flake8_use_pathlib::rules::path_constructor_current_directory(checker, call);
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ use itertools::Itertools;
|
||||
use log::debug;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_diagnostics::{Applicability, Fix, IsolationLevel};
|
||||
use ruff_notebook::{CellOffsets, NotebookIndex};
|
||||
use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to_module_path};
|
||||
@@ -64,7 +63,6 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use crate::checkers::ast::annotation::AnnotationContext;
|
||||
use crate::docstrings::extraction::ExtractionTarget;
|
||||
use crate::importer::{ImportRequest, Importer, ResolutionError};
|
||||
use crate::message::diagnostic_from_violation;
|
||||
use crate::noqa::NoqaMapping;
|
||||
use crate::package::PackageRoot;
|
||||
use crate::preview::is_undefined_export_in_dunder_init_enabled;
|
||||
@@ -76,7 +74,7 @@ use crate::rules::pylint::rules::{AwaitOutsideAsync, LoadBeforeGlobalDeclaration
|
||||
use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
|
||||
use crate::settings::rule_table::RuleTable;
|
||||
use crate::settings::{LinterSettings, TargetVersion, flags};
|
||||
use crate::{Edit, Violation};
|
||||
use crate::{Edit, OldDiagnostic, Violation};
|
||||
use crate::{Locator, docstrings, noqa};
|
||||
|
||||
mod analyze;
|
||||
@@ -390,7 +388,7 @@ impl<'a> Checker<'a> {
|
||||
|
||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
|
||||
///
|
||||
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
||||
/// before it is added to the collection in the checker on `Drop`.
|
||||
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
@@ -403,7 +401,7 @@ impl<'a> Checker<'a> {
|
||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
|
||||
/// enabled.
|
||||
///
|
||||
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
||||
/// before it is added to the collection in the checker on `Drop`.
|
||||
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
@@ -3118,9 +3116,9 @@ pub(crate) fn check_ast(
|
||||
/// A type for collecting diagnostics in a given file.
|
||||
///
|
||||
/// [`LintContext::report_diagnostic`] can be used to obtain a [`DiagnosticGuard`], which will push
|
||||
/// a [`Violation`] to the contained [`Diagnostic`] collection on `Drop`.
|
||||
/// a [`Violation`] to the contained [`OldDiagnostic`] collection on `Drop`.
|
||||
pub(crate) struct LintContext<'a> {
|
||||
diagnostics: RefCell<Vec<Diagnostic>>,
|
||||
diagnostics: RefCell<Vec<OldDiagnostic>>,
|
||||
source_file: SourceFile,
|
||||
rules: RuleTable,
|
||||
settings: &'a LinterSettings,
|
||||
@@ -3128,7 +3126,7 @@ pub(crate) struct LintContext<'a> {
|
||||
|
||||
impl<'a> LintContext<'a> {
|
||||
/// Create a new collector with the given `source_file` and an empty collection of
|
||||
/// `Diagnostic`s.
|
||||
/// `OldDiagnostic`s.
|
||||
pub(crate) fn new(path: &Path, contents: &str, settings: &'a LinterSettings) -> Self {
|
||||
let source_file =
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), contents).finish();
|
||||
@@ -3149,7 +3147,7 @@ impl<'a> LintContext<'a> {
|
||||
|
||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
|
||||
///
|
||||
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
||||
/// before it is added to the collection in the context on `Drop`.
|
||||
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
@@ -3158,7 +3156,7 @@ impl<'a> LintContext<'a> {
|
||||
) -> DiagnosticGuard<'chk, 'a> {
|
||||
DiagnosticGuard {
|
||||
context: self,
|
||||
diagnostic: Some(diagnostic_from_violation(kind, range, &self.source_file)),
|
||||
diagnostic: Some(OldDiagnostic::new(kind, range, &self.source_file)),
|
||||
rule: T::rule(),
|
||||
}
|
||||
}
|
||||
@@ -3166,7 +3164,7 @@ impl<'a> LintContext<'a> {
|
||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
|
||||
/// enabled.
|
||||
///
|
||||
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
||||
/// before it is added to the collection in the context on `Drop`.
|
||||
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
@@ -3177,7 +3175,7 @@ impl<'a> LintContext<'a> {
|
||||
if self.is_rule_enabled(rule) {
|
||||
Some(DiagnosticGuard {
|
||||
context: self,
|
||||
diagnostic: Some(diagnostic_from_violation(kind, range, &self.source_file)),
|
||||
diagnostic: Some(OldDiagnostic::new(kind, range, &self.source_file)),
|
||||
rule,
|
||||
})
|
||||
} else {
|
||||
@@ -3201,17 +3199,17 @@ impl<'a> LintContext<'a> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn into_parts(self) -> (Vec<Diagnostic>, SourceFile) {
|
||||
pub(crate) fn into_parts(self) -> (Vec<OldDiagnostic>, SourceFile) {
|
||||
(self.diagnostics.into_inner(), self.source_file)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn as_mut_vec(&mut self) -> &mut Vec<Diagnostic> {
|
||||
pub(crate) fn as_mut_vec(&mut self) -> &mut Vec<OldDiagnostic> {
|
||||
self.diagnostics.get_mut()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn iter(&mut self) -> impl Iterator<Item = &Diagnostic> {
|
||||
pub(crate) fn iter(&mut self) -> impl Iterator<Item = &OldDiagnostic> {
|
||||
self.diagnostics.get_mut().iter()
|
||||
}
|
||||
}
|
||||
@@ -3229,7 +3227,7 @@ pub(crate) struct DiagnosticGuard<'a, 'b> {
|
||||
/// The diagnostic that we want to report.
|
||||
///
|
||||
/// This is always `Some` until the `Drop` (or `defuse`) call.
|
||||
diagnostic: Option<Diagnostic>,
|
||||
diagnostic: Option<OldDiagnostic>,
|
||||
rule: Rule,
|
||||
}
|
||||
|
||||
@@ -3255,14 +3253,11 @@ impl DiagnosticGuard<'_, '_> {
|
||||
#[inline]
|
||||
pub(crate) fn set_fix(&mut self, fix: Fix) {
|
||||
if !self.context.rules.should_fix(self.rule) {
|
||||
self.diagnostic.as_mut().unwrap().remove_fix();
|
||||
self.fix = None;
|
||||
return;
|
||||
}
|
||||
let applicability = self.resolve_applicability(&fix);
|
||||
self.diagnostic
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.set_fix(fix.with_applicability(applicability));
|
||||
self.fix = Some(fix.with_applicability(applicability));
|
||||
}
|
||||
|
||||
/// Set the [`Fix`] used to fix the diagnostic, if the provided function returns `Ok`.
|
||||
@@ -3291,9 +3286,9 @@ impl DiagnosticGuard<'_, '_> {
|
||||
}
|
||||
|
||||
impl std::ops::Deref for DiagnosticGuard<'_, '_> {
|
||||
type Target = Diagnostic;
|
||||
type Target = OldDiagnostic;
|
||||
|
||||
fn deref(&self) -> &Diagnostic {
|
||||
fn deref(&self) -> &OldDiagnostic {
|
||||
// OK because `self.diagnostic` is only `None` within `Drop`.
|
||||
self.diagnostic.as_ref().unwrap()
|
||||
}
|
||||
@@ -3301,7 +3296,7 @@ impl std::ops::Deref for DiagnosticGuard<'_, '_> {
|
||||
|
||||
/// Return a mutable borrow of the diagnostic in this guard.
|
||||
impl std::ops::DerefMut for DiagnosticGuard<'_, '_> {
|
||||
fn deref_mut(&mut self) -> &mut Diagnostic {
|
||||
fn deref_mut(&mut self) -> &mut OldDiagnostic {
|
||||
// OK because `self.diagnostic` is only `None` within `Drop`.
|
||||
self.diagnostic.as_mut().unwrap()
|
||||
}
|
||||
|
||||
@@ -66,9 +66,9 @@ pub(crate) fn check_noqa(
|
||||
}
|
||||
|
||||
let noqa_offsets = diagnostic
|
||||
.parent()
|
||||
.parent
|
||||
.into_iter()
|
||||
.chain(std::iter::once(diagnostic.expect_range().start()))
|
||||
.chain(std::iter::once(diagnostic.start()))
|
||||
.map(|position| noqa_line_for.resolve(position))
|
||||
.unique();
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
/// `--select`. For pylint this is e.g. C0414 and E0118 but also C and E01.
|
||||
use std::fmt::Formatter;
|
||||
|
||||
use ruff_db::diagnostic::SecondaryCode;
|
||||
use strum_macros::EnumIter;
|
||||
|
||||
use crate::registry::Linter;
|
||||
@@ -53,18 +52,6 @@ impl PartialEq<NoqaCode> for &str {
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<NoqaCode> for SecondaryCode {
|
||||
fn eq(&self, other: &NoqaCode) -> bool {
|
||||
&self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for NoqaCode {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for NoqaCode {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
|
||||
@@ -618,8 +618,7 @@ mod tests {
|
||||
use crate::fix::edits::{
|
||||
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
|
||||
};
|
||||
use crate::message::diagnostic_from_violation;
|
||||
use crate::{Edit, Fix, Locator};
|
||||
use crate::{Edit, Fix, Locator, OldDiagnostic};
|
||||
|
||||
/// Parse the given source using [`Mode::Module`] and return the first statement.
|
||||
fn parse_first_stmt(source: &str) -> Result<Stmt> {
|
||||
@@ -750,12 +749,12 @@ x = 1 \
|
||||
let diag = {
|
||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||
let mut iter = edits.into_iter();
|
||||
let mut diagnostic = diagnostic_from_violation(
|
||||
let mut diagnostic = OldDiagnostic::new(
|
||||
MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary.
|
||||
TextRange::default(),
|
||||
&SourceFileBuilder::new("<filename>", "<code>").finish(),
|
||||
);
|
||||
diagnostic.set_fix(Fix::safe_edits(
|
||||
diagnostic.fix = Some(Fix::safe_edits(
|
||||
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
|
||||
iter,
|
||||
));
|
||||
|
||||
@@ -3,12 +3,12 @@ use std::collections::BTreeSet;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_diagnostics::{IsolationLevel, SourceMap};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::linter::FixTable;
|
||||
use crate::message::OldDiagnostic;
|
||||
use crate::registry::Rule;
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
use crate::{Edit, Fix};
|
||||
@@ -28,7 +28,7 @@ pub(crate) struct FixResult {
|
||||
|
||||
/// Fix errors in a file, and write the fixed source code to disk.
|
||||
pub(crate) fn fix_file(
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
locator: &Locator,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
) -> Option<FixResult> {
|
||||
@@ -52,7 +52,7 @@ pub(crate) fn fix_file(
|
||||
|
||||
/// Apply a series of fixes.
|
||||
fn apply_fixes<'a>(
|
||||
diagnostics: impl Iterator<Item = &'a Diagnostic>,
|
||||
diagnostics: impl Iterator<Item = &'a OldDiagnostic>,
|
||||
locator: &'a Locator<'a>,
|
||||
) -> FixResult {
|
||||
let mut output = String::with_capacity(locator.len());
|
||||
@@ -173,26 +173,25 @@ mod tests {
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::OldDiagnostic;
|
||||
use crate::fix::{FixResult, apply_fixes};
|
||||
use crate::message::diagnostic_from_violation;
|
||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||
use crate::{Edit, Fix};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
|
||||
fn create_diagnostics(
|
||||
filename: &str,
|
||||
source: &str,
|
||||
edit: impl IntoIterator<Item = Edit>,
|
||||
) -> Vec<Diagnostic> {
|
||||
) -> Vec<OldDiagnostic> {
|
||||
edit.into_iter()
|
||||
.map(|edit| {
|
||||
// The choice of rule here is arbitrary.
|
||||
let mut diagnostic = diagnostic_from_violation(
|
||||
let mut diagnostic = OldDiagnostic::new(
|
||||
MissingNewlineAtEndOfFile,
|
||||
edit.range(),
|
||||
&SourceFileBuilder::new(filename, source).finish(),
|
||||
);
|
||||
diagnostic.set_fix(Fix::safe_edit(edit));
|
||||
diagnostic.fix = Some(Fix::safe_edit(edit));
|
||||
diagnostic
|
||||
})
|
||||
.collect()
|
||||
|
||||
@@ -14,6 +14,7 @@ pub use rule_selector::RuleSelector;
|
||||
pub use rule_selector::clap_completion::RuleSelectorParser;
|
||||
pub use rules::pycodestyle::rules::IOError;
|
||||
|
||||
pub use message::OldDiagnostic;
|
||||
pub(crate) use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||
pub use violation::{AlwaysFixableViolation, FixAvailability, Violation, ViolationMetadata};
|
||||
|
||||
|
||||
@@ -7,14 +7,15 @@ use itertools::Itertools;
|
||||
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::{ParseError, ParseOptions, Parsed, UnsupportedSyntaxError};
|
||||
use ruff_source_file::SourceFile;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::OldDiagnostic;
|
||||
use crate::checkers::ast::{LintContext, check_ast};
|
||||
use crate::checkers::filesystem::check_file_path;
|
||||
use crate::checkers::imports::check_imports;
|
||||
@@ -24,7 +25,7 @@ use crate::checkers::tokens::check_tokens;
|
||||
use crate::directives::Directives;
|
||||
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
||||
use crate::fix::{FixResult, fix_file};
|
||||
use crate::message::create_syntax_error_diagnostic;
|
||||
use crate::message::SecondaryCode;
|
||||
use crate::noqa::add_noqa;
|
||||
use crate::package::PackageRoot;
|
||||
use crate::preview::is_py314_support_enabled;
|
||||
@@ -40,7 +41,7 @@ pub(crate) mod float;
|
||||
|
||||
pub struct LinterResult {
|
||||
/// A collection of diagnostic messages generated by the linter.
|
||||
pub diagnostics: Vec<Diagnostic>,
|
||||
pub diagnostics: Vec<OldDiagnostic>,
|
||||
/// Flag indicating that the parsed source code does not contain any
|
||||
/// [`ParseError`]s
|
||||
has_valid_syntax: bool,
|
||||
@@ -144,7 +145,7 @@ pub struct FixerResult<'a> {
|
||||
pub fixed: FixTable,
|
||||
}
|
||||
|
||||
/// Generate [`Diagnostic`]s from the source code contents at the given `Path`.
|
||||
/// Generate [`OldDiagnostic`]s from the source code contents at the given `Path`.
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub fn check_path(
|
||||
path: &Path,
|
||||
@@ -159,7 +160,7 @@ pub fn check_path(
|
||||
source_type: PySourceType,
|
||||
parsed: &Parsed<ModModule>,
|
||||
target_version: TargetVersion,
|
||||
) -> Vec<Diagnostic> {
|
||||
) -> Vec<OldDiagnostic> {
|
||||
// Aggregate all diagnostics.
|
||||
let mut context = LintContext::new(path, locator.contents(), settings);
|
||||
|
||||
@@ -381,7 +382,7 @@ pub fn check_path(
|
||||
if !parsed.has_valid_syntax() {
|
||||
// Avoid fixing in case the source code contains syntax errors.
|
||||
for diagnostic in &mut diagnostics {
|
||||
diagnostic.remove_fix();
|
||||
diagnostic.fix = None;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -392,6 +393,7 @@ pub fn check_path(
|
||||
parsed.errors(),
|
||||
syntax_errors,
|
||||
&semantic_syntax_errors,
|
||||
locator,
|
||||
directives,
|
||||
&source_file,
|
||||
)
|
||||
@@ -457,7 +459,7 @@ pub fn add_noqa_to_path(
|
||||
)
|
||||
}
|
||||
|
||||
/// Generate a [`Diagnostic`] for each diagnostic triggered by the given source code.
|
||||
/// Generate an [`OldDiagnostic`] for each diagnostic triggered by the given source code.
|
||||
pub fn lint_only(
|
||||
path: &Path,
|
||||
package: Option<PackageRoot<'_>>,
|
||||
@@ -514,7 +516,7 @@ pub fn lint_only(
|
||||
|
||||
LinterResult {
|
||||
has_valid_syntax: parsed.has_valid_syntax(),
|
||||
has_no_syntax_errors: !diagnostics.iter().any(Diagnostic::is_syntax_error),
|
||||
has_no_syntax_errors: !diagnostics.iter().any(OldDiagnostic::is_syntax_error),
|
||||
diagnostics,
|
||||
}
|
||||
}
|
||||
@@ -523,32 +525,30 @@ pub fn lint_only(
|
||||
///
|
||||
/// Also use `directives` to attach noqa offsets to lint diagnostics.
|
||||
fn diagnostics_to_messages(
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
diagnostics: Vec<OldDiagnostic>,
|
||||
parse_errors: &[ParseError],
|
||||
unsupported_syntax_errors: &[UnsupportedSyntaxError],
|
||||
semantic_syntax_errors: &[SemanticSyntaxError],
|
||||
locator: &Locator,
|
||||
directives: &Directives,
|
||||
source_file: &SourceFile,
|
||||
) -> Vec<Diagnostic> {
|
||||
) -> Vec<OldDiagnostic> {
|
||||
parse_errors
|
||||
.iter()
|
||||
.map(|parse_error| {
|
||||
create_syntax_error_diagnostic(source_file.clone(), &parse_error.error, parse_error)
|
||||
OldDiagnostic::from_parse_error(parse_error, locator, source_file.clone())
|
||||
})
|
||||
.chain(unsupported_syntax_errors.iter().map(|syntax_error| {
|
||||
create_syntax_error_diagnostic(source_file.clone(), syntax_error, syntax_error)
|
||||
OldDiagnostic::from_unsupported_syntax_error(syntax_error, source_file.clone())
|
||||
}))
|
||||
.chain(
|
||||
semantic_syntax_errors
|
||||
.iter()
|
||||
.map(|error| create_syntax_error_diagnostic(source_file.clone(), error, error)),
|
||||
.map(|error| OldDiagnostic::from_semantic_syntax_error(error, source_file.clone())),
|
||||
)
|
||||
.chain(diagnostics.into_iter().map(|mut diagnostic| {
|
||||
let noqa_offset = directives
|
||||
.noqa_line_for
|
||||
.resolve(diagnostic.expect_range().start());
|
||||
diagnostic.set_noqa_offset(noqa_offset);
|
||||
diagnostic
|
||||
.chain(diagnostics.into_iter().map(|diagnostic| {
|
||||
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
|
||||
diagnostic.with_noqa_offset(noqa_offset)
|
||||
}))
|
||||
.collect()
|
||||
}
|
||||
@@ -629,7 +629,7 @@ pub fn lint_fix<'a>(
|
||||
|
||||
if iterations == 0 {
|
||||
has_valid_syntax = parsed.has_valid_syntax();
|
||||
has_no_syntax_errors = !diagnostics.iter().any(Diagnostic::is_syntax_error);
|
||||
has_no_syntax_errors = !diagnostics.iter().any(OldDiagnostic::is_syntax_error);
|
||||
} else {
|
||||
// If the source code had no syntax errors on the first pass, but
|
||||
// does on a subsequent pass, then we've introduced a
|
||||
@@ -687,8 +687,8 @@ where
|
||||
}
|
||||
|
||||
#[expect(clippy::print_stderr)]
|
||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[Diagnostic]) {
|
||||
let codes = collect_rule_codes(diagnostics.iter().filter_map(Diagnostic::secondary_code));
|
||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[OldDiagnostic]) {
|
||||
let codes = collect_rule_codes(diagnostics.iter().filter_map(OldDiagnostic::secondary_code));
|
||||
if cfg!(debug_assertions) {
|
||||
eprintln!(
|
||||
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
|
||||
@@ -806,12 +806,13 @@ mod tests {
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::ParseOptions;
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_text_size::Ranged;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
|
||||
use crate::linter::check_path;
|
||||
use crate::message::OldDiagnostic;
|
||||
use crate::registry::Rule;
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::source_kind::SourceKind;
|
||||
@@ -969,7 +970,7 @@ mod tests {
|
||||
|
||||
/// Wrapper around `test_contents_syntax_errors` for testing a snippet of code instead of a
|
||||
/// file.
|
||||
fn test_snippet_syntax_errors(contents: &str, settings: &LinterSettings) -> Vec<Diagnostic> {
|
||||
fn test_snippet_syntax_errors(contents: &str, settings: &LinterSettings) -> Vec<OldDiagnostic> {
|
||||
let contents = dedent(contents);
|
||||
test_contents_syntax_errors(
|
||||
&SourceKind::Python(contents.to_string()),
|
||||
@@ -984,7 +985,7 @@ mod tests {
|
||||
source_kind: &SourceKind,
|
||||
path: &Path,
|
||||
settings: &LinterSettings,
|
||||
) -> Vec<Diagnostic> {
|
||||
) -> Vec<OldDiagnostic> {
|
||||
let source_type = PySourceType::from(path);
|
||||
let target_version = settings.resolve_target_version(path);
|
||||
let options =
|
||||
@@ -1015,7 +1016,7 @@ mod tests {
|
||||
&parsed,
|
||||
target_version,
|
||||
);
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
diagnostics
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::fmt::{Display, Formatter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{LazyLock, Mutex};
|
||||
|
||||
@@ -6,7 +6,7 @@ use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use fern;
|
||||
use log::Level;
|
||||
use ruff_python_parser::ParseError;
|
||||
use ruff_python_parser::{ParseError, ParseErrorType};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_source_file::{LineColumn, LineIndex, OneIndexed, SourceCode};
|
||||
@@ -248,7 +248,7 @@ impl Display for DisplayParseError {
|
||||
row = location.line,
|
||||
column = location.column,
|
||||
colon = ":".cyan(),
|
||||
inner = self.error.error
|
||||
inner = &DisplayParseErrorType(&self.error.error)
|
||||
)
|
||||
}
|
||||
ErrorLocation::Cell(cell, location) => {
|
||||
@@ -259,13 +259,27 @@ impl Display for DisplayParseError {
|
||||
row = location.line,
|
||||
column = location.column,
|
||||
colon = ":".cyan(),
|
||||
inner = self.error.error
|
||||
inner = &DisplayParseErrorType(&self.error.error)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct DisplayParseErrorType<'a>(&'a ParseErrorType);
|
||||
|
||||
impl<'a> DisplayParseErrorType<'a> {
|
||||
pub(crate) fn new(error: &'a ParseErrorType) -> Self {
|
||||
Self(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for DisplayParseErrorType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", TruncateAtNewline(&self.0))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ErrorLocation {
|
||||
/// The error occurred in a Python file.
|
||||
@@ -274,6 +288,44 @@ enum ErrorLocation {
|
||||
Cell(OneIndexed, LineColumn),
|
||||
}
|
||||
|
||||
/// Truncates the display text before the first newline character to avoid line breaks.
|
||||
struct TruncateAtNewline<'a>(&'a dyn Display);
|
||||
|
||||
impl Display for TruncateAtNewline<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
struct TruncateAdapter<'a> {
|
||||
inner: &'a mut dyn Write,
|
||||
after_new_line: bool,
|
||||
}
|
||||
|
||||
impl Write for TruncateAdapter<'_> {
|
||||
fn write_str(&mut self, s: &str) -> std::fmt::Result {
|
||||
if self.after_new_line {
|
||||
Ok(())
|
||||
} else {
|
||||
if let Some(end) = s.find(['\n', '\r']) {
|
||||
self.inner.write_str(&s[..end])?;
|
||||
self.inner.write_str("\u{23ce}...")?;
|
||||
self.after_new_line = true;
|
||||
Ok(())
|
||||
} else {
|
||||
self.inner.write_str(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
write!(
|
||||
TruncateAdapter {
|
||||
inner: f,
|
||||
after_new_line: false,
|
||||
},
|
||||
"{}",
|
||||
self.0
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::logging::LogLevel;
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
|
||||
/// Generate error logging commands for Azure Pipelines format.
|
||||
/// See [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning)
|
||||
@@ -14,23 +13,23 @@ impl Emitter for AzureEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for diagnostic in diagnostics {
|
||||
let filename = diagnostic.expect_ruff_filename();
|
||||
let location = if context.is_notebook(&filename) {
|
||||
let location = if context.is_notebook(&diagnostic.filename()) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
LineColumn::default()
|
||||
} else {
|
||||
diagnostic.expect_ruff_start_location()
|
||||
diagnostic.compute_start_location()
|
||||
};
|
||||
|
||||
writeln!(
|
||||
writer,
|
||||
"##vso[task.logissue type=error\
|
||||
;sourcepath={filename};linenumber={line};columnnumber={col};{code}]{body}",
|
||||
filename = diagnostic.filename(),
|
||||
line = location.line,
|
||||
col = location.column,
|
||||
code = diagnostic
|
||||
|
||||
@@ -2,12 +2,13 @@ use std::fmt::{Display, Formatter};
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use colored::{Color, ColoredString, Colorize, Styles};
|
||||
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use similar::{ChangeTag, TextDiff};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::{OneIndexed, SourceFile};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
use crate::message::OldDiagnostic;
|
||||
use crate::text_helpers::ShowNonprinting;
|
||||
use crate::{Applicability, Fix};
|
||||
|
||||
@@ -25,9 +26,9 @@ pub(super) struct Diff<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Diff<'a> {
|
||||
pub(crate) fn from_message(message: &'a Diagnostic) -> Option<Diff<'a>> {
|
||||
pub(crate) fn from_message(message: &'a OldDiagnostic) -> Option<Diff<'a>> {
|
||||
message.fix().map(|fix| Diff {
|
||||
source_code: message.expect_ruff_source_file(),
|
||||
source_code: message.source_file(),
|
||||
fix,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
|
||||
/// Generate error workflow command in GitHub Actions format.
|
||||
/// See: [GitHub documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message)
|
||||
@@ -15,13 +14,12 @@ impl Emitter for GithubEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for diagnostic in diagnostics {
|
||||
let source_location = diagnostic.expect_ruff_start_location();
|
||||
let filename = diagnostic.expect_ruff_filename();
|
||||
let location = if context.is_notebook(&filename) {
|
||||
let source_location = diagnostic.compute_start_location();
|
||||
let location = if context.is_notebook(&diagnostic.filename()) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
LineColumn::default()
|
||||
@@ -29,7 +27,7 @@ impl Emitter for GithubEmitter {
|
||||
source_location
|
||||
};
|
||||
|
||||
let end_location = diagnostic.expect_ruff_end_location();
|
||||
let end_location = diagnostic.compute_end_location();
|
||||
|
||||
write!(
|
||||
writer,
|
||||
@@ -37,7 +35,7 @@ impl Emitter for GithubEmitter {
|
||||
code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!(" ({code})")),
|
||||
file = filename,
|
||||
file = diagnostic.filename(),
|
||||
row = source_location.line,
|
||||
column = source_location.column,
|
||||
end_row = end_location.line,
|
||||
@@ -47,7 +45,7 @@ impl Emitter for GithubEmitter {
|
||||
write!(
|
||||
writer,
|
||||
"{path}:{row}:{column}:",
|
||||
path = relativize_path(&filename),
|
||||
path = relativize_path(&*diagnostic.filename()),
|
||||
row = location.line,
|
||||
column = location.column,
|
||||
)?;
|
||||
|
||||
@@ -7,10 +7,8 @@ use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::json;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
|
||||
use crate::fs::{relativize_path, relativize_path_to};
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
|
||||
/// Generate JSON with violations in GitLab CI format
|
||||
// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool
|
||||
@@ -30,7 +28,7 @@ impl Emitter for GitlabEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
serde_json::to_writer_pretty(
|
||||
@@ -47,7 +45,7 @@ impl Emitter for GitlabEmitter {
|
||||
}
|
||||
|
||||
struct SerializedMessages<'a> {
|
||||
diagnostics: &'a [Diagnostic],
|
||||
diagnostics: &'a [OldDiagnostic],
|
||||
context: &'a EmitterContext<'a>,
|
||||
project_dir: Option<&'a str>,
|
||||
}
|
||||
@@ -61,11 +59,10 @@ impl Serialize for SerializedMessages<'_> {
|
||||
let mut fingerprints = HashSet::<u64>::with_capacity(self.diagnostics.len());
|
||||
|
||||
for diagnostic in self.diagnostics {
|
||||
let start_location = diagnostic.expect_ruff_start_location();
|
||||
let end_location = diagnostic.expect_ruff_end_location();
|
||||
let start_location = diagnostic.compute_start_location();
|
||||
let end_location = diagnostic.compute_end_location();
|
||||
|
||||
let filename = diagnostic.expect_ruff_filename();
|
||||
let lines = if self.context.is_notebook(&filename) {
|
||||
let lines = if self.context.is_notebook(&diagnostic.filename()) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
json!({
|
||||
@@ -80,8 +77,8 @@ impl Serialize for SerializedMessages<'_> {
|
||||
};
|
||||
|
||||
let path = self.project_dir.as_ref().map_or_else(
|
||||
|| relativize_path(&filename),
|
||||
|project_dir| relativize_path_to(&filename, project_dir),
|
||||
|| relativize_path(&*diagnostic.filename()),
|
||||
|project_dir| relativize_path_to(&*diagnostic.filename(), project_dir),
|
||||
);
|
||||
|
||||
let mut message_fingerprint = fingerprint(diagnostic, &path, 0);
|
||||
@@ -123,7 +120,7 @@ impl Serialize for SerializedMessages<'_> {
|
||||
}
|
||||
|
||||
/// Generate a unique fingerprint to identify a violation.
|
||||
fn fingerprint(message: &Diagnostic, project_path: &str, salt: u64) -> u64 {
|
||||
fn fingerprint(message: &OldDiagnostic, project_path: &str, salt: u64) -> u64 {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
|
||||
salt.hash(&mut hasher);
|
||||
|
||||
@@ -4,14 +4,15 @@ use std::num::NonZeroUsize;
|
||||
|
||||
use colored::Colorize;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::diff::calculate_print_width;
|
||||
use crate::message::text::{MessageCodeFrame, RuleCodeAndBody};
|
||||
use crate::message::{Emitter, EmitterContext, MessageWithLocation, group_diagnostics_by_filename};
|
||||
use crate::message::{
|
||||
Emitter, EmitterContext, MessageWithLocation, OldDiagnostic, group_diagnostics_by_filename,
|
||||
};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -45,7 +46,7 @@ impl Emitter for GroupedEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for (filename, messages) in group_diagnostics_by_filename(diagnostics) {
|
||||
@@ -72,7 +73,7 @@ impl Emitter for GroupedEmitter {
|
||||
writer,
|
||||
"{}",
|
||||
DisplayGroupedMessage {
|
||||
notebook_index: context.notebook_index(&message.expect_ruff_filename()),
|
||||
notebook_index: context.notebook_index(&message.filename()),
|
||||
message,
|
||||
show_fix_status: self.show_fix_status,
|
||||
unsafe_fixes: self.unsafe_fixes,
|
||||
|
||||
@@ -4,13 +4,12 @@ use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Edit;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct JsonEmitter;
|
||||
@@ -19,7 +18,7 @@ impl Emitter for JsonEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
serde_json::to_writer_pretty(
|
||||
@@ -35,7 +34,7 @@ impl Emitter for JsonEmitter {
|
||||
}
|
||||
|
||||
struct ExpandedMessages<'a> {
|
||||
diagnostics: &'a [Diagnostic],
|
||||
diagnostics: &'a [OldDiagnostic],
|
||||
context: &'a EmitterContext<'a>,
|
||||
}
|
||||
|
||||
@@ -55,11 +54,10 @@ impl Serialize for ExpandedMessages<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn message_to_json_value(message: &Diagnostic, context: &EmitterContext) -> Value {
|
||||
let source_file = message.expect_ruff_source_file();
|
||||
pub(crate) fn message_to_json_value(message: &OldDiagnostic, context: &EmitterContext) -> Value {
|
||||
let source_file = message.source_file();
|
||||
let source_code = source_file.to_source_code();
|
||||
let filename = message.expect_ruff_filename();
|
||||
let notebook_index = context.notebook_index(&filename);
|
||||
let notebook_index = context.notebook_index(&message.filename());
|
||||
|
||||
let fix = message.fix().map(|fix| {
|
||||
json!({
|
||||
@@ -69,8 +67,8 @@ pub(crate) fn message_to_json_value(message: &Diagnostic, context: &EmitterConte
|
||||
})
|
||||
});
|
||||
|
||||
let mut start_location = source_code.line_column(message.expect_range().start());
|
||||
let mut end_location = source_code.line_column(message.expect_range().end());
|
||||
let mut start_location = source_code.line_column(message.start());
|
||||
let mut end_location = source_code.line_column(message.end());
|
||||
let mut noqa_location = message
|
||||
.noqa_offset()
|
||||
.map(|offset| source_code.line_column(offset));
|
||||
@@ -96,7 +94,7 @@ pub(crate) fn message_to_json_value(message: &Diagnostic, context: &EmitterConte
|
||||
"cell": notebook_cell_index,
|
||||
"location": location_to_json(start_location),
|
||||
"end_location": location_to_json(end_location),
|
||||
"filename": filename,
|
||||
"filename": message.filename(),
|
||||
"noqa_row": noqa_location.map(|location| location.line)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
|
||||
use crate::message::json::message_to_json_value;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct JsonLinesEmitter;
|
||||
@@ -12,7 +10,7 @@ impl Emitter for JsonLinesEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for diagnostic in diagnostics {
|
||||
|
||||
@@ -3,10 +3,11 @@ use std::path::Path;
|
||||
|
||||
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::message::{Emitter, EmitterContext, MessageWithLocation, group_diagnostics_by_filename};
|
||||
use crate::message::{
|
||||
Emitter, EmitterContext, MessageWithLocation, OldDiagnostic, group_diagnostics_by_filename,
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct JunitEmitter;
|
||||
@@ -15,7 +16,7 @@ impl Emitter for JunitEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
let mut report = Report::new("ruff");
|
||||
@@ -43,7 +44,7 @@ impl Emitter for JunitEmitter {
|
||||
} = message;
|
||||
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
|
||||
status.set_message(message.body());
|
||||
let location = if context.is_notebook(&message.expect_ruff_filename()) {
|
||||
let location = if context.is_notebook(&message.filename()) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
LineColumn::default()
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::Display;
|
||||
use std::io::Write;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Annotation, Diagnostic, DiagnosticId, LintName, SecondaryCode, Severity, Span,
|
||||
};
|
||||
use ruff_db::diagnostic::{self as db, Annotation, DiagnosticId, LintName, Severity, Span};
|
||||
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
pub use azure::AzureEmitter;
|
||||
@@ -18,14 +18,17 @@ pub use junit::JunitEmitter;
|
||||
pub use pylint::PylintEmitter;
|
||||
pub use rdjson::RdjsonEmitter;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_parser::{ParseError, UnsupportedSyntaxError};
|
||||
use ruff_source_file::{LineColumn, SourceFile};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
pub use sarif::SarifEmitter;
|
||||
pub use text::TextEmitter;
|
||||
|
||||
use crate::Fix;
|
||||
use crate::Violation;
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::logging::DisplayParseErrorType;
|
||||
use crate::registry::Rule;
|
||||
use crate::{Locator, Violation};
|
||||
|
||||
mod azure;
|
||||
mod diff;
|
||||
@@ -40,103 +43,292 @@ mod rdjson;
|
||||
mod sarif;
|
||||
mod text;
|
||||
|
||||
/// Creates a `Diagnostic` from a syntax error, with the format expected by Ruff.
|
||||
/// `OldDiagnostic` represents either a diagnostic message corresponding to a rule violation or a
|
||||
/// syntax error message.
|
||||
///
|
||||
/// This is almost identical to `ruff_db::diagnostic::create_syntax_error_diagnostic`, except the
|
||||
/// `message` is stored as the primary diagnostic message instead of on the primary annotation, and
|
||||
/// `SyntaxError: ` is prepended to the message.
|
||||
/// All of the information for syntax errors is captured in the underlying [`db::Diagnostic`], while
|
||||
/// rule violations can have the additional optional fields like fixes, suggestions, and (parent)
|
||||
/// `noqa` offsets.
|
||||
///
|
||||
/// TODO(brent) These should be unified at some point, but we keep them separate for now to avoid a
|
||||
/// ton of snapshot changes while combining ruff's diagnostic type with `Diagnostic`.
|
||||
pub fn create_syntax_error_diagnostic(
|
||||
span: impl Into<Span>,
|
||||
message: impl std::fmt::Display,
|
||||
range: impl Ranged,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(
|
||||
DiagnosticId::InvalidSyntax,
|
||||
Severity::Error,
|
||||
format_args!("SyntaxError: {message}"),
|
||||
);
|
||||
let span = span.into().with_range(range.range());
|
||||
diag.annotate(Annotation::primary(span));
|
||||
diag
|
||||
/// For diagnostic messages, the [`db::Diagnostic`]'s primary message contains the
|
||||
/// [`OldDiagnostic::body`], and the primary annotation optionally contains the suggestion
|
||||
/// accompanying a fix. The `db::Diagnostic::id` field contains the kebab-case lint name derived
|
||||
/// from the `Rule`.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct OldDiagnostic {
|
||||
pub diagnostic: db::Diagnostic,
|
||||
|
||||
// these fields are specific to rule violations
|
||||
pub fix: Option<Fix>,
|
||||
pub parent: Option<TextSize>,
|
||||
pub(crate) noqa_offset: Option<TextSize>,
|
||||
pub(crate) secondary_code: Option<SecondaryCode>,
|
||||
}
|
||||
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub fn create_lint_diagnostic<B, S>(
|
||||
body: B,
|
||||
suggestion: Option<S>,
|
||||
range: TextRange,
|
||||
fix: Option<Fix>,
|
||||
parent: Option<TextSize>,
|
||||
file: SourceFile,
|
||||
noqa_offset: Option<TextSize>,
|
||||
rule: Rule,
|
||||
) -> Diagnostic
|
||||
where
|
||||
B: Display,
|
||||
S: Display,
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
DiagnosticId::Lint(LintName::of(rule.into())),
|
||||
Severity::Error,
|
||||
body,
|
||||
);
|
||||
|
||||
if let Some(fix) = fix {
|
||||
diagnostic.set_fix(fix);
|
||||
impl OldDiagnostic {
|
||||
pub fn syntax_error(
|
||||
message: impl Display,
|
||||
range: TextRange,
|
||||
file: SourceFile,
|
||||
) -> OldDiagnostic {
|
||||
let mut diag = db::Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, message);
|
||||
let span = Span::from(file).with_range(range);
|
||||
diag.annotate(Annotation::primary(span));
|
||||
Self {
|
||||
diagnostic: diag,
|
||||
fix: None,
|
||||
parent: None,
|
||||
noqa_offset: None,
|
||||
secondary_code: None,
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent) = parent {
|
||||
diagnostic.set_parent(parent);
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub fn lint<B, S>(
|
||||
body: B,
|
||||
suggestion: Option<S>,
|
||||
range: TextRange,
|
||||
fix: Option<Fix>,
|
||||
parent: Option<TextSize>,
|
||||
file: SourceFile,
|
||||
noqa_offset: Option<TextSize>,
|
||||
rule: Rule,
|
||||
) -> OldDiagnostic
|
||||
where
|
||||
B: Display,
|
||||
S: Display,
|
||||
{
|
||||
let mut diagnostic = db::Diagnostic::new(
|
||||
DiagnosticId::Lint(LintName::of(rule.into())),
|
||||
Severity::Error,
|
||||
body,
|
||||
);
|
||||
let span = Span::from(file).with_range(range);
|
||||
let mut annotation = Annotation::primary(span);
|
||||
if let Some(suggestion) = suggestion {
|
||||
annotation = annotation.message(suggestion);
|
||||
}
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
OldDiagnostic {
|
||||
diagnostic,
|
||||
fix,
|
||||
parent,
|
||||
noqa_offset,
|
||||
secondary_code: Some(SecondaryCode(rule.noqa_code().to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(noqa_offset) = noqa_offset {
|
||||
diagnostic.set_noqa_offset(noqa_offset);
|
||||
/// Create an [`OldDiagnostic`] from the given [`ParseError`].
|
||||
pub fn from_parse_error(
|
||||
parse_error: &ParseError,
|
||||
locator: &Locator,
|
||||
file: SourceFile,
|
||||
) -> OldDiagnostic {
|
||||
// Try to create a non-empty range so that the diagnostic can print a caret at the right
|
||||
// position. This requires that we retrieve the next character, if any, and take its length
|
||||
// to maintain char-boundaries.
|
||||
let len = locator
|
||||
.after(parse_error.location.start())
|
||||
.chars()
|
||||
.next()
|
||||
.map_or(TextSize::new(0), TextLen::text_len);
|
||||
|
||||
OldDiagnostic::syntax_error(
|
||||
format_args!(
|
||||
"SyntaxError: {}",
|
||||
DisplayParseErrorType::new(&parse_error.error)
|
||||
),
|
||||
TextRange::at(parse_error.location.start(), len),
|
||||
file,
|
||||
)
|
||||
}
|
||||
|
||||
let span = Span::from(file).with_range(range);
|
||||
let mut annotation = Annotation::primary(span);
|
||||
if let Some(suggestion) = suggestion {
|
||||
annotation = annotation.message(suggestion);
|
||||
/// Create an [`OldDiagnostic`] from the given [`UnsupportedSyntaxError`].
|
||||
pub fn from_unsupported_syntax_error(
|
||||
unsupported_syntax_error: &UnsupportedSyntaxError,
|
||||
file: SourceFile,
|
||||
) -> OldDiagnostic {
|
||||
OldDiagnostic::syntax_error(
|
||||
format_args!("SyntaxError: {unsupported_syntax_error}"),
|
||||
unsupported_syntax_error.range,
|
||||
file,
|
||||
)
|
||||
}
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
diagnostic.set_secondary_code(SecondaryCode::new(rule.noqa_code().to_string()));
|
||||
/// Create an [`OldDiagnostic`] from the given [`SemanticSyntaxError`].
|
||||
pub fn from_semantic_syntax_error(
|
||||
semantic_syntax_error: &SemanticSyntaxError,
|
||||
file: SourceFile,
|
||||
) -> OldDiagnostic {
|
||||
OldDiagnostic::syntax_error(
|
||||
format_args!("SyntaxError: {semantic_syntax_error}"),
|
||||
semantic_syntax_error.range,
|
||||
file,
|
||||
)
|
||||
}
|
||||
|
||||
diagnostic
|
||||
// TODO(brent) We temporarily allow this to avoid updating all of the call sites to add
|
||||
// references. I expect this method to go away or change significantly with the rest of the
|
||||
// diagnostic refactor, but if it still exists in this form at the end of the refactor, we
|
||||
// should just update the call sites.
|
||||
#[expect(clippy::needless_pass_by_value)]
|
||||
pub fn new<T: Violation>(kind: T, range: TextRange, file: &SourceFile) -> Self {
|
||||
Self::lint(
|
||||
Violation::message(&kind),
|
||||
Violation::fix_title(&kind),
|
||||
range,
|
||||
None,
|
||||
None,
|
||||
file.clone(),
|
||||
None,
|
||||
T::rule(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Consumes `self` and returns a new `Diagnostic` with the given parent node.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn with_parent(mut self, parent: TextSize) -> Self {
|
||||
self.set_parent(parent);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the location of the diagnostic's parent node.
|
||||
#[inline]
|
||||
pub fn set_parent(&mut self, parent: TextSize) {
|
||||
self.parent = Some(parent);
|
||||
}
|
||||
|
||||
/// Consumes `self` and returns a new `Diagnostic` with the given noqa offset.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn with_noqa_offset(mut self, noqa_offset: TextSize) -> Self {
|
||||
self.noqa_offset = Some(noqa_offset);
|
||||
self
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a syntax error message.
|
||||
pub fn is_syntax_error(&self) -> bool {
|
||||
self.diagnostic.id().is_invalid_syntax()
|
||||
}
|
||||
|
||||
/// Returns the name used to represent the diagnostic.
|
||||
pub fn name(&self) -> &'static str {
|
||||
if self.is_syntax_error() {
|
||||
"syntax-error"
|
||||
} else {
|
||||
self.diagnostic.id().as_str()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the message body to display to the user.
|
||||
pub fn body(&self) -> &str {
|
||||
self.diagnostic.primary_message()
|
||||
}
|
||||
|
||||
/// Returns the fix suggestion for the violation.
|
||||
pub fn suggestion(&self) -> Option<&str> {
|
||||
self.diagnostic.primary_annotation()?.get_message()
|
||||
}
|
||||
|
||||
/// Returns the offset at which the `noqa` comment will be placed if it's a diagnostic message.
|
||||
pub fn noqa_offset(&self) -> Option<TextSize> {
|
||||
self.noqa_offset
|
||||
}
|
||||
|
||||
/// Returns the [`Fix`] for the diagnostic, if there is any.
|
||||
pub fn fix(&self) -> Option<&Fix> {
|
||||
self.fix.as_ref()
|
||||
}
|
||||
|
||||
/// Returns `true` if the diagnostic contains a [`Fix`].
|
||||
pub fn fixable(&self) -> bool {
|
||||
self.fix().is_some()
|
||||
}
|
||||
|
||||
/// Returns the noqa code for the diagnostic message as a string.
|
||||
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
|
||||
self.secondary_code.as_ref()
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
pub fn to_url(&self) -> Option<String> {
|
||||
if self.is_syntax_error() {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"{}/rules/{}",
|
||||
env!("CARGO_PKG_HOMEPAGE"),
|
||||
self.name()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the filename for the message.
|
||||
pub fn filename(&self) -> String {
|
||||
self.diagnostic
|
||||
.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.name()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Computes the start source location for the message.
|
||||
pub fn compute_start_location(&self) -> LineColumn {
|
||||
self.diagnostic
|
||||
.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.start())
|
||||
}
|
||||
|
||||
/// Computes the end source location for the message.
|
||||
pub fn compute_end_location(&self) -> LineColumn {
|
||||
self.diagnostic
|
||||
.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.end())
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
self.diagnostic
|
||||
.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.clone()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(brent) We temporarily allow this to avoid updating all of the call sites to add
|
||||
// references. I expect this method to go away or change significantly with the rest of the
|
||||
// diagnostic refactor, but if it still exists in this form at the end of the refactor, we
|
||||
// should just update the call sites.
|
||||
#[expect(clippy::needless_pass_by_value)]
|
||||
pub fn diagnostic_from_violation<T: Violation>(
|
||||
kind: T,
|
||||
range: TextRange,
|
||||
file: &SourceFile,
|
||||
) -> Diagnostic {
|
||||
create_lint_diagnostic(
|
||||
Violation::message(&kind),
|
||||
Violation::fix_title(&kind),
|
||||
range,
|
||||
None,
|
||||
None,
|
||||
file.clone(),
|
||||
None,
|
||||
T::rule(),
|
||||
)
|
||||
impl Ord for OldDiagnostic {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
(self.source_file(), self.start()).cmp(&(other.source_file(), other.start()))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for OldDiagnostic {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for OldDiagnostic {
|
||||
fn range(&self) -> TextRange {
|
||||
self.diagnostic
|
||||
.expect_primary_span()
|
||||
.range()
|
||||
.expect("Expected range for ruff span")
|
||||
}
|
||||
}
|
||||
|
||||
struct MessageWithLocation<'a> {
|
||||
message: &'a Diagnostic,
|
||||
message: &'a OldDiagnostic,
|
||||
start_location: LineColumn,
|
||||
}
|
||||
|
||||
impl Deref for MessageWithLocation<'_> {
|
||||
type Target = Diagnostic;
|
||||
type Target = OldDiagnostic;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.message
|
||||
@@ -144,30 +336,30 @@ impl Deref for MessageWithLocation<'_> {
|
||||
}
|
||||
|
||||
fn group_diagnostics_by_filename(
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
) -> BTreeMap<String, Vec<MessageWithLocation>> {
|
||||
let mut grouped_messages = BTreeMap::default();
|
||||
for diagnostic in diagnostics {
|
||||
grouped_messages
|
||||
.entry(diagnostic.expect_ruff_filename())
|
||||
.entry(diagnostic.filename().to_string())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(MessageWithLocation {
|
||||
message: diagnostic,
|
||||
start_location: diagnostic.expect_ruff_start_location(),
|
||||
start_location: diagnostic.compute_start_location(),
|
||||
});
|
||||
}
|
||||
grouped_messages
|
||||
}
|
||||
|
||||
/// Display format for [`Diagnostic`]s.
|
||||
/// Display format for [`OldDiagnostic`]s.
|
||||
///
|
||||
/// The emitter serializes a slice of [`Diagnostic`]s and writes them to a [`Write`].
|
||||
/// The emitter serializes a slice of [`OldDiagnostic`]s and writes them to a [`Write`].
|
||||
pub trait Emitter {
|
||||
/// Serializes the `diagnostics` and writes the output to `writer`.
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()>;
|
||||
}
|
||||
@@ -192,40 +384,101 @@ impl<'a> EmitterContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// A secondary identifier for a lint diagnostic.
|
||||
///
|
||||
/// For Ruff rules this means the noqa code.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, serde::Serialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct SecondaryCode(String);
|
||||
|
||||
impl SecondaryCode {
|
||||
pub fn new(code: String) -> Self {
|
||||
Self(code)
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for SecondaryCode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SecondaryCode {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&str> for SecondaryCode {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for &str {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<NoqaCode> for SecondaryCode {
|
||||
fn eq(&self, other: &NoqaCode) -> bool {
|
||||
&self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for NoqaCode {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
// for `hashbrown::EntryRef`
|
||||
impl From<&SecondaryCode> for SecondaryCode {
|
||||
fn from(value: &SecondaryCode) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use crate::codes::Rule;
|
||||
use crate::{Edit, Fix};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
|
||||
use ruff_source_file::{OneIndexed, SourceFileBuilder};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use crate::codes::Rule;
|
||||
use crate::message::{Emitter, EmitterContext, create_lint_diagnostic};
|
||||
use crate::{Edit, Fix};
|
||||
use crate::Locator;
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
|
||||
use super::create_syntax_error_diagnostic;
|
||||
|
||||
pub(super) fn create_syntax_error_diagnostics() -> Vec<Diagnostic> {
|
||||
pub(super) fn create_syntax_error_diagnostics() -> Vec<OldDiagnostic> {
|
||||
let source = r"from os import
|
||||
|
||||
if call(foo
|
||||
def bar():
|
||||
pass
|
||||
";
|
||||
let locator = Locator::new(source);
|
||||
let source_file = SourceFileBuilder::new("syntax_errors.py", source).finish();
|
||||
parse_unchecked(source, ParseOptions::from(Mode::Module))
|
||||
.errors()
|
||||
.iter()
|
||||
.map(|parse_error| {
|
||||
create_syntax_error_diagnostic(source_file.clone(), &parse_error.error, parse_error)
|
||||
OldDiagnostic::from_parse_error(parse_error, &locator, source_file.clone())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(super) fn create_diagnostics() -> Vec<Diagnostic> {
|
||||
pub(super) fn create_diagnostics() -> Vec<OldDiagnostic> {
|
||||
let fib = r#"import os
|
||||
|
||||
|
||||
@@ -243,7 +496,7 @@ def fibonacci(n):
|
||||
let fib_source = SourceFileBuilder::new("fib.py", fib).finish();
|
||||
|
||||
let unused_import_start = TextSize::from(7);
|
||||
let unused_import = create_lint_diagnostic(
|
||||
let unused_import = OldDiagnostic::lint(
|
||||
"`os` imported but unused",
|
||||
Some("Remove unused import: `os`"),
|
||||
TextRange::new(unused_import_start, TextSize::from(9)),
|
||||
@@ -258,7 +511,7 @@ def fibonacci(n):
|
||||
);
|
||||
|
||||
let unused_variable_start = TextSize::from(94);
|
||||
let unused_variable = create_lint_diagnostic(
|
||||
let unused_variable = OldDiagnostic::lint(
|
||||
"Local variable `x` is assigned to but never used",
|
||||
Some("Remove assignment to unused variable `x`"),
|
||||
TextRange::new(unused_variable_start, TextSize::from(95)),
|
||||
@@ -275,7 +528,7 @@ def fibonacci(n):
|
||||
let file_2 = r"if a == 1: pass";
|
||||
|
||||
let undefined_name_start = TextSize::from(3);
|
||||
let undefined_name = create_lint_diagnostic(
|
||||
let undefined_name = OldDiagnostic::lint(
|
||||
"Undefined name `a`",
|
||||
Option::<&'static str>::None,
|
||||
TextRange::new(undefined_name_start, TextSize::from(4)),
|
||||
@@ -290,7 +543,7 @@ def fibonacci(n):
|
||||
}
|
||||
|
||||
pub(super) fn create_notebook_diagnostics()
|
||||
-> (Vec<Diagnostic>, FxHashMap<String, NotebookIndex>) {
|
||||
-> (Vec<OldDiagnostic>, FxHashMap<String, NotebookIndex>) {
|
||||
let notebook = r"# cell 1
|
||||
import os
|
||||
# cell 2
|
||||
@@ -306,7 +559,7 @@ def foo():
|
||||
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
|
||||
|
||||
let unused_import_os_start = TextSize::from(16);
|
||||
let unused_import_os = create_lint_diagnostic(
|
||||
let unused_import_os = OldDiagnostic::lint(
|
||||
"`os` imported but unused",
|
||||
Some("Remove unused import: `os`"),
|
||||
TextRange::new(unused_import_os_start, TextSize::from(18)),
|
||||
@@ -321,7 +574,7 @@ def foo():
|
||||
);
|
||||
|
||||
let unused_import_math_start = TextSize::from(35);
|
||||
let unused_import_math = create_lint_diagnostic(
|
||||
let unused_import_math = OldDiagnostic::lint(
|
||||
"`math` imported but unused",
|
||||
Some("Remove unused import: `math`"),
|
||||
TextRange::new(unused_import_math_start, TextSize::from(39)),
|
||||
@@ -336,7 +589,7 @@ def foo():
|
||||
);
|
||||
|
||||
let unused_variable_start = TextSize::from(98);
|
||||
let unused_variable = create_lint_diagnostic(
|
||||
let unused_variable = OldDiagnostic::lint(
|
||||
"Local variable `x` is assigned to but never used",
|
||||
Some("Remove assignment to unused variable `x`"),
|
||||
TextRange::new(unused_variable_start, TextSize::from(99)),
|
||||
@@ -389,7 +642,7 @@ def foo():
|
||||
|
||||
pub(super) fn capture_emitter_output(
|
||||
emitter: &mut dyn Emitter,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
) -> String {
|
||||
let notebook_indexes = FxHashMap::default();
|
||||
let context = EmitterContext::new(¬ebook_indexes);
|
||||
@@ -401,7 +654,7 @@ def foo():
|
||||
|
||||
pub(super) fn capture_emitter_notebook_output(
|
||||
emitter: &mut dyn Emitter,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
notebook_indexes: &FxHashMap<String, NotebookIndex>,
|
||||
) -> String {
|
||||
let context = EmitterContext::new(notebook_indexes);
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
|
||||
/// Generate violations in Pylint format.
|
||||
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
|
||||
@@ -15,17 +14,16 @@ impl Emitter for PylintEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for diagnostic in diagnostics {
|
||||
let filename = diagnostic.expect_ruff_filename();
|
||||
let row = if context.is_notebook(&filename) {
|
||||
let row = if context.is_notebook(&diagnostic.filename()) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
OneIndexed::from_zero_indexed(0)
|
||||
} else {
|
||||
diagnostic.expect_ruff_start_location().line
|
||||
diagnostic.compute_start_location().line
|
||||
};
|
||||
|
||||
let body = if let Some(code) = diagnostic.secondary_code() {
|
||||
@@ -37,7 +35,7 @@ impl Emitter for PylintEmitter {
|
||||
writeln!(
|
||||
writer,
|
||||
"{path}:{row}: {body}",
|
||||
path = relativize_path(&filename),
|
||||
path = relativize_path(&*diagnostic.filename()),
|
||||
)?;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,12 +4,11 @@ use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::SourceCode;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Edit;
|
||||
use crate::message::{Emitter, EmitterContext, LineColumn};
|
||||
use crate::message::{Emitter, EmitterContext, LineColumn, OldDiagnostic};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct RdjsonEmitter;
|
||||
@@ -18,7 +17,7 @@ impl Emitter for RdjsonEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
_context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
serde_json::to_writer_pretty(
|
||||
@@ -38,7 +37,7 @@ impl Emitter for RdjsonEmitter {
|
||||
}
|
||||
|
||||
struct ExpandedMessages<'a> {
|
||||
diagnostics: &'a [Diagnostic],
|
||||
diagnostics: &'a [OldDiagnostic],
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedMessages<'_> {
|
||||
@@ -57,18 +56,18 @@ impl Serialize for ExpandedMessages<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn message_to_rdjson_value(message: &Diagnostic) -> Value {
|
||||
let source_file = message.expect_ruff_source_file();
|
||||
fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
|
||||
let source_file = message.source_file();
|
||||
let source_code = source_file.to_source_code();
|
||||
|
||||
let start_location = source_code.line_column(message.expect_range().start());
|
||||
let end_location = source_code.line_column(message.expect_range().end());
|
||||
let start_location = source_code.line_column(message.start());
|
||||
let end_location = source_code.line_column(message.end());
|
||||
|
||||
if let Some(fix) = message.fix() {
|
||||
json!({
|
||||
"message": message.body(),
|
||||
"location": {
|
||||
"path": message.expect_ruff_filename(),
|
||||
"path": message.filename(),
|
||||
"range": rdjson_range(start_location, end_location),
|
||||
},
|
||||
"code": {
|
||||
@@ -81,7 +80,7 @@ fn message_to_rdjson_value(message: &Diagnostic) -> Value {
|
||||
json!({
|
||||
"message": message.body(),
|
||||
"location": {
|
||||
"path": message.expect_ruff_filename(),
|
||||
"path": message.filename(),
|
||||
"range": rdjson_range(start_location, end_location),
|
||||
},
|
||||
"code": {
|
||||
|
||||
@@ -5,12 +5,11 @@ use anyhow::Result;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::json;
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::VERSION;
|
||||
use crate::fs::normalize_path;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
|
||||
use crate::registry::{Linter, RuleNamespace};
|
||||
|
||||
pub struct SarifEmitter;
|
||||
@@ -19,7 +18,7 @@ impl Emitter for SarifEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
_context: &EmitterContext,
|
||||
) -> Result<()> {
|
||||
let results = diagnostics
|
||||
@@ -123,10 +122,10 @@ struct SarifResult<'a> {
|
||||
|
||||
impl<'a> SarifResult<'a> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn from_message(message: &'a Diagnostic) -> Result<Self> {
|
||||
let start_location = message.expect_ruff_start_location();
|
||||
let end_location = message.expect_ruff_end_location();
|
||||
let path = normalize_path(&*message.expect_ruff_filename());
|
||||
fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
|
||||
let start_location = message.compute_start_location();
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(&*message.filename());
|
||||
Ok(Self {
|
||||
code: message.secondary_code(),
|
||||
level: "error".to_string(),
|
||||
@@ -143,10 +142,10 @@ impl<'a> SarifResult<'a> {
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[expect(clippy::unnecessary_wraps)]
|
||||
fn from_message(message: &'a Diagnostic) -> Result<Self> {
|
||||
let start_location = message.expect_ruff_start_location();
|
||||
let end_location = message.expect_ruff_end_location();
|
||||
let path = normalize_path(&*message.expect_ruff_filename());
|
||||
fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
|
||||
let start_location = message.compute_start_location();
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(&*message.filename());
|
||||
Ok(Self {
|
||||
code: message.secondary_code(),
|
||||
level: "error".to_string(),
|
||||
|
||||
@@ -6,16 +6,15 @@ use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::line_width::{IndentWidth, LineWidthBuilder};
|
||||
use crate::message::diff::Diff;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
||||
bitflags! {
|
||||
@@ -67,20 +66,19 @@ impl Emitter for TextEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for message in diagnostics {
|
||||
let filename = message.expect_ruff_filename();
|
||||
write!(
|
||||
writer,
|
||||
"{path}{sep}",
|
||||
path = relativize_path(&filename).bold(),
|
||||
path = relativize_path(&*message.filename()).bold(),
|
||||
sep = ":".cyan(),
|
||||
)?;
|
||||
|
||||
let start_location = message.expect_ruff_start_location();
|
||||
let notebook_index = context.notebook_index(&filename);
|
||||
let start_location = message.compute_start_location();
|
||||
let notebook_index = context.notebook_index(&message.filename());
|
||||
|
||||
// Check if we're working on a jupyter notebook and translate positions with cell accordingly
|
||||
let diagnostic_location = if let Some(notebook_index) = notebook_index {
|
||||
@@ -118,7 +116,7 @@ impl Emitter for TextEmitter {
|
||||
|
||||
if self.flags.intersects(EmitterFlags::SHOW_SOURCE) {
|
||||
// The `0..0` range is used to highlight file-level diagnostics.
|
||||
if message.expect_range() != TextRange::default() {
|
||||
if message.range() != TextRange::default() {
|
||||
writeln!(
|
||||
writer,
|
||||
"{}",
|
||||
@@ -142,7 +140,7 @@ impl Emitter for TextEmitter {
|
||||
}
|
||||
|
||||
pub(super) struct RuleCodeAndBody<'a> {
|
||||
pub(crate) message: &'a Diagnostic,
|
||||
pub(crate) message: &'a OldDiagnostic,
|
||||
pub(crate) show_fix_status: bool,
|
||||
pub(crate) unsafe_fixes: UnsafeFixes,
|
||||
}
|
||||
@@ -180,7 +178,7 @@ impl Display for RuleCodeAndBody<'_> {
|
||||
}
|
||||
|
||||
pub(super) struct MessageCodeFrame<'a> {
|
||||
pub(crate) message: &'a Diagnostic,
|
||||
pub(crate) message: &'a OldDiagnostic,
|
||||
pub(crate) notebook_index: Option<&'a NotebookIndex>,
|
||||
}
|
||||
|
||||
@@ -193,10 +191,10 @@ impl Display for MessageCodeFrame<'_> {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let source_file = self.message.expect_ruff_source_file();
|
||||
let source_file = self.message.source_file();
|
||||
let source_code = source_file.to_source_code();
|
||||
|
||||
let content_start_index = source_code.line_index(self.message.expect_range().start());
|
||||
let content_start_index = source_code.line_index(self.message.start());
|
||||
let mut start_index = content_start_index.saturating_sub(2);
|
||||
|
||||
// If we're working with a Jupyter Notebook, skip the lines which are
|
||||
@@ -219,7 +217,7 @@ impl Display for MessageCodeFrame<'_> {
|
||||
start_index = start_index.saturating_add(1);
|
||||
}
|
||||
|
||||
let content_end_index = source_code.line_index(self.message.expect_range().end());
|
||||
let content_end_index = source_code.line_index(self.message.end());
|
||||
let mut end_index = content_end_index
|
||||
.saturating_add(2)
|
||||
.min(OneIndexed::from_zero_indexed(source_code.line_count()));
|
||||
@@ -250,7 +248,7 @@ impl Display for MessageCodeFrame<'_> {
|
||||
|
||||
let source = replace_whitespace_and_unprintable(
|
||||
source_code.slice(TextRange::new(start_offset, end_offset)),
|
||||
self.message.expect_range() - start_offset,
|
||||
self.message.range() - start_offset,
|
||||
)
|
||||
.fix_up_empty_spans_after_line_terminator();
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use log::warn;
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||
use ruff_python_trivia::{CommentRanges, Cursor, indentation_at_offset};
|
||||
use ruff_source_file::{LineEnding, LineRanges};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
@@ -18,6 +17,7 @@ use rustc_hash::FxHashSet;
|
||||
use crate::Edit;
|
||||
use crate::Locator;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::{OldDiagnostic, SecondaryCode};
|
||||
use crate::registry::Rule;
|
||||
use crate::rule_redirects::get_redirect_target;
|
||||
|
||||
@@ -28,7 +28,7 @@ use crate::rule_redirects::get_redirect_target;
|
||||
/// simultaneously.
|
||||
pub fn generate_noqa_edits(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
@@ -717,7 +717,7 @@ impl Error for LexicalError {}
|
||||
/// Adds noqa comments to suppress all messages of a file.
|
||||
pub(crate) fn add_noqa(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
@@ -740,7 +740,7 @@ pub(crate) fn add_noqa(
|
||||
|
||||
fn add_noqa_inner(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
@@ -845,7 +845,7 @@ struct NoqaComment<'a> {
|
||||
}
|
||||
|
||||
fn find_noqa_comments<'a>(
|
||||
diagnostics: &'a [Diagnostic],
|
||||
diagnostics: &'a [OldDiagnostic],
|
||||
locator: &'a Locator,
|
||||
exemption: &'a FileExemption,
|
||||
directives: &'a NoqaDirectives,
|
||||
@@ -867,7 +867,7 @@ fn find_noqa_comments<'a>(
|
||||
}
|
||||
|
||||
// Is the violation ignored by a `noqa` directive on the parent line?
|
||||
if let Some(parent) = message.parent() {
|
||||
if let Some(parent) = message.parent {
|
||||
if let Some(directive_line) =
|
||||
directives.find_line_with_directive(noqa_line_for.resolve(parent))
|
||||
{
|
||||
@@ -886,7 +886,7 @@ fn find_noqa_comments<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
let noqa_offset = noqa_line_for.resolve(message.expect_range().start());
|
||||
let noqa_offset = noqa_line_for.resolve(message.range().start());
|
||||
|
||||
// Or ignored by the directive itself?
|
||||
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
|
||||
@@ -1225,8 +1225,6 @@ mod tests {
|
||||
use ruff_source_file::{LineEnding, SourceFileBuilder};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::Edit;
|
||||
use crate::message::diagnostic_from_violation;
|
||||
use crate::noqa::{
|
||||
Directive, LexicalError, NoqaLexerOutput, NoqaMapping, add_noqa_inner, lex_codes,
|
||||
lex_file_exemption, lex_inline_noqa,
|
||||
@@ -1234,6 +1232,7 @@ mod tests {
|
||||
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
|
||||
use crate::rules::pyflakes::rules::UnusedVariable;
|
||||
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
|
||||
use crate::{Edit, OldDiagnostic};
|
||||
use crate::{Locator, generate_noqa_edits};
|
||||
|
||||
fn assert_lexed_ranges_match_slices(
|
||||
@@ -2832,7 +2831,7 @@ mod tests {
|
||||
assert_eq!(output, format!("{contents}"));
|
||||
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
let messages = [diagnostic_from_violation(
|
||||
let messages = [OldDiagnostic::new(
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
@@ -2856,12 +2855,12 @@ mod tests {
|
||||
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
let messages = [
|
||||
diagnostic_from_violation(
|
||||
OldDiagnostic::new(
|
||||
AmbiguousVariableName("x".to_string()),
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
&source_file,
|
||||
),
|
||||
diagnostic_from_violation(
|
||||
OldDiagnostic::new(
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
@@ -2887,12 +2886,12 @@ mod tests {
|
||||
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
let messages = [
|
||||
diagnostic_from_violation(
|
||||
OldDiagnostic::new(
|
||||
AmbiguousVariableName("x".to_string()),
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
&source_file,
|
||||
),
|
||||
diagnostic_from_violation(
|
||||
OldDiagnostic::new(
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
@@ -2931,7 +2930,7 @@ print(
|
||||
"#;
|
||||
let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect();
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
|
||||
let messages = [diagnostic_from_violation(
|
||||
let messages = [OldDiagnostic::new(
|
||||
PrintfStringFormatting,
|
||||
TextRange::new(12.into(), 79.into()),
|
||||
&source_file,
|
||||
@@ -2964,7 +2963,7 @@ foo;
|
||||
bar =
|
||||
";
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
|
||||
let messages = [diagnostic_from_violation(
|
||||
let messages = [OldDiagnostic::new(
|
||||
UselessSemicolon,
|
||||
TextRange::new(4.into(), 5.into()),
|
||||
&source_file,
|
||||
|
||||
@@ -54,20 +54,6 @@ pub(crate) const fn is_fix_manual_list_comprehension_enabled(settings: &LinterSe
|
||||
pub(crate) const fn is_fix_os_path_getsize_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
// https://github.com/astral-sh/ruff/pull/18922
|
||||
pub(crate) const fn is_fix_os_path_getmtime_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/18922
|
||||
pub(crate) const fn is_fix_os_path_getatime_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/18922
|
||||
pub(crate) const fn is_fix_os_path_getctime_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/11436
|
||||
// https://github.com/astral-sh/ruff/pull/11168
|
||||
|
||||
@@ -3,17 +3,19 @@ use log::warn;
|
||||
use pyproject_toml::PyProjectToml;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_source_file::SourceFile;
|
||||
|
||||
use crate::IOError;
|
||||
use crate::message::diagnostic_from_violation;
|
||||
use crate::OldDiagnostic;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::ruff::rules::InvalidPyprojectToml;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
/// RUF200
|
||||
pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings) -> Vec<Diagnostic> {
|
||||
pub fn lint_pyproject_toml(
|
||||
source_file: &SourceFile,
|
||||
settings: &LinterSettings,
|
||||
) -> Vec<OldDiagnostic> {
|
||||
let Some(err) = toml::from_str::<PyProjectToml>(source_file.source_text()).err() else {
|
||||
return Vec::default();
|
||||
};
|
||||
@@ -30,11 +32,8 @@ pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings)
|
||||
source_file.name(),
|
||||
);
|
||||
if settings.rules.enabled(Rule::IOError) {
|
||||
let diagnostic = diagnostic_from_violation(
|
||||
IOError { message },
|
||||
TextRange::default(),
|
||||
source_file,
|
||||
);
|
||||
let diagnostic =
|
||||
OldDiagnostic::new(IOError { message }, TextRange::default(), source_file);
|
||||
messages.push(diagnostic);
|
||||
} else {
|
||||
warn!(
|
||||
@@ -56,7 +55,7 @@ pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings)
|
||||
|
||||
if settings.rules.enabled(Rule::InvalidPyprojectToml) {
|
||||
let toml_err = err.message().to_string();
|
||||
let diagnostic = diagnostic_from_violation(
|
||||
let diagnostic = OldDiagnostic::new(
|
||||
InvalidPyprojectToml { message: toml_err },
|
||||
range,
|
||||
source_file,
|
||||
|
||||
@@ -1,46 +1,46 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_bandit/mod.rs
|
||||
---
|
||||
S112.py:4:5: S112 `try`-`except`-`continue` detected, consider logging the exception
|
||||
S112.py:3:1: S112 `try`-`except`-`continue` detected, consider logging the exception
|
||||
|
|
||||
2 | try:
|
||||
3 | pass
|
||||
4 | / except Exception:
|
||||
5 | | continue
|
||||
| |________________^ S112
|
||||
6 |
|
||||
7 | try:
|
||||
1 | try:
|
||||
2 | pass
|
||||
3 | / except Exception:
|
||||
4 | | continue
|
||||
| |____________^ S112
|
||||
5 |
|
||||
6 | try:
|
||||
|
|
||||
|
||||
S112.py:9:5: S112 `try`-`except`-`continue` detected, consider logging the exception
|
||||
S112.py:8:1: S112 `try`-`except`-`continue` detected, consider logging the exception
|
||||
|
|
||||
7 | try:
|
||||
8 | pass
|
||||
9 | / except:
|
||||
10 | | continue
|
||||
| |________________^ S112
|
||||
11 |
|
||||
12 | try:
|
||||
6 | try:
|
||||
7 | pass
|
||||
8 | / except:
|
||||
9 | | continue
|
||||
| |____________^ S112
|
||||
10 |
|
||||
11 | try:
|
||||
|
|
||||
|
||||
S112.py:14:5: S112 `try`-`except`-`continue` detected, consider logging the exception
|
||||
S112.py:13:1: S112 `try`-`except`-`continue` detected, consider logging the exception
|
||||
|
|
||||
12 | try:
|
||||
13 | pass
|
||||
14 | / except (Exception,):
|
||||
15 | | continue
|
||||
| |________________^ S112
|
||||
16 |
|
||||
17 | try:
|
||||
11 | try:
|
||||
12 | pass
|
||||
13 | / except (Exception,):
|
||||
14 | | continue
|
||||
| |____________^ S112
|
||||
15 |
|
||||
16 | try:
|
||||
|
|
||||
|
||||
S112.py:19:5: S112 `try`-`except`-`continue` detected, consider logging the exception
|
||||
S112.py:18:1: S112 `try`-`except`-`continue` detected, consider logging the exception
|
||||
|
|
||||
17 | try:
|
||||
18 | pass
|
||||
19 | / except (Exception, ValueError):
|
||||
20 | | continue
|
||||
| |________________^ S112
|
||||
21 |
|
||||
22 | try:
|
||||
16 | try:
|
||||
17 | pass
|
||||
18 | / except (Exception, ValueError):
|
||||
19 | | continue
|
||||
| |____________^ S112
|
||||
20 |
|
||||
21 | try:
|
||||
|
|
||||
|
||||
@@ -195,31 +195,31 @@ B031.py:144:33: B031 Using the generator returned from `itertools.groupby()` mor
|
||||
146 | for group in groupby(items, key=lambda p: p[1]):
|
||||
|
|
||||
|
||||
B031.py:203:41: B031 Using the generator returned from `itertools.groupby()` more than once will do nothing on the second usage
|
||||
B031.py:200:37: B031 Using the generator returned from `itertools.groupby()` more than once will do nothing on the second usage
|
||||
|
|
||||
201 | if _section == "greens":
|
||||
202 | collect_shop_items(shopper, section_items)
|
||||
203 | collect_shop_items(shopper, section_items)
|
||||
| ^^^^^^^^^^^^^ B031
|
||||
204 | return
|
||||
|
|
||||
|
||||
B031.py:215:41: B031 Using the generator returned from `itertools.groupby()` more than once will do nothing on the second usage
|
||||
|
|
||||
213 | elif _section == "frozen items":
|
||||
214 | collect_shop_items(shopper, section_items)
|
||||
215 | collect_shop_items(shopper, section_items)
|
||||
| ^^^^^^^^^^^^^ B031
|
||||
216 |
|
||||
217 | # Should trigger, since only one branch has a return statement.
|
||||
|
|
||||
|
||||
B031.py:226:37: B031 Using the generator returned from `itertools.groupby()` more than once will do nothing on the second usage
|
||||
|
|
||||
224 | elif _section == "frozen items":
|
||||
225 | collect_shop_items(shopper, section_items)
|
||||
226 | collect_shop_items(shopper, section_items) # B031
|
||||
198 | if _section == "greens":
|
||||
199 | collect_shop_items(shopper, section_items)
|
||||
200 | collect_shop_items(shopper, section_items)
|
||||
| ^^^^^^^^^^^^^ B031
|
||||
227 |
|
||||
228 | # Let's redefine the `groupby` function to make sure we pick up the correct one.
|
||||
201 | return
|
||||
|
|
||||
|
||||
B031.py:210:37: B031 Using the generator returned from `itertools.groupby()` more than once will do nothing on the second usage
|
||||
|
|
||||
208 | elif _section == "frozen items":
|
||||
209 | collect_shop_items(shopper, section_items)
|
||||
210 | collect_shop_items(shopper, section_items)
|
||||
| ^^^^^^^^^^^^^ B031
|
||||
211 |
|
||||
212 | # Should trigger, since only one branch has a return statement.
|
||||
|
|
||||
|
||||
B031.py:219:33: B031 Using the generator returned from `itertools.groupby()` more than once will do nothing on the second usage
|
||||
|
|
||||
217 | elif _section == "frozen items":
|
||||
218 | collect_shop_items(shopper, section_items)
|
||||
219 | collect_shop_items(shopper, section_items) # B031
|
||||
| ^^^^^^^^^^^^^ B031
|
||||
220 |
|
||||
221 | # Let's redefine the `groupby` function to make sure we pick up the correct one.
|
||||
|
|
||||
|
||||
@@ -355,7 +355,7 @@ fn check_token(
|
||||
if let Some(mut diagnostic) =
|
||||
lint_context.report_diagnostic_if_enabled(ProhibitedTrailingComma, prev.range())
|
||||
{
|
||||
let range = diagnostic.expect_range();
|
||||
let range = diagnostic.range();
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ COM81_syntax_error.py:3:5: SyntaxError: Starred expression cannot be used here
|
||||
1 | # Check for `flake8-commas` violation for a file containing syntax errors.
|
||||
2 | (
|
||||
3 | *args
|
||||
| ^^^^^
|
||||
| ^
|
||||
4 | )
|
||||
|
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ ISC_syntax_error.py:2:5: SyntaxError: missing closing quote in string literal
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
| ^^
|
||||
| ^
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
|
|
||||
@@ -36,7 +36,7 @@ ISC_syntax_error.py:3:9: SyntaxError: missing closing quote in string literal
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
3 | "a" "b" "c
|
||||
| ^^
|
||||
| ^
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
|
|
||||
@@ -68,7 +68,7 @@ ISC_syntax_error.py:5:6: SyntaxError: missing closing quote in string literal
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
| ^^
|
||||
| ^
|
||||
6 |
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
|
|
||||
@@ -153,21 +153,19 @@ ISC_syntax_error.py:16:5: SyntaxError: missing closing quote in string literal
|
||||
14 | (
|
||||
15 | "a"
|
||||
16 | "b
|
||||
| ^^
|
||||
| ^
|
||||
17 | "c"
|
||||
18 | "d"
|
||||
|
|
||||
|
||||
ISC_syntax_error.py:26:9: SyntaxError: f-string: unterminated triple-quoted string
|
||||
|
|
||||
24 | (
|
||||
25 | """abc"""
|
||||
26 | f"""def
|
||||
| _________^
|
||||
27 | | "g" "h"
|
||||
28 | | "i" "j"
|
||||
29 | | )
|
||||
| |__^
|
||||
24 | (
|
||||
25 | """abc"""
|
||||
26 | f"""def
|
||||
| ^
|
||||
27 | "g" "h"
|
||||
28 | "i" "j"
|
||||
|
|
||||
|
||||
ISC_syntax_error.py:30:1: SyntaxError: unexpected EOF while parsing
|
||||
|
||||
@@ -5,7 +5,7 @@ ISC_syntax_error.py:2:5: SyntaxError: missing closing quote in string literal
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
| ^^
|
||||
| ^
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
|
|
||||
@@ -25,7 +25,7 @@ ISC_syntax_error.py:3:9: SyntaxError: missing closing quote in string literal
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
3 | "a" "b" "c
|
||||
| ^^
|
||||
| ^
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
|
|
||||
@@ -45,7 +45,7 @@ ISC_syntax_error.py:5:6: SyntaxError: missing closing quote in string literal
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
| ^^
|
||||
| ^
|
||||
6 |
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
|
|
||||
@@ -107,21 +107,19 @@ ISC_syntax_error.py:16:5: SyntaxError: missing closing quote in string literal
|
||||
14 | (
|
||||
15 | "a"
|
||||
16 | "b
|
||||
| ^^
|
||||
| ^
|
||||
17 | "c"
|
||||
18 | "d"
|
||||
|
|
||||
|
||||
ISC_syntax_error.py:26:9: SyntaxError: f-string: unterminated triple-quoted string
|
||||
|
|
||||
24 | (
|
||||
25 | """abc"""
|
||||
26 | f"""def
|
||||
| _________^
|
||||
27 | | "g" "h"
|
||||
28 | | "i" "j"
|
||||
29 | | )
|
||||
| |__^
|
||||
24 | (
|
||||
25 | """abc"""
|
||||
26 | f"""def
|
||||
| ^
|
||||
27 | "g" "h"
|
||||
28 | "i" "j"
|
||||
|
|
||||
|
||||
ISC_syntax_error.py:30:1: SyntaxError: unexpected EOF while parsing
|
||||
|
||||
@@ -190,73 +190,72 @@ PIE804.py:26:22: PIE804 [*] Unnecessary `dict` kwargs
|
||||
26 |+abc(a=1, **{'a': c}, b=c) # PIE804
|
||||
27 27 |
|
||||
28 28 | # Some values need to be parenthesized.
|
||||
29 29 | def foo():
|
||||
29 29 | abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
|
||||
PIE804.py:30:16: PIE804 [*] Unnecessary `dict` kwargs
|
||||
PIE804.py:29:12: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
28 | # Some values need to be parenthesized.
|
||||
29 | def foo():
|
||||
30 | abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
31 | abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
29 | abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
30 | abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
ℹ Safe fix
|
||||
26 26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
27 27 |
|
||||
28 28 | # Some values need to be parenthesized.
|
||||
29 |-abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
29 |+abc(foo=1, bar=(bar := 1)) # PIE804
|
||||
30 30 | abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
31 31 |
|
||||
32 32 | # https://github.com/astral-sh/ruff/issues/18036
|
||||
|
||||
PIE804.py:30:12: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
28 | # Some values need to be parenthesized.
|
||||
29 | abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
30 | abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
31 |
|
||||
32 | # https://github.com/astral-sh/ruff/issues/18036
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
ℹ Safe fix
|
||||
27 27 |
|
||||
28 28 | # Some values need to be parenthesized.
|
||||
29 29 | def foo():
|
||||
30 |- abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
30 |+ abc(foo=1, bar=(bar := 1)) # PIE804
|
||||
31 31 | abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
32 32 |
|
||||
33 33 | # https://github.com/astral-sh/ruff/issues/18036
|
||||
29 29 | abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
30 |-abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
30 |+abc(foo=1, bar=(yield 1)) # PIE804
|
||||
31 31 |
|
||||
32 32 | # https://github.com/astral-sh/ruff/issues/18036
|
||||
33 33 | # The autofix for this is unsafe due to the comments inside the dictionary.
|
||||
|
||||
PIE804.py:31:16: PIE804 [*] Unnecessary `dict` kwargs
|
||||
PIE804.py:35:5: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
29 | def foo():
|
||||
30 | abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
31 | abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
| ^^^^^^^^^^^^^^^^^^^^ PIE804
|
||||
32 |
|
||||
33 | # https://github.com/astral-sh/ruff/issues/18036
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
ℹ Safe fix
|
||||
28 28 | # Some values need to be parenthesized.
|
||||
29 29 | def foo():
|
||||
30 30 | abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
31 |- abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
31 |+ abc(foo=1, bar=(yield 1)) # PIE804
|
||||
32 32 |
|
||||
33 33 | # https://github.com/astral-sh/ruff/issues/18036
|
||||
34 34 | # The autofix for this is unsafe due to the comments inside the dictionary.
|
||||
|
||||
PIE804.py:36:5: PIE804 [*] Unnecessary `dict` kwargs
|
||||
|
|
||||
34 | # The autofix for this is unsafe due to the comments inside the dictionary.
|
||||
35 | foo(
|
||||
36 | / **{
|
||||
37 | | # Comment 1
|
||||
38 | | "x": 1.0,
|
||||
39 | | # Comment 2
|
||||
40 | | "y": 2.0,
|
||||
41 | | }
|
||||
33 | # The autofix for this is unsafe due to the comments inside the dictionary.
|
||||
34 | foo(
|
||||
35 | / **{
|
||||
36 | | # Comment 1
|
||||
37 | | "x": 1.0,
|
||||
38 | | # Comment 2
|
||||
39 | | "y": 2.0,
|
||||
40 | | }
|
||||
| |_____^ PIE804
|
||||
42 | )
|
||||
41 | )
|
||||
|
|
||||
= help: Remove unnecessary kwargs
|
||||
|
||||
ℹ Unsafe fix
|
||||
33 33 | # https://github.com/astral-sh/ruff/issues/18036
|
||||
34 34 | # The autofix for this is unsafe due to the comments inside the dictionary.
|
||||
35 35 | foo(
|
||||
36 |- **{
|
||||
37 |- # Comment 1
|
||||
38 |- "x": 1.0,
|
||||
39 |- # Comment 2
|
||||
40 |- "y": 2.0,
|
||||
41 |- }
|
||||
36 |+ x=1.0, y=2.0
|
||||
42 37 | )
|
||||
32 32 | # https://github.com/astral-sh/ruff/issues/18036
|
||||
33 33 | # The autofix for this is unsafe due to the comments inside the dictionary.
|
||||
34 34 | foo(
|
||||
35 |- **{
|
||||
36 |- # Comment 1
|
||||
37 |- "x": 1.0,
|
||||
38 |- # Comment 2
|
||||
39 |- "y": 2.0,
|
||||
40 |- }
|
||||
35 |+ x=1.0, y=2.0
|
||||
41 36 | )
|
||||
|
||||
@@ -21,9 +21,7 @@ use crate::registry::Rule;
|
||||
///
|
||||
/// ## Example
|
||||
/// ```pyi
|
||||
/// import sys
|
||||
///
|
||||
/// if sys.platform == "xunil"[::-1]:
|
||||
/// if sys.platform.startswith("linux"):
|
||||
/// # Linux specific definitions
|
||||
/// ...
|
||||
/// else:
|
||||
@@ -33,8 +31,6 @@ use crate::registry::Rule;
|
||||
///
|
||||
/// Instead, use a simple string comparison, such as `==` or `!=`:
|
||||
/// ```pyi
|
||||
/// import sys
|
||||
///
|
||||
/// if sys.platform == "linux":
|
||||
/// # Linux specific definitions
|
||||
/// ...
|
||||
@@ -69,15 +65,11 @@ impl Violation for UnrecognizedPlatformCheck {
|
||||
///
|
||||
/// ## Example
|
||||
/// ```pyi
|
||||
/// import sys
|
||||
///
|
||||
/// if sys.platform == "linus": ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```pyi
|
||||
/// import sys
|
||||
///
|
||||
/// if sys.platform == "linux": ...
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -31,7 +31,7 @@ use crate::rules::flake8_pytest_style::helpers::{Parentheses, get_mark_decorator
|
||||
/// import pytest
|
||||
///
|
||||
///
|
||||
/// @pytest.mark.foo()
|
||||
/// @pytest.mark.foo
|
||||
/// def test_something(): ...
|
||||
/// ```
|
||||
///
|
||||
@@ -41,7 +41,7 @@ use crate::rules::flake8_pytest_style::helpers::{Parentheses, get_mark_decorator
|
||||
/// import pytest
|
||||
///
|
||||
///
|
||||
/// @pytest.mark.foo
|
||||
/// @pytest.mark.foo()
|
||||
/// def test_something(): ...
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -76,11 +76,11 @@ impl Violation for PytestWarnsWithMultipleStatements {
|
||||
///
|
||||
///
|
||||
/// def test_foo():
|
||||
/// with pytest.warns(Warning):
|
||||
/// with pytest.warns(RuntimeWarning):
|
||||
/// ...
|
||||
///
|
||||
/// # empty string is also an error
|
||||
/// with pytest.warns(Warning, match=""):
|
||||
/// with pytest.warns(RuntimeWarning, match=""):
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
@@ -90,7 +90,7 @@ impl Violation for PytestWarnsWithMultipleStatements {
|
||||
///
|
||||
///
|
||||
/// def test_foo():
|
||||
/// with pytest.warns(Warning, match="expected message"):
|
||||
/// with pytest.warns(RuntimeWarning, match="expected message"):
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -19,12 +19,12 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// foo = "bar\"s"
|
||||
/// foo = 'bar\'s'
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// foo = 'bar"s'
|
||||
/// foo = "bar's"
|
||||
/// ```
|
||||
///
|
||||
/// ## Formatter compatibility
|
||||
|
||||
@@ -20,7 +20,6 @@ use crate::checkers::ast::Checker;
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// fruits = ["apple", "banana", "cherry"]
|
||||
/// i = 0
|
||||
/// for fruit in fruits:
|
||||
/// print(f"{i + 1}. {fruit}")
|
||||
/// i += 1
|
||||
|
||||
@@ -27,7 +27,6 @@ use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// foo = {}
|
||||
/// if "bar" in foo:
|
||||
/// value = foo["bar"]
|
||||
/// else:
|
||||
@@ -36,7 +35,6 @@ use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// foo = {}
|
||||
/// value = foo.get("bar", 0)
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -18,22 +18,17 @@ use crate::checkers::ast::Checker;
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// def find_phrase(x):
|
||||
/// if x == 1:
|
||||
/// return "Hello"
|
||||
/// elif x == 2:
|
||||
/// return "Goodbye"
|
||||
/// elif x == 3:
|
||||
/// return "Good morning"
|
||||
/// else:
|
||||
/// return "Goodnight"
|
||||
/// if x == 1:
|
||||
/// return "Hello"
|
||||
/// elif x == 2:
|
||||
/// return "Goodbye"
|
||||
/// else:
|
||||
/// return "Goodnight"
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def find_phrase(x):
|
||||
/// phrases = {1: "Hello", 2: "Goodye", 3: "Good morning"}
|
||||
/// return phrases.get(x, "Goodnight")
|
||||
/// return {1: "Hello", 2: "Goodbye"}.get(x, "Goodnight")
|
||||
/// ```
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct IfElseBlockInsteadOfDictLookup;
|
||||
|
||||
@@ -23,17 +23,15 @@ use crate::{Edit, Fix, FixAvailability, Violation};
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// def foo():
|
||||
/// for item in iterable:
|
||||
/// if predicate(item):
|
||||
/// return True
|
||||
/// return False
|
||||
/// for item in iterable:
|
||||
/// if predicate(item):
|
||||
/// return True
|
||||
/// return False
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def foo():
|
||||
/// return any(predicate(item) for item in iterable)
|
||||
/// return any(predicate(item) for item in iterable)
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
|
||||
@@ -50,285 +50,285 @@ SIM115.py:12:5: SIM115 Use a context manager for opening files
|
||||
14 | f.close()
|
||||
|
|
||||
|
||||
SIM115.py:40:9: SIM115 Use a context manager for opening files
|
||||
SIM115.py:39:9: SIM115 Use a context manager for opening files
|
||||
|
|
||||
38 | # SIM115
|
||||
39 | with contextlib.ExitStack():
|
||||
40 | f = open("filename")
|
||||
37 | # SIM115
|
||||
38 | with contextlib.ExitStack():
|
||||
39 | f = open("filename")
|
||||
| ^^^^ SIM115
|
||||
41 |
|
||||
42 | # OK
|
||||
40 |
|
||||
41 | # OK
|
||||
|
|
||||
|
||||
SIM115.py:80:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
78 | import fileinput
|
||||
79 |
|
||||
80 | f = tempfile.NamedTemporaryFile()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
81 | f = tempfile.TemporaryFile()
|
||||
82 | f = tempfile.SpooledTemporaryFile()
|
||||
|
|
||||
|
||||
SIM115.py:81:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
79 | import fileinput
|
||||
80 |
|
||||
81 | f = tempfile.NamedTemporaryFile()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
82 | f = tempfile.TemporaryFile()
|
||||
83 | f = tempfile.SpooledTemporaryFile()
|
||||
80 | f = tempfile.NamedTemporaryFile()
|
||||
81 | f = tempfile.TemporaryFile()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
82 | f = tempfile.SpooledTemporaryFile()
|
||||
83 | f = tarfile.open("foo.tar")
|
||||
|
|
||||
|
||||
SIM115.py:82:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
81 | f = tempfile.NamedTemporaryFile()
|
||||
82 | f = tempfile.TemporaryFile()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
83 | f = tempfile.SpooledTemporaryFile()
|
||||
84 | f = tarfile.open("foo.tar")
|
||||
80 | f = tempfile.NamedTemporaryFile()
|
||||
81 | f = tempfile.TemporaryFile()
|
||||
82 | f = tempfile.SpooledTemporaryFile()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
83 | f = tarfile.open("foo.tar")
|
||||
84 | f = TarFile("foo.tar").open()
|
||||
|
|
||||
|
||||
SIM115.py:83:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
81 | f = tempfile.NamedTemporaryFile()
|
||||
82 | f = tempfile.TemporaryFile()
|
||||
83 | f = tempfile.SpooledTemporaryFile()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
84 | f = tarfile.open("foo.tar")
|
||||
85 | f = TarFile("foo.tar").open()
|
||||
81 | f = tempfile.TemporaryFile()
|
||||
82 | f = tempfile.SpooledTemporaryFile()
|
||||
83 | f = tarfile.open("foo.tar")
|
||||
| ^^^^^^^^^^^^ SIM115
|
||||
84 | f = TarFile("foo.tar").open()
|
||||
85 | f = tarfile.TarFile("foo.tar").open()
|
||||
|
|
||||
|
||||
SIM115.py:84:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
82 | f = tempfile.TemporaryFile()
|
||||
83 | f = tempfile.SpooledTemporaryFile()
|
||||
84 | f = tarfile.open("foo.tar")
|
||||
| ^^^^^^^^^^^^ SIM115
|
||||
85 | f = TarFile("foo.tar").open()
|
||||
86 | f = tarfile.TarFile("foo.tar").open()
|
||||
82 | f = tempfile.SpooledTemporaryFile()
|
||||
83 | f = tarfile.open("foo.tar")
|
||||
84 | f = TarFile("foo.tar").open()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
85 | f = tarfile.TarFile("foo.tar").open()
|
||||
86 | f = tarfile.TarFile().open()
|
||||
|
|
||||
|
||||
SIM115.py:85:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
83 | f = tempfile.SpooledTemporaryFile()
|
||||
84 | f = tarfile.open("foo.tar")
|
||||
85 | f = TarFile("foo.tar").open()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
86 | f = tarfile.TarFile("foo.tar").open()
|
||||
87 | f = tarfile.TarFile().open()
|
||||
83 | f = tarfile.open("foo.tar")
|
||||
84 | f = TarFile("foo.tar").open()
|
||||
85 | f = tarfile.TarFile("foo.tar").open()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
86 | f = tarfile.TarFile().open()
|
||||
87 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:86:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
84 | f = tarfile.open("foo.tar")
|
||||
85 | f = TarFile("foo.tar").open()
|
||||
86 | f = tarfile.TarFile("foo.tar").open()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
87 | f = tarfile.TarFile().open()
|
||||
88 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
84 | f = TarFile("foo.tar").open()
|
||||
85 | f = tarfile.TarFile("foo.tar").open()
|
||||
86 | f = tarfile.TarFile().open()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
87 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
88 | f = io.open("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:87:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
85 | f = TarFile("foo.tar").open()
|
||||
86 | f = tarfile.TarFile("foo.tar").open()
|
||||
87 | f = tarfile.TarFile().open()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
88 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
89 | f = io.open("foo.txt")
|
||||
85 | f = tarfile.TarFile("foo.tar").open()
|
||||
86 | f = tarfile.TarFile().open()
|
||||
87 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
88 | f = io.open("foo.txt")
|
||||
89 | f = io.open_code("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:88:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
86 | f = tarfile.TarFile("foo.tar").open()
|
||||
87 | f = tarfile.TarFile().open()
|
||||
88 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
89 | f = io.open("foo.txt")
|
||||
90 | f = io.open_code("foo.txt")
|
||||
86 | f = tarfile.TarFile().open()
|
||||
87 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
88 | f = io.open("foo.txt")
|
||||
| ^^^^^^^ SIM115
|
||||
89 | f = io.open_code("foo.txt")
|
||||
90 | f = codecs.open("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:89:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
87 | f = tarfile.TarFile().open()
|
||||
88 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
89 | f = io.open("foo.txt")
|
||||
| ^^^^^^^ SIM115
|
||||
90 | f = io.open_code("foo.txt")
|
||||
91 | f = codecs.open("foo.txt")
|
||||
87 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
88 | f = io.open("foo.txt")
|
||||
89 | f = io.open_code("foo.txt")
|
||||
| ^^^^^^^^^^^^ SIM115
|
||||
90 | f = codecs.open("foo.txt")
|
||||
91 | f = bz2.open("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:90:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
88 | f = zipfile.ZipFile("foo.zip").open("foo.txt")
|
||||
89 | f = io.open("foo.txt")
|
||||
90 | f = io.open_code("foo.txt")
|
||||
| ^^^^^^^^^^^^ SIM115
|
||||
91 | f = codecs.open("foo.txt")
|
||||
92 | f = bz2.open("foo.txt")
|
||||
88 | f = io.open("foo.txt")
|
||||
89 | f = io.open_code("foo.txt")
|
||||
90 | f = codecs.open("foo.txt")
|
||||
| ^^^^^^^^^^^ SIM115
|
||||
91 | f = bz2.open("foo.txt")
|
||||
92 | f = gzip.open("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:91:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
89 | f = io.open("foo.txt")
|
||||
90 | f = io.open_code("foo.txt")
|
||||
91 | f = codecs.open("foo.txt")
|
||||
| ^^^^^^^^^^^ SIM115
|
||||
92 | f = bz2.open("foo.txt")
|
||||
93 | f = gzip.open("foo.txt")
|
||||
89 | f = io.open_code("foo.txt")
|
||||
90 | f = codecs.open("foo.txt")
|
||||
91 | f = bz2.open("foo.txt")
|
||||
| ^^^^^^^^ SIM115
|
||||
92 | f = gzip.open("foo.txt")
|
||||
93 | f = dbm.open("foo.db")
|
||||
|
|
||||
|
||||
SIM115.py:92:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
90 | f = io.open_code("foo.txt")
|
||||
91 | f = codecs.open("foo.txt")
|
||||
92 | f = bz2.open("foo.txt")
|
||||
| ^^^^^^^^ SIM115
|
||||
93 | f = gzip.open("foo.txt")
|
||||
94 | f = dbm.open("foo.db")
|
||||
90 | f = codecs.open("foo.txt")
|
||||
91 | f = bz2.open("foo.txt")
|
||||
92 | f = gzip.open("foo.txt")
|
||||
| ^^^^^^^^^ SIM115
|
||||
93 | f = dbm.open("foo.db")
|
||||
94 | f = dbm.gnu.open("foo.db")
|
||||
|
|
||||
|
||||
SIM115.py:93:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
91 | f = codecs.open("foo.txt")
|
||||
92 | f = bz2.open("foo.txt")
|
||||
93 | f = gzip.open("foo.txt")
|
||||
| ^^^^^^^^^ SIM115
|
||||
94 | f = dbm.open("foo.db")
|
||||
95 | f = dbm.gnu.open("foo.db")
|
||||
91 | f = bz2.open("foo.txt")
|
||||
92 | f = gzip.open("foo.txt")
|
||||
93 | f = dbm.open("foo.db")
|
||||
| ^^^^^^^^ SIM115
|
||||
94 | f = dbm.gnu.open("foo.db")
|
||||
95 | f = dbm.ndbm.open("foo.db")
|
||||
|
|
||||
|
||||
SIM115.py:94:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
92 | f = bz2.open("foo.txt")
|
||||
93 | f = gzip.open("foo.txt")
|
||||
94 | f = dbm.open("foo.db")
|
||||
| ^^^^^^^^ SIM115
|
||||
95 | f = dbm.gnu.open("foo.db")
|
||||
96 | f = dbm.ndbm.open("foo.db")
|
||||
92 | f = gzip.open("foo.txt")
|
||||
93 | f = dbm.open("foo.db")
|
||||
94 | f = dbm.gnu.open("foo.db")
|
||||
| ^^^^^^^^^^^^ SIM115
|
||||
95 | f = dbm.ndbm.open("foo.db")
|
||||
96 | f = dbm.dumb.open("foo.db")
|
||||
|
|
||||
|
||||
SIM115.py:95:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
93 | f = gzip.open("foo.txt")
|
||||
94 | f = dbm.open("foo.db")
|
||||
95 | f = dbm.gnu.open("foo.db")
|
||||
| ^^^^^^^^^^^^ SIM115
|
||||
96 | f = dbm.ndbm.open("foo.db")
|
||||
97 | f = dbm.dumb.open("foo.db")
|
||||
93 | f = dbm.open("foo.db")
|
||||
94 | f = dbm.gnu.open("foo.db")
|
||||
95 | f = dbm.ndbm.open("foo.db")
|
||||
| ^^^^^^^^^^^^^ SIM115
|
||||
96 | f = dbm.dumb.open("foo.db")
|
||||
97 | f = lzma.open("foo.xz")
|
||||
|
|
||||
|
||||
SIM115.py:96:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
94 | f = dbm.open("foo.db")
|
||||
95 | f = dbm.gnu.open("foo.db")
|
||||
96 | f = dbm.ndbm.open("foo.db")
|
||||
94 | f = dbm.gnu.open("foo.db")
|
||||
95 | f = dbm.ndbm.open("foo.db")
|
||||
96 | f = dbm.dumb.open("foo.db")
|
||||
| ^^^^^^^^^^^^^ SIM115
|
||||
97 | f = dbm.dumb.open("foo.db")
|
||||
98 | f = lzma.open("foo.xz")
|
||||
97 | f = lzma.open("foo.xz")
|
||||
98 | f = lzma.LZMAFile("foo.xz")
|
||||
|
|
||||
|
||||
SIM115.py:97:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
95 | f = dbm.gnu.open("foo.db")
|
||||
96 | f = dbm.ndbm.open("foo.db")
|
||||
97 | f = dbm.dumb.open("foo.db")
|
||||
| ^^^^^^^^^^^^^ SIM115
|
||||
98 | f = lzma.open("foo.xz")
|
||||
99 | f = lzma.LZMAFile("foo.xz")
|
||||
95 | f = dbm.ndbm.open("foo.db")
|
||||
96 | f = dbm.dumb.open("foo.db")
|
||||
97 | f = lzma.open("foo.xz")
|
||||
| ^^^^^^^^^ SIM115
|
||||
98 | f = lzma.LZMAFile("foo.xz")
|
||||
99 | f = shelve.open("foo.db")
|
||||
|
|
||||
|
||||
SIM115.py:98:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
96 | f = dbm.ndbm.open("foo.db")
|
||||
97 | f = dbm.dumb.open("foo.db")
|
||||
98 | f = lzma.open("foo.xz")
|
||||
| ^^^^^^^^^ SIM115
|
||||
99 | f = lzma.LZMAFile("foo.xz")
|
||||
100 | f = shelve.open("foo.db")
|
||||
96 | f = dbm.dumb.open("foo.db")
|
||||
97 | f = lzma.open("foo.xz")
|
||||
98 | f = lzma.LZMAFile("foo.xz")
|
||||
| ^^^^^^^^^^^^^ SIM115
|
||||
99 | f = shelve.open("foo.db")
|
||||
100 | f = tokenize.open("foo.py")
|
||||
|
|
||||
|
||||
SIM115.py:99:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
97 | f = dbm.dumb.open("foo.db")
|
||||
98 | f = lzma.open("foo.xz")
|
||||
99 | f = lzma.LZMAFile("foo.xz")
|
||||
| ^^^^^^^^^^^^^ SIM115
|
||||
100 | f = shelve.open("foo.db")
|
||||
101 | f = tokenize.open("foo.py")
|
||||
97 | f = lzma.open("foo.xz")
|
||||
98 | f = lzma.LZMAFile("foo.xz")
|
||||
99 | f = shelve.open("foo.db")
|
||||
| ^^^^^^^^^^^ SIM115
|
||||
100 | f = tokenize.open("foo.py")
|
||||
101 | f = wave.open("foo.wav")
|
||||
|
|
||||
|
||||
SIM115.py:100:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
98 | f = lzma.open("foo.xz")
|
||||
99 | f = lzma.LZMAFile("foo.xz")
|
||||
100 | f = shelve.open("foo.db")
|
||||
| ^^^^^^^^^^^ SIM115
|
||||
101 | f = tokenize.open("foo.py")
|
||||
102 | f = wave.open("foo.wav")
|
||||
98 | f = lzma.LZMAFile("foo.xz")
|
||||
99 | f = shelve.open("foo.db")
|
||||
100 | f = tokenize.open("foo.py")
|
||||
| ^^^^^^^^^^^^^ SIM115
|
||||
101 | f = wave.open("foo.wav")
|
||||
102 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
|
|
||||
|
||||
SIM115.py:101:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
99 | f = lzma.LZMAFile("foo.xz")
|
||||
100 | f = shelve.open("foo.db")
|
||||
101 | f = tokenize.open("foo.py")
|
||||
| ^^^^^^^^^^^^^ SIM115
|
||||
102 | f = wave.open("foo.wav")
|
||||
103 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
99 | f = shelve.open("foo.db")
|
||||
100 | f = tokenize.open("foo.py")
|
||||
101 | f = wave.open("foo.wav")
|
||||
| ^^^^^^^^^ SIM115
|
||||
102 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
103 | f = fileinput.input("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:102:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
100 | f = shelve.open("foo.db")
|
||||
101 | f = tokenize.open("foo.py")
|
||||
102 | f = wave.open("foo.wav")
|
||||
| ^^^^^^^^^ SIM115
|
||||
103 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
104 | f = fileinput.input("foo.txt")
|
||||
100 | f = tokenize.open("foo.py")
|
||||
101 | f = wave.open("foo.wav")
|
||||
102 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
103 | f = fileinput.input("foo.txt")
|
||||
104 | f = fileinput.FileInput("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:103:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
101 | f = tokenize.open("foo.py")
|
||||
102 | f = wave.open("foo.wav")
|
||||
103 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
104 | f = fileinput.input("foo.txt")
|
||||
105 | f = fileinput.FileInput("foo.txt")
|
||||
101 | f = wave.open("foo.wav")
|
||||
102 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
103 | f = fileinput.input("foo.txt")
|
||||
| ^^^^^^^^^^^^^^^ SIM115
|
||||
104 | f = fileinput.FileInput("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:104:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
102 | f = wave.open("foo.wav")
|
||||
103 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
104 | f = fileinput.input("foo.txt")
|
||||
| ^^^^^^^^^^^^^^^ SIM115
|
||||
105 | f = fileinput.FileInput("foo.txt")
|
||||
|
|
||||
|
||||
SIM115.py:105:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
103 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
104 | f = fileinput.input("foo.txt")
|
||||
105 | f = fileinput.FileInput("foo.txt")
|
||||
102 | f = tarfile.TarFile.taropen("foo.tar")
|
||||
103 | f = fileinput.input("foo.txt")
|
||||
104 | f = fileinput.FileInput("foo.txt")
|
||||
| ^^^^^^^^^^^^^^^^^^^ SIM115
|
||||
106 |
|
||||
107 | with contextlib.suppress(Exception):
|
||||
105 |
|
||||
106 | with contextlib.suppress(Exception):
|
||||
|
|
||||
|
||||
SIM115.py:241:9: SIM115 Use a context manager for opening files
|
||||
SIM115.py:240:9: SIM115 Use a context manager for opening files
|
||||
|
|
||||
239 | def aliased():
|
||||
240 | from shelve import open as open_shelf
|
||||
241 | x = open_shelf("foo.dbm")
|
||||
238 | def aliased():
|
||||
239 | from shelve import open as open_shelf
|
||||
240 | x = open_shelf("foo.dbm")
|
||||
| ^^^^^^^^^^ SIM115
|
||||
242 | x.close()
|
||||
241 | x.close()
|
||||
|
|
||||
|
||||
SIM115.py:245:9: SIM115 Use a context manager for opening files
|
||||
SIM115.py:244:9: SIM115 Use a context manager for opening files
|
||||
|
|
||||
244 | from tarfile import TarFile as TF
|
||||
245 | f = TF("foo").open()
|
||||
243 | from tarfile import TarFile as TF
|
||||
244 | f = TF("foo").open()
|
||||
| ^^^^^^^^^^^^^^ SIM115
|
||||
246 | f.close()
|
||||
245 | f.close()
|
||||
|
|
||||
|
||||
SIM115.py:258:5: SIM115 Use a context manager for opening files
|
||||
SIM115.py:257:5: SIM115 Use a context manager for opening files
|
||||
|
|
||||
257 | # SIM115
|
||||
258 | f = dbm.sqlite3.open("foo.db")
|
||||
256 | # SIM115
|
||||
257 | f = dbm.sqlite3.open("foo.db")
|
||||
| ^^^^^^^^^^^^^^^^ SIM115
|
||||
259 | f.close()
|
||||
258 | f.close()
|
||||
|
|
||||
|
||||
@@ -1,110 +1,110 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_simplify/mod.rs
|
||||
---
|
||||
SIM116.py:6:5: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
SIM116.py:5:1: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
|
|
||||
5 | # SIM116
|
||||
6 | / if a == "foo":
|
||||
7 | | return "bar"
|
||||
8 | | elif a == "bar":
|
||||
9 | | return "baz"
|
||||
10 | | elif a == "boo":
|
||||
11 | | return "ooh"
|
||||
12 | | else:
|
||||
13 | | return 42
|
||||
| |_________________^ SIM116
|
||||
14 |
|
||||
15 | # SIM116
|
||||
4 | # SIM116
|
||||
5 | / if a == "foo":
|
||||
6 | | return "bar"
|
||||
7 | | elif a == "bar":
|
||||
8 | | return "baz"
|
||||
9 | | elif a == "boo":
|
||||
10 | | return "ooh"
|
||||
11 | | else:
|
||||
12 | | return 42
|
||||
| |_____________^ SIM116
|
||||
13 |
|
||||
14 | # SIM116
|
||||
|
|
||||
|
||||
SIM116.py:16:5: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
SIM116.py:15:1: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
|
|
||||
15 | # SIM116
|
||||
16 | / if a == 1:
|
||||
17 | | return (1, 2, 3)
|
||||
18 | | elif a == 2:
|
||||
19 | | return (4, 5, 6)
|
||||
20 | | elif a == 3:
|
||||
21 | | return (7, 8, 9)
|
||||
22 | | else:
|
||||
23 | | return (10, 11, 12)
|
||||
| |___________________________^ SIM116
|
||||
24 |
|
||||
25 | # SIM116
|
||||
14 | # SIM116
|
||||
15 | / if a == 1:
|
||||
16 | | return (1, 2, 3)
|
||||
17 | | elif a == 2:
|
||||
18 | | return (4, 5, 6)
|
||||
19 | | elif a == 3:
|
||||
20 | | return (7, 8, 9)
|
||||
21 | | else:
|
||||
22 | | return (10, 11, 12)
|
||||
| |_______________________^ SIM116
|
||||
23 |
|
||||
24 | # SIM116
|
||||
|
|
||||
|
||||
SIM116.py:26:5: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
SIM116.py:25:1: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
|
|
||||
25 | # SIM116
|
||||
26 | / if a == 1:
|
||||
27 | | return (1, 2, 3)
|
||||
28 | | elif a == 2:
|
||||
29 | | return (4, 5, 6)
|
||||
30 | | elif a == 3:
|
||||
31 | | return (7, 8, 9)
|
||||
| |________________________^ SIM116
|
||||
32 |
|
||||
33 | # SIM116
|
||||
24 | # SIM116
|
||||
25 | / if a == 1:
|
||||
26 | | return (1, 2, 3)
|
||||
27 | | elif a == 2:
|
||||
28 | | return (4, 5, 6)
|
||||
29 | | elif a == 3:
|
||||
30 | | return (7, 8, 9)
|
||||
| |____________________^ SIM116
|
||||
31 |
|
||||
32 | # SIM116
|
||||
|
|
||||
|
||||
SIM116.py:34:5: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
SIM116.py:33:1: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
|
|
||||
33 | # SIM116
|
||||
34 | / if a == "hello 'sir'":
|
||||
35 | | return (1, 2, 3)
|
||||
36 | | elif a == 'goodbye "mam"':
|
||||
37 | | return (4, 5, 6)
|
||||
38 | | elif a == """Fairwell 'mister'""":
|
||||
39 | | return (7, 8, 9)
|
||||
40 | | else:
|
||||
41 | | return (10, 11, 12)
|
||||
| |___________________________^ SIM116
|
||||
42 |
|
||||
43 | # SIM116
|
||||
32 | # SIM116
|
||||
33 | / if a == "hello 'sir'":
|
||||
34 | | return (1, 2, 3)
|
||||
35 | | elif a == 'goodbye "mam"':
|
||||
36 | | return (4, 5, 6)
|
||||
37 | | elif a == """Fairwell 'mister'""":
|
||||
38 | | return (7, 8, 9)
|
||||
39 | | else:
|
||||
40 | | return (10, 11, 12)
|
||||
| |_______________________^ SIM116
|
||||
41 |
|
||||
42 | # SIM116
|
||||
|
|
||||
|
||||
SIM116.py:44:5: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
SIM116.py:43:1: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
|
|
||||
43 | # SIM116
|
||||
44 | / if a == b"one":
|
||||
45 | | return 1
|
||||
46 | | elif a == b"two":
|
||||
47 | | return 2
|
||||
48 | | elif a == b"three":
|
||||
49 | | return 3
|
||||
| |________________^ SIM116
|
||||
50 |
|
||||
51 | # SIM116
|
||||
42 | # SIM116
|
||||
43 | / if a == b"one":
|
||||
44 | | return 1
|
||||
45 | | elif a == b"two":
|
||||
46 | | return 2
|
||||
47 | | elif a == b"three":
|
||||
48 | | return 3
|
||||
| |____________^ SIM116
|
||||
49 |
|
||||
50 | # SIM116
|
||||
|
|
||||
|
||||
SIM116.py:52:5: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
SIM116.py:51:1: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
|
|
||||
51 | # SIM116
|
||||
52 | / if a == "hello 'sir'":
|
||||
53 | | return ("hello'", 'hi"', 3)
|
||||
54 | | elif a == 'goodbye "mam"':
|
||||
55 | | return (4, 5, 6)
|
||||
56 | | elif a == """Fairwell 'mister'""":
|
||||
57 | | return (7, 8, 9)
|
||||
58 | | else:
|
||||
59 | | return (10, 11, 12)
|
||||
| |___________________________^ SIM116
|
||||
60 |
|
||||
61 | # OK
|
||||
50 | # SIM116
|
||||
51 | / if a == "hello 'sir'":
|
||||
52 | | return ("hello'", 'hi"', 3)
|
||||
53 | | elif a == 'goodbye "mam"':
|
||||
54 | | return (4, 5, 6)
|
||||
55 | | elif a == """Fairwell 'mister'""":
|
||||
56 | | return (7, 8, 9)
|
||||
57 | | else:
|
||||
58 | | return (10, 11, 12)
|
||||
| |_______________________^ SIM116
|
||||
59 |
|
||||
60 | # OK
|
||||
|
|
||||
|
||||
SIM116.py:80:5: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
SIM116.py:79:1: SIM116 Use a dictionary instead of consecutive `if` statements
|
||||
|
|
||||
79 | # SIM116
|
||||
80 | / if func_name == "create":
|
||||
81 | | return "A"
|
||||
82 | | elif func_name == "modify":
|
||||
83 | | return "M"
|
||||
84 | | elif func_name == "remove":
|
||||
85 | | return "D"
|
||||
86 | | elif func_name == "move":
|
||||
87 | | return "MV"
|
||||
| |___________________^ SIM116
|
||||
88 |
|
||||
89 | # OK
|
||||
78 | # SIM116
|
||||
79 | / if func_name == "create":
|
||||
80 | | return "A"
|
||||
81 | | elif func_name == "modify":
|
||||
82 | | return "M"
|
||||
83 | | elif func_name == "remove":
|
||||
84 | | return "D"
|
||||
85 | | elif func_name == "move":
|
||||
86 | | return "MV"
|
||||
| |_______________^ SIM116
|
||||
87 |
|
||||
88 | # OK
|
||||
|
|
||||
|
||||
@@ -11,7 +11,6 @@ use crate::registry::Rule;
|
||||
use crate::rules::flake8_type_checking::helpers::quote_type_expression;
|
||||
use crate::{AlwaysFixableViolation, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use ruff_python_ast::parenthesize::parenthesized_range;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks if [PEP 613] explicit type aliases contain references to
|
||||
@@ -88,15 +87,11 @@ impl Violation for UnquotedTypeAlias {
|
||||
/// ## Example
|
||||
/// Given:
|
||||
/// ```python
|
||||
/// from typing import TypeAlias
|
||||
///
|
||||
/// OptInt: TypeAlias = "int | None"
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from typing import TypeAlias
|
||||
///
|
||||
/// OptInt: TypeAlias = int | None
|
||||
/// ```
|
||||
///
|
||||
@@ -292,30 +287,7 @@ pub(crate) fn quoted_type_alias(
|
||||
|
||||
let range = annotation_expr.range();
|
||||
let mut diagnostic = checker.report_diagnostic(QuotedTypeAlias, range);
|
||||
let fix_string = annotation_expr.value.to_string();
|
||||
let fix_string = if (fix_string.contains('\n') || fix_string.contains('\r'))
|
||||
&& parenthesized_range(
|
||||
// Check for parenthesis outside string ("""...""")
|
||||
annotation_expr.into(),
|
||||
checker.semantic().current_statement().into(),
|
||||
checker.comment_ranges(),
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.is_none()
|
||||
&& parenthesized_range(
|
||||
// Check for parenthesis inside string """(...)"""
|
||||
expr.into(),
|
||||
annotation_expr.into(),
|
||||
checker.comment_ranges(),
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.is_none()
|
||||
{
|
||||
format!("({fix_string})")
|
||||
} else {
|
||||
fix_string
|
||||
};
|
||||
let edit = Edit::range_replacement(fix_string, range);
|
||||
let edit = Edit::range_replacement(annotation_expr.value.to_string(), range);
|
||||
if checker.comment_ranges().intersects(range) {
|
||||
diagnostic.set_fix(Fix::unsafe_edit(edit));
|
||||
} else {
|
||||
|
||||
@@ -44,7 +44,7 @@ use crate::{Fix, FixAvailability, Violation};
|
||||
/// ```python
|
||||
/// from __future__ import annotations
|
||||
///
|
||||
/// from . import local_module
|
||||
/// import local_module
|
||||
///
|
||||
///
|
||||
/// def func(sized: local_module.Container) -> int:
|
||||
@@ -58,7 +58,7 @@ use crate::{Fix, FixAvailability, Violation};
|
||||
/// from typing import TYPE_CHECKING
|
||||
///
|
||||
/// if TYPE_CHECKING:
|
||||
/// from . import local_module
|
||||
/// import local_module
|
||||
///
|
||||
///
|
||||
/// def func(sized: local_module.Container) -> int:
|
||||
|
||||
@@ -409,8 +409,6 @@ TC008.py:52:18: TC008 [*] Remove quotes from type alias
|
||||
51 | a: TypeAlias = 'Baz' # OK
|
||||
52 | type A = 'Baz' # TC008
|
||||
| ^^^^^ TC008
|
||||
53 |
|
||||
54 | # O should have parenthesis added
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
@@ -420,187 +418,3 @@ TC008.py:52:18: TC008 [*] Remove quotes from type alias
|
||||
51 51 | a: TypeAlias = 'Baz' # OK
|
||||
52 |- type A = 'Baz' # TC008
|
||||
52 |+ type A = Baz # TC008
|
||||
53 53 |
|
||||
54 54 | # O should have parenthesis added
|
||||
55 55 | o: TypeAlias = """int
|
||||
|
||||
TC008.py:55:16: TC008 [*] Remove quotes from type alias
|
||||
|
|
||||
54 | # O should have parenthesis added
|
||||
55 | o: TypeAlias = """int
|
||||
| ________________^
|
||||
56 | | | None"""
|
||||
| |_________^ TC008
|
||||
57 | type O = """int
|
||||
58 | | None"""
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
52 52 | type A = 'Baz' # TC008
|
||||
53 53 |
|
||||
54 54 | # O should have parenthesis added
|
||||
55 |-o: TypeAlias = """int
|
||||
56 |-| None"""
|
||||
55 |+o: TypeAlias = (int
|
||||
56 |+| None)
|
||||
57 57 | type O = """int
|
||||
58 58 | | None"""
|
||||
59 59 |
|
||||
|
||||
TC008.py:57:10: TC008 [*] Remove quotes from type alias
|
||||
|
|
||||
55 | o: TypeAlias = """int
|
||||
56 | | None"""
|
||||
57 | type O = """int
|
||||
| __________^
|
||||
58 | | | None"""
|
||||
| |_________^ TC008
|
||||
59 |
|
||||
60 | # P, Q, and R should not have parenthesis added
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
54 54 | # O should have parenthesis added
|
||||
55 55 | o: TypeAlias = """int
|
||||
56 56 | | None"""
|
||||
57 |-type O = """int
|
||||
58 |-| None"""
|
||||
57 |+type O = (int
|
||||
58 |+| None)
|
||||
59 59 |
|
||||
60 60 | # P, Q, and R should not have parenthesis added
|
||||
61 61 | p: TypeAlias = ("""int
|
||||
|
||||
TC008.py:61:17: TC008 [*] Remove quotes from type alias
|
||||
|
|
||||
60 | # P, Q, and R should not have parenthesis added
|
||||
61 | p: TypeAlias = ("""int
|
||||
| _________________^
|
||||
62 | | | None""")
|
||||
| |_________^ TC008
|
||||
63 | type P = ("""int
|
||||
64 | | None""")
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
58 58 | | None"""
|
||||
59 59 |
|
||||
60 60 | # P, Q, and R should not have parenthesis added
|
||||
61 |-p: TypeAlias = ("""int
|
||||
62 |-| None""")
|
||||
61 |+p: TypeAlias = (int
|
||||
62 |+| None)
|
||||
63 63 | type P = ("""int
|
||||
64 64 | | None""")
|
||||
65 65 |
|
||||
|
||||
TC008.py:63:11: TC008 [*] Remove quotes from type alias
|
||||
|
|
||||
61 | p: TypeAlias = ("""int
|
||||
62 | | None""")
|
||||
63 | type P = ("""int
|
||||
| ___________^
|
||||
64 | | | None""")
|
||||
| |_________^ TC008
|
||||
65 |
|
||||
66 | q: TypeAlias = """(int
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
60 60 | # P, Q, and R should not have parenthesis added
|
||||
61 61 | p: TypeAlias = ("""int
|
||||
62 62 | | None""")
|
||||
63 |-type P = ("""int
|
||||
64 |-| None""")
|
||||
63 |+type P = (int
|
||||
64 |+| None)
|
||||
65 65 |
|
||||
66 66 | q: TypeAlias = """(int
|
||||
67 67 | | None)"""
|
||||
|
||||
TC008.py:66:16: TC008 [*] Remove quotes from type alias
|
||||
|
|
||||
64 | | None""")
|
||||
65 |
|
||||
66 | q: TypeAlias = """(int
|
||||
| ________________^
|
||||
67 | | | None)"""
|
||||
| |__________^ TC008
|
||||
68 | type Q = """(int
|
||||
69 | | None)"""
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
63 63 | type P = ("""int
|
||||
64 64 | | None""")
|
||||
65 65 |
|
||||
66 |-q: TypeAlias = """(int
|
||||
67 |-| None)"""
|
||||
66 |+q: TypeAlias = (int
|
||||
67 |+| None)
|
||||
68 68 | type Q = """(int
|
||||
69 69 | | None)"""
|
||||
70 70 |
|
||||
|
||||
TC008.py:68:10: TC008 [*] Remove quotes from type alias
|
||||
|
|
||||
66 | q: TypeAlias = """(int
|
||||
67 | | None)"""
|
||||
68 | type Q = """(int
|
||||
| __________^
|
||||
69 | | | None)"""
|
||||
| |__________^ TC008
|
||||
70 |
|
||||
71 | r: TypeAlias = """int | None"""
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
65 65 |
|
||||
66 66 | q: TypeAlias = """(int
|
||||
67 67 | | None)"""
|
||||
68 |-type Q = """(int
|
||||
69 |-| None)"""
|
||||
68 |+type Q = (int
|
||||
69 |+| None)
|
||||
70 70 |
|
||||
71 71 | r: TypeAlias = """int | None"""
|
||||
72 72 | type R = """int | None"""
|
||||
|
||||
TC008.py:71:16: TC008 [*] Remove quotes from type alias
|
||||
|
|
||||
69 | | None)"""
|
||||
70 |
|
||||
71 | r: TypeAlias = """int | None"""
|
||||
| ^^^^^^^^^^^^^^^^ TC008
|
||||
72 | type R = """int | None"""
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
68 68 | type Q = """(int
|
||||
69 69 | | None)"""
|
||||
70 70 |
|
||||
71 |-r: TypeAlias = """int | None"""
|
||||
71 |+r: TypeAlias = int | None
|
||||
72 72 | type R = """int | None"""
|
||||
|
||||
TC008.py:72:10: TC008 [*] Remove quotes from type alias
|
||||
|
|
||||
71 | r: TypeAlias = """int | None"""
|
||||
72 | type R = """int | None"""
|
||||
| ^^^^^^^^^^^^^^^^ TC008
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
69 69 | | None)"""
|
||||
70 70 |
|
||||
71 71 | r: TypeAlias = """int | None"""
|
||||
72 |-type R = """int | None"""
|
||||
72 |+type R = int | None
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::importer::ImportRequest;
|
||||
use crate::{Applicability, Edit, Fix, Violation};
|
||||
use ruff_python_ast::{Expr, ExprCall};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
pub(crate) fn is_path_call(checker: &Checker, expr: &Expr) -> bool {
|
||||
expr.as_call_expr().is_some_and(|expr_call| {
|
||||
checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(&expr_call.func)
|
||||
.is_some_and(|name| matches!(name.segments(), ["pathlib", "Path"]))
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn check_os_path_get_calls(
|
||||
checker: &Checker,
|
||||
call: &ExprCall,
|
||||
fn_name: &str,
|
||||
attr: &str,
|
||||
fix_enabled: bool,
|
||||
violation: impl Violation,
|
||||
) {
|
||||
if checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(&call.func)
|
||||
.is_none_or(|qualified_name| qualified_name.segments() != ["os", "path", fn_name])
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if call.arguments.len() != 1 {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(arg) = call.arguments.find_argument_value("filename", 0) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let arg_code = checker.locator().slice(arg.range());
|
||||
let range = call.range();
|
||||
|
||||
let mut diagnostic = checker.report_diagnostic(violation, call.func.range());
|
||||
|
||||
if fix_enabled {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
&ImportRequest::import("pathlib", "Path"),
|
||||
call.start(),
|
||||
checker.semantic(),
|
||||
)?;
|
||||
|
||||
let applicability = if checker.comment_ranges().intersects(range) {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
};
|
||||
|
||||
let replacement = if is_path_call(checker, arg) {
|
||||
format!("{arg_code}.stat().{attr}")
|
||||
} else {
|
||||
format!("{binding}({arg_code}).stat().{attr}")
|
||||
};
|
||||
|
||||
Ok(Fix::applicable_edits(
|
||||
Edit::range_replacement(replacement, range),
|
||||
[import_edit],
|
||||
applicability,
|
||||
))
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
//! Rules from [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/).
|
||||
mod helpers;
|
||||
pub(crate) mod rules;
|
||||
pub(crate) mod violations;
|
||||
|
||||
@@ -82,9 +81,6 @@ mod tests {
|
||||
|
||||
#[test_case(Rule::OsPathGetsize, Path::new("PTH202.py"))]
|
||||
#[test_case(Rule::OsPathGetsize, Path::new("PTH202_2.py"))]
|
||||
#[test_case(Rule::OsPathGetatime, Path::new("PTH203.py"))]
|
||||
#[test_case(Rule::OsPathGetmtime, Path::new("PTH204.py"))]
|
||||
#[test_case(Rule::OsPathGetctime, Path::new("PTH205.py"))]
|
||||
fn preview_flake8_use_pathlib(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__{}_{}",
|
||||
|
||||
@@ -19,20 +19,12 @@ use ruff_text_size::Ranged;
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// path = Path()
|
||||
///
|
||||
/// path.with_suffix("py")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
///
|
||||
/// ```python
|
||||
/// from pathlib import Path
|
||||
///
|
||||
/// path = Path()
|
||||
///
|
||||
/// path.with_suffix(".py")
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_path_getatime_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::check_os_path_get_calls;
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
use crate::Violation;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.getatime`.
|
||||
@@ -35,9 +32,6 @@ use ruff_python_ast::ExprCall;
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.stat`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.stat)
|
||||
/// - [Python documentation: `os.path.getatime`](https://docs.python.org/3/library/os.path.html#os.path.getatime)
|
||||
@@ -49,25 +43,8 @@ use ruff_python_ast::ExprCall;
|
||||
pub(crate) struct OsPathGetatime;
|
||||
|
||||
impl Violation for OsPathGetatime {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.path.getatime` should be replaced by `Path.stat().st_atime`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path.stat(...).st_atime`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH203
|
||||
pub(crate) fn os_path_getatime(checker: &Checker, call: &ExprCall) {
|
||||
check_os_path_get_calls(
|
||||
checker,
|
||||
call,
|
||||
"getatime",
|
||||
"st_atime",
|
||||
is_fix_os_path_getatime_enabled(checker.settings()),
|
||||
OsPathGetatime,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_path_getctime_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::check_os_path_get_calls;
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
use crate::Violation;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.getctime`.
|
||||
@@ -35,9 +32,6 @@ use ruff_python_ast::ExprCall;
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.stat`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.stat)
|
||||
/// - [Python documentation: `os.path.getctime`](https://docs.python.org/3/library/os.path.html#os.path.getctime)
|
||||
@@ -49,26 +43,8 @@ use ruff_python_ast::ExprCall;
|
||||
pub(crate) struct OsPathGetctime;
|
||||
|
||||
impl Violation for OsPathGetctime {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.path.getctime` should be replaced by `Path.stat().st_ctime`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path.stat(...).st_ctime`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH205
|
||||
pub(crate) fn os_path_getctime(checker: &Checker, call: &ExprCall) {
|
||||
check_os_path_get_calls(
|
||||
checker,
|
||||
call,
|
||||
"getctime",
|
||||
"st_ctime",
|
||||
is_fix_os_path_getctime_enabled(checker.settings()),
|
||||
OsPathGetctime,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_fix_os_path_getmtime_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::check_os_path_get_calls;
|
||||
use crate::{FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
|
||||
use crate::Violation;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.getmtime`.
|
||||
@@ -35,9 +32,6 @@ use ruff_python_ast::ExprCall;
|
||||
/// it can be less performant than the lower-level alternatives that work directly with strings,
|
||||
/// especially on older versions of Python.
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `Path.stat`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.stat)
|
||||
/// - [Python documentation: `os.path.getmtime`](https://docs.python.org/3/library/os.path.html#os.path.getmtime)
|
||||
@@ -49,26 +43,8 @@ use ruff_python_ast::ExprCall;
|
||||
pub(crate) struct OsPathGetmtime;
|
||||
|
||||
impl Violation for OsPathGetmtime {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"`os.path.getmtime` should be replaced by `Path.stat().st_mtime`".to_string()
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Replace with `Path.stat(...).st_mtime`".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// PTH204
|
||||
pub(crate) fn os_path_getmtime(checker: &Checker, call: &ExprCall) {
|
||||
check_os_path_get_calls(
|
||||
checker,
|
||||
call,
|
||||
"getmtime",
|
||||
"st_mtime",
|
||||
is_fix_os_path_getmtime_enabled(checker.settings()),
|
||||
OsPathGetmtime,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::importer::ImportRequest;
|
||||
use crate::preview::is_fix_os_path_getsize_enabled;
|
||||
use crate::rules::flake8_use_pathlib::helpers::check_os_path_get_calls;
|
||||
use crate::{FixAvailability, Violation};
|
||||
use crate::{Applicability, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::ExprCall;
|
||||
use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_ast::{Expr, ExprCall};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of `os.path.getsize`.
|
||||
@@ -63,12 +65,63 @@ impl Violation for OsPathGetsize {
|
||||
|
||||
/// PTH202
|
||||
pub(crate) fn os_path_getsize(checker: &Checker, call: &ExprCall) {
|
||||
check_os_path_get_calls(
|
||||
checker,
|
||||
call,
|
||||
"getsize",
|
||||
"st_size",
|
||||
is_fix_os_path_getsize_enabled(checker.settings()),
|
||||
OsPathGetsize,
|
||||
);
|
||||
if !matches!(
|
||||
checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(&call.func)
|
||||
.as_ref()
|
||||
.map(QualifiedName::segments),
|
||||
Some(["os", "path", "getsize"])
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
if call.arguments.len() != 1 {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(arg) = call.arguments.find_argument_value("filename", 0) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let arg_code = checker.locator().slice(arg.range());
|
||||
let range = call.range();
|
||||
|
||||
let applicability = if checker.comment_ranges().intersects(range) {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
};
|
||||
|
||||
let mut diagnostic = checker.report_diagnostic(OsPathGetsize, range);
|
||||
|
||||
if is_fix_os_path_getsize_enabled(checker.settings()) {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
&ImportRequest::import("pathlib", "Path"),
|
||||
call.start(),
|
||||
checker.semantic(),
|
||||
)?;
|
||||
|
||||
let replacement = if is_path_call(checker, arg) {
|
||||
format!("{arg_code}.stat().st_size")
|
||||
} else {
|
||||
format!("{binding}({arg_code}).stat().st_size")
|
||||
};
|
||||
|
||||
Ok(
|
||||
Fix::safe_edits(Edit::range_replacement(replacement, range), [import_edit])
|
||||
.with_applicability(applicability),
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn is_path_call(checker: &Checker, expr: &Expr) -> bool {
|
||||
expr.as_call_expr().is_some_and(|expr_call| {
|
||||
checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(&expr_call.func)
|
||||
.is_some_and(|name| matches!(name.segments(), ["pathlib", "Path"]))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -4,7 +4,9 @@ use ruff_python_semantic::analyze::typing;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_use_pathlib::rules::Glob;
|
||||
use crate::rules::flake8_use_pathlib::rules::{
|
||||
Glob, OsPathGetatime, OsPathGetctime, OsPathGetmtime,
|
||||
};
|
||||
use crate::rules::flake8_use_pathlib::violations::{
|
||||
BuiltinOpen, Joiner, OsChmod, OsGetcwd, OsListdir, OsMakedirs, OsMkdir, OsPathAbspath,
|
||||
OsPathBasename, OsPathDirname, OsPathExists, OsPathExpanduser, OsPathIsabs, OsPathIsdir,
|
||||
@@ -192,6 +194,12 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
|
||||
["os", "path", "samefile"] => checker.report_diagnostic_if_enabled(OsPathSamefile, range),
|
||||
// PTH122
|
||||
["os", "path", "splitext"] => checker.report_diagnostic_if_enabled(OsPathSplitext, range),
|
||||
// PTH203
|
||||
["os", "path", "getatime"] => checker.report_diagnostic_if_enabled(OsPathGetatime, range),
|
||||
// PTH204
|
||||
["os", "path", "getmtime"] => checker.report_diagnostic_if_enabled(OsPathGetmtime, range),
|
||||
// PTH205
|
||||
["os", "path", "getctime"] => checker.report_diagnostic_if_enabled(OsPathGetctime, range),
|
||||
// PTH211
|
||||
["os", "symlink"] => {
|
||||
// `dir_fd` is not supported by pathlib, so check if there are non-default values.
|
||||
|
||||
@@ -4,7 +4,7 @@ source: crates/ruff_linter/src/rules/flake8_use_pathlib/mod.rs
|
||||
PTH202.py:10:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
10 | os.path.getsize("filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
11 | os.path.getsize(b"filename")
|
||||
12 | os.path.getsize(Path("filename"))
|
||||
|
|
||||
@@ -14,7 +14,7 @@ PTH202.py:11:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
|
|
||||
10 | os.path.getsize("filename")
|
||||
11 | os.path.getsize(b"filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
12 | os.path.getsize(Path("filename"))
|
||||
13 | os.path.getsize(__file__)
|
||||
|
|
||||
@@ -25,7 +25,7 @@ PTH202.py:12:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
10 | os.path.getsize("filename")
|
||||
11 | os.path.getsize(b"filename")
|
||||
12 | os.path.getsize(Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
13 | os.path.getsize(__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -35,7 +35,7 @@ PTH202.py:13:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
11 | os.path.getsize(b"filename")
|
||||
12 | os.path.getsize(Path("filename"))
|
||||
13 | os.path.getsize(__file__)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
14 |
|
||||
15 | os.path.getsize(filename)
|
||||
|
|
||||
@@ -46,7 +46,7 @@ PTH202.py:15:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
13 | os.path.getsize(__file__)
|
||||
14 |
|
||||
15 | os.path.getsize(filename)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
16 | os.path.getsize(filename1)
|
||||
17 | os.path.getsize(filename2)
|
||||
|
|
||||
@@ -56,7 +56,7 @@ PTH202.py:16:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
|
|
||||
15 | os.path.getsize(filename)
|
||||
16 | os.path.getsize(filename1)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
17 | os.path.getsize(filename2)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -66,7 +66,7 @@ PTH202.py:17:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
15 | os.path.getsize(filename)
|
||||
16 | os.path.getsize(filename1)
|
||||
17 | os.path.getsize(filename2)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
18 |
|
||||
19 | os.path.getsize(filename="filename")
|
||||
|
|
||||
@@ -77,7 +77,7 @@ PTH202.py:19:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
17 | os.path.getsize(filename2)
|
||||
18 |
|
||||
19 | os.path.getsize(filename="filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
20 | os.path.getsize(filename=b"filename")
|
||||
21 | os.path.getsize(filename=Path("filename"))
|
||||
|
|
||||
@@ -87,7 +87,7 @@ PTH202.py:20:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
|
|
||||
19 | os.path.getsize(filename="filename")
|
||||
20 | os.path.getsize(filename=b"filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
21 | os.path.getsize(filename=Path("filename"))
|
||||
22 | os.path.getsize(filename=__file__)
|
||||
|
|
||||
@@ -98,7 +98,7 @@ PTH202.py:21:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
19 | os.path.getsize(filename="filename")
|
||||
20 | os.path.getsize(filename=b"filename")
|
||||
21 | os.path.getsize(filename=Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
22 | os.path.getsize(filename=__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -108,7 +108,7 @@ PTH202.py:22:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
20 | os.path.getsize(filename=b"filename")
|
||||
21 | os.path.getsize(filename=Path("filename"))
|
||||
22 | os.path.getsize(filename=__file__)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
23 |
|
||||
24 | getsize("filename")
|
||||
|
|
||||
@@ -119,7 +119,7 @@ PTH202.py:24:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
22 | os.path.getsize(filename=__file__)
|
||||
23 |
|
||||
24 | getsize("filename")
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
25 | getsize(b"filename")
|
||||
26 | getsize(Path("filename"))
|
||||
|
|
||||
@@ -129,7 +129,7 @@ PTH202.py:25:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
|
|
||||
24 | getsize("filename")
|
||||
25 | getsize(b"filename")
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
26 | getsize(Path("filename"))
|
||||
27 | getsize(__file__)
|
||||
|
|
||||
@@ -140,7 +140,7 @@ PTH202.py:26:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
24 | getsize("filename")
|
||||
25 | getsize(b"filename")
|
||||
26 | getsize(Path("filename"))
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
27 | getsize(__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -150,7 +150,7 @@ PTH202.py:27:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
25 | getsize(b"filename")
|
||||
26 | getsize(Path("filename"))
|
||||
27 | getsize(__file__)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^ PTH202
|
||||
28 |
|
||||
29 | getsize(filename="filename")
|
||||
|
|
||||
@@ -161,7 +161,7 @@ PTH202.py:29:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
27 | getsize(__file__)
|
||||
28 |
|
||||
29 | getsize(filename="filename")
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
30 | getsize(filename=b"filename")
|
||||
31 | getsize(filename=Path("filename"))
|
||||
|
|
||||
@@ -171,7 +171,7 @@ PTH202.py:30:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
|
|
||||
29 | getsize(filename="filename")
|
||||
30 | getsize(filename=b"filename")
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
31 | getsize(filename=Path("filename"))
|
||||
32 | getsize(filename=__file__)
|
||||
|
|
||||
@@ -182,7 +182,7 @@ PTH202.py:31:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
29 | getsize(filename="filename")
|
||||
30 | getsize(filename=b"filename")
|
||||
31 | getsize(filename=Path("filename"))
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
32 | getsize(filename=__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -192,7 +192,7 @@ PTH202.py:32:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
30 | getsize(filename=b"filename")
|
||||
31 | getsize(filename=Path("filename"))
|
||||
32 | getsize(filename=__file__)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
33 |
|
||||
34 | getsize(filename)
|
||||
|
|
||||
@@ -203,7 +203,7 @@ PTH202.py:34:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
32 | getsize(filename=__file__)
|
||||
33 |
|
||||
34 | getsize(filename)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^ PTH202
|
||||
35 | getsize(filename1)
|
||||
36 | getsize(filename2)
|
||||
|
|
||||
@@ -213,7 +213,7 @@ PTH202.py:35:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
|
|
||||
34 | getsize(filename)
|
||||
35 | getsize(filename1)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^ PTH202
|
||||
36 | getsize(filename2)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -223,70 +223,89 @@ PTH202.py:36:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
34 | getsize(filename)
|
||||
35 | getsize(filename1)
|
||||
36 | getsize(filename2)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^ PTH202
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
PTH202.py:39:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
39 | os.path.getsize(
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
40 | "filename", # comment
|
||||
41 | )
|
||||
39 | / os.path.getsize(
|
||||
40 | | "filename", # comment
|
||||
41 | | )
|
||||
| |_^ PTH202
|
||||
42 |
|
||||
43 | os.path.getsize(
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
PTH202.py:43:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
41 | )
|
||||
41 | )
|
||||
42 |
|
||||
43 | os.path.getsize(
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
44 | # comment
|
||||
45 | "filename"
|
||||
43 | / os.path.getsize(
|
||||
44 | | # comment
|
||||
45 | | "filename"
|
||||
46 | | ,
|
||||
47 | | # comment
|
||||
48 | | )
|
||||
| |_^ PTH202
|
||||
49 |
|
||||
50 | os.path.getsize(
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
PTH202.py:50:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
48 | )
|
||||
48 | )
|
||||
49 |
|
||||
50 | os.path.getsize(
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
51 | # comment
|
||||
52 | b"filename"
|
||||
50 | / os.path.getsize(
|
||||
51 | | # comment
|
||||
52 | | b"filename"
|
||||
53 | | # comment
|
||||
54 | | )
|
||||
| |_^ PTH202
|
||||
55 |
|
||||
56 | os.path.getsize( # comment
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
PTH202.py:56:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
54 | )
|
||||
54 | )
|
||||
55 |
|
||||
56 | os.path.getsize( # comment
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
57 | Path(__file__)
|
||||
58 | # comment
|
||||
56 | / os.path.getsize( # comment
|
||||
57 | | Path(__file__)
|
||||
58 | | # comment
|
||||
59 | | ) # comment
|
||||
| |_^ PTH202
|
||||
60 |
|
||||
61 | getsize( # comment
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
PTH202.py:61:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
59 | ) # comment
|
||||
59 | ) # comment
|
||||
60 |
|
||||
61 | getsize( # comment
|
||||
| ^^^^^^^ PTH202
|
||||
62 | "filename")
|
||||
61 | / getsize( # comment
|
||||
62 | | "filename")
|
||||
| |_______________^ PTH202
|
||||
63 |
|
||||
64 | getsize( # comment
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
PTH202.py:64:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
62 | "filename")
|
||||
62 | "filename")
|
||||
63 |
|
||||
64 | getsize( # comment
|
||||
| ^^^^^^^ PTH202
|
||||
65 | b"filename",
|
||||
66 | #comment
|
||||
64 | / getsize( # comment
|
||||
65 | | b"filename",
|
||||
66 | | #comment
|
||||
67 | | )
|
||||
| |_^ PTH202
|
||||
68 |
|
||||
69 | os.path.getsize("file" + "name")
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -295,7 +314,7 @@ PTH202.py:69:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
67 | )
|
||||
68 |
|
||||
69 | os.path.getsize("file" + "name")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
70 |
|
||||
71 | getsize \
|
||||
|
|
||||
@@ -303,12 +322,17 @@ PTH202.py:69:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
|
||||
PTH202.py:71:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
69 | os.path.getsize("file" + "name")
|
||||
69 | os.path.getsize("file" + "name")
|
||||
70 |
|
||||
71 | getsize \
|
||||
| ^^^^^^^ PTH202
|
||||
72 | \
|
||||
73 | \
|
||||
71 | / getsize \
|
||||
72 | | \
|
||||
73 | | \
|
||||
74 | | ( # comment
|
||||
75 | | "filename",
|
||||
76 | | )
|
||||
| |_____^ PTH202
|
||||
77 |
|
||||
78 | getsize(Path("filename").resolve())
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -317,7 +341,7 @@ PTH202.py:78:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
76 | )
|
||||
77 |
|
||||
78 | getsize(Path("filename").resolve())
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
79 |
|
||||
80 | import pathlib
|
||||
|
|
||||
@@ -328,6 +352,6 @@ PTH202.py:82:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_s
|
||||
80 | import pathlib
|
||||
81 |
|
||||
82 | os.path.getsize(pathlib.Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -6,7 +6,7 @@ PTH202_2.py:3:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_
|
||||
1 | import os
|
||||
2 |
|
||||
3 | os.path.getsize(filename="filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
4 | os.path.getsize(filename=b"filename")
|
||||
5 | os.path.getsize(filename=__file__)
|
||||
|
|
||||
@@ -16,7 +16,7 @@ PTH202_2.py:4:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_
|
||||
|
|
||||
3 | os.path.getsize(filename="filename")
|
||||
4 | os.path.getsize(filename=b"filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
5 | os.path.getsize(filename=__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -26,6 +26,6 @@ PTH202_2.py:5:1: PTH202 `os.path.getsize` should be replaced by `Path.stat().st_
|
||||
3 | os.path.getsize(filename="filename")
|
||||
4 | os.path.getsize(filename=b"filename")
|
||||
5 | os.path.getsize(filename=__file__)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -10,7 +10,6 @@ PTH203.py:5:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_a
|
||||
6 | os.path.getatime(b"filename")
|
||||
7 | os.path.getatime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:6:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
@@ -19,7 +18,6 @@ PTH203.py:6:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_a
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
7 | os.path.getatime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:7:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
@@ -28,7 +26,6 @@ PTH203.py:7:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_a
|
||||
7 | os.path.getatime(Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:10:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
@@ -37,7 +34,6 @@ PTH203.py:10:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_
|
||||
11 | getatime(b"filename")
|
||||
12 | getatime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:11:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
@@ -46,7 +42,6 @@ PTH203.py:11:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_
|
||||
| ^^^^^^^^ PTH203
|
||||
12 | getatime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:12:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
@@ -55,88 +50,3 @@ PTH203.py:12:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_
|
||||
12 | getatime(Path("filename"))
|
||||
| ^^^^^^^^ PTH203
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:17:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
15 | file = __file__
|
||||
16 |
|
||||
17 | os.path.getatime(file)
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
18 | os.path.getatime(filename="filename")
|
||||
19 | os.path.getatime(filename=Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:18:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
17 | os.path.getatime(file)
|
||||
18 | os.path.getatime(filename="filename")
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
19 | os.path.getatime(filename=Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:19:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
17 | os.path.getatime(file)
|
||||
18 | os.path.getatime(filename="filename")
|
||||
19 | os.path.getatime(filename=Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
20 |
|
||||
21 | os.path.getatime( # comment 1
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:21:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
19 | os.path.getatime(filename=Path("filename"))
|
||||
20 |
|
||||
21 | os.path.getatime( # comment 1
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
22 | # comment 2
|
||||
23 | "filename" # comment 3
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:29:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
27 | ) # comment 7
|
||||
28 |
|
||||
29 | os.path.getatime("file" + "name")
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
30 |
|
||||
31 | getatime(Path("filename").resolve())
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:31:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
29 | os.path.getatime("file" + "name")
|
||||
30 |
|
||||
31 | getatime(Path("filename").resolve())
|
||||
| ^^^^^^^^ PTH203
|
||||
32 |
|
||||
33 | os.path.getatime(pathlib.Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:33:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
31 | getatime(Path("filename").resolve())
|
||||
32 |
|
||||
33 | os.path.getatime(pathlib.Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
34 |
|
||||
35 | getatime(Path("dir") / "file.txt")
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
PTH203.py:35:1: PTH203 `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
33 | os.path.getatime(pathlib.Path("filename"))
|
||||
34 |
|
||||
35 | getatime(Path("dir") / "file.txt")
|
||||
| ^^^^^^^^ PTH203
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_use_pathlib/mod.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
PTH204.py:6:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
@@ -8,7 +9,6 @@ PTH204.py:6:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_m
|
||||
7 | os.path.getmtime(b"filename")
|
||||
8 | os.path.getmtime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
PTH204.py:7:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
@@ -17,7 +17,6 @@ PTH204.py:7:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_m
|
||||
| ^^^^^^^^^^^^^^^^ PTH204
|
||||
8 | os.path.getmtime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
PTH204.py:8:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
@@ -26,7 +25,6 @@ PTH204.py:8:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_m
|
||||
8 | os.path.getmtime(Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH204
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
PTH204.py:11:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
@@ -35,7 +33,6 @@ PTH204.py:11:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_
|
||||
12 | getmtime(b"filename")
|
||||
13 | getmtime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
PTH204.py:12:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
@@ -44,7 +41,6 @@ PTH204.py:12:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_
|
||||
| ^^^^^^^^ PTH204
|
||||
13 | getmtime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
PTH204.py:13:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
@@ -53,4 +49,3 @@ PTH204.py:13:1: PTH204 `os.path.getmtime` should be replaced by `Path.stat().st_
|
||||
13 | getmtime(Path("filename"))
|
||||
| ^^^^^^^^ PTH204
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
@@ -8,7 +8,6 @@ PTH205.py:6:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_c
|
||||
7 | os.path.getctime(b"filename")
|
||||
8 | os.path.getctime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
PTH205.py:7:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
@@ -17,7 +16,6 @@ PTH205.py:7:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_c
|
||||
| ^^^^^^^^^^^^^^^^ PTH205
|
||||
8 | os.path.getctime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
PTH205.py:8:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
@@ -28,7 +26,6 @@ PTH205.py:8:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_c
|
||||
9 |
|
||||
10 | getctime("filename")
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
PTH205.py:10:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
@@ -39,7 +36,6 @@ PTH205.py:10:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_
|
||||
11 | getctime(b"filename")
|
||||
12 | getctime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
PTH205.py:11:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
@@ -48,7 +44,6 @@ PTH205.py:11:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_
|
||||
| ^^^^^^^^ PTH205
|
||||
12 | getctime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
PTH205.py:12:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
@@ -57,4 +52,3 @@ PTH205.py:12:1: PTH205 `os.path.getctime` should be replaced by `Path.stat().st_
|
||||
12 | getctime(Path("filename"))
|
||||
| ^^^^^^^^ PTH205
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
@@ -4,7 +4,7 @@ source: crates/ruff_linter/src/rules/flake8_use_pathlib/mod.rs
|
||||
PTH202.py:10:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
10 | os.path.getsize("filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
11 | os.path.getsize(b"filename")
|
||||
12 | os.path.getsize(Path("filename"))
|
||||
|
|
||||
@@ -24,7 +24,7 @@ PTH202.py:11:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
|
||||
10 | os.path.getsize("filename")
|
||||
11 | os.path.getsize(b"filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
12 | os.path.getsize(Path("filename"))
|
||||
13 | os.path.getsize(__file__)
|
||||
|
|
||||
@@ -45,7 +45,7 @@ PTH202.py:12:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
10 | os.path.getsize("filename")
|
||||
11 | os.path.getsize(b"filename")
|
||||
12 | os.path.getsize(Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
13 | os.path.getsize(__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -65,7 +65,7 @@ PTH202.py:13:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
11 | os.path.getsize(b"filename")
|
||||
12 | os.path.getsize(Path("filename"))
|
||||
13 | os.path.getsize(__file__)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
14 |
|
||||
15 | os.path.getsize(filename)
|
||||
|
|
||||
@@ -86,7 +86,7 @@ PTH202.py:15:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
13 | os.path.getsize(__file__)
|
||||
14 |
|
||||
15 | os.path.getsize(filename)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
16 | os.path.getsize(filename1)
|
||||
17 | os.path.getsize(filename2)
|
||||
|
|
||||
@@ -106,7 +106,7 @@ PTH202.py:16:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
|
||||
15 | os.path.getsize(filename)
|
||||
16 | os.path.getsize(filename1)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
17 | os.path.getsize(filename2)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -126,7 +126,7 @@ PTH202.py:17:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
15 | os.path.getsize(filename)
|
||||
16 | os.path.getsize(filename1)
|
||||
17 | os.path.getsize(filename2)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
18 |
|
||||
19 | os.path.getsize(filename="filename")
|
||||
|
|
||||
@@ -147,7 +147,7 @@ PTH202.py:19:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
17 | os.path.getsize(filename2)
|
||||
18 |
|
||||
19 | os.path.getsize(filename="filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
20 | os.path.getsize(filename=b"filename")
|
||||
21 | os.path.getsize(filename=Path("filename"))
|
||||
|
|
||||
@@ -167,7 +167,7 @@ PTH202.py:20:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
|
||||
19 | os.path.getsize(filename="filename")
|
||||
20 | os.path.getsize(filename=b"filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
21 | os.path.getsize(filename=Path("filename"))
|
||||
22 | os.path.getsize(filename=__file__)
|
||||
|
|
||||
@@ -188,7 +188,7 @@ PTH202.py:21:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
19 | os.path.getsize(filename="filename")
|
||||
20 | os.path.getsize(filename=b"filename")
|
||||
21 | os.path.getsize(filename=Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
22 | os.path.getsize(filename=__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -208,7 +208,7 @@ PTH202.py:22:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
20 | os.path.getsize(filename=b"filename")
|
||||
21 | os.path.getsize(filename=Path("filename"))
|
||||
22 | os.path.getsize(filename=__file__)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
23 |
|
||||
24 | getsize("filename")
|
||||
|
|
||||
@@ -229,7 +229,7 @@ PTH202.py:24:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
22 | os.path.getsize(filename=__file__)
|
||||
23 |
|
||||
24 | getsize("filename")
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
25 | getsize(b"filename")
|
||||
26 | getsize(Path("filename"))
|
||||
|
|
||||
@@ -249,7 +249,7 @@ PTH202.py:25:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
|
||||
24 | getsize("filename")
|
||||
25 | getsize(b"filename")
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
26 | getsize(Path("filename"))
|
||||
27 | getsize(__file__)
|
||||
|
|
||||
@@ -270,7 +270,7 @@ PTH202.py:26:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
24 | getsize("filename")
|
||||
25 | getsize(b"filename")
|
||||
26 | getsize(Path("filename"))
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
27 | getsize(__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -290,7 +290,7 @@ PTH202.py:27:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
25 | getsize(b"filename")
|
||||
26 | getsize(Path("filename"))
|
||||
27 | getsize(__file__)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^ PTH202
|
||||
28 |
|
||||
29 | getsize(filename="filename")
|
||||
|
|
||||
@@ -311,7 +311,7 @@ PTH202.py:29:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
27 | getsize(__file__)
|
||||
28 |
|
||||
29 | getsize(filename="filename")
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
30 | getsize(filename=b"filename")
|
||||
31 | getsize(filename=Path("filename"))
|
||||
|
|
||||
@@ -331,7 +331,7 @@ PTH202.py:30:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
|
||||
29 | getsize(filename="filename")
|
||||
30 | getsize(filename=b"filename")
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
31 | getsize(filename=Path("filename"))
|
||||
32 | getsize(filename=__file__)
|
||||
|
|
||||
@@ -352,7 +352,7 @@ PTH202.py:31:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
29 | getsize(filename="filename")
|
||||
30 | getsize(filename=b"filename")
|
||||
31 | getsize(filename=Path("filename"))
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
32 | getsize(filename=__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -372,7 +372,7 @@ PTH202.py:32:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
30 | getsize(filename=b"filename")
|
||||
31 | getsize(filename=Path("filename"))
|
||||
32 | getsize(filename=__file__)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
33 |
|
||||
34 | getsize(filename)
|
||||
|
|
||||
@@ -393,7 +393,7 @@ PTH202.py:34:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
32 | getsize(filename=__file__)
|
||||
33 |
|
||||
34 | getsize(filename)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^ PTH202
|
||||
35 | getsize(filename1)
|
||||
36 | getsize(filename2)
|
||||
|
|
||||
@@ -413,7 +413,7 @@ PTH202.py:35:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
|
||||
34 | getsize(filename)
|
||||
35 | getsize(filename1)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^ PTH202
|
||||
36 | getsize(filename2)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -433,7 +433,7 @@ PTH202.py:36:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
34 | getsize(filename)
|
||||
35 | getsize(filename1)
|
||||
36 | getsize(filename2)
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^ PTH202
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -449,10 +449,12 @@ PTH202.py:36:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
||||
PTH202.py:39:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
39 | os.path.getsize(
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
40 | "filename", # comment
|
||||
41 | )
|
||||
39 | / os.path.getsize(
|
||||
40 | | "filename", # comment
|
||||
41 | | )
|
||||
| |_^ PTH202
|
||||
42 |
|
||||
43 | os.path.getsize(
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -470,12 +472,17 @@ PTH202.py:39:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
||||
PTH202.py:43:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
41 | )
|
||||
41 | )
|
||||
42 |
|
||||
43 | os.path.getsize(
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
44 | # comment
|
||||
45 | "filename"
|
||||
43 | / os.path.getsize(
|
||||
44 | | # comment
|
||||
45 | | "filename"
|
||||
46 | | ,
|
||||
47 | | # comment
|
||||
48 | | )
|
||||
| |_^ PTH202
|
||||
49 |
|
||||
50 | os.path.getsize(
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -496,12 +503,16 @@ PTH202.py:43:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
||||
PTH202.py:50:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
48 | )
|
||||
48 | )
|
||||
49 |
|
||||
50 | os.path.getsize(
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
51 | # comment
|
||||
52 | b"filename"
|
||||
50 | / os.path.getsize(
|
||||
51 | | # comment
|
||||
52 | | b"filename"
|
||||
53 | | # comment
|
||||
54 | | )
|
||||
| |_^ PTH202
|
||||
55 |
|
||||
56 | os.path.getsize( # comment
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -521,12 +532,15 @@ PTH202.py:50:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
||||
PTH202.py:56:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
54 | )
|
||||
54 | )
|
||||
55 |
|
||||
56 | os.path.getsize( # comment
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
57 | Path(__file__)
|
||||
58 | # comment
|
||||
56 | / os.path.getsize( # comment
|
||||
57 | | Path(__file__)
|
||||
58 | | # comment
|
||||
59 | | ) # comment
|
||||
| |_^ PTH202
|
||||
60 |
|
||||
61 | getsize( # comment
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -545,11 +559,13 @@ PTH202.py:56:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
||||
PTH202.py:61:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
59 | ) # comment
|
||||
59 | ) # comment
|
||||
60 |
|
||||
61 | getsize( # comment
|
||||
| ^^^^^^^ PTH202
|
||||
62 | "filename")
|
||||
61 | / getsize( # comment
|
||||
62 | | "filename")
|
||||
| |_______________^ PTH202
|
||||
63 |
|
||||
64 | getsize( # comment
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -566,12 +582,15 @@ PTH202.py:61:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
||||
PTH202.py:64:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
62 | "filename")
|
||||
62 | "filename")
|
||||
63 |
|
||||
64 | getsize( # comment
|
||||
| ^^^^^^^ PTH202
|
||||
65 | b"filename",
|
||||
66 | #comment
|
||||
64 | / getsize( # comment
|
||||
65 | | b"filename",
|
||||
66 | | #comment
|
||||
67 | | )
|
||||
| |_^ PTH202
|
||||
68 |
|
||||
69 | os.path.getsize("file" + "name")
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -593,7 +612,7 @@ PTH202.py:69:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
67 | )
|
||||
68 |
|
||||
69 | os.path.getsize("file" + "name")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
70 |
|
||||
71 | getsize \
|
||||
|
|
||||
@@ -611,12 +630,17 @@ PTH202.py:69:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
|
||||
PTH202.py:71:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().st_size`
|
||||
|
|
||||
69 | os.path.getsize("file" + "name")
|
||||
69 | os.path.getsize("file" + "name")
|
||||
70 |
|
||||
71 | getsize \
|
||||
| ^^^^^^^ PTH202
|
||||
72 | \
|
||||
73 | \
|
||||
71 | / getsize \
|
||||
72 | | \
|
||||
73 | | \
|
||||
74 | | ( # comment
|
||||
75 | | "filename",
|
||||
76 | | )
|
||||
| |_____^ PTH202
|
||||
77 |
|
||||
78 | getsize(Path("filename").resolve())
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
@@ -640,7 +664,7 @@ PTH202.py:78:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
76 | )
|
||||
77 |
|
||||
78 | getsize(Path("filename").resolve())
|
||||
| ^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
79 |
|
||||
80 | import pathlib
|
||||
|
|
||||
@@ -661,7 +685,7 @@ PTH202.py:82:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat().
|
||||
80 | import pathlib
|
||||
81 |
|
||||
82 | os.path.getsize(pathlib.Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ PTH202_2.py:3:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat()
|
||||
1 | import os
|
||||
2 |
|
||||
3 | os.path.getsize(filename="filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
4 | os.path.getsize(filename=b"filename")
|
||||
5 | os.path.getsize(filename=__file__)
|
||||
|
|
||||
@@ -25,7 +25,7 @@ PTH202_2.py:4:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat()
|
||||
|
|
||||
3 | os.path.getsize(filename="filename")
|
||||
4 | os.path.getsize(filename=b"filename")
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
5 | os.path.getsize(filename=__file__)
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
@@ -44,7 +44,7 @@ PTH202_2.py:5:1: PTH202 [*] `os.path.getsize` should be replaced by `Path.stat()
|
||||
3 | os.path.getsize(filename="filename")
|
||||
4 | os.path.getsize(filename=b"filename")
|
||||
5 | os.path.getsize(filename=__file__)
|
||||
| ^^^^^^^^^^^^^^^ PTH202
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PTH202
|
||||
|
|
||||
= help: Replace with `Path(...).stat().st_size`
|
||||
|
||||
|
||||
@@ -1,284 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_use_pathlib/mod.rs
|
||||
---
|
||||
PTH203.py:5:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
3 | from os.path import getatime
|
||||
4 |
|
||||
5 | os.path.getatime("filename")
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
6 | os.path.getatime(b"filename")
|
||||
7 | os.path.getatime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
2 2 | from pathlib import Path
|
||||
3 3 | from os.path import getatime
|
||||
4 4 |
|
||||
5 |-os.path.getatime("filename")
|
||||
5 |+Path("filename").stat().st_atime
|
||||
6 6 | os.path.getatime(b"filename")
|
||||
7 7 | os.path.getatime(Path("filename"))
|
||||
8 8 |
|
||||
|
||||
PTH203.py:6:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
5 | os.path.getatime("filename")
|
||||
6 | os.path.getatime(b"filename")
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
7 | os.path.getatime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | from os.path import getatime
|
||||
4 4 |
|
||||
5 5 | os.path.getatime("filename")
|
||||
6 |-os.path.getatime(b"filename")
|
||||
6 |+Path(b"filename").stat().st_atime
|
||||
7 7 | os.path.getatime(Path("filename"))
|
||||
8 8 |
|
||||
9 9 |
|
||||
|
||||
PTH203.py:7:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
5 | os.path.getatime("filename")
|
||||
6 | os.path.getatime(b"filename")
|
||||
7 | os.path.getatime(Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
4 4 |
|
||||
5 5 | os.path.getatime("filename")
|
||||
6 6 | os.path.getatime(b"filename")
|
||||
7 |-os.path.getatime(Path("filename"))
|
||||
7 |+Path("filename").stat().st_atime
|
||||
8 8 |
|
||||
9 9 |
|
||||
10 10 | getatime("filename")
|
||||
|
||||
PTH203.py:10:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
10 | getatime("filename")
|
||||
| ^^^^^^^^ PTH203
|
||||
11 | getatime(b"filename")
|
||||
12 | getatime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
7 7 | os.path.getatime(Path("filename"))
|
||||
8 8 |
|
||||
9 9 |
|
||||
10 |-getatime("filename")
|
||||
10 |+Path("filename").stat().st_atime
|
||||
11 11 | getatime(b"filename")
|
||||
12 12 | getatime(Path("filename"))
|
||||
13 13 |
|
||||
|
||||
PTH203.py:11:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
10 | getatime("filename")
|
||||
11 | getatime(b"filename")
|
||||
| ^^^^^^^^ PTH203
|
||||
12 | getatime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
8 8 |
|
||||
9 9 |
|
||||
10 10 | getatime("filename")
|
||||
11 |-getatime(b"filename")
|
||||
11 |+Path(b"filename").stat().st_atime
|
||||
12 12 | getatime(Path("filename"))
|
||||
13 13 |
|
||||
14 14 |
|
||||
|
||||
PTH203.py:12:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
10 | getatime("filename")
|
||||
11 | getatime(b"filename")
|
||||
12 | getatime(Path("filename"))
|
||||
| ^^^^^^^^ PTH203
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
9 9 |
|
||||
10 10 | getatime("filename")
|
||||
11 11 | getatime(b"filename")
|
||||
12 |-getatime(Path("filename"))
|
||||
12 |+Path("filename").stat().st_atime
|
||||
13 13 |
|
||||
14 14 |
|
||||
15 15 | file = __file__
|
||||
|
||||
PTH203.py:17:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
15 | file = __file__
|
||||
16 |
|
||||
17 | os.path.getatime(file)
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
18 | os.path.getatime(filename="filename")
|
||||
19 | os.path.getatime(filename=Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
14 14 |
|
||||
15 15 | file = __file__
|
||||
16 16 |
|
||||
17 |-os.path.getatime(file)
|
||||
17 |+Path(file).stat().st_atime
|
||||
18 18 | os.path.getatime(filename="filename")
|
||||
19 19 | os.path.getatime(filename=Path("filename"))
|
||||
20 20 |
|
||||
|
||||
PTH203.py:18:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
17 | os.path.getatime(file)
|
||||
18 | os.path.getatime(filename="filename")
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
19 | os.path.getatime(filename=Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
15 15 | file = __file__
|
||||
16 16 |
|
||||
17 17 | os.path.getatime(file)
|
||||
18 |-os.path.getatime(filename="filename")
|
||||
18 |+Path("filename").stat().st_atime
|
||||
19 19 | os.path.getatime(filename=Path("filename"))
|
||||
20 20 |
|
||||
21 21 | os.path.getatime( # comment 1
|
||||
|
||||
PTH203.py:19:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
17 | os.path.getatime(file)
|
||||
18 | os.path.getatime(filename="filename")
|
||||
19 | os.path.getatime(filename=Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
20 |
|
||||
21 | os.path.getatime( # comment 1
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
16 16 |
|
||||
17 17 | os.path.getatime(file)
|
||||
18 18 | os.path.getatime(filename="filename")
|
||||
19 |-os.path.getatime(filename=Path("filename"))
|
||||
19 |+Path("filename").stat().st_atime
|
||||
20 20 |
|
||||
21 21 | os.path.getatime( # comment 1
|
||||
22 22 | # comment 2
|
||||
|
||||
PTH203.py:21:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
19 | os.path.getatime(filename=Path("filename"))
|
||||
20 |
|
||||
21 | os.path.getatime( # comment 1
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
22 | # comment 2
|
||||
23 | "filename" # comment 3
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Unsafe fix
|
||||
18 18 | os.path.getatime(filename="filename")
|
||||
19 19 | os.path.getatime(filename=Path("filename"))
|
||||
20 20 |
|
||||
21 |-os.path.getatime( # comment 1
|
||||
22 |- # comment 2
|
||||
23 |- "filename" # comment 3
|
||||
24 |- # comment 4
|
||||
25 |- , # comment 5
|
||||
26 |- # comment 6
|
||||
27 |-) # comment 7
|
||||
21 |+Path("filename").stat().st_atime # comment 7
|
||||
28 22 |
|
||||
29 23 | os.path.getatime("file" + "name")
|
||||
30 24 |
|
||||
|
||||
PTH203.py:29:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
27 | ) # comment 7
|
||||
28 |
|
||||
29 | os.path.getatime("file" + "name")
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
30 |
|
||||
31 | getatime(Path("filename").resolve())
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
26 26 | # comment 6
|
||||
27 27 | ) # comment 7
|
||||
28 28 |
|
||||
29 |-os.path.getatime("file" + "name")
|
||||
29 |+Path("file" + "name").stat().st_atime
|
||||
30 30 |
|
||||
31 31 | getatime(Path("filename").resolve())
|
||||
32 32 |
|
||||
|
||||
PTH203.py:31:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
29 | os.path.getatime("file" + "name")
|
||||
30 |
|
||||
31 | getatime(Path("filename").resolve())
|
||||
| ^^^^^^^^ PTH203
|
||||
32 |
|
||||
33 | os.path.getatime(pathlib.Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
28 28 |
|
||||
29 29 | os.path.getatime("file" + "name")
|
||||
30 30 |
|
||||
31 |-getatime(Path("filename").resolve())
|
||||
31 |+Path(Path("filename").resolve()).stat().st_atime
|
||||
32 32 |
|
||||
33 33 | os.path.getatime(pathlib.Path("filename"))
|
||||
34 34 |
|
||||
|
||||
PTH203.py:33:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
31 | getatime(Path("filename").resolve())
|
||||
32 |
|
||||
33 | os.path.getatime(pathlib.Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH203
|
||||
34 |
|
||||
35 | getatime(Path("dir") / "file.txt")
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
30 30 |
|
||||
31 31 | getatime(Path("filename").resolve())
|
||||
32 32 |
|
||||
33 |-os.path.getatime(pathlib.Path("filename"))
|
||||
33 |+pathlib.Path("filename").stat().st_atime
|
||||
34 34 |
|
||||
35 35 | getatime(Path("dir") / "file.txt")
|
||||
|
||||
PTH203.py:35:1: PTH203 [*] `os.path.getatime` should be replaced by `Path.stat().st_atime`
|
||||
|
|
||||
33 | os.path.getatime(pathlib.Path("filename"))
|
||||
34 |
|
||||
35 | getatime(Path("dir") / "file.txt")
|
||||
| ^^^^^^^^ PTH203
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_atime`
|
||||
|
||||
ℹ Safe fix
|
||||
32 32 |
|
||||
33 33 | os.path.getatime(pathlib.Path("filename"))
|
||||
34 34 |
|
||||
35 |-getatime(Path("dir") / "file.txt")
|
||||
35 |+Path(Path("dir") / "file.txt").stat().st_atime
|
||||
@@ -1,110 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_use_pathlib/mod.rs
|
||||
---
|
||||
PTH204.py:6:1: PTH204 [*] `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
6 | os.path.getmtime("filename")
|
||||
| ^^^^^^^^^^^^^^^^ PTH204
|
||||
7 | os.path.getmtime(b"filename")
|
||||
8 | os.path.getmtime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | from os.path import getmtime
|
||||
4 4 |
|
||||
5 5 |
|
||||
6 |-os.path.getmtime("filename")
|
||||
6 |+Path("filename").stat().st_mtime
|
||||
7 7 | os.path.getmtime(b"filename")
|
||||
8 8 | os.path.getmtime(Path("filename"))
|
||||
9 9 |
|
||||
|
||||
PTH204.py:7:1: PTH204 [*] `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
6 | os.path.getmtime("filename")
|
||||
7 | os.path.getmtime(b"filename")
|
||||
| ^^^^^^^^^^^^^^^^ PTH204
|
||||
8 | os.path.getmtime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
ℹ Safe fix
|
||||
4 4 |
|
||||
5 5 |
|
||||
6 6 | os.path.getmtime("filename")
|
||||
7 |-os.path.getmtime(b"filename")
|
||||
7 |+Path(b"filename").stat().st_mtime
|
||||
8 8 | os.path.getmtime(Path("filename"))
|
||||
9 9 |
|
||||
10 10 |
|
||||
|
||||
PTH204.py:8:1: PTH204 [*] `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
6 | os.path.getmtime("filename")
|
||||
7 | os.path.getmtime(b"filename")
|
||||
8 | os.path.getmtime(Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH204
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
ℹ Safe fix
|
||||
5 5 |
|
||||
6 6 | os.path.getmtime("filename")
|
||||
7 7 | os.path.getmtime(b"filename")
|
||||
8 |-os.path.getmtime(Path("filename"))
|
||||
8 |+Path("filename").stat().st_mtime
|
||||
9 9 |
|
||||
10 10 |
|
||||
11 11 | getmtime("filename")
|
||||
|
||||
PTH204.py:11:1: PTH204 [*] `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
11 | getmtime("filename")
|
||||
| ^^^^^^^^ PTH204
|
||||
12 | getmtime(b"filename")
|
||||
13 | getmtime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
ℹ Safe fix
|
||||
8 8 | os.path.getmtime(Path("filename"))
|
||||
9 9 |
|
||||
10 10 |
|
||||
11 |-getmtime("filename")
|
||||
11 |+Path("filename").stat().st_mtime
|
||||
12 12 | getmtime(b"filename")
|
||||
13 13 | getmtime(Path("filename"))
|
||||
|
||||
PTH204.py:12:1: PTH204 [*] `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
11 | getmtime("filename")
|
||||
12 | getmtime(b"filename")
|
||||
| ^^^^^^^^ PTH204
|
||||
13 | getmtime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
ℹ Safe fix
|
||||
9 9 |
|
||||
10 10 |
|
||||
11 11 | getmtime("filename")
|
||||
12 |-getmtime(b"filename")
|
||||
12 |+Path(b"filename").stat().st_mtime
|
||||
13 13 | getmtime(Path("filename"))
|
||||
|
||||
PTH204.py:13:1: PTH204 [*] `os.path.getmtime` should be replaced by `Path.stat().st_mtime`
|
||||
|
|
||||
11 | getmtime("filename")
|
||||
12 | getmtime(b"filename")
|
||||
13 | getmtime(Path("filename"))
|
||||
| ^^^^^^^^ PTH204
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_mtime`
|
||||
|
||||
ℹ Safe fix
|
||||
10 10 |
|
||||
11 11 | getmtime("filename")
|
||||
12 12 | getmtime(b"filename")
|
||||
13 |-getmtime(Path("filename"))
|
||||
13 |+Path("filename").stat().st_mtime
|
||||
@@ -1,114 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_use_pathlib/mod.rs
|
||||
---
|
||||
PTH205.py:6:1: PTH205 [*] `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
6 | os.path.getctime("filename")
|
||||
| ^^^^^^^^^^^^^^^^ PTH205
|
||||
7 | os.path.getctime(b"filename")
|
||||
8 | os.path.getctime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | from os.path import getctime
|
||||
4 4 |
|
||||
5 5 |
|
||||
6 |-os.path.getctime("filename")
|
||||
6 |+Path("filename").stat().st_ctime
|
||||
7 7 | os.path.getctime(b"filename")
|
||||
8 8 | os.path.getctime(Path("filename"))
|
||||
9 9 |
|
||||
|
||||
PTH205.py:7:1: PTH205 [*] `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
6 | os.path.getctime("filename")
|
||||
7 | os.path.getctime(b"filename")
|
||||
| ^^^^^^^^^^^^^^^^ PTH205
|
||||
8 | os.path.getctime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
ℹ Safe fix
|
||||
4 4 |
|
||||
5 5 |
|
||||
6 6 | os.path.getctime("filename")
|
||||
7 |-os.path.getctime(b"filename")
|
||||
7 |+Path(b"filename").stat().st_ctime
|
||||
8 8 | os.path.getctime(Path("filename"))
|
||||
9 9 |
|
||||
10 10 | getctime("filename")
|
||||
|
||||
PTH205.py:8:1: PTH205 [*] `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
6 | os.path.getctime("filename")
|
||||
7 | os.path.getctime(b"filename")
|
||||
8 | os.path.getctime(Path("filename"))
|
||||
| ^^^^^^^^^^^^^^^^ PTH205
|
||||
9 |
|
||||
10 | getctime("filename")
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
ℹ Safe fix
|
||||
5 5 |
|
||||
6 6 | os.path.getctime("filename")
|
||||
7 7 | os.path.getctime(b"filename")
|
||||
8 |-os.path.getctime(Path("filename"))
|
||||
8 |+Path("filename").stat().st_ctime
|
||||
9 9 |
|
||||
10 10 | getctime("filename")
|
||||
11 11 | getctime(b"filename")
|
||||
|
||||
PTH205.py:10:1: PTH205 [*] `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
8 | os.path.getctime(Path("filename"))
|
||||
9 |
|
||||
10 | getctime("filename")
|
||||
| ^^^^^^^^ PTH205
|
||||
11 | getctime(b"filename")
|
||||
12 | getctime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
ℹ Safe fix
|
||||
7 7 | os.path.getctime(b"filename")
|
||||
8 8 | os.path.getctime(Path("filename"))
|
||||
9 9 |
|
||||
10 |-getctime("filename")
|
||||
10 |+Path("filename").stat().st_ctime
|
||||
11 11 | getctime(b"filename")
|
||||
12 12 | getctime(Path("filename"))
|
||||
|
||||
PTH205.py:11:1: PTH205 [*] `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
10 | getctime("filename")
|
||||
11 | getctime(b"filename")
|
||||
| ^^^^^^^^ PTH205
|
||||
12 | getctime(Path("filename"))
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
ℹ Safe fix
|
||||
8 8 | os.path.getctime(Path("filename"))
|
||||
9 9 |
|
||||
10 10 | getctime("filename")
|
||||
11 |-getctime(b"filename")
|
||||
11 |+Path(b"filename").stat().st_ctime
|
||||
12 12 | getctime(Path("filename"))
|
||||
|
||||
PTH205.py:12:1: PTH205 [*] `os.path.getctime` should be replaced by `Path.stat().st_ctime`
|
||||
|
|
||||
10 | getctime("filename")
|
||||
11 | getctime(b"filename")
|
||||
12 | getctime(Path("filename"))
|
||||
| ^^^^^^^^ PTH205
|
||||
|
|
||||
= help: Replace with `Path.stat(...).st_ctime`
|
||||
|
||||
ℹ Safe fix
|
||||
9 9 |
|
||||
10 10 | getctime("filename")
|
||||
11 11 | getctime(b"filename")
|
||||
12 |-getctime(Path("filename"))
|
||||
12 |+Path("filename").stat().st_ctime
|
||||
@@ -290,6 +290,7 @@ mod tests {
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_python_semantic::{MemberNameImport, ModuleNameImport, NameImport};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::assert_diagnostics;
|
||||
use crate::registry::Rule;
|
||||
@@ -657,7 +658,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -685,7 +686,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -715,7 +716,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -743,7 +744,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -765,7 +766,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -785,7 +786,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -1129,7 +1130,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -1154,7 +1155,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -1176,7 +1177,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -1197,7 +1198,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(&*snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
@@ -1216,7 +1217,7 @@ mod tests {
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -168,7 +168,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
WhitespaceAfterOpenBracket { symbol },
|
||||
TextRange::at(token.end(), trailing_len),
|
||||
) {
|
||||
let range = diagnostic.expect_range();
|
||||
let range = diagnostic.range();
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||
}
|
||||
}
|
||||
@@ -182,7 +182,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
WhitespaceBeforeCloseBracket { symbol },
|
||||
TextRange::at(token.start() - offset, offset),
|
||||
) {
|
||||
let range = diagnostic.expect_range();
|
||||
let range = diagnostic.range();
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||
}
|
||||
}
|
||||
@@ -210,7 +210,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
TextRange::at(token.start() - offset, offset),
|
||||
)
|
||||
{
|
||||
let range = diagnostic.expect_range();
|
||||
let range = diagnostic.range();
|
||||
diagnostic
|
||||
.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||
}
|
||||
@@ -227,7 +227,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
TextRange::at(token.start() - offset, offset),
|
||||
)
|
||||
{
|
||||
let range = diagnostic.expect_range();
|
||||
let range = diagnostic.range();
|
||||
diagnostic.set_fix(Fix::safe_edit(
|
||||
Edit::range_deletion(range),
|
||||
));
|
||||
@@ -255,7 +255,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
TextRange::at(token.start() - offset, offset),
|
||||
)
|
||||
{
|
||||
let range = diagnostic.expect_range();
|
||||
let range = diagnostic.range();
|
||||
diagnostic.set_fix(Fix::safe_edits(
|
||||
Edit::range_deletion(range),
|
||||
[Edit::insertion(
|
||||
@@ -278,7 +278,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
TextRange::at(token.start() - offset, offset),
|
||||
)
|
||||
{
|
||||
let range = diagnostic.expect_range();
|
||||
let range = diagnostic.range();
|
||||
diagnostic.set_fix(Fix::safe_edit(
|
||||
Edit::range_deletion(range),
|
||||
));
|
||||
@@ -297,7 +297,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||
WhitespaceBeforePunctuation { symbol },
|
||||
TextRange::at(token.start() - offset, offset),
|
||||
) {
|
||||
let range = diagnostic.expect_range();
|
||||
let range = diagnostic.range();
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user