Compare commits

..

1 Commits

Author SHA1 Message Date
Zanie Blue
e1c6e2e26a Add test case for missing VIRTUAL_ENV 2025-12-17 15:27:47 -06:00
1219 changed files with 7243 additions and 17959 deletions

View File

@@ -1,67 +0,0 @@
{
"permissions": {
"allow": [
"Bash(cargo build:*)",
"Read(//home/micha/astral/discord.py/discord/**)",
"Bash(source:*)",
"Bash(./target/profiling/ty check:*)",
"Bash(tee:*)",
"Read(//home/micha/astral/discord.py/.venv/**)",
"Read(//home/micha/astral/discord.py/**)",
"Bash(perf record:*)",
"Bash(perf report:*)",
"Bash(time:*)",
"Bash(../ruff/target/profiling/ty check discord/audit_logs.py -vv)",
"Bash(sed:*)",
"Read(//home/micha/git/TypeScript/**)",
"Bash(cargo test:*)",
"Bash(MDTEST_TEST_FILTER='union_types.md - Union types - Unions of tuples' cargo test -p ty_python_semantic --test mdtest -- mdtest__union_types)",
"Bash(timeout 10 cargo test --package ty_python_semantic --lib types::tests::divergent_type)",
"Bash(timeout 30 cargo test:*)",
"Bash(git stash:*)",
"Bash(timeout 60 time:*)",
"Bash(for i in 1 2 3 4 5)",
"Bash(do echo \"Run $i:\")",
"Bash(done)",
"Bash(for i in 1 2 3)",
"Bash(cargo fuzz run:*)",
"Bash(timeout 60 cargo fuzz run -s none ty_check_invalid_syntax -- -timeout=1)",
"Bash(for:*)",
"Bash(do echo \"=== Checking $crash ===\")",
"Bash(uvx ty@latest check \"$crash\")",
"Bash(do echo \"=== $crash ===\")",
"Bash(while read crash)",
"Bash(cargo fuzz cmin:*)",
"Bash(cargo +nightly fuzz cmin:*)",
"Bash(timeout 120 cargo fuzz run -s none ty_check_invalid_syntax -- -timeout=1)",
"Bash(awk:*)",
"Bash(while read file)",
"Bash(cat:*)",
"Bash(uvx ty@latest:*)",
"Bash(do cp \"$crash\" /tmp/isolated_crash.py)",
"Bash(echo \"=== $crash ===\")",
"Bash(do echo \"=== test$i.py ===\")",
"Bash(do echo \"=== Testing $crash ===\")",
"Bash(tree:*)",
"Bash(cut:*)",
"Bash(grep:*)",
"Bash(ls:*)",
"Bash(xargs basename:*)",
"Bash(wc:*)",
"Bash(find:*)",
"Bash({} ;)",
"Bash(git checkout:*)",
"Bash(do)",
"Bash(if ! grep -q \"use crate::types\" \"$f\")",
"Bash(! grep -q \"crate::types::\" \"$f\")",
"Bash(then)",
"Bash(else)",
"Bash(fi)",
"Bash(1)",
"Bash(__NEW_LINE__ echo \"Done\")",
"Bash(rm:*)"
],
"deny": [],
"ask": []
}
}

1
.gitattributes vendored
View File

@@ -22,7 +22,6 @@ crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_CR.py text eol=cr
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_LF.py text eol=lf
crates/ruff_python_parser/resources/inline linguist-generated=true
crates/ty_python_semantic/resources/mdtest/external/*.lock linguist-generated=true
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
ty.schema.json -diff linguist-generated=true text=auto eol=lf

View File

@@ -4,12 +4,10 @@
self-hosted-runner:
# Various runners we use that aren't recognized out-of-the-box by actionlint:
labels:
- depot-ubuntu-24.04-4
- depot-ubuntu-latest-8
- depot-ubuntu-22.04-16
- depot-ubuntu-22.04-32
- depot-windows-2022-16
- depot-ubuntu-22.04-arm-4
- github-windows-2025-x86_64-8
- github-windows-2025-x86_64-16
- codspeed-macro

View File

@@ -4,6 +4,5 @@
# Enable off-by-default rules.
[rules]
possibly-unresolved-reference = "warn"
possibly-missing-import = "warn"
unused-ignore-comment = "warn"
division-by-zero = "warn"

View File

@@ -952,7 +952,7 @@ jobs:
tool: cargo-codspeed
- name: "Build benchmarks"
run: cargo codspeed build --features "codspeed,ruff_instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
- name: "Run benchmarks"
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
@@ -960,9 +960,9 @@ jobs:
mode: simulation
run: cargo codspeed run
benchmarks-instrumented-ty-build:
name: "benchmarks instrumented ty (build)"
runs-on: depot-ubuntu-24.04-4
benchmarks-instrumented-ty:
name: "benchmarks instrumented (ty)"
runs-on: ubuntu-24.04
needs: determine_changes
if: |
github.repository == 'astral-sh/ruff' &&
@@ -971,6 +971,9 @@ jobs:
needs.determine_changes.outputs.ty == 'true'
)
timeout-minutes: 20
permissions:
contents: read # required for actions/checkout
id-token: write # required for OIDC authentication with CodSpeed
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -980,6 +983,7 @@ jobs:
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- name: "Install Rust toolchain"
run: rustup show
@@ -990,64 +994,28 @@ jobs:
tool: cargo-codspeed
- name: "Build benchmarks"
run: cargo codspeed build -m instrumentation --features "codspeed,ty_instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
- name: "Upload benchmark binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: benchmarks-instrumented-ty-binary
path: target/codspeed/simulation/ruff_benchmark
retention-days: 1
benchmarks-instrumented-ty-run:
name: "benchmarks instrumented ty (${{ matrix.benchmark }})"
runs-on: ubuntu-24.04
needs: benchmarks-instrumented-ty-build
timeout-minutes: 20
permissions:
contents: read # required for actions/checkout
id-token: write # required for OIDC authentication with CodSpeed
strategy:
fail-fast: false
matrix:
benchmark:
- "check_file|micro|anyio"
- "attrs|hydra|datetype"
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- name: "Install codspeed"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
with:
tool: cargo-codspeed
- name: "Download benchmark binary"
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v4.3.0
with:
name: benchmarks-instrumented-ty-binary
path: target/codspeed/simulation/ruff_benchmark
- name: "Restore binary permissions"
run: chmod +x target/codspeed/simulation/ruff_benchmark/ty
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
- name: "Run benchmarks"
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
with:
mode: simulation
run: cargo codspeed run --bench ty "${{ matrix.benchmark }}"
run: cargo codspeed run
benchmarks-walltime-build:
name: "benchmarks walltime (build)"
# We only run this job if `github.repository == 'astral-sh/ruff'`,
# so hardcoding depot here is fine
runs-on: depot-ubuntu-22.04-arm-4
benchmarks-walltime:
name: "benchmarks walltime (${{ matrix.benchmarks }})"
runs-on: codspeed-macro
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.ty == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
permissions:
contents: read # required for actions/checkout
id-token: write # required for OIDC authentication with CodSpeed
strategy:
matrix:
benchmarks:
- "medium|multithreaded"
- "small|large"
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -1068,51 +1036,7 @@ jobs:
tool: cargo-codspeed
- name: "Build benchmarks"
run: cargo codspeed build -m walltime --features "codspeed,ty_walltime" --profile profiling --no-default-features -p ruff_benchmark
- name: "Upload benchmark binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: benchmarks-walltime-binary
path: target/codspeed/walltime/ruff_benchmark
retention-days: 1
benchmarks-walltime-run:
name: "benchmarks walltime (${{ matrix.benchmark }})"
runs-on: codspeed-macro
needs: benchmarks-walltime-build
timeout-minutes: 20
permissions:
contents: read # required for actions/checkout
id-token: write # required for OIDC authentication with CodSpeed
strategy:
matrix:
benchmark:
- colour_science
- "pandas|tanjun|altair"
- "static_frame|sympy"
- "pydantic|multithreaded|freqtrade"
steps:
- name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- name: "Install codspeed"
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
with:
tool: cargo-codspeed
- name: "Download benchmark binary"
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: benchmarks-walltime-binary
path: target/codspeed/walltime/ruff_benchmark
- name: "Restore binary permissions"
run: chmod +x target/codspeed/walltime/ruff_benchmark/ty_walltime
run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
@@ -1123,4 +1047,4 @@ jobs:
CODSPEED_PERF_ENABLED: false
with:
mode: walltime
run: cargo codspeed run --bench ty_walltime -m walltime "${{ matrix.benchmark }}"
run: cargo codspeed run --bench ty_walltime "${{ matrix.benchmarks }}"

View File

@@ -62,7 +62,7 @@ jobs:
name: Create an issue if the daily fuzz surfaced any bugs
runs-on: ubuntu-latest
needs: fuzz
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result != 'success' }}
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.fuzz.result == 'failure' }}
permissions:
issues: write
steps:

View File

@@ -6,11 +6,6 @@ on:
pull_request:
paths:
- "crates/ty*/**"
- "!crates/ty_ide/**"
- "!crates/ty_server/**"
- "!crates/ty_test/**"
- "!crates/ty_completion_eval/**"
- "!crates/ty_wasm/**"
- "crates/ruff_db"
- "crates/ruff_python_ast"
- "crates/ruff_python_parser"

View File

@@ -16,7 +16,8 @@ name: Sync typeshed
# 3. Once the Windows worker is done, a MacOS worker:
# a. Checks out the branch created by the Linux worker
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
# c. Formats the code again
# c. Attempts to update any snapshots that might have changed
# (this sub-step is allowed to fail)
# d. Commits the changes and pushes them to the same upstream branch
# e. Creates a PR against the `main` branch using the branch all three workers have pushed to
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
@@ -197,6 +198,42 @@ jobs:
run: |
rm "${VENDORED_TYPESHED}/pyproject.toml"
git commit -am "Remove pyproject.toml file"
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- name: "Install Rust toolchain"
if: ${{ success() }}
run: rustup show
- name: "Install mold"
if: ${{ success() }}
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
- name: "Install cargo nextest"
if: ${{ success() }}
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
with:
tool: cargo-nextest
- name: "Install cargo insta"
if: ${{ success() }}
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
with:
tool: cargo-insta
- name: Update snapshots
if: ${{ success() }}
run: |
cargo r \
--profile=profiling \
-p ty_completion_eval \
-- all --tasks ./crates/ty_completion_eval/completion-evaluation-tasks.csv
# The `cargo insta` docs indicate that `--unreferenced=delete` might be a good option,
# but from local testing it appears to just revert all changes made by `cargo insta test --accept`.
#
# If there were only snapshot-related failures, `cargo insta test --accept` will have exit code 0,
# but if there were also other mdtest failures (for example), it will return a nonzero exit code.
# We don't care about other tests failing here, we just want snapshots updated where possible,
# so we use `|| true` here to ignore the exit code.
cargo insta test --accept --color=always --all-features --test-runner=nextest || true
- name: Commit snapshot changes
if: ${{ success() }}
run: git commit -am "Update snapshots" || echo "No snapshot changes to commit"
- name: Push changes upstream and create a PR
if: ${{ success() }}
run: |
@@ -208,7 +245,7 @@ jobs:
name: Create an issue if the typeshed sync failed
runs-on: ubuntu-latest
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result != 'success' || needs.docstrings-windows.result != 'success' || needs.docstrings-macos-and-pr.result != 'success') }}
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
permissions:
issues: write
steps:

View File

@@ -17,6 +17,7 @@ env:
RUSTUP_MAX_RETRIES: 10
RUST_BACKTRACE: 1
REF_NAME: ${{ github.ref_name }}
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
jobs:
ty-ecosystem-analyzer:
@@ -66,7 +67,7 @@ jobs:
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@55df3c868f3fa9ab34cff0498dd6106722aac205"
ecosystem-analyzer \
--repository ruff \
@@ -111,13 +112,22 @@ jobs:
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
# Make sure to update the bot if you rename the artifact.
- name: "Upload full report"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
id: deploy
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
with:
name: full-report
path: dist/
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
command: pages deploy dist --project-name=ty-ecosystem --branch ${{ github.head_ref }} --commit-hash ${GITHUB_SHA}
- name: "Append deployment URL"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
env:
DEPLOYMENT_URL: ${{ steps.deploy.outputs.pages-deployment-alias-url }}
run: |
echo >> comment.md
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
# Make sure to update the bot if you rename the artifact.

View File

@@ -14,6 +14,7 @@ env:
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
RUST_BACKTRACE: 1
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
jobs:
ty-ecosystem-report:
@@ -30,12 +31,12 @@ jobs:
- name: Install the latest version of uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
with:
enable-cache: true
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
workspaces: "ruff"
lookup-only: false
lookup-only: false # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
- name: Install Rust toolchain
run: rustup show
@@ -51,7 +52,7 @@ jobs:
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@55df3c868f3fa9ab34cff0498dd6106722aac205"
ecosystem-analyzer \
--verbose \
@@ -69,10 +70,11 @@ jobs:
ecosystem-diagnostics.json \
--output dist/index.html
# NOTE: astral-sh-bot uses this artifact to publish the ecosystem report.
# Make sure to update the bot if you rename the artifact.
- name: "Upload ecosystem report"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
id: deploy
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
with:
name: full-report
path: dist/
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}

View File

@@ -6,11 +6,6 @@ on:
pull_request:
paths:
- "crates/ty*/**"
- "!crates/ty_ide/**"
- "!crates/ty_server/**"
- "!crates/ty_test/**"
- "!crates/ty_completion_eval/**"
- "!crates/ty_wasm/**"
- "crates/ruff_db"
- "crates/ruff_python_ast"
- "crates/ruff_python_parser"

View File

@@ -5,7 +5,7 @@ exclude: |
.github/workflows/release.yml|
crates/ty_vendored/vendor/.*|
crates/ty_project/resources/.*|
crates/ty_python_types/resources/corpus/.*|
crates/ty_python_semantic/resources/corpus/.*|
crates/ty/docs/(configuration|rules|cli|environment).md|
crates/ruff_benchmark/resources/.*|
crates/ruff_linter/resources/.*|

View File

@@ -1,54 +1,5 @@
# Changelog
## 0.14.10
Released on 2025-12-18.
### Preview features
- [formatter] Fluent formatting of method chains ([#21369](https://github.com/astral-sh/ruff/pull/21369))
- [formatter] Keep lambda parameters on one line and parenthesize the body if it expands ([#21385](https://github.com/astral-sh/ruff/pull/21385))
- \[`flake8-implicit-str-concat`\] New rule to prevent implicit string concatenation in collections (`ISC004`) ([#21972](https://github.com/astral-sh/ruff/pull/21972))
- \[`flake8-use-pathlib`\] Make fixes unsafe when types change in compound statements (`PTH104`, `PTH105`, `PTH109`, `PTH115`) ([#22009](https://github.com/astral-sh/ruff/pull/22009))
- \[`refurb`\] Extend support for `Path.open` (`FURB101`, `FURB103`) ([#21080](https://github.com/astral-sh/ruff/pull/21080))
### Bug fixes
- \[`pyupgrade`\] Fix parsing named Unicode escape sequences (`UP032`) ([#21901](https://github.com/astral-sh/ruff/pull/21901))
### Rule changes
- \[`eradicate`\] Ignore `ruff:disable` and `ruff:enable` comments in `ERA001` ([#22038](https://github.com/astral-sh/ruff/pull/22038))
- \[`flake8-pytest-style`\] Allow `match` and `check` keyword arguments without an expected exception type (`PT010`) ([#21964](https://github.com/astral-sh/ruff/pull/21964))
- [syntax-errors] Annotated name cannot be global ([#20868](https://github.com/astral-sh/ruff/pull/20868))
### Documentation
- Add `uv` and `ty` to the Ruff README ([#21996](https://github.com/astral-sh/ruff/pull/21996))
- Document known lambda formatting deviations from Black ([#21954](https://github.com/astral-sh/ruff/pull/21954))
- Update `setup.md` ([#22024](https://github.com/astral-sh/ruff/pull/22024))
- \[`flake8-bandit`\] Fix broken link (`S704`) ([#22039](https://github.com/astral-sh/ruff/pull/22039))
### Other changes
- Fix playground Share button showing "Copied!" before clipboard copy completes ([#21942](https://github.com/astral-sh/ruff/pull/21942))
### Contributors
- [@dylwil3](https://github.com/dylwil3)
- [@charliecloudberry](https://github.com/charliecloudberry)
- [@charliermarsh](https://github.com/charliermarsh)
- [@chirizxc](https://github.com/chirizxc)
- [@ntBre](https://github.com/ntBre)
- [@zanieb](https://github.com/zanieb)
- [@amyreese](https://github.com/amyreese)
- [@hauntsaninja](https://github.com/hauntsaninja)
- [@11happy](https://github.com/11happy)
- [@mahiro72](https://github.com/mahiro72)
- [@MichaReiser](https://github.com/MichaReiser)
- [@phongddo](https://github.com/phongddo)
- [@PeterJCLaw](https://github.com/PeterJCLaw)
## 0.14.9
Released on 2025-12-11.

136
Cargo.lock generated
View File

@@ -1004,6 +1004,27 @@ dependencies = [
"crypto-common",
]
[[package]]
name = "dir-test"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62c013fe825864f3e4593f36426c1fa7a74f5603f13ca8d1af7a990c1cd94a79"
dependencies = [
"dir-test-macros",
]
[[package]]
name = "dir-test-macros"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d42f54d7b4a6bc2400fe5b338e35d1a335787585375322f49c5d5fe7b243da7e"
dependencies = [
"glob",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "dirs"
version = "6.0.0"
@@ -2887,7 +2908,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.14.10"
version = "0.14.9"
dependencies = [
"anyhow",
"argfile",
@@ -3083,7 +3104,6 @@ dependencies = [
"ty",
"ty_project",
"ty_python_semantic",
"ty_python_types",
"ty_static",
"url",
]
@@ -3130,9 +3150,7 @@ dependencies = [
"salsa",
"schemars",
"serde",
"ty_module_resolver",
"ty_python_semantic",
"ty_python_types",
"zip",
]
@@ -3148,7 +3166,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.14.10"
version = "0.14.9"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3507,7 +3525,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.14.10"
version = "0.14.9"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3622,8 +3640,8 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
version = "0.24.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
dependencies = [
"boxcar",
"compact_str",
@@ -3647,13 +3665,13 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
version = "0.24.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
[[package]]
name = "salsa-macros"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
version = "0.24.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
dependencies = [
"proc-macro2",
"quote",
@@ -4378,7 +4396,6 @@ dependencies = [
"tracing-flame",
"tracing-subscriber",
"ty_combine",
"ty_module_resolver",
"ty_project",
"ty_python_semantic",
"ty_server",
@@ -4393,6 +4410,7 @@ dependencies = [
"ordermap",
"ruff_db",
"ruff_python_ast",
"ty_python_semantic",
]
[[package]]
@@ -4410,8 +4428,8 @@ dependencies = [
"tempfile",
"toml",
"ty_ide",
"ty_module_resolver",
"ty_project",
"ty_python_semantic",
"walkdir",
]
@@ -4441,33 +4459,8 @@ dependencies = [
"salsa",
"smallvec",
"tracing",
"ty_module_resolver",
"ty_project",
"ty_python_semantic",
"ty_python_types",
"ty_vendored",
]
[[package]]
name = "ty_module_resolver"
version = "0.0.0"
dependencies = [
"anyhow",
"camino",
"compact_str",
"get-size2",
"insta",
"ruff_db",
"ruff_memory_usage",
"ruff_python_ast",
"ruff_python_stdlib",
"rustc-hash",
"salsa",
"strum",
"strum_macros",
"tempfile",
"thiserror 2.0.17",
"tracing",
"ty_vendored",
]
@@ -4505,9 +4498,7 @@ dependencies = [
"toml",
"tracing",
"ty_combine",
"ty_module_resolver",
"ty_python_semantic",
"ty_python_types",
"ty_static",
"ty_vendored",
]
@@ -4521,12 +4512,19 @@ dependencies = [
"bitvec",
"camino",
"colored 3.0.0",
"compact_str",
"dir-test",
"drop_bomb",
"get-size2",
"glob",
"hashbrown 0.16.1",
"indexmap",
"indoc",
"insta",
"itertools 0.14.0",
"memchr",
"ordermap",
"pretty_assertions",
"quickcheck",
"quickcheck_macros",
"ruff_annotate_snippets",
@@ -4536,7 +4534,9 @@ dependencies = [
"ruff_macros",
"ruff_memory_usage",
"ruff_python_ast",
"ruff_python_literal",
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
@@ -4550,57 +4550,10 @@ dependencies = [
"strsim",
"strum",
"strum_macros",
"tempfile",
"test-case",
"thiserror 2.0.17",
"tracing",
"ty_combine",
"ty_module_resolver",
"ty_static",
"ty_vendored",
]
[[package]]
name = "ty_python_types"
version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.10.0",
"camino",
"compact_str",
"datatest-stable",
"drop_bomb",
"get-size2",
"glob",
"indexmap",
"indoc",
"insta",
"itertools 0.14.0",
"memchr",
"ordermap",
"pretty_assertions",
"quickcheck",
"quickcheck_macros",
"ruff_db",
"ruff_diagnostics",
"ruff_macros",
"ruff_memory_usage",
"ruff_python_ast",
"ruff_python_literal",
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"salsa",
"schemars",
"serde",
"serde_json",
"smallvec",
"static_assertions",
"strum",
"strum_macros",
"test-case",
"tracing",
"ty_module_resolver",
"ty_python_semantic",
"ty_static",
"ty_test",
@@ -4640,7 +4593,6 @@ dependencies = [
"tracing-subscriber",
"ty_combine",
"ty_ide",
"ty_module_resolver",
"ty_project",
"ty_python_semantic",
]
@@ -4681,9 +4633,7 @@ dependencies = [
"thiserror 2.0.17",
"toml",
"tracing",
"ty_module_resolver",
"ty_python_semantic",
"ty_python_types",
"ty_static",
"ty_vendored",
]

View File

@@ -45,10 +45,8 @@ ty = { path = "crates/ty" }
ty_combine = { path = "crates/ty_combine" }
ty_completion_eval = { path = "crates/ty_completion_eval" }
ty_ide = { path = "crates/ty_ide" }
ty_module_resolver = { path = "crates/ty_module_resolver" }
ty_project = { path = "crates/ty_project", default-features = false }
ty_python_semantic = { path = "crates/ty_python_semantic" }
ty_python_types = { path = "crates/ty_python_types" }
ty_server = { path = "crates/ty_server" }
ty_static = { path = "crates/ty_static" }
ty_test = { path = "crates/ty_test" }
@@ -84,6 +82,7 @@ criterion = { version = "0.7.0", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
datatest-stable = { version = "0.3.3" }
dir-test = { version = "0.4.0" }
dunce = { version = "1.0.5" }
drop_bomb = { version = "0.1.5" }
etcetera = { version = "0.11.0" }
@@ -148,7 +147,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "ce80691fa0b87dc2fd2235a26544e63e5e43d8d3", default-features = false, features = [
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "55e5e7d32fa3fc189276f35bb04c9438f9aedbd1", default-features = false, features = [
"compact_str",
"macros",
"salsa_unstable",

View File

@@ -150,8 +150,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.14.10/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.14.10/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.14.9/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.14.9/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -184,7 +184,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.14.10
rev: v0.14.9
hooks:
# Run the linter.
- id: ruff-check

View File

@@ -4,7 +4,6 @@ extend-exclude = [
"crates/ty_vendored/vendor/**/*",
"**/resources/**/*",
"**/snapshots/**/*",
"crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/collection_literal.rs",
# Completion tests tend to have a lot of incomplete
# words naturally. It's annoying to have to make all
# of them actually words. So just ignore typos here.

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.14.10"
version = "0.14.9"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -14,6 +14,6 @@ info:
success: false
exit_code: 1
----- stdout -----
::error title=Ruff (unformatted),file=[TMP]/input.py,line=1,endLine=2::input.py:1:1: unformatted: File would be reformatted
::error title=Ruff (unformatted),file=[TMP]/input.py,line=1,col=1,endLine=2,endColumn=1::input.py:1:1: unformatted: File would be reformatted
----- stderr -----

View File

@@ -19,32 +19,32 @@ doctest = false
[[bench]]
name = "linter"
harness = false
required-features = ["ruff_instrumented"]
required-features = ["instrumented"]
[[bench]]
name = "lexer"
harness = false
required-features = ["ruff_instrumented"]
required-features = ["instrumented"]
[[bench]]
name = "parser"
harness = false
required-features = ["ruff_instrumented"]
required-features = ["instrumented"]
[[bench]]
name = "formatter"
harness = false
required-features = ["ruff_instrumented"]
required-features = ["instrumented"]
[[bench]]
name = "ty"
harness = false
required-features = ["ty_instrumented"]
required-features = ["instrumented"]
[[bench]]
name = "ty_walltime"
harness = false
required-features = ["ty_walltime"]
required-features = ["walltime"]
[dependencies]
ruff_db = { workspace = true, features = ["testing"] }
@@ -67,32 +67,25 @@ tracing = { workspace = true }
workspace = true
[features]
default = ["ty_instrumented", "ty_walltime", "ruff_instrumented"]
# Enables the ruff instrumented benchmarks
ruff_instrumented = [
default = ["instrumented", "walltime"]
# Enables the benchmark that should only run with codspeed's instrumented runner
instrumented = [
"criterion",
"ruff_linter",
"ruff_python_formatter",
"ruff_python_parser",
"ruff_python_trivia",
"mimalloc",
"tikv-jemallocator",
]
# Enables the ty instrumented benchmarks
ty_instrumented = [
"criterion",
"ty_project",
"ruff_python_trivia",
]
codspeed = ["codspeed-criterion-compat"]
# Enables the ty_walltime benchmarks
ty_walltime = ["ruff_db/os", "ty_project", "divan"]
# Enables benchmark that should only run with codspeed's walltime runner.
walltime = ["ruff_db/os", "ty_project", "divan"]
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true, optional = true }
[target.'cfg(target_os = "windows")'.dev-dependencies]
mimalloc = { workspace = true }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
tikv-jemallocator = { workspace = true, optional = true }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dev-dependencies]
tikv-jemallocator = { workspace = true }
[dev-dependencies]
rustc-hash = { workspace = true }

View File

@@ -194,7 +194,7 @@ static SYMPY: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
13106,
13100,
);
static TANJUN: Benchmark = Benchmark::new(
@@ -235,55 +235,30 @@ fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
});
}
#[bench(sample_size = 2, sample_count = 3)]
fn altair(bencher: Bencher) {
run_single_threaded(bencher, &ALTAIR);
#[bench(args=[&ALTAIR, &FREQTRADE, &TANJUN], sample_size=2, sample_count=3)]
fn small(bencher: Bencher, benchmark: &Benchmark) {
run_single_threaded(bencher, benchmark);
}
#[bench(sample_size = 2, sample_count = 3)]
fn freqtrade(bencher: Bencher) {
run_single_threaded(bencher, &FREQTRADE);
#[bench(args=[&COLOUR_SCIENCE, &PANDAS, &STATIC_FRAME], sample_size=1, sample_count=3)]
fn medium(bencher: Bencher, benchmark: &Benchmark) {
run_single_threaded(bencher, benchmark);
}
#[bench(sample_size = 2, sample_count = 3)]
fn tanjun(bencher: Bencher) {
run_single_threaded(bencher, &TANJUN);
#[bench(args=[&SYMPY, &PYDANTIC], sample_size=1, sample_count=2)]
fn large(bencher: Bencher, benchmark: &Benchmark) {
run_single_threaded(bencher, benchmark);
}
#[bench(sample_size = 2, sample_count = 3)]
fn pydantic(bencher: Bencher) {
run_single_threaded(bencher, &PYDANTIC);
}
#[bench(sample_size = 1, sample_count = 3)]
fn static_frame(bencher: Bencher) {
run_single_threaded(bencher, &STATIC_FRAME);
}
#[bench(sample_size = 1, sample_count = 2)]
fn colour_science(bencher: Bencher) {
run_single_threaded(bencher, &COLOUR_SCIENCE);
}
#[bench(sample_size = 1, sample_count = 2)]
fn pandas(bencher: Bencher) {
run_single_threaded(bencher, &PANDAS);
}
#[bench(sample_size = 1, sample_count = 2)]
fn sympy(bencher: Bencher) {
run_single_threaded(bencher, &SYMPY);
}
#[bench(sample_size = 3, sample_count = 8)]
fn multithreaded(bencher: Bencher) {
#[bench(args=[&ALTAIR], sample_size=3, sample_count=8)]
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
bencher
.with_inputs(|| ALTAIR.setup_iteration())
.with_inputs(|| benchmark.setup_iteration())
.bench_local_values(|db| {
thread_pool.install(|| {
check_project(&db, ALTAIR.project.name, ALTAIR.max_diagnostics);
check_project(&db, benchmark.project.name, benchmark.max_diagnostics);
db
})
});

View File

@@ -1,6 +1,6 @@
use std::path::PathBuf;
#[cfg(any(feature = "ty_instrumented", feature = "ruff_instrumented"))]
#[cfg(feature = "instrumented")]
pub mod criterion;
pub mod real_world_projects;

View File

@@ -1,51 +0,0 @@
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
/// Signals a [`CancellationToken`] that it should be canceled.
#[derive(Debug, Clone)]
pub struct CancellationTokenSource {
cancelled: Arc<AtomicBool>,
}
impl Default for CancellationTokenSource {
fn default() -> Self {
Self::new()
}
}
impl CancellationTokenSource {
pub fn new() -> Self {
Self {
cancelled: Arc::new(AtomicBool::new(false)),
}
}
pub fn is_cancellation_requested(&self) -> bool {
self.cancelled.load(std::sync::atomic::Ordering::Relaxed)
}
/// Creates a new token that uses this source.
pub fn token(&self) -> CancellationToken {
CancellationToken {
cancelled: self.cancelled.clone(),
}
}
/// Requests cancellation for operations using this token.
pub fn cancel(&self) {
self.cancelled
.store(true, std::sync::atomic::Ordering::Relaxed);
}
}
/// Token signals whether an operation should be canceled.
#[derive(Debug, Clone)]
pub struct CancellationToken {
cancelled: Arc<AtomicBool>,
}
impl CancellationToken {
pub fn is_cancelled(&self) -> bool {
self.cancelled.load(std::sync::atomic::Ordering::Relaxed)
}
}

View File

@@ -1,4 +1,4 @@
use std::{borrow::Cow, fmt::Formatter, path::Path, sync::Arc};
use std::{fmt::Formatter, path::Path, sync::Arc};
use ruff_diagnostics::{Applicability, Fix};
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
@@ -11,7 +11,6 @@ pub use self::render::{
ceil_char_boundary,
github::{DisplayGithubDiagnostics, GithubRenderer},
};
use crate::cancellation::CancellationToken;
use crate::{Db, files::File};
mod render;
@@ -411,6 +410,11 @@ impl Diagnostic {
self.id().is_invalid_syntax()
}
/// Returns the message body to display to the user.
pub fn body(&self) -> &str {
self.primary_message()
}
/// Returns the message of the first sub-diagnostic with a `Help` severity.
///
/// Note that this is used as the fix title/suggestion for some of Ruff's output formats, but in
@@ -1308,8 +1312,6 @@ pub struct DisplayDiagnosticConfig {
show_fix_diff: bool,
/// The lowest applicability that should be shown when reporting diagnostics.
fix_applicability: Applicability,
cancellation_token: Option<CancellationToken>,
}
impl DisplayDiagnosticConfig {
@@ -1383,20 +1385,6 @@ impl DisplayDiagnosticConfig {
pub fn fix_applicability(&self) -> Applicability {
self.fix_applicability
}
pub fn with_cancellation_token(
mut self,
token: Option<CancellationToken>,
) -> DisplayDiagnosticConfig {
self.cancellation_token = token;
self
}
pub fn is_canceled(&self) -> bool {
self.cancellation_token
.as_ref()
.is_some_and(|token| token.is_cancelled())
}
}
impl Default for DisplayDiagnosticConfig {
@@ -1410,7 +1398,6 @@ impl Default for DisplayDiagnosticConfig {
show_fix_status: false,
show_fix_diff: false,
fix_applicability: Applicability::Safe,
cancellation_token: None,
}
}
}
@@ -1487,15 +1474,6 @@ pub enum ConciseMessage<'a> {
Custom(&'a str),
}
impl<'a> ConciseMessage<'a> {
pub fn to_str(&self) -> Cow<'a, str> {
match self {
ConciseMessage::MainDiagnostic(s) | ConciseMessage::Custom(s) => Cow::Borrowed(s),
ConciseMessage::Both { .. } => Cow::Owned(self.to_string()),
}
}
}
impl std::fmt::Display for ConciseMessage<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
@@ -1512,16 +1490,6 @@ impl std::fmt::Display for ConciseMessage<'_> {
}
}
#[cfg(feature = "serde")]
impl serde::Serialize for ConciseMessage<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.collect_str(self)
}
}
/// A diagnostic message string.
///
/// This is, for all intents and purposes, equivalent to a `Box<str>`.

View File

@@ -52,7 +52,7 @@ impl AzureRenderer<'_> {
f,
"code={code};]{body}",
code = diag.secondary_code_or_id(),
body = diag.concise_message(),
body = diag.body(),
)?;
}

View File

@@ -28,10 +28,6 @@ impl<'a> ConciseRenderer<'a> {
let sep = fmt_styled(":", stylesheet.separator);
for diag in diagnostics {
if self.config.is_canceled() {
return Ok(());
}
if let Some(span) = diag.primary_span() {
write!(
f,

View File

@@ -53,10 +53,6 @@ impl<'a> FullRenderer<'a> {
.hyperlink(stylesheet.hyperlink);
for diag in diagnostics {
if self.config.is_canceled() {
return Ok(());
}
let resolved = Resolved::new(self.resolver, diag, self.config);
let renderable = resolved.to_renderable(self.config.context);
for diag in renderable.diagnostics.iter() {

View File

@@ -49,26 +49,14 @@ impl<'a> GithubRenderer<'a> {
}
.unwrap_or_default();
// GitHub Actions workflow commands have constraints on error annotations:
// - `col` and `endColumn` cannot be set if `line` and `endLine` are different
// See: https://github.com/astral-sh/ruff/issues/22074
if start_location.line == end_location.line {
write!(
f,
",line={row},col={column},endLine={end_row},endColumn={end_column}::",
row = start_location.line,
column = start_location.column,
end_row = end_location.line,
end_column = end_location.column,
)?;
} else {
write!(
f,
",line={row},endLine={end_row}::",
row = start_location.line,
end_row = end_location.line,
)?;
}
write!(
f,
",line={row},col={column},endLine={end_row},endColumn={end_column}::",
row = start_location.line,
column = start_location.column,
end_row = end_location.line,
end_column = end_location.column,
)?;
write!(
f,
@@ -87,7 +75,7 @@ impl<'a> GithubRenderer<'a> {
write!(f, "{id}:", id = diagnostic.id())?;
}
writeln!(f, " {}", diagnostic.concise_message())?;
writeln!(f, " {}", diagnostic.body())?;
}
Ok(())

View File

@@ -98,7 +98,7 @@ impl Serialize for SerializedMessages<'_> {
}
fingerprints.insert(message_fingerprint);
let description = diagnostic.concise_message();
let description = diagnostic.body();
let check_name = diagnostic.secondary_code_or_id();
let severity = match diagnostic.severity() {
Severity::Info => "info",

View File

@@ -6,7 +6,7 @@ use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, OneIndexed};
use ruff_text_size::Ranged;
use crate::diagnostic::{ConciseMessage, Diagnostic, DiagnosticSource, DisplayDiagnosticConfig};
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig};
use super::FileResolver;
@@ -101,7 +101,7 @@ pub(super) fn diagnostic_to_json<'a>(
JsonDiagnostic {
code: diagnostic.secondary_code_or_id(),
url: diagnostic.documentation_url(),
message: diagnostic.concise_message(),
message: diagnostic.body(),
fix,
cell: notebook_cell_index,
location: start_location.map(JsonLocation::from),
@@ -113,7 +113,7 @@ pub(super) fn diagnostic_to_json<'a>(
JsonDiagnostic {
code: diagnostic.secondary_code_or_id(),
url: diagnostic.documentation_url(),
message: diagnostic.concise_message(),
message: diagnostic.body(),
fix,
cell: notebook_cell_index,
location: Some(start_location.unwrap_or_default().into()),
@@ -226,7 +226,7 @@ pub(crate) struct JsonDiagnostic<'a> {
filename: Option<&'a str>,
fix: Option<JsonFix<'a>>,
location: Option<JsonLocation>,
message: ConciseMessage<'a>,
message: &'a str,
noqa_row: Option<OneIndexed>,
url: Option<&'a str>,
}

View File

@@ -56,17 +56,17 @@ impl<'a> JunitRenderer<'a> {
start_location: location,
} = diagnostic;
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
status.set_message(diagnostic.concise_message().to_str());
status.set_message(diagnostic.body());
if let Some(location) = location {
status.set_description(format!(
"line {row}, col {col}, {body}",
row = location.line,
col = location.column,
body = diagnostic.concise_message()
body = diagnostic.body()
));
} else {
status.set_description(diagnostic.concise_message().to_str());
status.set_description(diagnostic.body());
}
let code = diagnostic

View File

@@ -55,7 +55,7 @@ impl PylintRenderer<'_> {
f,
"{path}:{row}: [{code}] {body}",
path = filename,
body = diagnostic.concise_message()
body = diagnostic.body()
)?;
}

View File

@@ -5,7 +5,7 @@ use ruff_diagnostics::{Edit, Fix};
use ruff_source_file::{LineColumn, SourceCode};
use ruff_text_size::Ranged;
use crate::diagnostic::{ConciseMessage, Diagnostic};
use crate::diagnostic::Diagnostic;
use super::FileResolver;
@@ -76,7 +76,7 @@ fn diagnostic_to_rdjson<'a>(
let edits = diagnostic.fix().map(Fix::edits).unwrap_or_default();
RdjsonDiagnostic {
message: diagnostic.concise_message(),
message: diagnostic.body(),
location,
code: RdjsonCode {
value: diagnostic
@@ -155,7 +155,7 @@ struct RdjsonDiagnostic<'a> {
code: RdjsonCode<'a>,
#[serde(skip_serializing_if = "Option::is_none")]
location: Option<RdjsonLocation<'a>>,
message: ConciseMessage<'a>,
message: &'a str,
#[serde(skip_serializing_if = "Vec::is_empty")]
suggestions: Vec<RdjsonSuggestion<'a>>,
}

View File

@@ -2,5 +2,5 @@
source: crates/ruff_db/src/diagnostic/render/github.rs
expression: env.render_diagnostics(&diagnostics)
---
::error title=ty (invalid-syntax),file=/syntax_errors.py,line=1,endLine=2::syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import
::error title=ty (invalid-syntax),file=/syntax_errors.py,line=3,endLine=4::syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline
::error title=ty (invalid-syntax),file=/syntax_errors.py,line=1,col=15,endLine=2,endColumn=1::syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import
::error title=ty (invalid-syntax),file=/syntax_errors.py,line=3,col=12,endLine=4,endColumn=1::syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline

View File

@@ -12,7 +12,6 @@ use std::hash::BuildHasherDefault;
use std::num::NonZeroUsize;
use ty_static::EnvVars;
pub mod cancellation;
pub mod diagnostic;
pub mod display;
pub mod file_revision;

View File

@@ -275,16 +275,16 @@ impl OsSystem {
/// instead of at least one system call for each component between `path` and `prefix`.
///
/// However, using `canonicalize` to resolve the path's casing doesn't work in two cases:
/// * if `path` is a symlink, `canonicalize` returns the symlink's target and not the symlink's source path.
/// * on Windows: If `path` is a mapped network drive, `canonicalize` returns the UNC path
/// (e.g. `Z:\` is mapped to `\\server\share` and `canonicalize` returns `\\?\UNC\server\share`).
/// * if `path` is a symlink because `canonicalize` then returns the symlink's target and not the symlink's source path.
/// * on Windows: If `path` is a mapped network drive because `canonicalize` then returns the UNC path
/// (e.g. `Z:\` is mapped to `\\server\share` and `canonicalize` then returns `\\?\UNC\server\share`).
///
/// Symlinks and mapped network drives should be rare enough that this fast path is worth trying first,
/// even if it comes at a cost for those rare use cases.
fn path_exists_case_sensitive_fast(&self, path: &SystemPath) -> Option<bool> {
// This is a more forgiving version of `dunce::simplified` that removes all `\\?\` prefixes on Windows.
// We use this more forgiving version because we don't intend on using either path for anything other than comparison
// and the prefix is only relevant when passing the path to other programs and it's longer than 200 something
// and the prefix is only relevant when passing the path to other programs and its longer than 200 something
// characters.
fn simplify_ignore_verbatim(path: &SystemPath) -> &SystemPath {
if cfg!(windows) {
@@ -298,7 +298,9 @@ impl OsSystem {
}
}
let Ok(canonicalized) = path.as_std_path().canonicalize() else {
let simplified = simplify_ignore_verbatim(path);
let Ok(canonicalized) = simplified.as_std_path().canonicalize() else {
// The path doesn't exist or can't be accessed. The path doesn't exist.
return Some(false);
};
@@ -307,13 +309,12 @@ impl OsSystem {
// The original path is valid UTF8 but the canonicalized path isn't. This definitely suggests
// that a symlink is involved. Fall back to the slow path.
tracing::debug!(
"Falling back to the slow case-sensitive path existence check because the canonicalized path of `{path}` is not valid UTF-8"
"Falling back to the slow case-sensitive path existence check because the canonicalized path of `{simplified}` is not valid UTF-8"
);
return None;
};
let simplified_canonicalized = simplify_ignore_verbatim(&canonicalized);
let simplified = simplify_ignore_verbatim(path);
// Test if the paths differ by anything other than casing. If so, that suggests that
// `path` pointed to a symlink (or some other none reversible path normalization happened).

View File

@@ -56,7 +56,10 @@ impl TestSystem {
/// Removes an environment variable override, making it appear as not set.
pub fn remove_env_var(&self, name: impl Into<String>) {
self.env_overrides.lock().unwrap().insert(name.into(), None);
self.env_overrides
.lock()
.unwrap()
.insert(name.into(), None);
}
/// Returns the [`InMemorySystem`].

View File

@@ -14,7 +14,6 @@ license = { workspace = true }
ty = { workspace = true }
ty_project = { workspace = true, features = ["schemars"] }
ty_python_semantic = { workspace = true }
ty_python_types = { workspace = true }
ty_static = { workspace = true }
ruff = { workspace = true }
ruff_formatter = { workspace = true }

View File

@@ -1,13 +1,11 @@
//! Generate a Markdown-compatible listing of configuration options for `pyproject.toml`.
use std::borrow::Cow;
use std::{fmt::Write, path::PathBuf};
use anyhow::bail;
use itertools::Itertools;
use pretty_assertions::StrComparison;
use std::{fmt::Write, path::PathBuf};
use ruff_options_metadata::{OptionField, OptionSet, OptionsMetadata, Visit};
use ruff_python_trivia::textwrap;
use ty_project::metadata::Options;
use crate::{
@@ -167,69 +165,62 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
let _ = writeln!(output, "**Default value**: `{}`", field.default);
output.push('\n');
let _ = writeln!(output, "**Type**: `{}`", field.value_type);
output.push('\n');
output.push_str("**Example usage**:\n\n");
output.push_str(&format_example(
"pyproject.toml",
&format_header(
field.scope,
field.example,
parents,
ConfigurationFile::PyprojectToml,
),
field.example,
));
output.push('\n');
}
for configuration_file in [ConfigurationFile::PyprojectToml, ConfigurationFile::TyToml] {
let (header, example) =
format_snippet(field.scope, field.example, parents, configuration_file);
output.push_str(&format_tab(configuration_file.name(), &header, &example));
output.push('\n');
fn format_example(title: &str, header: &str, content: &str) -> String {
if header.is_empty() {
format!("```toml title=\"{title}\"\n{content}\n```\n",)
} else {
format!("```toml title=\"{title}\"\n{header}\n{content}\n```\n",)
}
}
fn format_tab(tab_name: &str, header: &str, content: &str) -> String {
let header = if header.is_empty() {
String::new()
} else {
format!("\n {header}")
};
format!(
"=== \"{}\"\n\n ```toml{}\n{}\n ```\n",
tab_name,
header,
textwrap::indent(content, " ")
)
}
/// Format the TOML header for the example usage for a given option.
///
/// For example: `[tool.ty.rules]`.
fn format_snippet<'a>(
/// For example: `[tool.ruff.format]` or `[tool.ruff.lint.isort]`.
fn format_header(
scope: Option<&str>,
example: &'a str,
example: &str,
parents: &[Set],
configuration: ConfigurationFile,
) -> (String, Cow<'a, str>) {
let mut example = Cow::Borrowed(example);
) -> String {
let tool_parent = match configuration {
ConfigurationFile::PyprojectToml => Some("tool.ty"),
ConfigurationFile::TyToml => None,
};
let header = configuration
.parent_table()
let header = tool_parent
.into_iter()
.chain(parents.iter().filter_map(|parent| parent.name()))
.chain(scope)
.join(".");
// Rewrite examples starting with `[tool.ty]` or `[[tool.ty]]` to their `ty.toml` equivalent.
if matches!(configuration, ConfigurationFile::TyToml) {
example = example.replace("[tool.ty.", "[").into();
}
// Ex) `[[tool.ty.xx]]`
if example.starts_with(&format!("[[{header}")) {
return (String::new(), example);
return String::new();
}
// Ex) `[tool.ty.rules]`
if example.starts_with(&format!("[{header}")) {
return (String::new(), example);
return String::new();
}
if header.is_empty() {
(String::new(), example)
String::new()
} else {
(format!("[{header}]"), example)
format!("[{header}]")
}
}
@@ -252,25 +243,10 @@ impl Visit for CollectOptionsVisitor {
#[derive(Debug, Copy, Clone)]
enum ConfigurationFile {
PyprojectToml,
#[expect(dead_code)]
TyToml,
}
impl ConfigurationFile {
const fn name(self) -> &'static str {
match self {
Self::PyprojectToml => "pyproject.toml",
Self::TyToml => "ty.toml",
}
}
const fn parent_table(self) -> Option<&'static str> {
match self {
Self::PyprojectToml => Some("tool.ty"),
Self::TyToml => None,
}
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;

View File

@@ -52,7 +52,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
}
fn generate_markdown() -> String {
let registry = ty_python_types::default_lint_registry();
let registry = ty_python_semantic::default_lint_registry();
let mut output = String::new();
@@ -63,7 +63,12 @@ fn generate_markdown() -> String {
let _ = writeln!(&mut output, "# Rules\n");
let mut lints: Vec<_> = registry.lints().iter().collect();
lints.sort_by_key(|a| a.name());
lints.sort_by(|a, b| {
a.default_level()
.cmp(&b.default_level())
.reverse()
.then_with(|| a.name().cmp(&b.name()))
});
for lint in lints {
let _ = writeln!(&mut output, "## `{rule_name}`\n", rule_name = lint.name());
@@ -114,7 +119,7 @@ fn generate_markdown() -> String {
let _ = writeln!(
&mut output,
r#"<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of '{level}'."><code>{level}</code></a> ·
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of '{level}'."><code>{level}</code></a> ·
{status_text} ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20{encoded_name}" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/{file}#L{line}" target="_blank">View source</a>

View File

@@ -16,9 +16,7 @@ ruff_linter = { workspace = true }
ruff_macros = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_python_parser = { workspace = true }
ty_module_resolver = { workspace = true }
ty_python_semantic = { workspace = true }
ty_python_types = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true, optional = true }

View File

@@ -3,7 +3,7 @@ use ruff_python_ast::visitor::source_order::{
SourceOrderVisitor, walk_expr, walk_module, walk_stmt,
};
use ruff_python_ast::{self as ast, Expr, Mod, Stmt};
use ty_module_resolver::ModuleName;
use ty_python_semantic::ModuleName;
/// Collect all imports for a given Python file.
#[derive(Default, Debug)]

View File

@@ -7,13 +7,11 @@ use ruff_db::files::{File, Files};
use ruff_db::system::{OsSystem, System, SystemPathBuf};
use ruff_db::vendored::{VendoredFileSystem, VendoredFileSystemBuilder};
use ruff_python_ast::PythonVersion;
use ty_module_resolver::{SearchPathSettings, SearchPaths};
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{
AnalysisSettings, Db, Program, ProgramSettings, PythonEnvironment, PythonPlatform,
PythonVersionSource, PythonVersionWithSource, SysPrefixPathOrigin,
Db, Program, ProgramSettings, PythonEnvironment, PythonPlatform, PythonVersionSource,
PythonVersionWithSource, SearchPathSettings, SysPrefixPathOrigin, default_lint_registry,
};
use ty_python_types::default_lint_registry;
static EMPTY_VENDORED: std::sync::LazyLock<VendoredFileSystem> = std::sync::LazyLock::new(|| {
let mut builder = VendoredFileSystemBuilder::new(CompressionMethod::Stored);
@@ -28,7 +26,6 @@ pub struct ModuleDb {
files: Files,
system: OsSystem,
rule_selection: Arc<RuleSelection>,
analysis_settings: Arc<AnalysisSettings>,
}
impl ModuleDb {
@@ -88,13 +85,6 @@ impl SourceDb for ModuleDb {
}
}
#[salsa::db]
impl ty_module_resolver::Db for ModuleDb {
fn search_paths(&self) -> &SearchPaths {
Program::get(self).search_paths(self)
}
}
#[salsa::db]
impl Db for ModuleDb {
fn should_check_file(&self, file: File) -> bool {
@@ -112,10 +102,6 @@ impl Db for ModuleDb {
fn verbose(&self) -> bool {
false
}
fn analysis_settings(&self) -> &AnalysisSettings {
&self.analysis_settings
}
}
#[salsa::db]

View File

@@ -1,6 +1,6 @@
use ruff_db::files::{File, FilePath, system_path_to_file};
use ruff_db::system::SystemPath;
use ty_module_resolver::{
use ty_python_semantic::{
ModuleName, resolve_module, resolve_module_confident, resolve_real_module,
resolve_real_module_confident,
};

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.14.10"
version = "0.14.9"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -1,66 +0,0 @@
facts = (
"Lobsters have blue blood.",
"The liver is the only human organ that can fully regenerate itself.",
"Clarinets are made almost entirely out of wood from the mpingo tree."
"In 1971, astronaut Alan Shepard played golf on the moon.",
)
facts = [
"Lobsters have blue blood.",
"The liver is the only human organ that can fully regenerate itself.",
"Clarinets are made almost entirely out of wood from the mpingo tree."
"In 1971, astronaut Alan Shepard played golf on the moon.",
]
facts = {
"Lobsters have blue blood.",
"The liver is the only human organ that can fully regenerate itself.",
"Clarinets are made almost entirely out of wood from the mpingo tree."
"In 1971, astronaut Alan Shepard played golf on the moon.",
}
facts = {
(
"Clarinets are made almost entirely out of wood from the mpingo tree."
"In 1971, astronaut Alan Shepard played golf on the moon."
),
}
facts = (
"Octopuses have three hearts."
# Missing comma here.
"Honey never spoils.",
)
facts = [
"Octopuses have three hearts."
# Missing comma here.
"Honey never spoils.",
]
facts = {
"Octopuses have three hearts."
# Missing comma here.
"Honey never spoils.",
}
facts = (
(
"Clarinets are made almost entirely out of wood from the mpingo tree."
"In 1971, astronaut Alan Shepard played golf on the moon."
),
)
facts = [
(
"Clarinets are made almost entirely out of wood from the mpingo tree."
"In 1971, astronaut Alan Shepard played golf on the moon."
),
]
facts = (
"Lobsters have blue blood.\n"
"The liver is the only human organ that can fully regenerate itself.\n"
"Clarinets are made almost entirely out of wood from the mpingo tree.\n"
"In 1971, astronaut Alan Shepard played golf on the moon.\n"
)

View File

@@ -9,15 +9,3 @@ def test_ok():
def test_error():
with pytest.raises(UnicodeError):
pass
def test_match_only():
with pytest.raises(match="some error message"):
pass
def test_check_only():
with pytest.raises(check=lambda e: True):
pass
def test_match_and_check():
with pytest.raises(match="some error message", check=lambda e: True):
pass

View File

@@ -136,38 +136,4 @@ os.chmod("pth1_file", 0o700, None, True, 1, *[1], **{"x": 1}, foo=1)
os.rename("pth1_file", "pth1_file1", None, None, 1, *[1], **{"x": 1}, foo=1)
os.replace("pth1_file1", "pth1_file", None, None, 1, *[1], **{"x": 1}, foo=1)
os.path.samefile("pth1_file", "pth1_link", 1, *[1], **{"x": 1}, foo=1)
# See: https://github.com/astral-sh/ruff/issues/21794
import sys
if os.rename("pth1.py", "pth1.py.bak"):
print("rename: truthy")
else:
print("rename: falsey")
if os.replace("pth1.py.bak", "pth1.py"):
print("replace: truthy")
else:
print("replace: falsey")
try:
for _ in os.getcwd():
print("getcwd: iterable")
break
except TypeError as e:
print("getcwd: not iterable")
try:
for _ in os.getcwdb():
print("getcwdb: iterable")
break
except TypeError as e:
print("getcwdb: not iterable")
try:
for _ in os.readlink(sys.executable):
print("readlink: iterable")
break
except TypeError as e:
print("readlink: not iterable")
os.path.samefile("pth1_file", "pth1_link", 1, *[1], **{"x": 1}, foo=1)

View File

@@ -138,6 +138,5 @@ with open("file.txt", encoding="utf-8") as f:
with open("file.txt", encoding="utf-8") as f:
contents = process_contents(f.read())
with open("file1.txt", encoding="utf-8") as f:
with open("file.txt", encoding="utf-8") as f:
contents: str = process_contents(f.read())

View File

@@ -1,8 +0,0 @@
from pathlib import Path
with Path("file.txt").open() as f:
contents = f.read()
with Path("file.txt").open("r") as f:
contents = f.read()

View File

@@ -1,26 +0,0 @@
from pathlib import Path
with Path("file.txt").open("w") as f:
f.write("test")
with Path("file.txt").open("wb") as f:
f.write(b"test")
with Path("file.txt").open(mode="w") as f:
f.write("test")
with Path("file.txt").open("w", encoding="utf8") as f:
f.write("test")
with Path("file.txt").open("w", errors="ignore") as f:
f.write("test")
with Path(foo()).open("w") as f:
f.write("test")
p = Path("file.txt")
with p.open("w") as f:
f.write("test")
with Path("foo", "bar", "baz").open("w") as f:
f.write("test")

View File

@@ -86,26 +86,3 @@ def f():
# Multiple codes but none are used
# ruff: disable[E741, F401, F841]
print("hello")
def f():
# Unknown rule codes
# ruff: disable[YF829]
# ruff: disable[F841, RQW320]
value = 0
# ruff: enable[F841, RQW320]
# ruff: enable[YF829]
def f():
# External rule codes should be ignored
# ruff: disable[TK421]
print("hello")
# ruff: enable[TK421]
def f():
# Empty or missing rule codes
# ruff: disable
# ruff: disable[]
print("hello")

View File

@@ -214,13 +214,6 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
range: _,
node_index: _,
}) => {
if checker.is_rule_enabled(Rule::ImplicitStringConcatenationInCollectionLiteral) {
flake8_implicit_str_concat::rules::implicit_string_concatenation_in_collection_literal(
checker,
expr,
elts,
);
}
if ctx.is_store() {
let check_too_many_expressions =
checker.is_rule_enabled(Rule::ExpressionsInStarAssignment);
@@ -1336,13 +1329,6 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
}
}
Expr::Set(set) => {
if checker.is_rule_enabled(Rule::ImplicitStringConcatenationInCollectionLiteral) {
flake8_implicit_str_concat::rules::implicit_string_concatenation_in_collection_literal(
checker,
expr,
&set.elts,
);
}
if checker.is_rule_enabled(Rule::DuplicateValue) {
flake8_bugbear::rules::duplicate_value(checker, set);
}

View File

@@ -454,7 +454,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Flake8ImplicitStrConcat, "001") => rules::flake8_implicit_str_concat::rules::SingleLineImplicitStringConcatenation,
(Flake8ImplicitStrConcat, "002") => rules::flake8_implicit_str_concat::rules::MultiLineImplicitStringConcatenation,
(Flake8ImplicitStrConcat, "003") => rules::flake8_implicit_str_concat::rules::ExplicitStringConcatenation,
(Flake8ImplicitStrConcat, "004") => rules::flake8_implicit_str_concat::rules::ImplicitStringConcatenationInCollectionLiteral,
// flake8-print
(Flake8Print, "1") => rules::flake8_print::rules::Print,
@@ -1064,8 +1063,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Ruff, "100") => rules::ruff::rules::UnusedNOQA,
(Ruff, "101") => rules::ruff::rules::RedirectedNOQA,
(Ruff, "102") => rules::ruff::rules::InvalidRuleCode,
(Ruff, "103") => rules::ruff::rules::InvalidSuppressionComment,
(Ruff, "104") => rules::ruff::rules::UnmatchedSuppressionComment,
(Ruff, "200") => rules::ruff::rules::InvalidPyprojectToml,
#[cfg(any(feature = "test-rules", test))]

View File

@@ -197,7 +197,7 @@ impl Display for RuleCodeAndBody<'_> {
f,
"{fix}{body}",
fix = format_args!("[{}] ", "*".cyan()),
body = self.message.concise_message(),
body = self.message.body(),
);
}
}
@@ -208,14 +208,14 @@ impl Display for RuleCodeAndBody<'_> {
f,
"{code} {body}",
code = code.red().bold(),
body = self.message.concise_message(),
body = self.message.body(),
)
} else {
write!(
f,
"{code}: {body}",
code = self.message.id().as_str().red().bold(),
body = self.message.concise_message(),
body = self.message.body(),
)
}
}

View File

@@ -334,7 +334,7 @@ impl<'a> SarifResult<'a> {
rule_id: RuleCode::from(diagnostic),
level: "error".to_string(),
message: SarifMessage {
text: diagnostic.concise_message().to_string(),
text: diagnostic.body().to_string(),
},
fixes: Self::fix(diagnostic, &uri).into_iter().collect(),
locations: vec![SarifLocation {

View File

@@ -22,7 +22,6 @@ static ALLOWLIST_REGEX: LazyLock<Regex> = LazyLock::new(|| {
# Case-sensitive
pyright
| pyrefly
| ruff\s*:\s*(disable|enable)
| mypy:
| type:\s*ignore
| SPDX-License-Identifier:
@@ -149,8 +148,6 @@ mod tests {
assert!(!comment_contains_code("# 123", &[]));
assert!(!comment_contains_code("# 123.1", &[]));
assert!(!comment_contains_code("# 1, 2, 3", &[]));
assert!(!comment_contains_code("# ruff: disable[E501]", &[]));
assert!(!comment_contains_code("#ruff:enable[E501, F84]", &[]));
assert!(!comment_contains_code(
"# pylint: disable=redefined-outer-name",
&[]

View File

@@ -70,7 +70,7 @@ fn is_open_call(func: &Expr, semantic: &SemanticModel) -> bool {
}
/// Returns `true` if an expression resolves to a call to `pathlib.Path.open`.
pub(crate) fn is_open_call_from_pathlib(func: &Expr, semantic: &SemanticModel) -> bool {
fn is_open_call_from_pathlib(func: &Expr, semantic: &SemanticModel) -> bool {
let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func else {
return false;
};

View File

@@ -18,7 +18,7 @@ mod async_zero_sleep;
mod blocking_http_call;
mod blocking_http_call_httpx;
mod blocking_input;
pub(crate) mod blocking_open_call;
mod blocking_open_call;
mod blocking_path_methods;
mod blocking_process_invocation;
mod blocking_sleep;

View File

@@ -12,7 +12,7 @@ use crate::{checkers::ast::Checker, settings::LinterSettings};
/// Checks for non-literal strings being passed to [`markupsafe.Markup`][markupsafe-markup].
///
/// ## Why is this bad?
/// [`markupsafe.Markup`][markupsafe-markup] does not perform any escaping, so passing dynamic
/// [`markupsafe.Markup`] does not perform any escaping, so passing dynamic
/// content, like f-strings, variables or interpolated strings will potentially
/// lead to XSS vulnerabilities.
///

View File

@@ -32,10 +32,6 @@ mod tests {
Path::new("ISC_syntax_error_2.py")
)]
#[test_case(Rule::ExplicitStringConcatenation, Path::new("ISC.py"))]
#[test_case(
Rule::ImplicitStringConcatenationInCollectionLiteral,
Path::new("ISC004.py")
)]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(

View File

@@ -1,103 +0,0 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::{Expr, StringLike};
use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
use crate::{Edit, Fix, FixAvailability, Violation};
/// ## What it does
/// Checks for implicitly concatenated strings inside list, tuple, and set literals.
///
/// ## Why is this bad?
/// In collection literals, implicit string concatenation is often the result of
/// a missing comma between elements, which can silently merge items together.
///
/// ## Example
/// ```python
/// facts = (
/// "Lobsters have blue blood.",
/// "The liver is the only human organ that can fully regenerate itself.",
/// "Clarinets are made almost entirely out of wood from the mpingo tree."
/// "In 1971, astronaut Alan Shepard played golf on the moon.",
/// )
/// ```
///
/// Instead, you likely intended:
/// ```python
/// facts = (
/// "Lobsters have blue blood.",
/// "The liver is the only human organ that can fully regenerate itself.",
/// "Clarinets are made almost entirely out of wood from the mpingo tree.",
/// "In 1971, astronaut Alan Shepard played golf on the moon.",
/// )
/// ```
///
/// If the concatenation is intentional, wrap it in parentheses to make it
/// explicit:
/// ```python
/// facts = (
/// "Lobsters have blue blood.",
/// "The liver is the only human organ that can fully regenerate itself.",
/// (
/// "Clarinets are made almost entirely out of wood from the mpingo tree."
/// "In 1971, astronaut Alan Shepard played golf on the moon."
/// ),
/// )
/// ```
///
/// ## Fix safety
/// The fix is safe in that it does not change the semantics of your code.
/// However, the issue is that you may often want to change semantics
/// by adding a missing comma.
#[derive(ViolationMetadata)]
#[violation_metadata(preview_since = "0.14.10")]
pub(crate) struct ImplicitStringConcatenationInCollectionLiteral;
impl Violation for ImplicitStringConcatenationInCollectionLiteral {
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Always;
#[derive_message_formats]
fn message(&self) -> String {
"Unparenthesized implicit string concatenation in collection".to_string()
}
fn fix_title(&self) -> Option<String> {
Some("Wrap implicitly concatenated strings in parentheses".to_string())
}
}
/// ISC004
pub(crate) fn implicit_string_concatenation_in_collection_literal(
checker: &Checker,
expr: &Expr,
elements: &[Expr],
) {
for element in elements {
let Ok(string_like) = StringLike::try_from(element) else {
continue;
};
if !string_like.is_implicit_concatenated() {
continue;
}
if parenthesized_range(
string_like.as_expression_ref(),
expr.into(),
checker.tokens(),
)
.is_some()
{
continue;
}
let mut diagnostic = checker.report_diagnostic(
ImplicitStringConcatenationInCollectionLiteral,
string_like.range(),
);
diagnostic.help("Did you forget a comma?");
diagnostic.set_fix(Fix::unsafe_edits(
Edit::insertion("(".to_string(), string_like.range().start()),
[Edit::insertion(")".to_string(), string_like.range().end())],
));
}
}

View File

@@ -1,7 +1,5 @@
pub(crate) use collection_literal::*;
pub(crate) use explicit::*;
pub(crate) use implicit::*;
mod collection_literal;
mod explicit;
mod implicit;

View File

@@ -1,149 +0,0 @@
---
source: crates/ruff_linter/src/rules/flake8_implicit_str_concat/mod.rs
---
ISC004 [*] Unparenthesized implicit string concatenation in collection
--> ISC004.py:4:5
|
2 | "Lobsters have blue blood.",
3 | "The liver is the only human organ that can fully regenerate itself.",
4 | / "Clarinets are made almost entirely out of wood from the mpingo tree."
5 | | "In 1971, astronaut Alan Shepard played golf on the moon.",
| |______________________________________________________________^
6 | )
|
help: Wrap implicitly concatenated strings in parentheses
help: Did you forget a comma?
1 | facts = (
2 | "Lobsters have blue blood.",
3 | "The liver is the only human organ that can fully regenerate itself.",
- "Clarinets are made almost entirely out of wood from the mpingo tree."
- "In 1971, astronaut Alan Shepard played golf on the moon.",
4 + ("Clarinets are made almost entirely out of wood from the mpingo tree."
5 + "In 1971, astronaut Alan Shepard played golf on the moon."),
6 | )
7 |
8 | facts = [
note: This is an unsafe fix and may change runtime behavior
ISC004 [*] Unparenthesized implicit string concatenation in collection
--> ISC004.py:11:5
|
9 | "Lobsters have blue blood.",
10 | "The liver is the only human organ that can fully regenerate itself.",
11 | / "Clarinets are made almost entirely out of wood from the mpingo tree."
12 | | "In 1971, astronaut Alan Shepard played golf on the moon.",
| |______________________________________________________________^
13 | ]
|
help: Wrap implicitly concatenated strings in parentheses
help: Did you forget a comma?
8 | facts = [
9 | "Lobsters have blue blood.",
10 | "The liver is the only human organ that can fully regenerate itself.",
- "Clarinets are made almost entirely out of wood from the mpingo tree."
- "In 1971, astronaut Alan Shepard played golf on the moon.",
11 + ("Clarinets are made almost entirely out of wood from the mpingo tree."
12 + "In 1971, astronaut Alan Shepard played golf on the moon."),
13 | ]
14 |
15 | facts = {
note: This is an unsafe fix and may change runtime behavior
ISC004 [*] Unparenthesized implicit string concatenation in collection
--> ISC004.py:18:5
|
16 | "Lobsters have blue blood.",
17 | "The liver is the only human organ that can fully regenerate itself.",
18 | / "Clarinets are made almost entirely out of wood from the mpingo tree."
19 | | "In 1971, astronaut Alan Shepard played golf on the moon.",
| |______________________________________________________________^
20 | }
|
help: Wrap implicitly concatenated strings in parentheses
help: Did you forget a comma?
15 | facts = {
16 | "Lobsters have blue blood.",
17 | "The liver is the only human organ that can fully regenerate itself.",
- "Clarinets are made almost entirely out of wood from the mpingo tree."
- "In 1971, astronaut Alan Shepard played golf on the moon.",
18 + ("Clarinets are made almost entirely out of wood from the mpingo tree."
19 + "In 1971, astronaut Alan Shepard played golf on the moon."),
20 | }
21 |
22 | facts = {
note: This is an unsafe fix and may change runtime behavior
ISC004 [*] Unparenthesized implicit string concatenation in collection
--> ISC004.py:30:5
|
29 | facts = (
30 | / "Octopuses have three hearts."
31 | | # Missing comma here.
32 | | "Honey never spoils.",
| |_________________________^
33 | )
|
help: Wrap implicitly concatenated strings in parentheses
help: Did you forget a comma?
27 | }
28 |
29 | facts = (
- "Octopuses have three hearts."
30 + ("Octopuses have three hearts."
31 | # Missing comma here.
- "Honey never spoils.",
32 + "Honey never spoils."),
33 | )
34 |
35 | facts = [
note: This is an unsafe fix and may change runtime behavior
ISC004 [*] Unparenthesized implicit string concatenation in collection
--> ISC004.py:36:5
|
35 | facts = [
36 | / "Octopuses have three hearts."
37 | | # Missing comma here.
38 | | "Honey never spoils.",
| |_________________________^
39 | ]
|
help: Wrap implicitly concatenated strings in parentheses
help: Did you forget a comma?
33 | )
34 |
35 | facts = [
- "Octopuses have three hearts."
36 + ("Octopuses have three hearts."
37 | # Missing comma here.
- "Honey never spoils.",
38 + "Honey never spoils."),
39 | ]
40 |
41 | facts = {
note: This is an unsafe fix and may change runtime behavior
ISC004 [*] Unparenthesized implicit string concatenation in collection
--> ISC004.py:42:5
|
41 | facts = {
42 | / "Octopuses have three hearts."
43 | | # Missing comma here.
44 | | "Honey never spoils.",
| |_________________________^
45 | }
|
help: Wrap implicitly concatenated strings in parentheses
help: Did you forget a comma?
39 | ]
40 |
41 | facts = {
- "Octopuses have three hearts."
42 + ("Octopuses have three hearts."
43 | # Missing comma here.
- "Honey never spoils.",
44 + "Honey never spoils."),
45 | }
46 |
47 | facts = (
note: This is an unsafe fix and may change runtime behavior

View File

@@ -37,11 +37,10 @@ use crate::{Fix, FixAvailability, Violation};
/// import logging
///
/// logging.basicConfig(level=logging.INFO)
/// logger = logging.getLogger(__name__)
///
///
/// def sum_less_than_four(a, b):
/// logger.debug("Calling sum_less_than_four")
/// logging.debug("Calling sum_less_than_four")
/// return a + b < 4
/// ```
///

View File

@@ -125,9 +125,6 @@ impl Violation for PytestRaisesTooBroad {
/// ## Why is this bad?
/// `pytest.raises` expects to receive an expected exception as its first
/// argument. If omitted, the `pytest.raises` call will fail at runtime.
/// The rule will also accept calls without an expected exception but with
/// `match` and/or `check` keyword arguments, which are also valid after
/// pytest version 8.4.0.
///
/// ## Example
/// ```python
@@ -184,8 +181,6 @@ pub(crate) fn raises_call(checker: &Checker, call: &ast::ExprCall) {
.arguments
.find_argument("expected_exception", 0)
.is_none()
&& call.arguments.find_keyword("match").is_none()
&& call.arguments.find_keyword("check").is_none()
{
checker.report_diagnostic(PytestRaisesWithoutException, call.func.range());
}

View File

@@ -210,7 +210,6 @@ pub(crate) fn is_argument_non_default(arguments: &Arguments, name: &str, positio
/// Returns `true` if the given call is a top-level expression in its statement.
/// This means the call's return value is not used, so return type changes don't matter.
pub(crate) fn is_top_level_expression_in_statement(checker: &Checker) -> bool {
pub(crate) fn is_top_level_expression_call(checker: &Checker) -> bool {
checker.semantic().current_expression_parent().is_none()
&& checker.semantic().current_statement().is_expr_stmt()
}

View File

@@ -6,7 +6,7 @@ use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
use crate::importer::ImportRequest;
use crate::preview::is_fix_os_getcwd_enabled;
use crate::rules::flake8_use_pathlib::helpers::is_top_level_expression_in_statement;
use crate::rules::flake8_use_pathlib::helpers::is_top_level_expression_call;
use crate::{FixAvailability, Violation};
/// ## What it does
@@ -89,7 +89,7 @@ pub(crate) fn os_getcwd(checker: &Checker, call: &ExprCall, segments: &[&str]) {
// Unsafe when the fix would delete comments or change a used return value
let applicability = if checker.comment_ranges().intersects(range)
|| !is_top_level_expression_in_statement(checker)
|| !is_top_level_expression_call(checker)
{
Applicability::Unsafe
} else {

View File

@@ -6,7 +6,7 @@ use crate::checkers::ast::Checker;
use crate::preview::is_fix_os_readlink_enabled;
use crate::rules::flake8_use_pathlib::helpers::{
check_os_pathlib_single_arg_calls, is_keyword_only_argument_non_default,
is_top_level_expression_in_statement,
is_top_level_expression_call,
};
use crate::{FixAvailability, Violation};
@@ -86,7 +86,7 @@ pub(crate) fn os_readlink(checker: &Checker, call: &ExprCall, segments: &[&str])
return;
}
let applicability = if !is_top_level_expression_in_statement(checker) {
let applicability = if !is_top_level_expression_call(checker) {
// Unsafe because the return type changes (str/bytes -> Path)
Applicability::Unsafe
} else {

View File

@@ -6,7 +6,7 @@ use crate::checkers::ast::Checker;
use crate::preview::is_fix_os_rename_enabled;
use crate::rules::flake8_use_pathlib::helpers::{
check_os_pathlib_two_arg_calls, has_unknown_keywords_or_starred_expr,
is_keyword_only_argument_non_default, is_top_level_expression_in_statement,
is_keyword_only_argument_non_default, is_top_level_expression_call,
};
use crate::{FixAvailability, Violation};
@@ -92,7 +92,7 @@ pub(crate) fn os_rename(checker: &Checker, call: &ExprCall, segments: &[&str]) {
);
// Unsafe when the fix would delete comments or change a used return value
let applicability = if !is_top_level_expression_in_statement(checker) {
let applicability = if !is_top_level_expression_call(checker) {
// Unsafe because the return type changes (None -> Path)
Applicability::Unsafe
} else {

View File

@@ -6,7 +6,7 @@ use crate::checkers::ast::Checker;
use crate::preview::is_fix_os_replace_enabled;
use crate::rules::flake8_use_pathlib::helpers::{
check_os_pathlib_two_arg_calls, has_unknown_keywords_or_starred_expr,
is_keyword_only_argument_non_default, is_top_level_expression_in_statement,
is_keyword_only_argument_non_default, is_top_level_expression_call,
};
use crate::{FixAvailability, Violation};
@@ -95,7 +95,7 @@ pub(crate) fn os_replace(checker: &Checker, call: &ExprCall, segments: &[&str])
);
// Unsafe when the fix would delete comments or change a used return value
let applicability = if !is_top_level_expression_in_statement(checker) {
let applicability = if !is_top_level_expression_call(checker) {
// Unsafe because the return type changes (None -> Path)
Applicability::Unsafe
} else {

View File

@@ -567,64 +567,5 @@ PTH121 `os.path.samefile()` should be replaced by `Path.samefile()`
138 |
139 | os.path.samefile("pth1_file", "pth1_link", 1, *[1], **{"x": 1}, foo=1)
| ^^^^^^^^^^^^^^^^
140 |
141 | # See: https://github.com/astral-sh/ruff/issues/21794
|
help: Replace with `Path(...).samefile()`
PTH104 `os.rename()` should be replaced by `Path.rename()`
--> full_name.py:144:4
|
142 | import sys
143 |
144 | if os.rename("pth1.py", "pth1.py.bak"):
| ^^^^^^^^^
145 | print("rename: truthy")
146 | else:
|
help: Replace with `Path(...).rename(...)`
PTH105 `os.replace()` should be replaced by `Path.replace()`
--> full_name.py:149:4
|
147 | print("rename: falsey")
148 |
149 | if os.replace("pth1.py.bak", "pth1.py"):
| ^^^^^^^^^^
150 | print("replace: truthy")
151 | else:
|
help: Replace with `Path(...).replace(...)`
PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
--> full_name.py:155:14
|
154 | try:
155 | for _ in os.getcwd():
| ^^^^^^^^^
156 | print("getcwd: iterable")
157 | break
|
help: Replace with `Path.cwd()`
PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
--> full_name.py:162:14
|
161 | try:
162 | for _ in os.getcwdb():
| ^^^^^^^^^^
163 | print("getcwdb: iterable")
164 | break
|
help: Replace with `Path.cwd()`
PTH115 `os.readlink()` should be replaced by `Path.readlink()`
--> full_name.py:169:14
|
168 | try:
169 | for _ in os.readlink(sys.executable):
| ^^^^^^^^^^^
170 | print("readlink: iterable")
171 | break
|
help: Replace with `Path(...).readlink()`

View File

@@ -1037,142 +1037,5 @@ PTH121 `os.path.samefile()` should be replaced by `Path.samefile()`
138 |
139 | os.path.samefile("pth1_file", "pth1_link", 1, *[1], **{"x": 1}, foo=1)
| ^^^^^^^^^^^^^^^^
140 |
141 | # See: https://github.com/astral-sh/ruff/issues/21794
|
help: Replace with `Path(...).samefile()`
PTH104 [*] `os.rename()` should be replaced by `Path.rename()`
--> full_name.py:144:4
|
142 | import sys
143 |
144 | if os.rename("pth1.py", "pth1.py.bak"):
| ^^^^^^^^^
145 | print("rename: truthy")
146 | else:
|
help: Replace with `Path(...).rename(...)`
140 |
141 | # See: https://github.com/astral-sh/ruff/issues/21794
142 | import sys
143 + import pathlib
144 |
- if os.rename("pth1.py", "pth1.py.bak"):
145 + if pathlib.Path("pth1.py").rename("pth1.py.bak"):
146 | print("rename: truthy")
147 | else:
148 | print("rename: falsey")
note: This is an unsafe fix and may change runtime behavior
PTH105 [*] `os.replace()` should be replaced by `Path.replace()`
--> full_name.py:149:4
|
147 | print("rename: falsey")
148 |
149 | if os.replace("pth1.py.bak", "pth1.py"):
| ^^^^^^^^^^
150 | print("replace: truthy")
151 | else:
|
help: Replace with `Path(...).replace(...)`
140 |
141 | # See: https://github.com/astral-sh/ruff/issues/21794
142 | import sys
143 + import pathlib
144 |
145 | if os.rename("pth1.py", "pth1.py.bak"):
146 | print("rename: truthy")
147 | else:
148 | print("rename: falsey")
149 |
- if os.replace("pth1.py.bak", "pth1.py"):
150 + if pathlib.Path("pth1.py.bak").replace("pth1.py"):
151 | print("replace: truthy")
152 | else:
153 | print("replace: falsey")
note: This is an unsafe fix and may change runtime behavior
PTH109 [*] `os.getcwd()` should be replaced by `Path.cwd()`
--> full_name.py:155:14
|
154 | try:
155 | for _ in os.getcwd():
| ^^^^^^^^^
156 | print("getcwd: iterable")
157 | break
|
help: Replace with `Path.cwd()`
140 |
141 | # See: https://github.com/astral-sh/ruff/issues/21794
142 | import sys
143 + import pathlib
144 |
145 | if os.rename("pth1.py", "pth1.py.bak"):
146 | print("rename: truthy")
--------------------------------------------------------------------------------
153 | print("replace: falsey")
154 |
155 | try:
- for _ in os.getcwd():
156 + for _ in pathlib.Path.cwd():
157 | print("getcwd: iterable")
158 | break
159 | except TypeError as e:
note: This is an unsafe fix and may change runtime behavior
PTH109 [*] `os.getcwd()` should be replaced by `Path.cwd()`
--> full_name.py:162:14
|
161 | try:
162 | for _ in os.getcwdb():
| ^^^^^^^^^^
163 | print("getcwdb: iterable")
164 | break
|
help: Replace with `Path.cwd()`
140 |
141 | # See: https://github.com/astral-sh/ruff/issues/21794
142 | import sys
143 + import pathlib
144 |
145 | if os.rename("pth1.py", "pth1.py.bak"):
146 | print("rename: truthy")
--------------------------------------------------------------------------------
160 | print("getcwd: not iterable")
161 |
162 | try:
- for _ in os.getcwdb():
163 + for _ in pathlib.Path.cwd():
164 | print("getcwdb: iterable")
165 | break
166 | except TypeError as e:
note: This is an unsafe fix and may change runtime behavior
PTH115 [*] `os.readlink()` should be replaced by `Path.readlink()`
--> full_name.py:169:14
|
168 | try:
169 | for _ in os.readlink(sys.executable):
| ^^^^^^^^^^^
170 | print("readlink: iterable")
171 | break
|
help: Replace with `Path(...).readlink()`
140 |
141 | # See: https://github.com/astral-sh/ruff/issues/21794
142 | import sys
143 + import pathlib
144 |
145 | if os.rename("pth1.py", "pth1.py.bak"):
146 | print("rename: truthy")
--------------------------------------------------------------------------------
167 | print("getcwdb: not iterable")
168 |
169 | try:
- for _ in os.readlink(sys.executable):
170 + for _ in pathlib.Path(sys.executable).readlink():
171 | print("readlink: iterable")
172 | break
173 | except TypeError as e:
note: This is an unsafe fix and may change runtime behavior

View File

@@ -3,11 +3,10 @@ use std::borrow::Cow;
use ruff_python_ast::PythonVersion;
use ruff_python_ast::{self as ast, Expr, name::Name, token::parenthesized_range};
use ruff_python_codegen::Generator;
use ruff_python_semantic::{ResolvedReference, SemanticModel};
use ruff_python_semantic::{BindingId, ResolvedReference, SemanticModel};
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::rules::flake8_async::rules::blocking_open_call::is_open_call_from_pathlib;
use crate::{Applicability, Edit, Fix};
/// Format a code snippet to call `name.method()`.
@@ -120,13 +119,14 @@ impl OpenMode {
pub(super) struct FileOpen<'a> {
/// With item where the open happens, we use it for the reporting range.
pub(super) item: &'a ast::WithItem,
/// Filename expression used as the first argument in `open`, we use it in the diagnostic message.
pub(super) filename: &'a Expr,
/// The file open mode.
pub(super) mode: OpenMode,
/// The file open keywords.
pub(super) keywords: Vec<&'a ast::Keyword>,
/// We only check `open` operations whose file handles are used exactly once.
pub(super) reference: &'a ResolvedReference,
pub(super) argument: OpenArgument<'a>,
}
impl FileOpen<'_> {
@@ -137,45 +137,6 @@ impl FileOpen<'_> {
}
}
#[derive(Debug, Clone, Copy)]
pub(super) enum OpenArgument<'a> {
/// The filename argument to `open`, e.g. "foo.txt" in:
///
/// ```py
/// f = open("foo.txt")
/// ```
Builtin { filename: &'a Expr },
/// The `Path` receiver of a `pathlib.Path.open` call, e.g. the `p` in the
/// context manager in:
///
/// ```py
/// p = Path("foo.txt")
/// with p.open() as f: ...
/// ```
///
/// or `Path("foo.txt")` in
///
/// ```py
/// with Path("foo.txt").open() as f: ...
/// ```
Pathlib { path: &'a Expr },
}
impl OpenArgument<'_> {
pub(super) fn display<'src>(&self, source: &'src str) -> &'src str {
&source[self.range()]
}
}
impl Ranged for OpenArgument<'_> {
fn range(&self) -> TextRange {
match self {
OpenArgument::Builtin { filename } => filename.range(),
OpenArgument::Pathlib { path } => path.range(),
}
}
}
/// Find and return all `open` operations in the given `with` statement.
pub(super) fn find_file_opens<'a>(
with: &'a ast::StmtWith,
@@ -185,65 +146,10 @@ pub(super) fn find_file_opens<'a>(
) -> Vec<FileOpen<'a>> {
with.items
.iter()
.filter_map(|item| {
find_file_open(item, with, semantic, read_mode, python_version)
.or_else(|| find_path_open(item, with, semantic, read_mode, python_version))
})
.filter_map(|item| find_file_open(item, with, semantic, read_mode, python_version))
.collect()
}
fn resolve_file_open<'a>(
item: &'a ast::WithItem,
with: &'a ast::StmtWith,
semantic: &'a SemanticModel<'a>,
read_mode: bool,
mode: OpenMode,
keywords: Vec<&'a ast::Keyword>,
argument: OpenArgument<'a>,
) -> Option<FileOpen<'a>> {
match mode {
OpenMode::ReadText | OpenMode::ReadBytes => {
if !read_mode {
return None;
}
}
OpenMode::WriteText | OpenMode::WriteBytes => {
if read_mode {
return None;
}
}
}
if matches!(mode, OpenMode::ReadBytes | OpenMode::WriteBytes) && !keywords.is_empty() {
return None;
}
let var = item.optional_vars.as_deref()?.as_name_expr()?;
let scope = semantic.current_scope();
let binding = scope.get_all(var.id.as_str()).find_map(|id| {
let b = semantic.binding(id);
(b.range() == var.range()).then_some(b)
})?;
let references: Vec<&ResolvedReference> = binding
.references
.iter()
.map(|id| semantic.reference(*id))
.filter(|reference| with.range().contains_range(reference.range()))
.collect();
let [reference] = references.as_slice() else {
return None;
};
Some(FileOpen {
item,
mode,
keywords,
reference,
argument,
})
}
/// Find `open` operation in the given `with` item.
fn find_file_open<'a>(
item: &'a ast::WithItem,
@@ -259,6 +165,8 @@ fn find_file_open<'a>(
..
} = item.context_expr.as_call_expr()?;
let var = item.optional_vars.as_deref()?.as_name_expr()?;
// Ignore calls with `*args` and `**kwargs`. In the exact case of `open(*filename, mode="w")`,
// it could be a match; but in all other cases, the call _could_ contain unsupported keyword
// arguments, like `buffering`.
@@ -279,57 +187,58 @@ fn find_file_open<'a>(
let (keywords, kw_mode) = match_open_keywords(keywords, read_mode, python_version)?;
let mode = kw_mode.unwrap_or(pos_mode);
resolve_file_open(
item,
with,
semantic,
read_mode,
mode,
keywords,
OpenArgument::Builtin { filename },
)
}
fn find_path_open<'a>(
item: &'a ast::WithItem,
with: &'a ast::StmtWith,
semantic: &'a SemanticModel<'a>,
read_mode: bool,
python_version: PythonVersion,
) -> Option<FileOpen<'a>> {
let ast::ExprCall {
func,
arguments: ast::Arguments { args, keywords, .. },
..
} = item.context_expr.as_call_expr()?;
if args.iter().any(Expr::is_starred_expr)
|| keywords.iter().any(|keyword| keyword.arg.is_none())
{
match mode {
OpenMode::ReadText | OpenMode::ReadBytes => {
if !read_mode {
return None;
}
}
OpenMode::WriteText | OpenMode::WriteBytes => {
if read_mode {
return None;
}
}
}
// Path.read_bytes and Path.write_bytes do not support any kwargs.
if matches!(mode, OpenMode::ReadBytes | OpenMode::WriteBytes) && !keywords.is_empty() {
return None;
}
if !is_open_call_from_pathlib(func, semantic) {
// Now we need to find what is this variable bound to...
let scope = semantic.current_scope();
let bindings: Vec<BindingId> = scope.get_all(var.id.as_str()).collect();
let binding = bindings
.iter()
.map(|id| semantic.binding(*id))
// We might have many bindings with the same name, but we only care
// for the one we are looking at right now.
.find(|binding| binding.range() == var.range())?;
// Since many references can share the same binding, we can limit our attention span
// exclusively to the body of the current `with` statement.
let references: Vec<&ResolvedReference> = binding
.references
.iter()
.map(|id| semantic.reference(*id))
.filter(|reference| with.range().contains_range(reference.range()))
.collect();
// And even with all these restrictions, if the file handle gets used not exactly once,
// it doesn't fit the bill.
let [reference] = references.as_slice() else {
return None;
}
let attr = func.as_attribute_expr()?;
let mode = if args.is_empty() {
OpenMode::ReadText
} else {
match_open_mode(args.first()?)?
};
let (keywords, kw_mode) = match_open_keywords(keywords, read_mode, python_version)?;
let mode = kw_mode.unwrap_or(mode);
resolve_file_open(
Some(FileOpen {
item,
with,
semantic,
read_mode,
filename,
mode,
keywords,
OpenArgument::Pathlib {
path: attr.value.as_ref(),
},
)
reference,
})
}
/// Match positional arguments. Return expression for the file name and open mode.

View File

@@ -15,8 +15,7 @@ mod tests {
use crate::test::test_path;
use crate::{assert_diagnostics, settings};
#[test_case(Rule::ReadWholeFile, Path::new("FURB101_0.py"))]
#[test_case(Rule::ReadWholeFile, Path::new("FURB101_1.py"))]
#[test_case(Rule::ReadWholeFile, Path::new("FURB101.py"))]
#[test_case(Rule::RepeatedAppend, Path::new("FURB113.py"))]
#[test_case(Rule::IfExpInsteadOfOrOperator, Path::new("FURB110.py"))]
#[test_case(Rule::ReimplementedOperator, Path::new("FURB118.py"))]
@@ -47,8 +46,7 @@ mod tests {
#[test_case(Rule::MetaClassABCMeta, Path::new("FURB180.py"))]
#[test_case(Rule::HashlibDigestHex, Path::new("FURB181.py"))]
#[test_case(Rule::ListReverseCopy, Path::new("FURB187.py"))]
#[test_case(Rule::WriteWholeFile, Path::new("FURB103_0.py"))]
#[test_case(Rule::WriteWholeFile, Path::new("FURB103_1.py"))]
#[test_case(Rule::WriteWholeFile, Path::new("FURB103.py"))]
#[test_case(Rule::FStringNumberFormat, Path::new("FURB116.py"))]
#[test_case(Rule::SortedMinMax, Path::new("FURB192.py"))]
#[test_case(Rule::SliceToRemovePrefixOrSuffix, Path::new("FURB188.py"))]
@@ -67,7 +65,7 @@ mod tests {
#[test]
fn write_whole_file_python_39() -> Result<()> {
let diagnostics = test_path(
Path::new("refurb/FURB103_0.py"),
Path::new("refurb/FURB103.py"),
&settings::LinterSettings::for_rule(Rule::WriteWholeFile)
.with_target_version(PythonVersion::PY39),
)?;

View File

@@ -10,7 +10,7 @@ use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::fix::snippet::SourceCodeSnippet;
use crate::importer::ImportRequest;
use crate::rules::refurb::helpers::{FileOpen, OpenArgument, find_file_opens};
use crate::rules::refurb::helpers::{FileOpen, find_file_opens};
use crate::{FixAvailability, Violation};
/// ## What it does
@@ -42,41 +42,27 @@ use crate::{FixAvailability, Violation};
/// - [Python documentation: `Path.read_text`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.read_text)
#[derive(ViolationMetadata)]
#[violation_metadata(preview_since = "v0.1.2")]
pub(crate) struct ReadWholeFile<'a> {
pub(crate) struct ReadWholeFile {
filename: SourceCodeSnippet,
suggestion: SourceCodeSnippet,
argument: OpenArgument<'a>,
}
impl Violation for ReadWholeFile<'_> {
impl Violation for ReadWholeFile {
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
#[derive_message_formats]
fn message(&self) -> String {
let filename = self.filename.truncated_display();
let suggestion = self.suggestion.truncated_display();
match self.argument {
OpenArgument::Pathlib { .. } => {
format!(
"`Path.open()` followed by `read()` can be replaced by `{filename}.{suggestion}`"
)
}
OpenArgument::Builtin { .. } => {
format!("`open` and `read` should be replaced by `Path({filename}).{suggestion}`")
}
}
format!("`open` and `read` should be replaced by `Path({filename}).{suggestion}`")
}
fn fix_title(&self) -> Option<String> {
let filename = self.filename.truncated_display();
let suggestion = self.suggestion.truncated_display();
match self.argument {
OpenArgument::Pathlib { .. } => Some(format!("Replace with `{filename}.{suggestion}`")),
OpenArgument::Builtin { .. } => {
Some(format!("Replace with `Path({filename}).{suggestion}`"))
}
}
Some(format!(
"Replace with `Path({}).{}`",
self.filename.truncated_display(),
self.suggestion.truncated_display(),
))
}
}
@@ -128,13 +114,13 @@ impl<'a> Visitor<'a> for ReadMatcher<'a, '_> {
.position(|open| open.is_ref(read_from))
{
let open = self.candidates.remove(open);
let filename_display = open.argument.display(self.checker.source());
let suggestion = make_suggestion(&open, self.checker.generator());
let mut diagnostic = self.checker.report_diagnostic(
ReadWholeFile {
filename: SourceCodeSnippet::from_str(filename_display),
filename: SourceCodeSnippet::from_str(
&self.checker.generator().expr(open.filename),
),
suggestion: SourceCodeSnippet::from_str(&suggestion),
argument: open.argument,
},
open.item.range(),
);
@@ -202,6 +188,8 @@ fn generate_fix(
let locator = checker.locator();
let filename_code = locator.slice(open.filename.range());
let (import_edit, binding) = checker
.importer()
.get_or_import_symbol(
@@ -218,15 +206,10 @@ fn generate_fix(
[Stmt::Assign(ast::StmtAssign { targets, value, .. })] if value.range() == expr.range() => {
match targets.as_slice() {
[Expr::Name(name)] => {
let target = match open.argument {
OpenArgument::Builtin { filename } => {
let filename_code = locator.slice(filename.range());
format!("{binding}({filename_code})")
}
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
};
format!("{name} = {target}.{suggestion}", name = name.id)
format!(
"{name} = {binding}({filename_code}).{suggestion}",
name = name.id
)
}
_ => return None,
}
@@ -240,16 +223,8 @@ fn generate_fix(
}),
] if value.range() == expr.range() => match target.as_ref() {
Expr::Name(name) => {
let target = match open.argument {
OpenArgument::Builtin { filename } => {
let filename_code = locator.slice(filename.range());
format!("{binding}({filename_code})")
}
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
};
format!(
"{var}: {ann} = {target}.{suggestion}",
"{var}: {ann} = {binding}({filename_code}).{suggestion}",
var = name.id,
ann = locator.slice(annotation.range())
)

View File

@@ -176,7 +176,7 @@ fn match_consecutive_appends<'a>(
let suite = if semantic.at_top_level() {
// If the statement is at the top level, we should go to the parent module.
// Module is available in the definitions list.
EnclosingSuite::new(semantic.definitions.python_ast()?, stmt.into())?
EnclosingSuite::new(semantic.definitions.python_ast()?, stmt)?
} else {
// Otherwise, go to the parent, and take its body as a sequence of siblings.
semantic

View File

@@ -9,7 +9,7 @@ use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
use crate::fix::snippet::SourceCodeSnippet;
use crate::importer::ImportRequest;
use crate::rules::refurb::helpers::{FileOpen, OpenArgument, find_file_opens};
use crate::rules::refurb::helpers::{FileOpen, find_file_opens};
use crate::{FixAvailability, Locator, Violation};
/// ## What it does
@@ -42,40 +42,26 @@ use crate::{FixAvailability, Locator, Violation};
/// - [Python documentation: `Path.write_text`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.write_text)
#[derive(ViolationMetadata)]
#[violation_metadata(preview_since = "v0.3.6")]
pub(crate) struct WriteWholeFile<'a> {
pub(crate) struct WriteWholeFile {
filename: SourceCodeSnippet,
suggestion: SourceCodeSnippet,
argument: OpenArgument<'a>,
}
impl Violation for WriteWholeFile<'_> {
impl Violation for WriteWholeFile {
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
#[derive_message_formats]
fn message(&self) -> String {
let filename = self.filename.truncated_display();
let suggestion = self.suggestion.truncated_display();
match self.argument {
OpenArgument::Pathlib { .. } => {
format!(
"`Path.open()` followed by `write()` can be replaced by `{filename}.{suggestion}`"
)
}
OpenArgument::Builtin { .. } => {
format!("`open` and `write` should be replaced by `Path({filename}).{suggestion}`")
}
}
format!("`open` and `write` should be replaced by `Path({filename}).{suggestion}`")
}
fn fix_title(&self) -> Option<String> {
let filename = self.filename.truncated_display();
let suggestion = self.suggestion.truncated_display();
match self.argument {
OpenArgument::Pathlib { .. } => Some(format!("Replace with `{filename}.{suggestion}`")),
OpenArgument::Builtin { .. } => {
Some(format!("Replace with `Path({filename}).{suggestion}`"))
}
}
Some(format!(
"Replace with `Path({}).{}`",
self.filename.truncated_display(),
self.suggestion.truncated_display(),
))
}
}
@@ -139,15 +125,16 @@ impl<'a> Visitor<'a> for WriteMatcher<'a, '_> {
.position(|open| open.is_ref(write_to))
{
let open = self.candidates.remove(open);
if self.loop_counter == 0 {
let filename_display = open.argument.display(self.checker.source());
let suggestion = make_suggestion(&open, content, self.checker.locator());
let mut diagnostic = self.checker.report_diagnostic(
WriteWholeFile {
filename: SourceCodeSnippet::from_str(filename_display),
filename: SourceCodeSnippet::from_str(
&self.checker.generator().expr(open.filename),
),
suggestion: SourceCodeSnippet::from_str(&suggestion),
argument: open.argument,
},
open.item.range(),
);
@@ -211,6 +198,7 @@ fn generate_fix(
}
let locator = checker.locator();
let filename_code = locator.slice(open.filename.range());
let (import_edit, binding) = checker
.importer()
@@ -221,15 +209,7 @@ fn generate_fix(
)
.ok()?;
let target = match open.argument {
OpenArgument::Builtin { filename } => {
let filename_code = locator.slice(filename.range());
format!("{binding}({filename_code})")
}
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
};
let replacement = format!("{target}.{suggestion}");
let replacement = format!("{binding}({filename_code}).{suggestion}");
let applicability = if checker.comment_ranges().intersects(with_stmt.range()) {
Applicability::Unsafe

View File

@@ -2,7 +2,7 @@
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
--> FURB101_0.py:12:6
--> FURB101.py:12:6
|
11 | # FURB101
12 | with open("file.txt") as f:
@@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").read_text()`
16 | with open("file.txt", "rb") as f:
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
--> FURB101_0.py:16:6
--> FURB101.py:16:6
|
15 | # FURB101
16 | with open("file.txt", "rb") as f:
@@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").read_bytes()`
20 | with open("file.txt", mode="rb") as f:
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
--> FURB101_0.py:20:6
--> FURB101.py:20:6
|
19 | # FURB101
20 | with open("file.txt", mode="rb") as f:
@@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").read_bytes()`
24 | with open("file.txt", encoding="utf8") as f:
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")`
--> FURB101_0.py:24:6
--> FURB101.py:24:6
|
23 | # FURB101
24 | with open("file.txt", encoding="utf8") as f:
@@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").read_text(encoding="utf8")`
28 | with open("file.txt", errors="ignore") as f:
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")`
--> FURB101_0.py:28:6
--> FURB101.py:28:6
|
27 | # FURB101
28 | with open("file.txt", errors="ignore") as f:
@@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").read_text(errors="ignore")`
32 | with open("file.txt", mode="r") as f: # noqa: FURB120
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
--> FURB101_0.py:32:6
--> FURB101.py:32:6
|
31 | # FURB101
32 | with open("file.txt", mode="r") as f: # noqa: FURB120
@@ -147,7 +147,7 @@ help: Replace with `Path("file.txt").read_text()`
note: This is an unsafe fix and may change runtime behavior
FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
--> FURB101_0.py:36:6
--> FURB101.py:36:6
|
35 | # FURB101
36 | with open(foo(), "rb") as f:
@@ -158,7 +158,7 @@ FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
help: Replace with `Path(foo()).read_bytes()`
FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()`
--> FURB101_0.py:44:6
--> FURB101.py:44:6
|
43 | # FURB101
44 | with open("a.txt") as a, open("b.txt", "rb") as b:
@@ -169,7 +169,7 @@ FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()`
help: Replace with `Path("a.txt").read_text()`
FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()`
--> FURB101_0.py:44:26
--> FURB101.py:44:26
|
43 | # FURB101
44 | with open("a.txt") as a, open("b.txt", "rb") as b:
@@ -180,7 +180,7 @@ FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()`
help: Replace with `Path("b.txt").read_bytes()`
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
--> FURB101_0.py:49:18
--> FURB101.py:49:18
|
48 | # FURB101
49 | with foo() as a, open("file.txt") as b, foo() as c:
@@ -191,7 +191,7 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
help: Replace with `Path("file.txt").read_text()`
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
--> FURB101_0.py:130:6
--> FURB101.py:130:6
|
129 | # FURB101
130 | with open("file.txt", encoding="utf-8") as f:
@@ -215,7 +215,7 @@ help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
134 | with open("file.txt", encoding="utf-8") as f:
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
--> FURB101_0.py:134:6
--> FURB101.py:134:6
|
133 | # FURB101 but no fix because it would remove the assignment to `x`
134 | with open("file.txt", encoding="utf-8") as f:
@@ -225,7 +225,7 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(enco
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
--> FURB101_0.py:138:6
--> FURB101.py:138:6
|
137 | # FURB101 but no fix because it would remove the `process_contents` call
138 | with open("file.txt", encoding="utf-8") as f:
@@ -234,13 +234,13 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(enco
|
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
FURB101 `open` and `read` should be replaced by `Path("file1.txt").read_text(encoding="utf-8")`
--> FURB101_0.py:141:6
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
--> FURB101.py:141:6
|
139 | contents = process_contents(f.read())
140 |
141 | with open("file1.txt", encoding="utf-8") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
141 | with open("file.txt", encoding="utf-8") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
142 | contents: str = process_contents(f.read())
|
help: Replace with `Path("file1.txt").read_text(encoding="utf-8")`
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`

View File

@@ -1,39 +0,0 @@
---
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB101 [*] `Path.open()` followed by `read()` can be replaced by `Path("file.txt").read_text()`
--> FURB101_1.py:4:6
|
2 | from pathlib import Path
3 |
4 | with Path("file.txt").open() as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
5 | contents = f.read()
|
help: Replace with `Path("file.txt").read_text()`
1 |
2 | from pathlib import Path
3 |
- with Path("file.txt").open() as f:
- contents = f.read()
4 + contents = Path("file.txt").read_text()
5 |
6 | with Path("file.txt").open("r") as f:
7 | contents = f.read()
FURB101 [*] `Path.open()` followed by `read()` can be replaced by `Path("file.txt").read_text()`
--> FURB101_1.py:7:6
|
5 | contents = f.read()
6 |
7 | with Path("file.txt").open("r") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
8 | contents = f.read()
|
help: Replace with `Path("file.txt").read_text()`
4 | with Path("file.txt").open() as f:
5 | contents = f.read()
6 |
- with Path("file.txt").open("r") as f:
- contents = f.read()
7 + contents = Path("file.txt").read_text()

View File

@@ -2,7 +2,7 @@
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
--> FURB103_0.py:12:6
--> FURB103.py:12:6
|
11 | # FURB103
12 | with open("file.txt", "w") as f:
@@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").write_text("test")`
16 | with open("file.txt", "wb") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
--> FURB103_0.py:16:6
--> FURB103.py:16:6
|
15 | # FURB103
16 | with open("file.txt", "wb") as f:
@@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").write_bytes(foobar)`
20 | with open("file.txt", mode="wb") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
--> FURB103_0.py:20:6
--> FURB103.py:20:6
|
19 | # FURB103
20 | with open("file.txt", mode="wb") as f:
@@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").write_bytes(b"abc")`
24 | with open("file.txt", "w", encoding="utf8") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
--> FURB103_0.py:24:6
--> FURB103.py:24:6
|
23 | # FURB103
24 | with open("file.txt", "w", encoding="utf8") as f:
@@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
28 | with open("file.txt", "w", errors="ignore") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
--> FURB103_0.py:28:6
--> FURB103.py:28:6
|
27 | # FURB103
28 | with open("file.txt", "w", errors="ignore") as f:
@@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
32 | with open("file.txt", mode="w") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
--> FURB103_0.py:32:6
--> FURB103.py:32:6
|
31 | # FURB103
32 | with open("file.txt", mode="w") as f:
@@ -146,7 +146,7 @@ help: Replace with `Path("file.txt").write_text(foobar)`
36 | with open(foo(), "wb") as f:
FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
--> FURB103_0.py:36:6
--> FURB103.py:36:6
|
35 | # FURB103
36 | with open(foo(), "wb") as f:
@@ -157,7 +157,7 @@ FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())
help: Replace with `Path(foo()).write_bytes(bar())`
FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
--> FURB103_0.py:44:6
--> FURB103.py:44:6
|
43 | # FURB103
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
@@ -168,7 +168,7 @@ FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
help: Replace with `Path("a.txt").write_text(x)`
FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
--> FURB103_0.py:44:31
--> FURB103.py:44:31
|
43 | # FURB103
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
@@ -179,7 +179,7 @@ FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
help: Replace with `Path("b.txt").write_bytes(y)`
FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
--> FURB103_0.py:49:18
--> FURB103.py:49:18
|
48 | # FURB103
49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
@@ -190,7 +190,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba
help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
--> FURB103_0.py:58:6
--> FURB103.py:58:6
|
57 | # FURB103
58 | with open("file.txt", "w", newline="\r\n") as f:
@@ -214,7 +214,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
62 | import builtins
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
--> FURB103_0.py:66:6
--> FURB103.py:66:6
|
65 | # FURB103
66 | with builtins.open("file.txt", "w", newline="\r\n") as f:
@@ -237,7 +237,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
70 | from builtins import open as o
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
--> FURB103_0.py:74:6
--> FURB103.py:74:6
|
73 | # FURB103
74 | with o("file.txt", "w", newline="\r\n") as f:
@@ -260,7 +260,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
78 |
FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
--> FURB103_0.py:154:6
--> FURB103.py:154:6
|
152 | data = {"price": 100}
153 |
@@ -284,7 +284,7 @@ help: Replace with `Path("test.json")....`
158 | with open("tmp_path/pyproject.toml", "w") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
--> FURB103_0.py:158:6
--> FURB103.py:158:6
|
157 | # See: https://github.com/astral-sh/ruff/issues/21381
158 | with open("tmp_path/pyproject.toml", "w") as f:

View File

@@ -1,157 +0,0 @@
---
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test")`
--> FURB103_1.py:3:6
|
1 | from pathlib import Path
2 |
3 | with Path("file.txt").open("w") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
4 | f.write("test")
|
help: Replace with `Path("file.txt").write_text("test")`
1 | from pathlib import Path
2 |
- with Path("file.txt").open("w") as f:
- f.write("test")
3 + Path("file.txt").write_text("test")
4 |
5 | with Path("file.txt").open("wb") as f:
6 | f.write(b"test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_bytes(b"test")`
--> FURB103_1.py:6:6
|
4 | f.write("test")
5 |
6 | with Path("file.txt").open("wb") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
7 | f.write(b"test")
|
help: Replace with `Path("file.txt").write_bytes(b"test")`
3 | with Path("file.txt").open("w") as f:
4 | f.write("test")
5 |
- with Path("file.txt").open("wb") as f:
- f.write(b"test")
6 + Path("file.txt").write_bytes(b"test")
7 |
8 | with Path("file.txt").open(mode="w") as f:
9 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test")`
--> FURB103_1.py:9:6
|
7 | f.write(b"test")
8 |
9 | with Path("file.txt").open(mode="w") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
10 | f.write("test")
|
help: Replace with `Path("file.txt").write_text("test")`
6 | with Path("file.txt").open("wb") as f:
7 | f.write(b"test")
8 |
- with Path("file.txt").open(mode="w") as f:
- f.write("test")
9 + Path("file.txt").write_text("test")
10 |
11 | with Path("file.txt").open("w", encoding="utf8") as f:
12 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test", encoding="utf8")`
--> FURB103_1.py:12:6
|
10 | f.write("test")
11 |
12 | with Path("file.txt").open("w", encoding="utf8") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
13 | f.write("test")
|
help: Replace with `Path("file.txt").write_text("test", encoding="utf8")`
9 | with Path("file.txt").open(mode="w") as f:
10 | f.write("test")
11 |
- with Path("file.txt").open("w", encoding="utf8") as f:
- f.write("test")
12 + Path("file.txt").write_text("test", encoding="utf8")
13 |
14 | with Path("file.txt").open("w", errors="ignore") as f:
15 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test", errors="ignore")`
--> FURB103_1.py:15:6
|
13 | f.write("test")
14 |
15 | with Path("file.txt").open("w", errors="ignore") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
16 | f.write("test")
|
help: Replace with `Path("file.txt").write_text("test", errors="ignore")`
12 | with Path("file.txt").open("w", encoding="utf8") as f:
13 | f.write("test")
14 |
- with Path("file.txt").open("w", errors="ignore") as f:
- f.write("test")
15 + Path("file.txt").write_text("test", errors="ignore")
16 |
17 | with Path(foo()).open("w") as f:
18 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path(foo()).write_text("test")`
--> FURB103_1.py:18:6
|
16 | f.write("test")
17 |
18 | with Path(foo()).open("w") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
19 | f.write("test")
|
help: Replace with `Path(foo()).write_text("test")`
15 | with Path("file.txt").open("w", errors="ignore") as f:
16 | f.write("test")
17 |
- with Path(foo()).open("w") as f:
- f.write("test")
18 + Path(foo()).write_text("test")
19 |
20 | p = Path("file.txt")
21 | with p.open("w") as f:
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `p.write_text("test")`
--> FURB103_1.py:22:6
|
21 | p = Path("file.txt")
22 | with p.open("w") as f:
| ^^^^^^^^^^^^^^^^
23 | f.write("test")
|
help: Replace with `p.write_text("test")`
19 | f.write("test")
20 |
21 | p = Path("file.txt")
- with p.open("w") as f:
- f.write("test")
22 + p.write_text("test")
23 |
24 | with Path("foo", "bar", "baz").open("w") as f:
25 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("foo", "bar", "baz").write_text("test")`
--> FURB103_1.py:25:6
|
23 | f.write("test")
24 |
25 | with Path("foo", "bar", "baz").open("w") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
26 | f.write("test")
|
help: Replace with `Path("foo", "bar", "baz").write_text("test")`
22 | with p.open("w") as f:
23 | f.write("test")
24 |
- with Path("foo", "bar", "baz").open("w") as f:
- f.write("test")
25 + Path("foo", "bar", "baz").write_text("test")

View File

@@ -2,7 +2,7 @@
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
--> FURB103_0.py:12:6
--> FURB103.py:12:6
|
11 | # FURB103
12 | with open("file.txt", "w") as f:
@@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").write_text("test")`
16 | with open("file.txt", "wb") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
--> FURB103_0.py:16:6
--> FURB103.py:16:6
|
15 | # FURB103
16 | with open("file.txt", "wb") as f:
@@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").write_bytes(foobar)`
20 | with open("file.txt", mode="wb") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
--> FURB103_0.py:20:6
--> FURB103.py:20:6
|
19 | # FURB103
20 | with open("file.txt", mode="wb") as f:
@@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").write_bytes(b"abc")`
24 | with open("file.txt", "w", encoding="utf8") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
--> FURB103_0.py:24:6
--> FURB103.py:24:6
|
23 | # FURB103
24 | with open("file.txt", "w", encoding="utf8") as f:
@@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
28 | with open("file.txt", "w", errors="ignore") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
--> FURB103_0.py:28:6
--> FURB103.py:28:6
|
27 | # FURB103
28 | with open("file.txt", "w", errors="ignore") as f:
@@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
32 | with open("file.txt", mode="w") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
--> FURB103_0.py:32:6
--> FURB103.py:32:6
|
31 | # FURB103
32 | with open("file.txt", mode="w") as f:
@@ -146,7 +146,7 @@ help: Replace with `Path("file.txt").write_text(foobar)`
36 | with open(foo(), "wb") as f:
FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
--> FURB103_0.py:36:6
--> FURB103.py:36:6
|
35 | # FURB103
36 | with open(foo(), "wb") as f:
@@ -157,7 +157,7 @@ FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())
help: Replace with `Path(foo()).write_bytes(bar())`
FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
--> FURB103_0.py:44:6
--> FURB103.py:44:6
|
43 | # FURB103
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
@@ -168,7 +168,7 @@ FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
help: Replace with `Path("a.txt").write_text(x)`
FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
--> FURB103_0.py:44:31
--> FURB103.py:44:31
|
43 | # FURB103
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
@@ -179,7 +179,7 @@ FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
help: Replace with `Path("b.txt").write_bytes(y)`
FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
--> FURB103_0.py:49:18
--> FURB103.py:49:18
|
48 | # FURB103
49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
@@ -190,7 +190,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba
help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
--> FURB103_0.py:154:6
--> FURB103.py:154:6
|
152 | data = {"price": 100}
153 |
@@ -214,7 +214,7 @@ help: Replace with `Path("test.json")....`
158 | with open("tmp_path/pyproject.toml", "w") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
--> FURB103_0.py:158:6
--> FURB103.py:158:6
|
157 | # See: https://github.com/astral-sh/ruff/issues/21381
158 | with open("tmp_path/pyproject.toml", "w") as f:

View File

@@ -313,20 +313,12 @@ mod tests {
Rule::UnusedVariable,
Rule::AmbiguousVariableName,
Rule::UnusedNOQA,
Rule::InvalidRuleCode,
Rule::InvalidSuppressionComment,
Rule::UnmatchedSuppressionComment,
])
.with_external_rules(&["TK421"]),
]),
&settings::LinterSettings::for_rules(vec![
Rule::UnusedVariable,
Rule::AmbiguousVariableName,
Rule::UnusedNOQA,
Rule::InvalidRuleCode,
Rule::InvalidSuppressionComment,
Rule::UnmatchedSuppressionComment,
])
.with_external_rules(&["TK421"])
.with_preview_mode(),
);
Ok(())

View File

@@ -9,21 +9,6 @@ use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target;
use crate::{AlwaysFixableViolation, Edit, Fix};
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum InvalidRuleCodeKind {
Noqa,
Suppression,
}
impl InvalidRuleCodeKind {
fn as_str(&self) -> &str {
match self {
InvalidRuleCodeKind::Noqa => "`# noqa`",
InvalidRuleCodeKind::Suppression => "suppression",
}
}
}
/// ## What it does
/// Checks for `noqa` codes that are invalid.
///
@@ -51,17 +36,12 @@ impl InvalidRuleCodeKind {
#[violation_metadata(preview_since = "0.11.4")]
pub(crate) struct InvalidRuleCode {
pub(crate) rule_code: String,
pub(crate) kind: InvalidRuleCodeKind,
}
impl AlwaysFixableViolation for InvalidRuleCode {
#[derive_message_formats]
fn message(&self) -> String {
format!(
"Invalid rule code in {}: {}",
self.kind.as_str(),
self.rule_code
)
format!("Invalid rule code in `# noqa`: {}", self.rule_code)
}
fn fix_title(&self) -> String {
@@ -81,9 +61,7 @@ pub(crate) fn invalid_noqa_code(
continue;
};
let all_valid = directive
.iter()
.all(|code| code_is_valid(code.as_str(), external));
let all_valid = directive.iter().all(|code| code_is_valid(code, external));
if all_valid {
continue;
@@ -91,7 +69,7 @@ pub(crate) fn invalid_noqa_code(
let (valid_codes, invalid_codes): (Vec<_>, Vec<_>) = directive
.iter()
.partition(|&code| code_is_valid(code.as_str(), external));
.partition(|&code| code_is_valid(code, external));
if valid_codes.is_empty() {
all_codes_invalid_diagnostic(directive, invalid_codes, context);
@@ -103,9 +81,10 @@ pub(crate) fn invalid_noqa_code(
}
}
pub(crate) fn code_is_valid(code: &str, external: &[String]) -> bool {
Rule::from_code(get_redirect_target(code).unwrap_or(code)).is_ok()
|| external.iter().any(|ext| code.starts_with(ext))
fn code_is_valid(code: &Code, external: &[String]) -> bool {
let code_str = code.as_str();
Rule::from_code(get_redirect_target(code_str).unwrap_or(code_str)).is_ok()
|| external.iter().any(|ext| code_str.starts_with(ext))
}
fn all_codes_invalid_diagnostic(
@@ -121,7 +100,6 @@ fn all_codes_invalid_diagnostic(
.map(Code::as_str)
.collect::<Vec<_>>()
.join(", "),
kind: InvalidRuleCodeKind::Noqa,
},
directive.range(),
)
@@ -138,7 +116,6 @@ fn some_codes_are_invalid_diagnostic(
.report_diagnostic(
InvalidRuleCode {
rule_code: invalid_code.to_string(),
kind: InvalidRuleCodeKind::Noqa,
},
invalid_code.range(),
)

View File

@@ -1,59 +0,0 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use crate::AlwaysFixableViolation;
use crate::suppression::{InvalidSuppressionKind, ParseErrorKind};
/// ## What it does
/// Checks for invalid suppression comments
///
/// ## Why is this bad?
/// Invalid suppression comments are ignored by Ruff, and should either
/// be fixed or removed to avoid confusion.
///
/// ## Example
/// ```python
/// ruff: disable # missing codes
/// ```
///
/// Use instead:
/// ```python
/// # ruff: disable[E501]
/// ```
///
/// Or delete the invalid suppression comment.
///
/// ## References
/// - [Ruff error suppression](https://docs.astral.sh/ruff/linter/#error-suppression)
#[derive(ViolationMetadata)]
#[violation_metadata(preview_since = "0.14.11")]
pub(crate) struct InvalidSuppressionComment {
pub(crate) kind: InvalidSuppressionCommentKind,
}
impl AlwaysFixableViolation for InvalidSuppressionComment {
#[derive_message_formats]
fn message(&self) -> String {
let msg = match self.kind {
InvalidSuppressionCommentKind::Invalid(InvalidSuppressionKind::Indentation) => {
"unexpected indentation".to_string()
}
InvalidSuppressionCommentKind::Invalid(InvalidSuppressionKind::Trailing) => {
"trailing comments are not supported".to_string()
}
InvalidSuppressionCommentKind::Invalid(InvalidSuppressionKind::Unmatched) => {
"no matching 'disable' comment".to_string()
}
InvalidSuppressionCommentKind::Error(error) => format!("{error}"),
};
format!("Invalid suppression comment: {msg}")
}
fn fix_title(&self) -> String {
"Remove suppression comment".to_string()
}
}
pub(crate) enum InvalidSuppressionCommentKind {
Invalid(InvalidSuppressionKind),
Error(ParseErrorKind),
}

View File

@@ -22,7 +22,6 @@ pub(crate) use invalid_formatter_suppression_comment::*;
pub(crate) use invalid_index_type::*;
pub(crate) use invalid_pyproject_toml::*;
pub(crate) use invalid_rule_code::*;
pub(crate) use invalid_suppression_comment::*;
pub(crate) use legacy_form_pytest_raises::*;
pub(crate) use logging_eager_conversion::*;
pub(crate) use map_int_version_parsing::*;
@@ -47,7 +46,6 @@ pub(crate) use starmap_zip::*;
pub(crate) use static_key_dict_comprehension::*;
#[cfg(any(feature = "test-rules", test))]
pub(crate) use test_rules::*;
pub(crate) use unmatched_suppression_comment::*;
pub(crate) use unnecessary_cast_to_int::*;
pub(crate) use unnecessary_iterable_allocation_for_first_element::*;
pub(crate) use unnecessary_key_check::*;
@@ -89,7 +87,6 @@ mod invalid_formatter_suppression_comment;
mod invalid_index_type;
mod invalid_pyproject_toml;
mod invalid_rule_code;
mod invalid_suppression_comment;
mod legacy_form_pytest_raises;
mod logging_eager_conversion;
mod map_int_version_parsing;
@@ -116,7 +113,6 @@ mod static_key_dict_comprehension;
mod suppression_comment_visitor;
#[cfg(any(feature = "test-rules", test))]
pub(crate) mod test_rules;
mod unmatched_suppression_comment;
mod unnecessary_cast_to_int;
mod unnecessary_iterable_allocation_for_first_element;
mod unnecessary_key_check;

View File

@@ -1,42 +0,0 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use crate::Violation;
/// ## What it does
/// Checks for unmatched range suppression comments
///
/// ## Why is this bad?
/// Unmatched range suppression comments can inadvertently suppress violations
/// over larger sections of code than intended, particularly at module scope.
///
/// ## Example
/// ```python
/// def foo():
/// # ruff: disable[E501] # unmatched
/// REALLY_LONG_VALUES = [...]
///
/// print(REALLY_LONG_VALUES)
/// ```
///
/// Use instead:
/// ```python
/// def foo():
/// # ruff: disable[E501]
/// REALLY_LONG_VALUES = [...]
/// # ruff: enable[E501]
///
/// print(REALLY_LONG_VALUES)
/// ```
///
/// ## References
/// - [Ruff error suppression](https://docs.astral.sh/ruff/linter/#error-suppression)
#[derive(ViolationMetadata)]
#[violation_metadata(preview_since = "0.14.11")]
pub(crate) struct UnmatchedSuppressionComment;
impl Violation for UnmatchedSuppressionComment {
#[derive_message_formats]
fn message(&self) -> String {
"Suppression comment without matching `#ruff:enable` comment".to_string()
}
}

View File

@@ -6,8 +6,8 @@ source: crates/ruff_linter/src/rules/ruff/mod.rs
+linter.preview = enabled
--- Summary ---
Removed: 15
Added: 23
Removed: 14
Added: 11
--- Removed ---
E741 Ambiguous variable name: `I`
@@ -238,60 +238,8 @@ help: Remove assignment to unused variable `I`
note: This is an unsafe fix and may change runtime behavior
F841 [*] Local variable `value` is assigned to but never used
--> suppressions.py:95:5
|
93 | # ruff: disable[YF829]
94 | # ruff: disable[F841, RQW320]
95 | value = 0
| ^^^^^
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
|
help: Remove assignment to unused variable `value`
92 | # Unknown rule codes
93 | # ruff: disable[YF829]
94 | # ruff: disable[F841, RQW320]
- value = 0
95 + pass
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
98 |
note: This is an unsafe fix and may change runtime behavior
--- Added ---
RUF104 Suppression comment without matching `#ruff:enable` comment
--> suppressions.py:11:5
|
9 | # These should both be ignored by the implicit range suppression.
10 | # Should also generate an "unmatched suppression" warning.
11 | # ruff:disable[E741,F841]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
12 | I = 1
|
RUF103 [*] Invalid suppression comment: no matching 'disable' comment
--> suppressions.py:19:5
|
17 | # should be generated.
18 | I = 1
19 | # ruff: enable[E741, F841]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Remove suppression comment
16 | # Neither warning is ignored, and an "unmatched suppression"
17 | # should be generated.
18 | I = 1
- # ruff: enable[E741, F841]
19 |
20 |
21 | def f():
note: This is an unsafe fix and may change runtime behavior
RUF100 [*] Unused suppression (non-enabled: `E501`)
--> suppressions.py:46:5
|
@@ -350,17 +298,6 @@ help: Remove unused `noqa` directive
58 |
RUF104 Suppression comment without matching `#ruff:enable` comment
--> suppressions.py:61:5
|
59 | def f():
60 | # TODO: Duplicate codes should be counted as duplicate, not unused
61 | # ruff: disable[F841, F841]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
62 | foo = 0
|
RUF100 [*] Unused suppression (unused: `F841`)
--> suppressions.py:61:21
|
@@ -381,18 +318,6 @@ help: Remove unused suppression
64 |
RUF104 Suppression comment without matching `#ruff:enable` comment
--> suppressions.py:68:5
|
66 | # Overlapping range suppressions, one should be marked as used,
67 | # and the other should trigger an unused suppression diagnostic
68 | # ruff: disable[F841]
| ^^^^^^^^^^^^^^^^^^^^^
69 | # ruff: disable[F841]
70 | foo = 0
|
RUF100 [*] Unused suppression (unused: `F841`)
--> suppressions.py:69:5
|
@@ -412,17 +337,6 @@ help: Remove unused suppression
71 |
RUF104 Suppression comment without matching `#ruff:enable` comment
--> suppressions.py:75:5
|
73 | def f():
74 | # Multiple codes but only one is used
75 | # ruff: disable[E741, F401, F841]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
76 | foo = 0
|
RUF100 [*] Unused suppression (unused: `E741`)
--> suppressions.py:75:21
|
@@ -463,17 +377,6 @@ help: Remove unused suppression
78 |
RUF104 Suppression comment without matching `#ruff:enable` comment
--> suppressions.py:81:5
|
79 | def f():
80 | # Multiple codes but only two are used
81 | # ruff: disable[E741, F401, F841]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
82 | I = 0
|
RUF100 [*] Unused suppression (non-enabled: `F401`)
--> suppressions.py:81:27
|
@@ -510,8 +413,6 @@ help: Remove unused suppression
- # ruff: disable[E741, F401, F841]
87 + # ruff: disable[F401, F841]
88 | print("hello")
89 |
90 |
RUF100 [*] Unused suppression (non-enabled: `F401`)
@@ -530,8 +431,6 @@ help: Remove unused suppression
- # ruff: disable[E741, F401, F841]
87 + # ruff: disable[E741, F841]
88 | print("hello")
89 |
90 |
RUF100 [*] Unused suppression (unused: `F841`)
@@ -550,122 +449,3 @@ help: Remove unused suppression
- # ruff: disable[E741, F401, F841]
87 + # ruff: disable[E741, F401]
88 | print("hello")
89 |
90 |
RUF102 [*] Invalid rule code in suppression: YF829
--> suppressions.py:93:21
|
91 | def f():
92 | # Unknown rule codes
93 | # ruff: disable[YF829]
| ^^^^^
94 | # ruff: disable[F841, RQW320]
95 | value = 0
|
help: Remove the rule code
90 |
91 | def f():
92 | # Unknown rule codes
- # ruff: disable[YF829]
93 | # ruff: disable[F841, RQW320]
94 | value = 0
95 | # ruff: enable[F841, RQW320]
RUF102 [*] Invalid rule code in suppression: RQW320
--> suppressions.py:94:27
|
92 | # Unknown rule codes
93 | # ruff: disable[YF829]
94 | # ruff: disable[F841, RQW320]
| ^^^^^^
95 | value = 0
96 | # ruff: enable[F841, RQW320]
|
help: Remove the rule code
91 | def f():
92 | # Unknown rule codes
93 | # ruff: disable[YF829]
- # ruff: disable[F841, RQW320]
94 + # ruff: disable[F841]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
RUF102 [*] Invalid rule code in suppression: RQW320
--> suppressions.py:96:26
|
94 | # ruff: disable[F841, RQW320]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
| ^^^^^^
97 | # ruff: enable[YF829]
|
help: Remove the rule code
93 | # ruff: disable[YF829]
94 | # ruff: disable[F841, RQW320]
95 | value = 0
- # ruff: enable[F841, RQW320]
96 + # ruff: enable[F841]
97 | # ruff: enable[YF829]
98 |
99 |
RUF102 [*] Invalid rule code in suppression: YF829
--> suppressions.py:97:20
|
95 | value = 0
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
| ^^^^^
|
help: Remove the rule code
94 | # ruff: disable[F841, RQW320]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
- # ruff: enable[YF829]
97 |
98 |
99 | def f():
RUF103 [*] Invalid suppression comment: missing suppression codes like `[E501, ...]`
--> suppressions.py:109:5
|
107 | def f():
108 | # Empty or missing rule codes
109 | # ruff: disable
| ^^^^^^^^^^^^^^^
110 | # ruff: disable[]
111 | print("hello")
|
help: Remove suppression comment
106 |
107 | def f():
108 | # Empty or missing rule codes
- # ruff: disable
109 | # ruff: disable[]
110 | print("hello")
note: This is an unsafe fix and may change runtime behavior
RUF103 [*] Invalid suppression comment: missing suppression codes like `[E501, ...]`
--> suppressions.py:110:5
|
108 | # Empty or missing rule codes
109 | # ruff: disable
110 | # ruff: disable[]
| ^^^^^^^^^^^^^^^^^
111 | print("hello")
|
help: Remove suppression comment
107 | def f():
108 | # Empty or missing rule codes
109 | # ruff: disable
- # ruff: disable[]
110 | print("hello")
note: This is an unsafe fix and may change runtime behavior

View File

@@ -471,13 +471,6 @@ impl LinterSettings {
self
}
#[must_use]
pub fn with_external_rules(mut self, rules: &[&str]) -> Self {
self.external
.extend(rules.iter().map(std::string::ToString::to_string));
self
}
/// Resolve the [`TargetVersion`] to use for linting.
///
/// This method respects the per-file version overrides in

View File

@@ -4,7 +4,6 @@ use ruff_db::diagnostic::Diagnostic;
use ruff_diagnostics::{Edit, Fix};
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_ast::whitespace::indentation;
use rustc_hash::FxHashSet;
use std::cell::Cell;
use std::{error::Error, fmt::Formatter};
use thiserror::Error;
@@ -18,11 +17,7 @@ use crate::checkers::ast::LintContext;
use crate::codes::Rule;
use crate::fix::edits::delete_comment;
use crate::preview::is_range_suppressions_enabled;
use crate::rule_redirects::get_redirect_target;
use crate::rules::ruff::rules::{
InvalidRuleCode, InvalidRuleCodeKind, InvalidSuppressionComment, InvalidSuppressionCommentKind,
UnmatchedSuppressionComment, UnusedCodes, UnusedNOQA, UnusedNOQAKind, code_is_valid,
};
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA, UnusedNOQAKind};
use crate::settings::LinterSettings;
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -135,7 +130,7 @@ impl Suppressions {
}
pub(crate) fn is_empty(&self) -> bool {
self.valid.is_empty() && self.invalid.is_empty() && self.errors.is_empty()
self.valid.is_empty()
}
/// Check if a diagnostic is suppressed by any known range suppressions
@@ -155,9 +150,7 @@ impl Suppressions {
};
for suppression in &self.valid {
let suppression_code =
get_redirect_target(suppression.code.as_str()).unwrap_or(suppression.code.as_str());
if *code == suppression_code && suppression.range.contains_range(range) {
if *code == suppression.code.as_str() && suppression.range.contains_range(range) {
suppression.used.set(true);
return true;
}
@@ -166,140 +159,81 @@ impl Suppressions {
}
pub(crate) fn check_suppressions(&self, context: &LintContext, locator: &Locator) {
let mut unmatched_ranges = FxHashSet::default();
for suppression in &self.valid {
if !code_is_valid(&suppression.code, &context.settings().external) {
// InvalidRuleCode
if context.is_rule_enabled(Rule::InvalidRuleCode) {
for comment in &suppression.comments {
let (range, edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
comment,
true,
);
context
.report_diagnostic(
InvalidRuleCode {
rule_code: suppression.code.to_string(),
kind: InvalidRuleCodeKind::Suppression,
},
range,
)
.set_fix(Fix::safe_edit(edit));
}
}
} else if !suppression.used.get() {
// UnusedNOQA
if context.is_rule_enabled(Rule::UnusedNOQA) {
let Ok(rule) = Rule::from_code(
get_redirect_target(&suppression.code).unwrap_or(&suppression.code),
) else {
continue; // "external" lint code, don't treat it as unused
};
for comment in &suppression.comments {
let (range, edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
comment,
false,
);
let codes = if context.is_rule_enabled(rule) {
UnusedCodes {
unmatched: vec![suppression.code.to_string()],
..Default::default()
}
} else {
UnusedCodes {
disabled: vec![suppression.code.to_string()],
..Default::default()
}
};
context
.report_diagnostic(
UnusedNOQA {
codes: Some(codes),
kind: UnusedNOQAKind::Suppression,
},
range,
)
.set_fix(Fix::safe_edit(edit));
}
}
} else if suppression.comments.len() == 1 {
// UnmatchedSuppressionComment
let range = suppression.comments[0].range;
if unmatched_ranges.insert(range) {
context.report_diagnostic_if_enabled(UnmatchedSuppressionComment {}, range);
}
}
if !context.any_rule_enabled(&[Rule::UnusedNOQA, Rule::InvalidRuleCode]) {
return;
}
if context.is_rule_enabled(Rule::InvalidSuppressionComment) {
for error in &self.errors {
context
.report_diagnostic(
InvalidSuppressionComment {
kind: InvalidSuppressionCommentKind::Error(error.kind),
},
error.range,
)
.set_fix(Fix::unsafe_edit(delete_comment(error.range, locator)));
}
}
let unused = self
.valid
.iter()
.filter(|suppression| !suppression.used.get());
if context.is_rule_enabled(Rule::InvalidSuppressionComment) {
for invalid in &self.invalid {
context
.report_diagnostic(
InvalidSuppressionComment {
kind: InvalidSuppressionCommentKind::Invalid(invalid.kind),
},
invalid.comment.range,
)
.set_fix(Fix::unsafe_edit(delete_comment(
invalid.comment.range,
locator,
)));
}
}
}
fn delete_code_or_comment(
locator: &Locator<'_>,
suppression: &Suppression,
comment: &SuppressionComment,
highlight_only_code: bool,
) -> (TextRange, Edit) {
let mut range = comment.range;
let edit = if comment.codes.len() == 1 {
if highlight_only_code {
range = comment.codes[0];
}
delete_comment(comment.range, locator)
} else {
let code_index = comment
.codes
.iter()
.position(|range| locator.slice(range) == suppression.code)
.unwrap();
range = comment.codes[code_index];
let code_range = if code_index < (comment.codes.len() - 1) {
TextRange::new(
comment.codes[code_index].start(),
comment.codes[code_index + 1].start(),
)
} else {
TextRange::new(
comment.codes[code_index - 1].end(),
comment.codes[code_index].end(),
)
for suppression in unused {
let Ok(rule) = Rule::from_code(&suppression.code) else {
continue; // TODO: invalid code
};
Edit::range_deletion(code_range)
};
(range, edit)
for comment in &suppression.comments {
let mut range = comment.range;
let edit = if comment.codes.len() == 1 {
delete_comment(comment.range, locator)
} else {
let code_index = comment
.codes
.iter()
.position(|range| locator.slice(range) == suppression.code)
.unwrap();
range = comment.codes[code_index];
let code_range = if code_index < (comment.codes.len() - 1) {
TextRange::new(
comment.codes[code_index].start(),
comment.codes[code_index + 1].start(),
)
} else {
TextRange::new(
comment.codes[code_index - 1].end(),
comment.codes[code_index].end(),
)
};
Edit::range_deletion(code_range)
};
let codes = if context.is_rule_enabled(rule) {
UnusedCodes {
unmatched: vec![suppression.code.to_string()],
..Default::default()
}
} else {
UnusedCodes {
disabled: vec![suppression.code.to_string()],
..Default::default()
}
};
let mut diagnostic = context.report_diagnostic(
UnusedNOQA {
codes: Some(codes),
kind: UnusedNOQAKind::Suppression,
},
range,
);
diagnostic.set_fix(Fix::safe_edit(edit));
}
}
for error in self
.errors
.iter()
.filter(|error| error.kind == ParseErrorKind::MissingCodes)
{
let mut diagnostic = context.report_diagnostic(
UnusedNOQA {
codes: Some(UnusedCodes::default()),
kind: UnusedNOQAKind::Suppression,
},
error.range,
);
diagnostic.set_fix(Fix::safe_edit(delete_comment(error.range, locator)));
}
}
}
@@ -457,7 +391,7 @@ impl<'a> SuppressionsBuilder<'a> {
}
#[derive(Copy, Clone, Debug, Eq, Error, PartialEq)]
pub(crate) enum ParseErrorKind {
enum ParseErrorKind {
#[error("not a suppression comment")]
NotASuppression,
@@ -467,7 +401,7 @@ pub(crate) enum ParseErrorKind {
#[error("unknown ruff directive")]
UnknownAction,
#[error("missing suppression codes like `[E501, ...]`")]
#[error("missing suppression codes")]
MissingCodes,
#[error("missing closing bracket")]

View File

@@ -12,7 +12,6 @@ pub use python_version::*;
pub mod comparable;
pub mod docstrings;
mod expression;
pub mod find_node;
mod generated;
pub mod helpers;
pub mod identifier;

View File

@@ -2,25 +2,18 @@
use crate::{self as ast, AnyNodeRef, ExceptHandler, Stmt};
/// Given a [`Stmt`] and its parent, return the [`ast::Suite`] that contains the [`Stmt`].
pub fn suite<'a>(
stmt: impl Into<AnyNodeRef<'a>>,
parent: impl Into<AnyNodeRef<'a>>,
) -> Option<EnclosingSuite<'a>> {
pub fn suite<'a>(stmt: &'a Stmt, parent: &'a Stmt) -> Option<EnclosingSuite<'a>> {
// TODO: refactor this to work without a parent, ie when `stmt` is at the top level
let stmt = stmt.into();
match parent.into() {
AnyNodeRef::ModModule(ast::ModModule { body, .. }) => EnclosingSuite::new(body, stmt),
AnyNodeRef::StmtFunctionDef(ast::StmtFunctionDef { body, .. }) => {
EnclosingSuite::new(body, stmt)
}
AnyNodeRef::StmtClassDef(ast::StmtClassDef { body, .. }) => EnclosingSuite::new(body, stmt),
AnyNodeRef::StmtFor(ast::StmtFor { body, orelse, .. }) => [body, orelse]
match parent {
Stmt::FunctionDef(ast::StmtFunctionDef { body, .. }) => EnclosingSuite::new(body, stmt),
Stmt::ClassDef(ast::StmtClassDef { body, .. }) => EnclosingSuite::new(body, stmt),
Stmt::For(ast::StmtFor { body, orelse, .. }) => [body, orelse]
.iter()
.find_map(|suite| EnclosingSuite::new(suite, stmt)),
AnyNodeRef::StmtWhile(ast::StmtWhile { body, orelse, .. }) => [body, orelse]
Stmt::While(ast::StmtWhile { body, orelse, .. }) => [body, orelse]
.iter()
.find_map(|suite| EnclosingSuite::new(suite, stmt)),
AnyNodeRef::StmtIf(ast::StmtIf {
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
..
@@ -28,12 +21,12 @@ pub fn suite<'a>(
.into_iter()
.chain(elif_else_clauses.iter().map(|clause| &clause.body))
.find_map(|suite| EnclosingSuite::new(suite, stmt)),
AnyNodeRef::StmtWith(ast::StmtWith { body, .. }) => EnclosingSuite::new(body, stmt),
AnyNodeRef::StmtMatch(ast::StmtMatch { cases, .. }) => cases
Stmt::With(ast::StmtWith { body, .. }) => EnclosingSuite::new(body, stmt),
Stmt::Match(ast::StmtMatch { cases, .. }) => cases
.iter()
.map(|case| &case.body)
.find_map(|body| EnclosingSuite::new(body, stmt)),
AnyNodeRef::StmtTry(ast::StmtTry {
Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
@@ -58,10 +51,10 @@ pub struct EnclosingSuite<'a> {
}
impl<'a> EnclosingSuite<'a> {
pub fn new(suite: &'a [Stmt], stmt: AnyNodeRef<'a>) -> Option<Self> {
pub fn new(suite: &'a [Stmt], stmt: &'a Stmt) -> Option<Self> {
let position = suite
.iter()
.position(|sibling| AnyNodeRef::ptr_eq(sibling.into(), stmt))?;
.position(|sibling| AnyNodeRef::ptr_eq(sibling.into(), stmt.into()))?;
Some(EnclosingSuite { suite, position })
}

View File

@@ -222,17 +222,6 @@ where
visitor.leave_node(node);
}
pub fn walk_node<'a, V>(visitor: &mut V, node: AnyNodeRef<'a>)
where
V: SourceOrderVisitor<'a> + ?Sized,
{
if visitor.enter_node(node).is_traverse() {
node.visit_source_order(visitor);
}
visitor.leave_node(node);
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum TraversalSignal {
Traverse,

View File

@@ -243,7 +243,7 @@ fn to_lsp_diagnostic(
) -> (usize, lsp_types::Diagnostic) {
let diagnostic_range = diagnostic.range().unwrap_or_default();
let name = diagnostic.name();
let body = diagnostic.concise_message().to_string();
let body = diagnostic.body().to_string();
let fix = diagnostic.fix();
let suggestion = diagnostic.first_help_text();
let code = diagnostic.secondary_code();

View File

@@ -179,45 +179,42 @@ impl LineIndex {
let line = self.line_index(offset);
let line_start = self.line_start(line, text);
let character_offset =
self.characters_between(TextRange::new(line_start, offset), text, encoding);
SourceLocation {
line,
character_offset: OneIndexed::from_zero_indexed(character_offset),
}
}
fn characters_between(
&self,
range: TextRange,
text: &str,
encoding: PositionEncoding,
) -> usize {
if self.is_ascii() {
return (range.end() - range.start()).to_usize();
return SourceLocation {
line,
character_offset: OneIndexed::from_zero_indexed((offset - line_start).to_usize()),
};
}
match encoding {
PositionEncoding::Utf8 => (range.end() - range.start()).to_usize(),
PositionEncoding::Utf8 => {
let character_offset = offset - line_start;
SourceLocation {
line,
character_offset: OneIndexed::from_zero_indexed(character_offset.to_usize()),
}
}
PositionEncoding::Utf16 => {
let up_to_character = &text[range];
up_to_character.encode_utf16().count()
let up_to_character = &text[TextRange::new(line_start, offset)];
let character = up_to_character.encode_utf16().count();
SourceLocation {
line,
character_offset: OneIndexed::from_zero_indexed(character),
}
}
PositionEncoding::Utf32 => {
let up_to_character = &text[range];
up_to_character.chars().count()
let up_to_character = &text[TextRange::new(line_start, offset)];
let character = up_to_character.chars().count();
SourceLocation {
line,
character_offset: OneIndexed::from_zero_indexed(character),
}
}
}
}
/// Returns the length of the line in characters, respecting the given encoding
pub fn line_len(&self, line: OneIndexed, text: &str, encoding: PositionEncoding) -> usize {
let line_range = self.line_range(line, text);
self.characters_between(line_range, text, encoding)
}
/// Return the number of lines in the source code.
pub fn line_count(&self) -> usize {
self.line_starts().len()

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_wasm"
version = "0.14.10"
version = "0.14.9"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -241,7 +241,7 @@ impl Workspace {
let range = msg.range().unwrap_or_default();
ExpandedMessage {
code: msg.secondary_code_or_id().to_string(),
message: msg.concise_message().to_string(),
message: msg.body().to_string(),
start_location: source_code
.source_location(range.start(), self.position_encoding)
.into(),

View File

@@ -42,7 +42,6 @@ wild = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_python_trivia = { workspace = true }
ty_module_resolver = { workspace = true }
dunce = { workspace = true }
insta = { workspace = true, features = ["filters"] }

Some files were not shown because too many files have changed in this diff Show More