Compare commits

..

1 Commits

Author SHA1 Message Date
Micha Reiser
da6403b1bf [ty] Use diagnostic rendering for semantic token tests 2025-07-08 14:05:51 +02:00
1367 changed files with 27582 additions and 128071 deletions

View File

@@ -143,12 +143,12 @@ jobs:
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
# NOTE: Do not exclude all Markdown files here, but rather use
# specific exclude patterns like 'docs/**'), because tests for
# 'ty' are written in Markdown.
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
':!**/*.md' \
':crates/ty_python_semantic/resources/mdtest/**/*.md' \
':!docs/**' \
':!assets/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
@@ -238,13 +238,13 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-insta
- name: ty mdtests (GitHub annotations)
@@ -296,13 +296,13 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-insta
- name: "Run tests"
@@ -325,7 +325,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-nextest
- name: "Run tests"
@@ -381,7 +381,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Build"
run: cargo build --release --locked
@@ -406,12 +406,21 @@ jobs:
MSRV: ${{ steps.msrv.outputs.value }}
run: rustup default "${MSRV}"
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
- name: "Build tests"
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-insta
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
MSRV: ${{ steps.msrv.outputs.value }}
run: cargo "+${MSRV}" test --no-run --all-features
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
cargo-fuzz-build:
name: "cargo fuzz build"
@@ -429,7 +438,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
with:
tool: cargo-fuzz@0.11.2
- name: "Install cargo-fuzz"
@@ -451,7 +460,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download Ruff binary to test
id: download-cached-binary
@@ -652,7 +661,7 @@ jobs:
branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml"
check_artifacts: true
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: Fuzz
env:
FORCE_COLOR: 1
@@ -682,7 +691,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
- uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
- run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
@@ -722,7 +731,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
@@ -765,7 +774,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
@@ -897,13 +906,13 @@ jobs:
persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-codspeed
@@ -911,7 +920,7 @@ jobs:
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}
@@ -930,13 +939,13 @@ jobs:
persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@f3a27926ea13d7be3ee2f4cbb925883cf9442b56 # v2.56.7
with:
tool: cargo-codspeed
@@ -944,7 +953,7 @@ jobs:
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -34,11 +34,11 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: Build ruff
# A debug build means the script runs slower once it gets started,

View File

@@ -12,7 +12,6 @@ on:
- ".github/workflows/mypy_primer.yaml"
- ".github/workflows/mypy_primer_comment.yaml"
- "Cargo.lock"
- "!**.md"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
@@ -38,7 +37,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
@@ -81,7 +80,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
with:

View File

@@ -22,7 +22,7 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: wheels-*

View File

@@ -1,25 +1,5 @@
name: Sync typeshed
# How this works:
#
# 1. A Linux worker:
# a. Checks out Ruff and typeshed
# b. Deletes the vendored typeshed stdlib stubs from Ruff
# c. Copies the latest versions of the stubs from typeshed
# d. Uses docstring-adder to sync all docstrings available on Linux
# e. Creates a new branch on the upstream astral-sh/ruff repository
# f. Commits the changes it's made and pushes them to the new upstream branch
# 2. Once the Linux worker is done, a Windows worker:
# a. Checks out the branch created by the Linux worker
# b. Syncs all docstrings available on Windows that are not available on Linux
# c. Commits the changes and pushes them to the same upstream branch
# 3. Once the Windows worker is done, a MacOS worker:
# a. Checks out the branch created by the Linux worker
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
# c. Commits the changes and pushes them to the same upstream branch
# d. Creates a PR against the `main` branch using the branch all three workers have pushed to
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
on:
workflow_dispatch:
schedule:
@@ -30,17 +10,7 @@ env:
FORCE_COLOR: 1
GH_TOKEN: ${{ github.token }}
# The name of the upstream branch that the first worker creates,
# and which all three workers push to.
UPSTREAM_BRANCH: typeshedbot/sync-typeshed
# The path to the directory that contains the vendored typeshed stubs,
# relative to the root of the Ruff repository.
VENDORED_TYPESHED: crates/ty_vendored/vendor/typeshed
jobs:
# Sync typeshed stubs, and sync all docstrings available on Linux.
# Push the changes to a new branch on the upstream repository.
sync:
name: Sync typeshed
runs-on: ubuntu-latest
@@ -49,6 +19,7 @@ jobs:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff
@@ -65,130 +36,37 @@ jobs:
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Sync typeshed stubs
- name: Sync typeshed
id: sync
run: |
rm -rf "ruff/${VENDORED_TYPESHED}"
mkdir "ruff/${VENDORED_TYPESHED}"
cp typeshed/README.md "ruff/${VENDORED_TYPESHED}"
cp typeshed/LICENSE "ruff/${VENDORED_TYPESHED}"
# The pyproject.toml file is needed by a later job for the black configuration.
# It's deleted before creating the PR.
cp typeshed/pyproject.toml "ruff/${VENDORED_TYPESHED}"
cp -r typeshed/stdlib "ruff/${VENDORED_TYPESHED}/stdlib"
rm -rf "ruff/${VENDORED_TYPESHED}/stdlib/@tests"
git -C typeshed rev-parse HEAD > "ruff/${VENDORED_TYPESHED}/source_commit.txt"
cd ruff
git checkout -b "${UPSTREAM_BRANCH}"
git add .
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
- name: Sync Linux docstrings
if: ${{ success() }}
run: |
cd ruff
./scripts/codemod_docstrings.sh
git commit -am "Sync Linux docstrings" --allow-empty
- name: Push the changes
id: commit
if: ${{ success() }}
run: git -C ruff push --force --set-upstream origin "${UPSTREAM_BRANCH}"
# Checkout the branch created by the sync job,
# and sync all docstrings available on Windows that are not available on Linux.
# Commit the changes and push them to the same branch.
docstrings-windows:
runs-on: windows-latest
timeout-minutes: 20
needs: [sync]
# Don't run the cron job on forks.
# The job will also be skipped if the sync job failed, because it's specified in `needs` above,
# and we haven't used `always()` in the `if` condition here
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
permissions:
contents: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff
with:
persist-credentials: true
ref: ${{ env.UPSTREAM_BRANCH}}
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Setup git
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- name: Sync Windows docstrings
id: docstrings
shell: bash
run: ./scripts/codemod_docstrings.sh
rm -rf ruff/crates/ty_vendored/vendor/typeshed
mkdir ruff/crates/ty_vendored/vendor/typeshed
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
- name: Commit the changes
if: ${{ steps.docstrings.outcome == 'success' }}
id: commit
if: ${{ steps.sync.outcome == 'success' }}
run: |
git commit -am "Sync Windows docstrings" --allow-empty
git push
# Checkout the branch created by the sync job,
# and sync all docstrings available on macOS that are not available on Linux or Windows.
# Push the changes to the same branch and create a PR against the `main` branch using that branch.
docstrings-macos-and-pr:
runs-on: macos-latest
timeout-minutes: 20
needs: [sync, docstrings-windows]
# Don't run the cron job on forks.
# The job will also be skipped if the sync or docstrings-windows jobs failed,
# because they're specified in `needs` above and we haven't used an `always()` condition in the `if` here
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff
with:
persist-credentials: true
ref: ${{ env.UPSTREAM_BRANCH}}
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Setup git
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- name: Sync macOS docstrings
run: ./scripts/codemod_docstrings.sh
- name: Commit and push the changes
if: ${{ success() }}
run: |
git commit -am "Sync macOS docstrings" --allow-empty
# Here we just reformat the codemodded stubs so that they are
# consistent with the other typeshed stubs around them.
# Typeshed formats code using black in their CI, so we just invoke
# black on the stubs the same way that typeshed does.
uvx black "${VENDORED_TYPESHED}/stdlib" --config "${VENDORED_TYPESHED}/pyproject.toml" || true
git commit -am "Format codemodded docstrings" --allow-empty
rm "${VENDORED_TYPESHED}/pyproject.toml"
git commit -am "Remove pyproject.toml file"
git push
cd ruff
git checkout -b typeshedbot/sync-typeshed
git add .
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
- name: Create a PR
if: ${{ success() }}
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
run: |
gh pr list --repo "${GITHUB_REPOSITORY}" --head "${UPSTREAM_BRANCH}" --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
cd ruff
git push --force origin typeshedbot/sync-typeshed
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
create-issue-on-failure:
name: Create an issue if the typeshed sync failed
runs-on: ubuntu-latest
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
needs: [sync]
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
permissions:
issues: write
steps:

View File

@@ -17,7 +17,6 @@ env:
RUSTUP_MAX_RETRIES: 10
RUST_BACKTRACE: 1
REF_NAME: ${{ github.ref_name }}
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
jobs:
ty-ecosystem-analyzer:
@@ -33,7 +32,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
@@ -64,75 +63,32 @@ jobs:
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@9c34dc514ee9aef6735db1dfebb80f63acbc3440"
ecosystem-analyzer \
--repository ruff \
diff \
--projects-old ruff/projects_old.txt \
--projects-new ruff/projects_new.txt \
--old old_commit \
--new new_commit \
--output-old diagnostics-old.json \
--output-new diagnostics-new.json
analyze \
--projects ruff/projects_old.txt \
--commit old_commit \
--output diagnostics_old.json
mkdir dist
ecosystem-analyzer \
--repository ruff \
analyze \
--projects ruff/projects_new.txt \
--commit new_commit \
--output diagnostics_new.json
ecosystem-analyzer \
generate-diff \
diagnostics-old.json \
diagnostics-new.json \
diagnostics_old.json \
diagnostics_new.json \
--old-name "main (merge base)" \
--new-name "$REF_NAME" \
--output-html dist/diff.html
--output-html diff.html
ecosystem-analyzer \
generate-diff-statistics \
diagnostics-old.json \
diagnostics-new.json \
--old-name "main (merge base)" \
--new-name "$REF_NAME" \
--output diff-statistics.md
echo '## `ecosystem-analyzer` results' > comment.md
echo >> comment.md
cat diff-statistics.md >> comment.md
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
echo ${{ github.event.number }} > pr-number
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
id: deploy
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
command: pages deploy dist --project-name=ty-ecosystem --branch ${{ github.head_ref }} --commit-hash ${GITHUB_SHA}
- name: "Append deployment URL"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
env:
DEPLOYMENT_URL: ${{ steps.deploy.outputs.pages-deployment-alias-url }}
run: |
echo >> comment.md
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)**" >> comment.md
- name: Upload comment
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: comment.md
path: comment.md
- name: Upload pr-number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr-number
path: pr-number
- name: Upload diagnostics diff
- name: Upload HTML diff report
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: diff.html
path: dist/diff.html
path: diff.html

View File

@@ -1,85 +0,0 @@
name: PR comment (ty ecosystem-analyzer)
on: # zizmor: ignore[dangerous-triggers]
workflow_run:
workflows: [ty ecosystem-analyzer]
types: [completed]
workflow_dispatch:
inputs:
workflow_run_id:
description: The ty ecosystem-analyzer workflow that triggers the workflow run
required: true
jobs:
comment:
runs-on: ubuntu-24.04
permissions:
pull-requests: write
steps:
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: Download PR number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
allow_forks: true
- name: Parse pull request number
id: pr-number
run: |
if [[ -f pr-number ]]
then
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
fi
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: "Download comment.md"
id: download-comment
if: steps.pr-number.outputs.pr-number
with:
name: comment.md
workflow: ty-ecosystem-analyzer.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/comment
workflow_conclusion: completed
if_no_artifact_found: ignore
allow_forks: true
- name: Generate comment content
id: generate-comment
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
run: |
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
# file on this runner
if [[ -L pr/comment/comment.md ]]
then
echo "Error: comment.md cannot be a symlink"
exit 1
fi
# Note: this identifier is used to find the comment to update on subsequent runs
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
echo >> comment.md
cat pr/comment/comment.md >> comment.md
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
cat comment.md >> "$GITHUB_OUTPUT"
echo 'EOF' >> "$GITHUB_OUTPUT"
- name: Find existing comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
issue-number: ${{ steps.pr-number.outputs.pr-number }}
comment-author: "github-actions[bot]"
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
- name: Create or update comment
if: steps.find-comment.outcome == 'success'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.pr-number.outputs.pr-number }}
body-path: comment.md
edit-mode: replace

View File

@@ -1,76 +0,0 @@
name: ty ecosystem-report
permissions: {}
on:
workflow_dispatch:
schedule:
# Run every Wednesday at 5:00 UTC:
- cron: 0 5 * * 3
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
RUST_BACKTRACE: 1
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
jobs:
ty-ecosystem-report:
name: Create ecosystem report
runs-on: depot-ubuntu-22.04-32
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
workspaces: "ruff"
- name: Install Rust toolchain
run: rustup show
- name: Create report
shell: bash
run: |
cd ruff
echo "Enabling configuration overloads (see .github/mypy-primer-ty.toml)"
mkdir -p ~/.config/ty
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
ecosystem-analyzer \
--verbose \
--repository ruff \
analyze \
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
--output ecosystem-diagnostics.json
mkdir dist
ecosystem-analyzer \
generate-report \
--max-diagnostics-per-project=1200 \
ecosystem-diagnostics.json \
--output dist/index.html
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
id: deploy
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}

View File

@@ -1,109 +0,0 @@
name: Run typing conformance
permissions: {}
on:
pull_request:
paths:
- "crates/ty*/**"
- "crates/ruff_db"
- "crates/ruff_python_ast"
- "crates/ruff_python_parser"
- ".github/workflows/typing_conformance.yaml"
- ".github/workflows/typing_conformance_comment.yaml"
- "Cargo.lock"
- "!**.md"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
RUST_BACKTRACE: 1
jobs:
typing_conformance:
name: Compute diagnostic diff
runs-on: depot-ubuntu-22.04-32
timeout-minutes: 10
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
repository: python/typing
ref: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
path: typing
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
workspaces: "ruff"
- name: Install Rust toolchain
run: rustup show
- name: Compute diagnostic diff
shell: bash
run: |
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
# Build the executable for the old and new commit
(
cd ruff
echo "new commit"
git checkout -b new_commit "${{ github.event.pull_request.head.sha }}"
git rev-list --format=%s --max-count=1 new_commit
cargo build --release --bin ty
mv target/release/ty ty-new
echo "old commit (merge base)"
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
git checkout -b old_commit "$MERGE_BASE"
git rev-list --format=%s --max-count=1 old_commit
cargo build --release --bin ty
mv target/release/ty ty-old
)
(
cd typing/conformance/tests
echo "Running ty on old commit (merge base)"
"$RUFF_DIR/ty-old" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/old-output.txt" 2>&1 || true
echo "Running ty on new commit"
"$RUFF_DIR/ty-new" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/new-output.txt" 2>&1 || true
)
if ! diff -u old-output.txt new-output.txt > typing_conformance_diagnostics.diff; then
echo "Differences found between base and PR"
else
echo "No differences found"
touch typing_conformance_diagnostics.diff
fi
echo ${{ github.event.number }} > pr-number
- name: Upload diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: typing_conformance_diagnostics_diff
path: typing_conformance_diagnostics.diff
- name: Upload pr-number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr-number
path: pr-number

View File

@@ -1,97 +0,0 @@
name: PR comment (typing_conformance)
on: # zizmor: ignore[dangerous-triggers]
workflow_run:
workflows: [Run typing conformance]
types: [completed]
workflow_dispatch:
inputs:
workflow_run_id:
description: The typing_conformance workflow that triggers the workflow run
required: true
jobs:
comment:
runs-on: ubuntu-24.04
permissions:
pull-requests: write
steps:
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: Download PR number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
allow_forks: true
- name: Parse pull request number
id: pr-number
run: |
if [[ -f pr-number ]]
then
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
fi
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: "Download typing_conformance results"
id: download-typing_conformance_diff
if: steps.pr-number.outputs.pr-number
with:
name: typing_conformance_diagnostics_diff
workflow: typing_conformance.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/typing_conformance_diagnostics_diff
workflow_conclusion: completed
if_no_artifact_found: ignore
allow_forks: true
- name: Generate comment content
id: generate-comment
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
run: |
# Guard against malicious typing_conformance results that symlink to a secret
# file on this runner
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
then
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
exit 1
fi
# Note this identifier is used to find the comment to update on
# subsequent runs
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
echo '## Diagnostic diff on typing conformance tests' >> comment.txt
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
echo '<details>' >> comment.txt
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
echo '' >> comment.txt
echo '```diff' >> comment.txt
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
echo '```' >> comment.txt
echo '</details>' >> comment.txt
else
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
fi
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
cat comment.txt >> "$GITHUB_OUTPUT"
echo 'EOF' >> "$GITHUB_OUTPUT"
- name: Find existing comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
issue-number: ${{ steps.pr-number.outputs.pr-number }}
comment-author: "github-actions[bot]"
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
- name: Create or update comment
if: steps.find-comment.outcome == 'success'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.pr-number.outputs.pr-number }}
body-path: comment.txt
edit-mode: replace

2
.github/zizmor.yml vendored
View File

@@ -10,8 +10,6 @@ rules:
ignore:
- build-docker.yml
- publish-playground.yml
- ty-ecosystem-analyzer.yaml
- ty-ecosystem-report.yaml
excessive-permissions:
# it's hard to test what the impact of removing these ignores would be
# without actually running the release workflow...

View File

@@ -6,7 +6,7 @@ exclude: |
crates/ty_vendored/vendor/.*|
crates/ty_project/resources/.*|
crates/ty_python_semantic/resources/corpus/.*|
crates/ty/docs/(configuration|rules|cli|environment).md|
crates/ty/docs/(configuration|rules|cli).md|
crates/ruff_benchmark/resources/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
@@ -81,10 +81,10 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.5
rev: v0.12.2
hooks:
- id: ruff-format
- id: ruff-check
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
types_or: [python, pyi]
require_serial: true
@@ -128,10 +128,5 @@ repos:
# but the integration only works if shellcheck is installed
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.10.0.1
hooks:
- id: shellcheck
ci:
skip: [cargo-fmt, dev-generate-all]

View File

@@ -1,112 +1,5 @@
# Changelog
## 0.12.7
This is a follow-up release to 0.12.6. Because of an issue in the package metadata, 0.12.6 failed to publish fully to PyPI and has been yanked. Similarly, there is no GitHub release or Git tag for 0.12.6. The contents of the 0.12.7 release are identical to 0.12.6, except for the updated metadata.
## 0.12.6
### Preview features
- \[`flake8-commas`\] Add support for trailing comma checks in type parameter lists (`COM812`, `COM819`) ([#19390](https://github.com/astral-sh/ruff/pull/19390))
- \[`pylint`\] Implement auto-fix for `missing-maxsplit-arg` (`PLC0207`) ([#19387](https://github.com/astral-sh/ruff/pull/19387))
- \[`ruff`\] Offer fixes for `RUF039` in more cases ([#19065](https://github.com/astral-sh/ruff/pull/19065))
### Bug fixes
- Support `.pyi` files in ruff analyze graph ([#19611](https://github.com/astral-sh/ruff/pull/19611))
- \[`flake8-pyi`\] Preserve inline comment in ellipsis removal (`PYI013`) ([#19399](https://github.com/astral-sh/ruff/pull/19399))
- \[`perflint`\] Ignore rule if target is `global` or `nonlocal` (`PERF401`) ([#19539](https://github.com/astral-sh/ruff/pull/19539))
- \[`pyupgrade`\] Fix `UP030` to avoid modifying double curly braces in format strings ([#19378](https://github.com/astral-sh/ruff/pull/19378))
- \[`refurb`\] Ignore decorated functions for `FURB118` ([#19339](https://github.com/astral-sh/ruff/pull/19339))
- \[`refurb`\] Mark `int` and `bool` cases for `Decimal.from_float` as safe fixes (`FURB164`) ([#19468](https://github.com/astral-sh/ruff/pull/19468))
- \[`ruff`\] Fix `RUF033` for named default expressions ([#19115](https://github.com/astral-sh/ruff/pull/19115))
### Rule changes
- \[`flake8-blind-except`\] Change `BLE001` to permit `logging.critical(..., exc_info=True)` ([#19520](https://github.com/astral-sh/ruff/pull/19520))
### Performance
- Add support for specifying minimum dots in detected string imports ([#19538](https://github.com/astral-sh/ruff/pull/19538))
## 0.12.5
### Preview features
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
### Bug fixes
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
### Rule changes
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
## 0.12.4
### Preview features
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
### Bug fixes
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
### Rule changes
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
### Documentation
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
## 0.12.3
### Preview features
- \[`flake8-bugbear`\] Support non-context-manager calls in `B017` ([#19063](https://github.com/astral-sh/ruff/pull/19063))
- \[`flake8-use-pathlib`\] Add autofixes for `PTH100`, `PTH106`, `PTH107`, `PTH108`, `PTH110`, `PTH111`, `PTH112`, `PTH113`, `PTH114`, `PTH115`, `PTH117`, `PTH119`, `PTH120` ([#19213](https://github.com/astral-sh/ruff/pull/19213))
- \[`flake8-use-pathlib`\] Add autofixes for `PTH203`, `PTH204`, `PTH205` ([#18922](https://github.com/astral-sh/ruff/pull/18922))
### Bug fixes
- \[`flake8-return`\] Fix false-positive for variables used inside nested functions in `RET504` ([#18433](https://github.com/astral-sh/ruff/pull/18433))
- Treat form feed as valid whitespace before a line continuation ([#19220](https://github.com/astral-sh/ruff/pull/19220))
- \[`flake8-type-checking`\] Fix syntax error introduced by fix (`TC008`) ([#19150](https://github.com/astral-sh/ruff/pull/19150))
- \[`pyupgrade`\] Keyword arguments in `super` should suppress the `UP008` fix ([#19131](https://github.com/astral-sh/ruff/pull/19131))
### Documentation
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI007`, `PYI008`) ([#19103](https://github.com/astral-sh/ruff/pull/19103))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM116`) ([#19111](https://github.com/astral-sh/ruff/pull/19111))
- \[`flake8-type-checking`\] Make example error out-of-the-box (`TC001`) ([#19151](https://github.com/astral-sh/ruff/pull/19151))
- \[`flake8-use-pathlib`\] Make example error out-of-the-box (`PTH210`) ([#19189](https://github.com/astral-sh/ruff/pull/19189))
- \[`pycodestyle`\] Make example error out-of-the-box (`E272`) ([#19191](https://github.com/astral-sh/ruff/pull/19191))
- \[`pycodestyle`\] Make example not raise unnecessary `SyntaxError` (`E114`) ([#19190](https://github.com/astral-sh/ruff/pull/19190))
- \[`pydoclint`\] Make example error out-of-the-box (`DOC501`) ([#19218](https://github.com/astral-sh/ruff/pull/19218))
- \[`pylint`, `pyupgrade`\] Fix syntax errors in examples (`PLW1501`, `UP028`) ([#19127](https://github.com/astral-sh/ruff/pull/19127))
- \[`pylint`\] Update `missing-maxsplit-arg` docs and error to suggest proper usage (`PLC0207`) ([#18949](https://github.com/astral-sh/ruff/pull/18949))
- \[`flake8-bandit`\] Make example error out-of-the-box (`S412`) ([#19241](https://github.com/astral-sh/ruff/pull/19241))
## 0.12.2
### Preview features

View File

@@ -266,13 +266,6 @@ Finally, regenerate the documentation and generated code with `cargo dev generat
## MkDocs
> [!NOTE]
>
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
> This means only members of the Astral organization can preview the documentation exactly as it
> will appear in production.
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
To preview any changes to the documentation locally:
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).

334
Cargo.lock generated
View File

@@ -261,18 +261,6 @@ version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
[[package]]
name = "bitvec"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
dependencies = [
"funty",
"radium",
"tap",
"wyz",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@@ -408,9 +396,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.41"
version = "4.5.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
dependencies = [
"clap_builder",
"clap_derive",
@@ -418,9 +406,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.41"
version = "4.5.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
dependencies = [
"anstream",
"anstyle",
@@ -461,9 +449,9 @@ dependencies = [
[[package]]
name = "clap_derive"
version = "4.5.41"
version = "4.5.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491"
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
dependencies = [
"heck",
"proc-macro2",
@@ -530,7 +518,7 @@ dependencies = [
"ciborium",
"clap",
"codspeed",
"criterion-plot 0.5.0",
"criterion-plot",
"is-terminal",
"itertools 0.10.5",
"num-traits",
@@ -603,7 +591,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -612,7 +600,7 @@ version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -692,6 +680,11 @@ name = "countme"
version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
dependencies = [
"dashmap 5.5.3",
"once_cell",
"rustc-hash 1.1.0",
]
[[package]]
name = "cpufeatures"
@@ -713,15 +706,15 @@ dependencies = [
[[package]]
name = "criterion"
version = "0.7.0"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1c047a62b0cc3e145fa84415a3191f628e980b194c2755aa12300a4e6cbd928"
checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679"
dependencies = [
"anes",
"cast",
"ciborium",
"clap",
"criterion-plot 0.6.0",
"criterion-plot",
"itertools 0.13.0",
"num-traits",
"oorandom",
@@ -742,16 +735,6 @@ dependencies = [
"itertools 0.10.5",
]
[[package]]
name = "criterion-plot"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b1bcc0dc7dfae599d84ad0b1a55f80cde8af3725da8313b528da95ef783e338"
dependencies = [
"cast",
"itertools 0.13.0",
]
[[package]]
name = "crossbeam"
version = "0.8.4"
@@ -869,6 +852,19 @@ dependencies = [
"syn",
]
[[package]]
name = "dashmap"
version = "5.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
"hashbrown 0.14.5",
"lock_api",
"once_cell",
"parking_lot_core",
]
[[package]]
name = "dashmap"
version = "6.1.0"
@@ -955,7 +951,7 @@ dependencies = [
"libc",
"option-ext",
"redox_users",
"windows-sys 0.60.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -1035,7 +1031,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18"
dependencies = [
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -1143,12 +1139,6 @@ dependencies = [
"libc",
]
[[package]]
name = "funty"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "generic-array"
version = "0.14.7"
@@ -1161,9 +1151,9 @@ dependencies = [
[[package]]
name = "get-size-derive2"
version = "0.6.1"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca171f9f8ed2f416ac044de2dc4acde3e356662a14ac990345639653bdc7fc28"
checksum = "1aac2af9f9a6a50e31b1e541d05b7925add83d3982c2793193fe9d4ee584323c"
dependencies = [
"attribute-derive",
"quote",
@@ -1172,9 +1162,9 @@ dependencies = [
[[package]]
name = "get-size2"
version = "0.6.1"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "965bc5c1c5fe05c5bbd398bb9b3f0f14d750261ebdd1af959f2c8a603fedb5ad"
checksum = "624a0312efd19e1c45922dfcc2d6806d3ffc4bca261f89f31fcc4f63f438d885"
dependencies = [
"compact_str",
"get-size-derive2",
@@ -1585,15 +1575,6 @@ dependencies = [
"memoffset",
]
[[package]]
name = "inventory"
version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab08d7cd2c5897f2c949e5383ea7c7db03fb19130ffcfbf7eda795137ae3cb83"
dependencies = [
"rustversion",
]
[[package]]
name = "is-docker"
version = "0.2.0"
@@ -1623,7 +1604,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi 0.5.1",
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -1687,7 +1668,7 @@ dependencies = [
"portable-atomic",
"portable-atomic-util",
"serde",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -2160,6 +2141,16 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "papaya"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
dependencies = [
"equivalent",
"seize",
]
[[package]]
name = "parking_lot"
version = "0.12.3"
@@ -2271,7 +2262,7 @@ dependencies = [
"once_cell",
"pep440_rs",
"regex",
"rustc-hash",
"rustc-hash 2.1.1",
"serde",
"smallvec",
"thiserror 1.0.69",
@@ -2492,7 +2483,7 @@ dependencies = [
"pep508_rs",
"serde",
"thiserror 2.0.12",
"toml 0.8.23",
"toml",
]
[[package]]
@@ -2576,12 +2567,6 @@ version = "5.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
[[package]]
name = "radium"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
[[package]]
name = "rand"
version = "0.8.5"
@@ -2595,9 +2580,9 @@ dependencies = [
[[package]]
name = "rand"
version = "0.9.2"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.3",
@@ -2744,7 +2729,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.12.7"
version = "0.12.2"
dependencies = [
"anyhow",
"argfile",
@@ -2787,7 +2772,7 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
"rustc-hash",
"rustc-hash 2.1.1",
"serde",
"serde_json",
"shellexpand",
@@ -2796,7 +2781,7 @@ dependencies = [
"test-case",
"thiserror 2.0.12",
"tikv-jemallocator",
"toml 0.9.2",
"toml",
"tracing",
"walkdir",
"wild",
@@ -2812,7 +2797,7 @@ dependencies = [
"ruff_annotate_snippets",
"serde",
"snapbox",
"toml 0.9.2",
"toml",
"tryfn",
"unicode-width 0.2.1",
]
@@ -2833,7 +2818,7 @@ dependencies = [
"ruff_python_formatter",
"ruff_python_parser",
"ruff_python_trivia",
"rustc-hash",
"rustc-hash 2.1.1",
"serde",
"serde_json",
"tikv-jemallocator",
@@ -2861,7 +2846,8 @@ dependencies = [
"anstyle",
"arc-swap",
"camino",
"dashmap",
"countme",
"dashmap 6.1.0",
"dunce",
"etcetera",
"filetime",
@@ -2871,7 +2857,6 @@ dependencies = [
"insta",
"matchit",
"path-slash",
"quick-junit",
"ruff_annotate_snippets",
"ruff_cache",
"ruff_diagnostics",
@@ -2881,17 +2866,14 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"salsa",
"schemars",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.12",
"tracing",
"tracing-subscriber",
"ty_static",
"unicode-width 0.2.1",
"web-time",
"zip",
]
@@ -2929,13 +2911,12 @@ dependencies = [
"similar",
"strum",
"tempfile",
"toml 0.9.2",
"toml",
"tracing",
"tracing-indicatif",
"tracing-subscriber",
"ty",
"ty_project",
"ty_static",
"url",
]
@@ -2957,7 +2938,7 @@ dependencies = [
"ruff_cache",
"ruff_macros",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"schemars",
"serde",
"static_assertions",
@@ -2971,7 +2952,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"memchr",
"ruff_cache",
"ruff_db",
"ruff_linter",
@@ -2997,7 +2977,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.12.7"
version = "0.12.2"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3022,6 +3002,7 @@ dependencies = [
"pathdiff",
"pep440_rs",
"pyproject-toml",
"quick-junit",
"regex",
"ruff_annotate_snippets",
"ruff_cache",
@@ -3039,7 +3020,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"schemars",
"serde",
"serde_json",
@@ -3050,7 +3031,7 @@ dependencies = [
"tempfile",
"test-case",
"thiserror 2.0.12",
"toml 0.9.2",
"toml",
"typed-arena",
"unicode-normalization",
"unicode-width 0.2.1",
@@ -3076,7 +3057,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"itertools 0.14.0",
"rand 0.9.2",
"rand 0.9.1",
"ruff_diagnostics",
"ruff_source_file",
"ruff_text_size",
@@ -3111,7 +3092,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"salsa",
"schemars",
"serde",
@@ -3161,7 +3142,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"salsa",
"schemars",
"serde",
@@ -3210,7 +3191,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"serde",
"serde_json",
"static_assertions",
@@ -3234,7 +3215,7 @@ dependencies = [
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"schemars",
"serde",
"smallvec",
@@ -3295,12 +3276,12 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
"rustc-hash",
"rustc-hash 2.1.1",
"serde",
"serde_json",
"shellexpand",
"thiserror 2.0.12",
"toml 0.9.2",
"toml",
"tracing",
"tracing-log",
"tracing-subscriber",
@@ -3329,7 +3310,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.12.7"
version = "0.12.2"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3384,13 +3365,13 @@ dependencies = [
"ruff_python_semantic",
"ruff_python_stdlib",
"ruff_source_file",
"rustc-hash",
"rustc-hash 2.1.1",
"schemars",
"serde",
"shellexpand",
"strum",
"tempfile",
"toml 0.9.2",
"toml",
]
[[package]]
@@ -3403,6 +3384,12 @@ dependencies = [
"serde_derive",
]
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc-hash"
version = "2.1.1"
@@ -3425,7 +3412,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -3443,7 +3430,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
dependencies = [
"boxcar",
"compact_str",
@@ -3453,11 +3440,11 @@ dependencies = [
"hashlink",
"indexmap",
"intrusive-collections",
"inventory",
"papaya",
"parking_lot",
"portable-atomic",
"rayon",
"rustc-hash",
"rustc-hash 2.1.1",
"salsa-macro-rules",
"salsa-macros",
"smallvec",
@@ -3468,12 +3455,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
[[package]]
name = "salsa-macros"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
dependencies = [
"proc-macro2",
"quote",
@@ -3526,6 +3513,16 @@ version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "seize"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "serde"
version = "1.0.219"
@@ -3570,9 +3567,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.141"
version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
dependencies = [
"itoa",
"memchr",
@@ -3600,15 +3597,6 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_spanned"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83"
dependencies = [
"serde",
]
[[package]]
name = "serde_test"
version = "1.0.177"
@@ -3762,22 +3750,23 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.27.2"
version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.27.2"
version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7"
checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8"
dependencies = [
"heck",
"proc-macro2",
"quote",
"rustversion",
"syn",
]
@@ -3803,12 +3792,6 @@ dependencies = [
"syn",
]
[[package]]
name = "tap"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tempfile"
version = "3.20.0"
@@ -3819,7 +3802,7 @@ dependencies = [
"getrandom 0.3.3",
"once_cell",
"rustix",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -4019,26 +4002,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
dependencies = [
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.11",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac"
dependencies = [
"indexmap",
"serde",
"serde_spanned 1.0.0",
"toml_datetime 0.7.0",
"toml_parser",
"toml_writer",
"winnow",
]
[[package]]
name = "toml_datetime"
version = "0.6.11"
@@ -4048,15 +4016,6 @@ dependencies = [
"serde",
]
[[package]]
name = "toml_datetime"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.22.27"
@@ -4065,25 +4024,17 @@ checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [
"indexmap",
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.11",
"serde_spanned",
"toml_datetime",
"toml_write",
"winnow",
]
[[package]]
name = "toml_parser"
version = "1.0.1"
name = "toml_write"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30"
dependencies = [
"winnow",
]
[[package]]
name = "toml_writer"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64"
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
[[package]]
name = "tracing"
@@ -4191,6 +4142,7 @@ dependencies = [
"clap",
"clap_complete_command",
"colored 3.0.0",
"countme",
"crossbeam",
"ctrlc",
"dunce",
@@ -4206,14 +4158,13 @@ dependencies = [
"ruff_python_trivia",
"salsa",
"tempfile",
"toml 0.9.2",
"toml",
"tracing",
"tracing-flame",
"tracing-subscriber",
"ty_project",
"ty_python_semantic",
"ty_server",
"ty_static",
"wild",
]
@@ -4223,19 +4174,16 @@ version = "0.0.0"
dependencies = [
"bitflags 2.9.1",
"insta",
"itertools 0.14.0",
"regex",
"ruff_db",
"ruff_python_ast",
"ruff_python_parser",
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"salsa",
"smallvec",
"tracing",
"ty_project",
"ty_python_semantic",
"ty_vendored",
]
[[package]]
@@ -4262,13 +4210,14 @@ dependencies = [
"ruff_python_ast",
"ruff_python_formatter",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"salsa",
"schemars",
"serde",
"thiserror 2.0.12",
"toml 0.9.2",
"toml",
"tracing",
"ty_ide",
"ty_python_semantic",
"ty_vendored",
]
@@ -4279,10 +4228,10 @@ version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.9.1",
"bitvec",
"camino",
"colored 3.0.0",
"compact_str",
"countme",
"dir-test",
"drop_bomb",
"get-size2",
@@ -4306,7 +4255,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"salsa",
"schemars",
"serde",
@@ -4319,7 +4268,6 @@ dependencies = [
"thiserror 2.0.12",
"tracing",
"ty_python_semantic",
"ty_static",
"ty_test",
"ty_vendored",
]
@@ -4329,27 +4277,20 @@ name = "ty_server"
version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.9.1",
"crossbeam",
"dunce",
"insta",
"jod-thread",
"libc",
"lsp-server",
"lsp-types",
"regex",
"ruff_db",
"ruff_notebook",
"ruff_python_ast",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"salsa",
"serde",
"serde_json",
"shellexpand",
"tempfile",
"thiserror 2.0.12",
"tracing",
"tracing-subscriber",
"ty_ide",
@@ -4358,13 +4299,6 @@ dependencies = [
"ty_vendored",
]
[[package]]
name = "ty_static"
version = "0.0.1"
dependencies = [
"ruff_macros",
]
[[package]]
name = "ty_test"
version = "0.0.0"
@@ -4383,17 +4317,16 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"rustc-hash 2.1.1",
"rustc-stable-hash",
"salsa",
"serde",
"smallvec",
"tempfile",
"thiserror 2.0.12",
"toml 0.9.2",
"toml",
"tracing",
"ty_python_semantic",
"ty_static",
"ty_vendored",
]
@@ -4608,7 +4541,7 @@ checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
dependencies = [
"getrandom 0.3.3",
"js-sys",
"rand 0.9.2",
"rand 0.9.1",
"uuid-macro-internal",
"wasm-bindgen",
]
@@ -4884,7 +4817,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -5137,15 +5070,6 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "wyz"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
dependencies = [
"tap",
]
[[package]]
name = "yansi"
version = "1.0.1"

View File

@@ -44,7 +44,6 @@ ty_ide = { path = "crates/ty_ide" }
ty_project = { path = "crates/ty_project", default-features = false }
ty_python_semantic = { path = "crates/ty_python_semantic" }
ty_server = { path = "crates/ty_server" }
ty_static = { path = "crates/ty_static" }
ty_test = { path = "crates/ty_test" }
ty_vendored = { path = "crates/ty_vendored" }
@@ -57,9 +56,6 @@ assert_fs = { version = "1.1.0" }
argfile = { version = "0.2.0" }
bincode = { version = "2.0.0" }
bitflags = { version = "2.5.0" }
bitvec = { version = "1.0.1", default-features = false, features = [
"alloc",
] }
bstr = { version = "1.9.1" }
cachedir = { version = "0.3.1" }
camino = { version = "1.1.7" }
@@ -73,7 +69,7 @@ console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
compact_str = "0.9.0"
criterion = { version = "0.7.0", default-features = false }
criterion = { version = "0.6.0", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
dir-test = { version = "0.4.0" }
@@ -83,11 +79,11 @@ etcetera = { version = "0.10.0" }
fern = { version = "0.7.0" }
filetime = { version = "0.2.23" }
getrandom = { version = "0.3.1" }
get-size2 = { version = "0.6.0", features = [
get-size2 = { version = "0.5.0", features = [
"derive",
"smallvec",
"hashbrown",
"compact-str",
"compact-str"
] }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
@@ -141,7 +137,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "dba66f1a37acca014c2402f231ed5b361bd7d8fe" }
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "fc00eba89e5dcaa5edba51c41aa5f309b5cb126b" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -153,7 +149,7 @@ serde_with = { version = "3.6.0", default-features = false, features = [
] }
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2", features = ["union", "const_generics", "const_new"] }
smallvec = { version = "1.13.2" }
snapbox = { version = "0.6.0", features = [
"diff",
"term-svg",
@@ -168,7 +164,7 @@ tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
thiserror = { version = "2.0.0" }
tikv-jemallocator = { version = "0.6.0" }
toml = { version = "0.9.0" }
toml = { version = "0.8.11" }
tracing = { version = "0.1.40" }
tracing-flame = { version = "0.2.0" }
tracing-indicatif = { version = "0.3.11" }
@@ -177,7 +173,7 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
"env-filter",
"fmt",
"ansi",
"smallvec",
"smallvec"
] }
tryfn = { version = "0.2.1" }
typed-arena = { version = "2.0.2" }
@@ -187,7 +183,11 @@ unicode-width = { version = "0.2.0" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.42" }
@@ -222,8 +222,8 @@ must_use_candidate = "allow"
similar_names = "allow"
single_match_else = "allow"
too_many_lines = "allow"
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
needless_raw_string_hashes = "allow"
# Disallowed restriction lints

View File

@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.12.7/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.7/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.12.2/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.2/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.12.7
rev: v0.12.2
hooks:
# Run the linter.
- id: ruff-check
@@ -430,7 +430,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Babel](https://github.com/python-babel/babel)
- Benchling ([Refac](https://github.com/benchling/refac))
- [Bokeh](https://github.com/bokeh/bokeh)
- Capital One ([datacompy](https://github.com/capitalone/datacompy))
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- CERN ([Indico](https://getindico.io/))

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.12.7"
version = "0.12.2"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -169,9 +169,6 @@ pub struct AnalyzeGraphCommand {
/// Attempt to detect imports from string literals.
#[clap(long)]
detect_string_imports: bool,
/// The minimum number of dots in a string import to consider it a valid import.
#[clap(long)]
min_dots: Option<usize>,
/// Enable preview mode. Use `--no-preview` to disable.
#[arg(long, overrides_with("no_preview"))]
preview: bool,
@@ -811,7 +808,6 @@ impl AnalyzeGraphCommand {
} else {
None
},
string_imports_min_dots: self.min_dots,
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
target_version: self.target_version.map(ast::PythonVersion::from),
..ExplicitConfigOverrides::default()
@@ -1309,7 +1305,6 @@ struct ExplicitConfigOverrides {
show_fixes: Option<bool>,
extension: Option<Vec<ExtensionPair>>,
detect_string_imports: Option<bool>,
string_imports_min_dots: Option<usize>,
}
impl ConfigurationTransformer for ExplicitConfigOverrides {
@@ -1397,9 +1392,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
if let Some(detect_string_imports) = &self.detect_string_imports {
config.analyze.detect_string_imports = Some(*detect_string_imports);
}
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
}
config
}

View File

@@ -454,7 +454,7 @@ impl LintCacheData {
CacheMessage {
rule,
body: msg.body().to_string(),
suggestion: msg.first_help_text().map(ToString::to_string),
suggestion: msg.suggestion().map(ToString::to_string),
range: msg.expect_range(),
parent: msg.parent(),
fix: msg.fix().cloned(),
@@ -681,7 +681,7 @@ mod tests {
UnsafeFixes::Enabled,
)
.unwrap();
if diagnostics.inner.iter().any(Diagnostic::is_invalid_syntax) {
if diagnostics.inner.iter().any(Diagnostic::is_syntax_error) {
parse_errors.push(path.clone());
}
paths.push(path);

View File

@@ -102,7 +102,7 @@ pub(crate) fn analyze_graph(
// Resolve the per-file settings.
let settings = resolver.resolve(path);
let string_imports = settings.analyze.string_imports;
let string_imports = settings.analyze.detect_string_imports;
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
// Skip excluded files.

View File

@@ -9,15 +9,15 @@ use ignore::Error;
use log::{debug, error, warn};
#[cfg(not(target_family = "wasm"))]
use rayon::prelude::*;
use ruff_linter::message::diagnostic_from_violation;
use rustc_hash::FxHashMap;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::panic::catch_unwind;
use ruff_linter::package::PackageRoot;
use ruff_linter::registry::Rule;
use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{LinterSettings, flags};
use ruff_linter::{IOError, Violation, fs, warn_user_once};
use ruff_linter::{IOError, fs, warn_user_once};
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::TextRange;
use ruff_workspace::resolver::{
@@ -129,7 +129,11 @@ pub(crate) fn check(
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
Diagnostics::new(
vec![IOError { message }.into_diagnostic(TextRange::default(), &dummy)],
vec![diagnostic_from_violation(
IOError { message },
TextRange::default(),
&dummy,
)],
FxHashMap::default(),
)
} else {
@@ -162,9 +166,7 @@ pub(crate) fn check(
|a, b| (a.0 + b.0, a.1 + b.1),
);
all_diagnostics
.inner
.sort_by(Diagnostic::ruff_start_ordering);
all_diagnostics.inner.sort();
// Store the caches.
caches.persist()?;
@@ -279,7 +281,6 @@ mod test {
TextEmitter::default()
.with_show_fix_status(true)
.with_color(false)
.emit(
&mut output,
&diagnostics.inner,

View File

@@ -1,7 +1,6 @@
use std::path::Path;
use anyhow::Result;
use ruff_db::diagnostic::Diagnostic;
use ruff_linter::package::PackageRoot;
use ruff_linter::packaging;
use ruff_linter::settings::flags;
@@ -53,8 +52,6 @@ pub(crate) fn check_stdin(
noqa,
fix_mode,
)?;
diagnostics
.inner
.sort_unstable_by(Diagnostic::ruff_start_ordering);
diagnostics.inner.sort_unstable();
Ok(diagnostics)
}

View File

@@ -13,13 +13,13 @@ use log::{debug, warn};
use ruff_db::diagnostic::Diagnostic;
use ruff_linter::codes::Rule;
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
use ruff_linter::message::create_syntax_error_diagnostic;
use ruff_linter::message::{create_syntax_error_diagnostic, diagnostic_from_violation};
use ruff_linter::package::PackageRoot;
use ruff_linter::pyproject_toml::lint_pyproject_toml;
use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{LinterSettings, flags};
use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::{IOError, Violation, fs};
use ruff_linter::{IOError, fs};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder;
@@ -62,12 +62,13 @@ impl Diagnostics {
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
let source_file = SourceFileBuilder::new(name, "").finish();
Self::new(
vec![
vec![diagnostic_from_violation(
IOError {
message: err.to_string(),
}
.into_diagnostic(TextRange::default(), &source_file),
],
},
TextRange::default(),
&source_file,
)],
FxHashMap::default(),
)
} else {

View File

@@ -131,7 +131,6 @@ pub fn run(
}: Args,
) -> Result<ExitStatus> {
{
ruff_db::set_program_version(crate::version::version().to_string()).unwrap();
let default_panic_hook = std::panic::take_hook();
std::panic::set_hook(Box::new(move |info| {
#[expect(clippy::print_stderr)]
@@ -440,7 +439,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
if cli.statistics {
printer.write_statistics(&diagnostics, &mut summary_writer)?;
} else {
printer.write_once(&diagnostics, &mut summary_writer, preview)?;
printer.write_once(&diagnostics, &mut summary_writer)?;
}
if !cli.exit_zero {

View File

@@ -9,13 +9,12 @@ use itertools::{Itertools, iterate};
use ruff_linter::linter::FixTable;
use serde::Serialize;
use ruff_db::diagnostic::{
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
};
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel;
use ruff_linter::message::{
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, SarifEmitter,
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, RdjsonEmitter, SarifEmitter,
TextEmitter,
};
use ruff_linter::notify_user;
@@ -203,7 +202,6 @@ impl Printer {
&self,
diagnostics: &Diagnostics,
writer: &mut dyn Write,
preview: bool,
) -> Result<()> {
if matches!(self.log_level, LogLevel::Silent) {
return Ok(());
@@ -231,32 +229,16 @@ impl Printer {
match self.format {
OutputFormat::Json => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Json)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
JsonEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Rdjson => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Rdjson)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
RdjsonEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::JsonLines => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::JsonLines)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
JsonLinesEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Junit => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Junit)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
JunitEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Concise | OutputFormat::Full => {
TextEmitter::default()
@@ -264,7 +246,6 @@ impl Printer {
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
.with_show_source(self.format == OutputFormat::Full)
.with_unsafe_fixes(self.unsafe_fixes)
.with_preview(preview)
.emit(writer, &diagnostics.inner, &context)?;
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
@@ -299,18 +280,10 @@ impl Printer {
GitlabEmitter::default().emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Pylint => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Pylint)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
PylintEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Azure => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Azure)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
AzureEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Sarif => {
SarifEmitter.emit(writer, &diagnostics.inner, &context)?;

View File

@@ -57,40 +57,33 @@ fn dependencies() -> Result<()> {
.write_str(indoc::indoc! {r#"
def f(): pass
"#})?;
root.child("ruff")
.child("e.pyi")
.write_str(indoc::indoc! {r#"
def f() -> None: ...
"#})?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": [
"ruff/d.py"
],
"ruff/d.py": [
"ruff/e.py",
"ruff/e.pyi"
],
"ruff/e.py": [],
"ruff/e.pyi": []
}
assert_cmd_snapshot!(command().current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": [
"ruff/d.py"
],
"ruff/d.py": [
"ruff/e.py"
],
"ruff/e.py": []
}
----- stderr -----
"#);
----- stderr -----
"###);
});
Ok(())
@@ -204,43 +197,23 @@ fn string_detection() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [],
"ruff/c.py": []
}
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": []
}
----- stderr -----
"#);
});
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("--detect-string-imports").arg("--min-dots").arg("1").current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": []
}
----- stderr -----
"#);
----- stderr -----
"###);
});
Ok(())

View File

@@ -120,7 +120,7 @@ fn nonexistent_config_file() {
#[test]
fn config_override_rejected_if_invalid_toml() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--config", "foo = bar", "."]), @r"
.args(["format", "--config", "foo = bar", "."]), @r#"
success: false
exit_code: 2
----- stdout -----
@@ -137,11 +137,12 @@ fn config_override_rejected_if_invalid_toml() {
TOML parse error at line 1, column 7
|
1 | foo = bar
| ^^^
string values must be quoted, expected literal string
| ^
invalid string
expected `"`, `'`
For more information, try '--help'.
");
"#);
}
#[test]

View File

@@ -2246,7 +2246,8 @@ fn pyproject_toml_stdin_syntax_error() {
success: false
exit_code: 1
----- stdout -----
pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: unclosed table, expected `]`
pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: invalid table header
expected `.`, `]`
|
1 | [project
| ^ RUF200

View File

@@ -534,7 +534,7 @@ fn nonexistent_config_file() {
fn config_override_rejected_if_invalid_toml() {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.args(["--config", "foo = bar", "."]), @r"
.args(["--config", "foo = bar", "."]), @r#"
success: false
exit_code: 2
----- stdout -----
@@ -551,11 +551,12 @@ fn config_override_rejected_if_invalid_toml() {
TOML parse error at line 1, column 7
|
1 | foo = bar
| ^^^
string values must be quoted, expected literal string
| ^
invalid string
expected `"`, `'`
For more information, try '--help'.
");
"#);
}
#[test]
@@ -732,8 +733,9 @@ select = [E501]
Cause: TOML parse error at line 3, column 11
|
3 | select = [E501]
| ^^^^
string values must be quoted, expected literal string
| ^
invalid array
expected `]`
");
});
@@ -874,7 +876,7 @@ fn each_toml_option_requires_a_new_flag_1() {
|
1 | extend-select=['F841'], line-length=90
| ^
unexpected key or value, expected newline, `#`
expected newline, `#`
For more information, try '--help'.
");
@@ -905,7 +907,7 @@ fn each_toml_option_requires_a_new_flag_2() {
|
1 | extend-select=['F841'] line-length=90
| ^
unexpected key or value, expected newline, `#`
expected newline, `#`
For more information, try '--help'.
");
@@ -993,7 +995,6 @@ fn value_given_to_table_key_is_not_inline_table_2() {
- `lint.exclude`
- `lint.preview`
- `lint.typing-extensions`
- `lint.future-annotations`
For more information, try '--help'.
");
@@ -2422,7 +2423,7 @@ requires-python = ">= 3.11"
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.11
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -2734,7 +2735,7 @@ requires-python = ">= 3.11"
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.10
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -3098,7 +3099,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.11
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -3478,7 +3479,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.11
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -3806,7 +3807,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.10
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -4134,7 +4135,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.9
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -4419,7 +4420,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.9
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -4757,7 +4758,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.10
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -5691,82 +5692,3 @@ class Foo:
"
);
}
#[test_case::test_case("concise")]
#[test_case::test_case("full")]
#[test_case::test_case("json")]
#[test_case::test_case("json-lines")]
#[test_case::test_case("junit")]
#[test_case::test_case("grouped")]
#[test_case::test_case("github")]
#[test_case::test_case("gitlab")]
#[test_case::test_case("pylint")]
#[test_case::test_case("rdjson")]
#[test_case::test_case("azure")]
#[test_case::test_case("sarif")]
fn output_format(output_format: &str) -> Result<()> {
const CONTENT: &str = "\
import os # F401
x = y # F821
match 42: # invalid-syntax
case _: ...
";
let tempdir = TempDir::new()?;
let input = tempdir.path().join("input.py");
fs::write(&input, CONTENT)?;
let snapshot = format!("output_format_{output_format}");
let project_dir = dunce::canonicalize(tempdir.path())?;
insta::with_settings!({
filters => vec![
(tempdir_filter(&project_dir).as_str(), "[TMP]/"),
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
(ruff_linter::VERSION, "[VERSION]"),
]
}, {
assert_cmd_snapshot!(
snapshot,
Command::new(get_cargo_bin(BIN_NAME))
.args([
"check",
"--no-cache",
"--output-format",
output_format,
"--select",
"F401,F821",
"--target-version",
"py39",
"input.py",
])
.current_dir(&tempdir),
);
});
Ok(())
}
#[test]
fn future_annotations_preview_warning() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.args(["--config", "lint.future-annotations = true"])
.args(["--select", "F"])
.arg("--no-preview")
.arg("-")
.pass_stdin("1"),
@r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
warning: The `lint.future-annotations` setting will have no effect because `preview` is disabled
",
);
}

View File

@@ -1,23 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- azure
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y`
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;]SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr -----

View File

@@ -1,25 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- concise
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:1:8: F401 [*] `os` imported but unused
input.py:2:5: F821 Undefined name `y`
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 3 errors.
[*] 1 fixable with the `--fix` option.
----- stderr -----

View File

@@ -1,49 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- full
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:1:8: F401 [*] `os` imported but unused
|
1 | import os # F401
| ^^ F401
2 | x = y # F821
3 | match 42: # invalid-syntax
|
= help: Remove unused import: `os`
input.py:2:5: F821 Undefined name `y`
|
1 | import os # F401
2 | x = y # F821
| ^ F821
3 | match 42: # invalid-syntax
4 | case _: ...
|
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
1 | import os # F401
2 | x = y # F821
3 | match 42: # invalid-syntax
| ^^^^^
4 | case _: ...
|
Found 3 errors.
[*] 1 fixable with the `--fix` option.
----- stderr -----

View File

@@ -1,23 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- github
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused
::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y`
::error title=Ruff,file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr -----

View File

@@ -1,60 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- gitlab
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
[
{
"check_name": "F401",
"description": "`os` imported but unused",
"fingerprint": "4dbad37161e65c72",
"location": {
"lines": {
"begin": 1,
"end": 1
},
"path": "input.py"
},
"severity": "major"
},
{
"check_name": "F821",
"description": "Undefined name `y`",
"fingerprint": "7af59862a085230",
"location": {
"lines": {
"begin": 2,
"end": 2
},
"path": "input.py"
},
"severity": "major"
},
{
"check_name": "syntax-error",
"description": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
"fingerprint": "e558cec859bb66e8",
"location": {
"lines": {
"begin": 3,
"end": 3
},
"path": "input.py"
},
"severity": "major"
}
]
----- stderr -----

View File

@@ -1,27 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- grouped
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:
1:8 F401 [*] `os` imported but unused
2:5 F821 Undefined name `y`
3:1 SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 3 errors.
[*] 1 fixable with the `--fix` option.
----- stderr -----

View File

@@ -1,23 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- json-lines
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
{"cell":null,"code":null,"end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
----- stderr -----

View File

@@ -1,88 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- json
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
[
{
"cell": null,
"code": "F401",
"end_location": {
"column": 10,
"row": 1
},
"filename": "[TMP]/input.py",
"fix": {
"applicability": "safe",
"edits": [
{
"content": "",
"end_location": {
"column": 1,
"row": 2
},
"location": {
"column": 1,
"row": 1
}
}
],
"message": "Remove unused import: `os`"
},
"location": {
"column": 8,
"row": 1
},
"message": "`os` imported but unused",
"noqa_row": 1,
"url": "https://docs.astral.sh/ruff/rules/unused-import"
},
{
"cell": null,
"code": "F821",
"end_location": {
"column": 6,
"row": 2
},
"filename": "[TMP]/input.py",
"fix": null,
"location": {
"column": 5,
"row": 2
},
"message": "Undefined name `y`",
"noqa_row": 2,
"url": "https://docs.astral.sh/ruff/rules/undefined-name"
},
{
"cell": null,
"code": null,
"end_location": {
"column": 6,
"row": 3
},
"filename": "[TMP]/input.py",
"fix": null,
"location": {
"column": 1,
"row": 3
},
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
"noqa_row": null,
"url": null
}
]
----- stderr -----

View File

@@ -1,34 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- junit
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="3" failures="3" errors="0">
<testsuite name="[TMP]/input.py" tests="3" disabled="0" errors="0" failures="3" package="org.ruff">
<testcase name="org.ruff.F401" classname="[TMP]/input" line="1" column="8">
<failure message="`os` imported but unused">line 1, col 8, `os` imported but unused</failure>
</testcase>
<testcase name="org.ruff.F821" classname="[TMP]/input" line="2" column="5">
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
</testcase>
<testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1">
<failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
</testcase>
</testsuite>
</testsuites>
----- stderr -----

View File

@@ -1,23 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- pylint
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:1: [F401] `os` imported but unused
input.py:2: [F821] Undefined name `y`
input.py:3: [invalid-syntax] SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr -----

View File

@@ -1,102 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- rdjson
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
{
"diagnostics": [
{
"code": {
"url": "https://docs.astral.sh/ruff/rules/unused-import",
"value": "F401"
},
"location": {
"path": "[TMP]/input.py",
"range": {
"end": {
"column": 10,
"line": 1
},
"start": {
"column": 8,
"line": 1
}
}
},
"message": "`os` imported but unused",
"suggestions": [
{
"range": {
"end": {
"column": 1,
"line": 2
},
"start": {
"column": 1,
"line": 1
}
},
"text": ""
}
]
},
{
"code": {
"url": "https://docs.astral.sh/ruff/rules/undefined-name",
"value": "F821"
},
"location": {
"path": "[TMP]/input.py",
"range": {
"end": {
"column": 6,
"line": 2
},
"start": {
"column": 5,
"line": 2
}
}
},
"message": "Undefined name `y`"
},
{
"code": {
"value": "invalid-syntax"
},
"location": {
"path": "[TMP]/input.py",
"range": {
"end": {
"column": 6,
"line": 3
},
"start": {
"column": 1,
"line": 3
}
}
},
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
}
],
"severity": "WARNING",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"
}
}
----- stderr -----

View File

@@ -1,142 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- sarif
- "--select"
- "F401,F821"
- "--target-version"
- py39
- input.py
---
success: false
exit_code: 1
----- stdout -----
{
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
"runs": [
{
"results": [
{
"level": "error",
"locations": [
{
"physicalLocation": {
"artifactLocation": {
"uri": "[TMP]/input.py"
},
"region": {
"endColumn": 10,
"endLine": 1,
"startColumn": 8,
"startLine": 1
}
}
}
],
"message": {
"text": "`os` imported but unused"
},
"ruleId": "F401"
},
{
"level": "error",
"locations": [
{
"physicalLocation": {
"artifactLocation": {
"uri": "[TMP]/input.py"
},
"region": {
"endColumn": 6,
"endLine": 2,
"startColumn": 5,
"startLine": 2
}
}
}
],
"message": {
"text": "Undefined name `y`"
},
"ruleId": "F821"
},
{
"level": "error",
"locations": [
{
"physicalLocation": {
"artifactLocation": {
"uri": "[TMP]/input.py"
},
"region": {
"endColumn": 6,
"endLine": 3,
"startColumn": 1,
"startLine": 3
}
}
}
],
"message": {
"text": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
},
"ruleId": null
}
],
"tool": {
"driver": {
"informationUri": "https://github.com/astral-sh/ruff",
"name": "ruff",
"rules": [
{
"fullDescription": {
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)\n"
},
"help": {
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
},
"helpUri": "https://docs.astral.sh/ruff/rules/unused-import",
"id": "F401",
"properties": {
"id": "F401",
"kind": "Pyflakes",
"name": "unused-import",
"problem.severity": "error"
},
"shortDescription": {
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
}
},
{
"fullDescription": {
"text": "## What it does\nChecks for uses of undefined names.\n\n## Why is this bad?\nAn undefined name is likely to raise `NameError` at runtime.\n\n## Example\n```python\ndef double():\n return n * 2 # raises `NameError` if `n` is undefined when `double` is called\n```\n\nUse instead:\n```python\ndef double(n):\n return n * 2\n```\n\n## Options\n- [`target-version`]: Can be used to configure which symbols Ruff will understand\n as being available in the `builtins` namespace.\n\n## References\n- [Python documentation: Naming and binding](https://docs.python.org/3/reference/executionmodel.html#naming-and-binding)\n"
},
"help": {
"text": "Undefined name `{name}`. {tip}"
},
"helpUri": "https://docs.astral.sh/ruff/rules/undefined-name",
"id": "F821",
"properties": {
"id": "F821",
"kind": "Pyflakes",
"name": "undefined-name",
"problem.severity": "error"
},
"shortDescription": {
"text": "Undefined name `{name}`. {tip}"
}
}
],
"version": "[VERSION]"
}
}
}
],
"version": "2.1.0"
}
----- stderr -----

View File

@@ -392,7 +392,7 @@ formatter.docstring_code_line_width = dynamic
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.7
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}

View File

@@ -60,7 +60,7 @@ fn config_option_ignored_but_validated() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.arg("version")
.args(["--config", "foo = bar"]), @r"
.args(["--config", "foo = bar"]), @r#"
success: false
exit_code: 2
----- stdout -----
@@ -77,11 +77,12 @@ fn config_option_ignored_but_validated() {
TOML parse error at line 1, column 7
|
1 | foo = bar
| ^^^
string values must be quoted, expected literal string
| ^
invalid string
expected `"`, `'`
For more information, try '--help'.
"
"#
);
});
}

View File

@@ -2,7 +2,6 @@
use ruff_benchmark::criterion;
use ruff_benchmark::real_world_projects::{InstalledProject, RealWorldProject};
use std::fmt::Write;
use std::ops::Range;
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
@@ -18,7 +17,7 @@ use ruff_python_ast::PythonVersion;
use ty_project::metadata::options::{EnvironmentOptions, Options};
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
use ty_project::watch::{ChangeEvent, ChangedKind};
use ty_project::{CheckMode, Db, ProjectDatabase, ProjectMetadata};
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
struct Case {
db: ProjectDatabase,
@@ -102,7 +101,6 @@ fn setup_tomllib_case() -> Case {
let re = re.unwrap();
db.set_check_mode(CheckMode::OpenFiles);
db.project().set_open_files(&mut db, tomllib_files);
let re_path = re.path(&db).as_system_path().unwrap().to_owned();
@@ -238,7 +236,6 @@ fn setup_micro_case(code: &str) -> Case {
let mut db = ProjectDatabase::new(metadata, system).unwrap();
let file = system_path_to_file(&db, SystemPathBuf::from(file_path)).unwrap();
db.set_check_mode(CheckMode::OpenFiles);
db.project()
.set_open_files(&mut db, FxHashSet::from_iter([file]));
@@ -444,37 +441,6 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
});
}
fn benchmark_many_enum_members(criterion: &mut Criterion) {
const NUM_ENUM_MEMBERS: usize = 512;
setup_rayon();
let mut code = String::new();
writeln!(&mut code, "from enum import Enum").ok();
writeln!(&mut code, "class E(Enum):").ok();
for i in 0..NUM_ENUM_MEMBERS {
writeln!(&mut code, " m{i} = {i}").ok();
}
writeln!(&mut code).ok();
for i in 0..NUM_ENUM_MEMBERS {
writeln!(&mut code, "print(E.m{i})").ok();
}
criterion.bench_function("ty_micro[many_enum_members]", |b| {
b.iter_batched_ref(
|| setup_micro_case(&code),
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
struct ProjectBenchmark<'a> {
project: InstalledProject<'a>,
fs: MemoryFileSystem,
@@ -527,21 +493,14 @@ impl<'a> ProjectBenchmark<'a> {
#[track_caller]
fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
fn check_project(db: &mut ProjectDatabase, project_name: &str, max_diagnostics: usize) {
fn check_project(db: &mut ProjectDatabase, max_diagnostics: usize) {
let result = db.check();
let diagnostics = result.len();
if diagnostics > max_diagnostics {
let details = result
.into_iter()
.map(|diagnostic| diagnostic.concise_message().to_string())
.collect::<Vec<_>>()
.join("\n ");
assert!(
diagnostics <= max_diagnostics,
"{project_name}: Expected <={max_diagnostics} diagnostics but got {diagnostics}:\n {details}",
);
}
assert!(
diagnostics <= max_diagnostics,
"Expected <={max_diagnostics} diagnostics but got {diagnostics}"
);
}
setup_rayon();
@@ -551,7 +510,7 @@ fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
group.bench_function(benchmark.project.config.name, |b| {
b.iter_batched_ref(
|| benchmark.setup_iteration(),
|db| check_project(db, benchmark.project.config.name, benchmark.max_diagnostics),
|db| check_project(db, benchmark.max_diagnostics),
BatchSize::SmallInput,
);
});
@@ -619,7 +578,7 @@ fn datetype(criterion: &mut Criterion) {
max_dep_date: "2025-07-04",
python_version: PythonVersion::PY313,
},
2,
0,
);
bench_project(&benchmark, criterion);
@@ -632,7 +591,6 @@ criterion_group!(
benchmark_many_tuple_assignments,
benchmark_complex_constrained_attributes_1,
benchmark_complex_constrained_attributes_2,
benchmark_many_enum_members,
);
criterion_group!(project, anyio, attrs, hydra, datetype);
criterion_main!(check_file, micro, project);

View File

@@ -20,11 +20,11 @@ ruff_python_parser = { workspace = true }
ruff_python_trivia = { workspace = true }
ruff_source_file = { workspace = true, features = ["get-size"] }
ruff_text_size = { workspace = true }
ty_static = { workspace = true }
anstyle = { workspace = true }
arc-swap = { workspace = true }
camino = { workspace = true }
countme = { workspace = true }
dashmap = { workspace = true }
dunce = { workspace = true }
filetime = { workspace = true }
@@ -33,16 +33,13 @@ glob = { workspace = true }
ignore = { workspace = true, optional = true }
matchit = { workspace = true }
path-slash = { workspace = true }
quick-junit = { workspace = true, optional = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
serde_json = { workspace = true, optional = true }
thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true, optional = true }
unicode-width = { workspace = true }
zip = { workspace = true }
[target.'cfg(target_arch="wasm32")'.dependencies]
@@ -57,13 +54,7 @@ tempfile = { workspace = true }
[features]
cache = ["ruff_cache"]
junit = ["dep:quick-junit"]
os = ["ignore", "dep:etcetera"]
serde = [
"camino/serde1",
"dep:serde",
"dep:serde_json",
"ruff_diagnostics/serde",
]
serde = ["dep:serde", "camino/serde1"]
# Exposes testing utilities.
testing = ["tracing-subscriber"]

View File

@@ -1,14 +1,13 @@
use std::{fmt::Formatter, path::Path, sync::Arc};
use std::{fmt::Formatter, sync::Arc};
use ruff_diagnostics::{Applicability, Fix};
use render::{FileResolver, Input};
use ruff_diagnostics::Fix;
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
use ruff_annotate_snippets::Level as AnnotateLevel;
use ruff_text_size::{Ranged, TextRange, TextSize};
pub use self::render::{
DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary,
};
pub use self::render::DisplayDiagnostic;
use crate::{Db, files::File};
mod render;
@@ -84,7 +83,7 @@ impl Diagnostic {
///
/// Note that `message` is stored in the primary annotation, _not_ in the primary diagnostic
/// message.
pub fn invalid_syntax(
pub fn syntax_error(
span: impl Into<Span>,
message: impl IntoDiagnosticMessage,
range: impl Ranged,
@@ -124,14 +123,7 @@ impl Diagnostic {
/// directly. If callers want or need to avoid cloning the diagnostic
/// message, then they can also pass a `DiagnosticMessage` directly.
pub fn info<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Info, message));
}
/// Adds a "help" sub-diagnostic with the given message.
///
/// See the closely related [`Diagnostic::info`] method for more details.
pub fn help<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Help, message));
self.sub(SubDiagnostic::new(Severity::Info, message));
}
/// Adds a "sub" diagnostic to this diagnostic.
@@ -317,10 +309,6 @@ impl Diagnostic {
/// Set the fix for this diagnostic.
pub fn set_fix(&mut self, fix: Fix) {
debug_assert!(
self.primary_span().is_some(),
"Expected a source file for a diagnostic with a fix"
);
Arc::make_mut(&mut self.inner).fix = Some(fix);
}
@@ -377,7 +365,7 @@ impl Diagnostic {
}
/// Returns `true` if `self` is a syntax error message.
pub fn is_invalid_syntax(&self) -> bool {
pub fn is_syntax_error(&self) -> bool {
self.id().is_invalid_syntax()
}
@@ -386,20 +374,14 @@ impl Diagnostic {
self.primary_message()
}
/// Returns the message of the first sub-diagnostic with a `Help` severity.
///
/// Note that this is used as the fix title/suggestion for some of Ruff's output formats, but in
/// general this is not the guaranteed meaning of such a message.
pub fn first_help_text(&self) -> Option<&str> {
self.sub_diagnostics()
.iter()
.find(|sub| matches!(sub.inner.severity, SubDiagnosticSeverity::Help))
.map(|sub| sub.inner.message.as_str())
/// Returns the fix suggestion for the violation.
pub fn suggestion(&self) -> Option<&str> {
self.primary_annotation()?.get_message()
}
/// Returns the URL for the rule documentation, if it exists.
pub fn to_ruff_url(&self) -> Option<String> {
if self.is_invalid_syntax() {
pub fn to_url(&self) -> Option<String> {
if self.is_syntax_error() {
None
} else {
Some(format!(
@@ -450,9 +432,8 @@ impl Diagnostic {
/// Returns the [`SourceFile`] which the message belongs to.
///
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
pub fn expect_ruff_source_file(&self) -> &SourceFile {
self.ruff_source_file()
.expect("Expected a ruff source file")
pub fn expect_ruff_source_file(&self) -> SourceFile {
self.expect_primary_span().expect_ruff_file().clone()
}
/// Returns the [`TextRange`] for the diagnostic.
@@ -466,16 +447,20 @@ impl Diagnostic {
pub fn expect_range(&self) -> TextRange {
self.range().expect("Expected a range for the primary span")
}
}
/// Returns the ordering of diagnostics based on the start of their ranges, if they have any.
///
/// Panics if either diagnostic has no primary span, if the span has no range, or if its file is
/// not a `SourceFile`.
pub fn ruff_start_ordering(&self, other: &Self) -> std::cmp::Ordering {
(self.expect_ruff_source_file(), self.expect_range().start()).cmp(&(
other.expect_ruff_source_file(),
other.expect_range().start(),
))
impl Ord for Diagnostic {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.partial_cmp(other).unwrap_or(std::cmp::Ordering::Equal)
}
}
impl PartialOrd for Diagnostic {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(
(self.ruff_source_file()?, self.range()?.start())
.cmp(&(other.ruff_source_file()?, other.range()?.start())),
)
}
}
@@ -580,10 +565,7 @@ impl SubDiagnostic {
/// Callers can pass anything that implements `std::fmt::Display`
/// directly. If callers want or need to avoid cloning the diagnostic
/// message, then they can also pass a `DiagnosticMessage` directly.
pub fn new<'a>(
severity: SubDiagnosticSeverity,
message: impl IntoDiagnosticMessage + 'a,
) -> SubDiagnostic {
pub fn new<'a>(severity: Severity, message: impl IntoDiagnosticMessage + 'a) -> SubDiagnostic {
let inner = Box::new(SubDiagnosticInner {
severity,
message: message.into_diagnostic_message(),
@@ -661,7 +643,7 @@ impl SubDiagnostic {
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
struct SubDiagnosticInner {
severity: SubDiagnosticSeverity,
severity: Severity,
message: DiagnosticMessage,
annotations: Vec<Annotation>,
}
@@ -1030,18 +1012,6 @@ impl UnifiedFile {
}
}
/// Return the file's path relative to the current working directory.
pub fn relative_path<'a>(&'a self, resolver: &'a dyn FileResolver) -> &'a Path {
let cwd = resolver.current_directory();
let path = Path::new(self.path(resolver));
if let Ok(path) = path.strip_prefix(cwd) {
return path;
}
path
}
fn diagnostic_source(&self, resolver: &dyn FileResolver) -> DiagnosticSource {
match self {
UnifiedFile::Ty(file) => DiagnosticSource::Ty(resolver.input(*file)),
@@ -1188,32 +1158,6 @@ impl Severity {
}
}
/// Like [`Severity`] but exclusively for sub-diagnostics.
///
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
/// deleted and the two combined again.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
pub enum SubDiagnosticSeverity {
Help,
Info,
Warning,
Error,
Fatal,
}
impl SubDiagnosticSeverity {
fn to_annotate(self) -> AnnotateLevel {
match self {
SubDiagnosticSeverity::Help => AnnotateLevel::Help,
SubDiagnosticSeverity::Info => AnnotateLevel::Info,
SubDiagnosticSeverity::Warning => AnnotateLevel::Warning,
SubDiagnosticSeverity::Error => AnnotateLevel::Error,
SubDiagnosticSeverity::Fatal => AnnotateLevel::Error,
}
}
}
/// Configuration for rendering diagnostics.
#[derive(Clone, Debug)]
pub struct DisplayDiagnosticConfig {
@@ -1234,21 +1178,6 @@ pub struct DisplayDiagnosticConfig {
/// here for now as the most "sensible" place for it to live until
/// we had more concrete use cases. ---AG
context: usize,
/// Whether to use preview formatting for Ruff diagnostics.
#[allow(
dead_code,
reason = "This is currently only used for JSON but will be needed soon for other formats"
)]
preview: bool,
/// Whether to hide the real `Severity` of diagnostics.
///
/// This is intended for temporary use by Ruff, which only has a single `error` severity at the
/// moment. We should be able to remove this option when Ruff gets more severities.
hide_severity: bool,
/// Whether to show the availability of a fix in a diagnostic.
show_fix_status: bool,
/// The lowest applicability that should be shown when reporting diagnostics.
fix_applicability: Applicability,
}
impl DisplayDiagnosticConfig {
@@ -1269,43 +1198,6 @@ impl DisplayDiagnosticConfig {
..self
}
}
/// Whether to enable preview behavior or not.
pub fn preview(self, yes: bool) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
preview: yes,
..self
}
}
/// Whether to hide a diagnostic's severity or not.
pub fn hide_severity(self, yes: bool) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
hide_severity: yes,
..self
}
}
/// Whether to show a fix's availability or not.
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
show_fix_status: yes,
..self
}
}
/// Set the lowest fix applicability that should be shown.
///
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
/// availability for unsafe or display-only fixes.
///
/// Note that this option is currently ignored when `hide_severity` is false.
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
fix_applicability: applicability,
..self
}
}
}
impl Default for DisplayDiagnosticConfig {
@@ -1314,10 +1206,6 @@ impl Default for DisplayDiagnosticConfig {
format: DiagnosticFormat::default(),
color: false,
context: 2,
preview: false,
hide_severity: false,
show_fix_status: false,
fix_applicability: Applicability::Safe,
}
}
}
@@ -1345,31 +1233,6 @@ pub enum DiagnosticFormat {
///
/// This may use color when printing to a `tty`.
Concise,
/// Print diagnostics in the [Azure Pipelines] format.
///
/// [Azure Pipelines]: https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning
Azure,
/// Print diagnostics in JSON format.
///
/// Unlike `json-lines`, this prints all of the diagnostics as a JSON array.
#[cfg(feature = "serde")]
Json,
/// Print diagnostics in JSON format, one per line.
///
/// This will print each diagnostic as a separate JSON object on its own line. See the `json`
/// format for an array of all diagnostics. See <https://jsonlines.org/> for more details.
#[cfg(feature = "serde")]
JsonLines,
/// Print diagnostics in the JSON format expected by [reviewdog].
///
/// [reviewdog]: https://github.com/reviewdog/reviewdog
#[cfg(feature = "serde")]
Rdjson,
/// Print diagnostics in the format emitted by Pylint.
Pylint,
/// Print diagnostics in the format expected by JUnit.
#[cfg(feature = "junit")]
Junit,
}
/// A representation of the kinds of messages inside a diagnostic.

File diff suppressed because it is too large Load Diff

View File

@@ -1,83 +0,0 @@
use ruff_source_file::LineColumn;
use crate::diagnostic::{Diagnostic, Severity};
use super::FileResolver;
pub(super) struct AzureRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> AzureRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
}
impl AzureRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
for diag in diagnostics {
let severity = match diag.severity() {
Severity::Info | Severity::Warning => "warning",
Severity::Error | Severity::Fatal => "error",
};
write!(f, "##vso[task.logissue type={severity};")?;
if let Some(span) = diag.primary_span() {
let filename = span.file().path(self.resolver);
write!(f, "sourcepath={filename};")?;
if let Some(range) = span.range() {
let location = if self.resolver.notebook_index(span.file()).is_some() {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
LineColumn::default()
} else {
span.file()
.diagnostic_source(self.resolver)
.as_source_code()
.line_column(range.start())
};
write!(
f,
"linenumber={line};columnnumber={col};",
line = location.line,
col = location.column,
)?;
}
}
writeln!(
f,
"{code}]{body}",
code = diag
.secondary_code()
.map_or_else(String::new, |code| format!("code={code};")),
body = diag.body(),
)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Azure);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Azure);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
}

View File

@@ -1,195 +0,0 @@
use crate::diagnostic::{
Diagnostic, DisplayDiagnosticConfig, Severity,
stylesheet::{DiagnosticStylesheet, fmt_styled},
};
use super::FileResolver;
pub(super) struct ConciseRenderer<'a> {
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
}
impl<'a> ConciseRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
Self { resolver, config }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
let stylesheet = if self.config.color {
DiagnosticStylesheet::styled()
} else {
DiagnosticStylesheet::plain()
};
let sep = fmt_styled(":", stylesheet.separator);
for diag in diagnostics {
if let Some(span) = diag.primary_span() {
write!(
f,
"{path}",
path = fmt_styled(
span.file().relative_path(self.resolver).to_string_lossy(),
stylesheet.emphasis
)
)?;
if let Some(range) = span.range() {
let diagnostic_source = span.file().diagnostic_source(self.resolver);
let start = diagnostic_source
.as_source_code()
.line_column(range.start());
if let Some(notebook_index) = self.resolver.notebook_index(span.file()) {
write!(
f,
"{sep}cell {cell}{sep}{line}{sep}{col}",
cell = notebook_index.cell(start.line).unwrap_or_default(),
line = notebook_index.cell_row(start.line).unwrap_or_default(),
col = start.column,
)?;
} else {
write!(
f,
"{sep}{line}{sep}{col}",
line = start.line,
col = start.column,
)?;
}
}
write!(f, "{sep} ")?;
}
if self.config.hide_severity {
if let Some(code) = diag.secondary_code() {
write!(
f,
"{code} ",
code = fmt_styled(code, stylesheet.secondary_code)
)?;
}
if self.config.show_fix_status {
if let Some(fix) = diag.fix() {
// Do not display an indicator for inapplicable fixes
if fix.applies(self.config.fix_applicability) {
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
}
}
}
} else {
let (severity, severity_style) = match diag.severity() {
Severity::Info => ("info", stylesheet.info),
Severity::Warning => ("warning", stylesheet.warning),
Severity::Error => ("error", stylesheet.error),
Severity::Fatal => ("fatal", stylesheet.error),
};
write!(
f,
"{severity}[{id}] ",
severity = fmt_styled(severity, severity_style),
id = fmt_styled(diag.id(), stylesheet.emphasis)
)?;
}
writeln!(f, "{message}", message = diag.concise_message())?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use ruff_diagnostics::Applicability;
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
create_syntax_error_diagnostics,
},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
fib.py:1:8: error[unused-import] `os` imported but unused
fib.py:6:5: error[unused-variable] Local variable `x` is assigned to but never used
undef.py:1:4: error[undefined-name] Undefined name `a`
");
}
#[test]
fn show_fixes() {
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
fib.py:1:8: F401 [*] `os` imported but unused
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
undef.py:1:4: F821 Undefined name `a`
");
}
#[test]
fn show_fixes_preview() {
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
env.preview(true);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
fib.py:1:8: F401 [*] `os` imported but unused
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
undef.py:1:4: F821 Undefined name `a`
");
}
#[test]
fn show_fixes_syntax_errors() {
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
");
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
syntax_errors.py:1:15: error[invalid-syntax] SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3:12: error[invalid-syntax] SyntaxError: Expected ')', found newline
");
}
#[test]
fn notebook_output() {
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
notebook.ipynb:cell 1:2:8: error[unused-import] `os` imported but unused
notebook.ipynb:cell 2:2:8: error[unused-import] `math` imported but unused
notebook.ipynb:cell 3:4:5: error[unused-variable] Local variable `x` is assigned to but never used
");
}
#[test]
fn missing_file() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Concise);
let diag = env.err().build();
insta::assert_snapshot!(
env.render(&diag),
@"error[test-diagnostic] main diagnostic message",
);
}
}

View File

@@ -1,180 +0,0 @@
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat, Severity,
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
error[unused-import]: `os` imported but unused
--> fib.py:1:8
|
1 | import os
| ^^
|
help: Remove unused import: `os`
error[unused-variable]: Local variable `x` is assigned to but never used
--> fib.py:6:5
|
4 | def fibonacci(n):
5 | """Compute the nth number in the Fibonacci sequence."""
6 | x = 1
| ^
7 | if n == 0:
8 | return 0
|
help: Remove assignment to unused variable `x`
error[undefined-name]: Undefined name `a`
--> undef.py:1:4
|
1 | if a == 1: pass
| ^
|
"#);
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
error[invalid-syntax]: SyntaxError: Expected one or more symbol names after import
--> syntax_errors.py:1:15
|
1 | from os import
| ^
2 |
3 | if call(foo
|
error[invalid-syntax]: SyntaxError: Expected ')', found newline
--> syntax_errors.py:3:12
|
1 | from os import
2 |
3 | if call(foo
| ^
4 | def bar():
5 | pass
|
");
}
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit c9b99e4.
///
/// For example, without the fix, we get diagnostics like this:
///
/// ```
/// error[no-indented-block]: Expected an indented block
/// --> example.py:3:1
/// |
/// 2 | if False:
/// | ^
/// 3 | print()
/// |
/// ```
///
/// where the caret points to the end of the previous line instead of the start of the next.
#[test]
fn empty_span_after_line_terminator() {
let mut env = TestEnvironment::new();
env.add(
"example.py",
r#"
if False:
print()
"#,
);
env.format(DiagnosticFormat::Full);
let diagnostic = env
.builder(
"no-indented-block",
Severity::Error,
"Expected an indented block",
)
.primary("example.py", "3:0", "3:0", "")
.build();
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[no-indented-block]: Expected an indented block
--> example.py:3:1
|
2 | if False:
3 | print()
| ^
|
");
}
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit 2922490.
///
/// For example, without the fix, we get diagnostics like this:
///
/// ```
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
/// --> example.py:1:25
/// |
/// 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
/// | ^
/// |
/// ```
///
/// where the caret points to the `f` in the f-string instead of the start of the invalid
/// character (`^Z`).
#[test]
fn unprintable_characters() {
let mut env = TestEnvironment::new();
env.add("example.py", "nested_fstrings = f'{f'{f''}'}'");
env.format(DiagnosticFormat::Full);
let diagnostic = env
.builder(
"invalid-character-sub",
Severity::Error,
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
)
.primary("example.py", "1:24", "1:24", "")
.build();
insta::assert_snapshot!(env.render(&diagnostic), @r#"
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
--> example.py:1:25
|
1 | nested_fstrings = f'␈{f'{f'␛'}'}'
| ^
|
"#);
}
#[test]
fn multiple_unprintable_characters() -> std::io::Result<()> {
let mut env = TestEnvironment::new();
env.add("example.py", "");
env.format(DiagnosticFormat::Full);
let diagnostic = env
.builder(
"invalid-character-sub",
Severity::Error,
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
)
.primary("example.py", "1:1", "1:1", "")
.build();
insta::assert_snapshot!(env.render(&diagnostic), @r#"
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
--> example.py:1:2
|
1 | ␈
| ^
|
"#);
Ok(())
}
}

View File

@@ -1,352 +0,0 @@
use serde::{Serialize, Serializer, ser::SerializeSeq};
use serde_json::{Value, json};
use ruff_diagnostics::{Applicability, Edit};
use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, OneIndexed};
use ruff_text_size::Ranged;
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig, SecondaryCode};
use super::FileResolver;
pub(super) struct JsonRenderer<'a> {
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
}
impl<'a> JsonRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
Self { resolver, config }
}
}
impl JsonRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
write!(
f,
"{:#}",
diagnostics_to_json_value(diagnostics, self.resolver, self.config)
)
}
}
fn diagnostics_to_json_value<'a>(
diagnostics: impl IntoIterator<Item = &'a Diagnostic>,
resolver: &dyn FileResolver,
config: &DisplayDiagnosticConfig,
) -> Value {
let values: Vec<_> = diagnostics
.into_iter()
.map(|diag| diagnostic_to_json(diag, resolver, config))
.collect();
json!(values)
}
pub(super) fn diagnostic_to_json<'a>(
diagnostic: &'a Diagnostic,
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
) -> JsonDiagnostic<'a> {
let span = diagnostic.primary_span_ref();
let filename = span.map(|span| span.file().path(resolver));
let range = span.and_then(|span| span.range());
let diagnostic_source = span.map(|span| span.file().diagnostic_source(resolver));
let source_code = diagnostic_source
.as_ref()
.map(|diagnostic_source| diagnostic_source.as_source_code());
let notebook_index = span.and_then(|span| resolver.notebook_index(span.file()));
let mut start_location = None;
let mut end_location = None;
let mut noqa_location = None;
let mut notebook_cell_index = None;
if let Some(source_code) = source_code {
noqa_location = diagnostic
.noqa_offset()
.map(|offset| source_code.line_column(offset));
if let Some(range) = range {
let mut start = source_code.line_column(range.start());
let mut end = source_code.line_column(range.end());
if let Some(notebook_index) = &notebook_index {
notebook_cell_index =
Some(notebook_index.cell(start.line).unwrap_or(OneIndexed::MIN));
start = notebook_index.translate_line_column(&start);
end = notebook_index.translate_line_column(&end);
noqa_location =
noqa_location.map(|location| notebook_index.translate_line_column(&location));
}
start_location = Some(start);
end_location = Some(end);
}
}
let fix = diagnostic.fix().map(|fix| JsonFix {
applicability: fix.applicability(),
message: diagnostic.first_help_text(),
edits: ExpandedEdits {
edits: fix.edits(),
notebook_index,
config,
diagnostic_source,
},
});
// In preview, the locations and filename can be optional.
if config.preview {
JsonDiagnostic {
code: diagnostic.secondary_code(),
url: diagnostic.to_ruff_url(),
message: diagnostic.body(),
fix,
cell: notebook_cell_index,
location: start_location.map(JsonLocation::from),
end_location: end_location.map(JsonLocation::from),
filename,
noqa_row: noqa_location.map(|location| location.line),
}
} else {
JsonDiagnostic {
code: diagnostic.secondary_code(),
url: diagnostic.to_ruff_url(),
message: diagnostic.body(),
fix,
cell: notebook_cell_index,
location: Some(start_location.unwrap_or_default().into()),
end_location: Some(end_location.unwrap_or_default().into()),
filename: Some(filename.unwrap_or_default()),
noqa_row: noqa_location.map(|location| location.line),
}
}
}
struct ExpandedEdits<'a> {
edits: &'a [Edit],
notebook_index: Option<NotebookIndex>,
config: &'a DisplayDiagnosticConfig,
diagnostic_source: Option<DiagnosticSource>,
}
impl Serialize for ExpandedEdits<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
for edit in self.edits {
let (location, end_location) = if let Some(diagnostic_source) = &self.diagnostic_source
{
let source_code = diagnostic_source.as_source_code();
let mut location = source_code.line_column(edit.start());
let mut end_location = source_code.line_column(edit.end());
if let Some(notebook_index) = &self.notebook_index {
// There exists a newline between each cell's source code in the
// concatenated source code in Ruff. This newline doesn't actually
// exists in the JSON source field.
//
// Now, certain edits may try to remove this newline, which means
// the edit will spill over to the first character of the next cell.
// If it does, we need to translate the end location to the last
// character of the previous cell.
match (
notebook_index.cell(location.line),
notebook_index.cell(end_location.line),
) {
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
debug_assert_eq!(end_location.column.get(), 1);
let prev_row = end_location.line.saturating_sub(1);
end_location = LineColumn {
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
column: source_code
.line_column(source_code.line_end_exclusive(prev_row))
.column,
};
}
(Some(_), None) => {
debug_assert_eq!(end_location.column.get(), 1);
let prev_row = end_location.line.saturating_sub(1);
end_location = LineColumn {
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
column: source_code
.line_column(source_code.line_end_exclusive(prev_row))
.column,
};
}
_ => {
end_location = notebook_index.translate_line_column(&end_location);
}
}
location = notebook_index.translate_line_column(&location);
}
(Some(location), Some(end_location))
} else {
(None, None)
};
// In preview, the locations can be optional.
let value = if self.config.preview {
JsonEdit {
content: edit.content().unwrap_or_default(),
location: location.map(JsonLocation::from),
end_location: end_location.map(JsonLocation::from),
}
} else {
JsonEdit {
content: edit.content().unwrap_or_default(),
location: Some(location.unwrap_or_default().into()),
end_location: Some(end_location.unwrap_or_default().into()),
}
};
s.serialize_element(&value)?;
}
s.end()
}
}
/// A serializable version of `Diagnostic`.
///
/// The `Old` variant only exists to preserve backwards compatibility. Both this and `JsonEdit`
/// should become structs with the `New` definitions in a future Ruff release.
#[derive(Serialize)]
pub(crate) struct JsonDiagnostic<'a> {
cell: Option<OneIndexed>,
code: Option<&'a SecondaryCode>,
end_location: Option<JsonLocation>,
filename: Option<&'a str>,
fix: Option<JsonFix<'a>>,
location: Option<JsonLocation>,
message: &'a str,
noqa_row: Option<OneIndexed>,
url: Option<String>,
}
#[derive(Serialize)]
struct JsonFix<'a> {
applicability: Applicability,
edits: ExpandedEdits<'a>,
message: Option<&'a str>,
}
#[derive(Serialize)]
struct JsonLocation {
column: OneIndexed,
row: OneIndexed,
}
impl From<LineColumn> for JsonLocation {
fn from(location: LineColumn) -> Self {
JsonLocation {
row: location.line,
column: location.column,
}
}
}
#[derive(Serialize)]
struct JsonEdit<'a> {
content: &'a str,
end_location: Option<JsonLocation>,
location: Option<JsonLocation>,
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
create_syntax_error_diagnostics,
},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Json);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Json);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn notebook_output() {
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Json);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn missing_file_stable() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Json);
env.preview(false);
let diag = env.err().build();
insta::assert_snapshot!(
env.render(&diag),
@r#"
[
{
"cell": null,
"code": null,
"end_location": {
"column": 1,
"row": 1
},
"filename": "",
"fix": null,
"location": {
"column": 1,
"row": 1
},
"message": "main diagnostic message",
"noqa_row": null,
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
}
]
"#,
);
}
#[test]
fn missing_file_preview() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Json);
env.preview(true);
let diag = env.err().build();
insta::assert_snapshot!(
env.render(&diag),
@r#"
[
{
"cell": null,
"code": null,
"end_location": null,
"filename": null,
"fix": null,
"location": null,
"message": "main diagnostic message",
"noqa_row": null,
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
}
]
"#,
);
}
}

View File

@@ -1,59 +0,0 @@
use crate::diagnostic::{Diagnostic, DisplayDiagnosticConfig, render::json::diagnostic_to_json};
use super::FileResolver;
pub(super) struct JsonLinesRenderer<'a> {
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
}
impl<'a> JsonLinesRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
Self { resolver, config }
}
}
impl JsonLinesRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
for diag in diagnostics {
writeln!(
f,
"{}",
serde_json::json!(diagnostic_to_json(diag, self.resolver, self.config))
)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{
create_diagnostics, create_notebook_diagnostics, create_syntax_error_diagnostics,
},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::JsonLines);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::JsonLines);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn notebook_output() {
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::JsonLines);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
}

View File

@@ -1,195 +0,0 @@
use std::{collections::BTreeMap, ops::Deref, path::Path};
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
use ruff_source_file::LineColumn;
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
/// A renderer for diagnostics in the [JUnit] format.
///
/// See [`junit.xsd`] for the specification in the JUnit repository and an annotated [version]
/// linked from the [`quick_junit`] docs.
///
/// [JUnit]: https://junit.org/
/// [`junit.xsd`]: https://github.com/junit-team/junit-framework/blob/2870b7d8fd5bf7c1efe489d3991d3ed3900e82bb/platform-tests/src/test/resources/jenkins-junit.xsd
/// [version]: https://llg.cubic.org/docs/junit/
/// [`quick_junit`]: https://docs.rs/quick-junit/latest/quick_junit/
pub struct JunitRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> JunitRenderer<'a> {
pub fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
let mut report = Report::new("ruff");
if diagnostics.is_empty() {
let mut test_suite = TestSuite::new("ruff");
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
let mut case = TestCase::new("No errors found", TestCaseStatus::success());
case.set_classname("ruff");
test_suite.add_test_case(case);
report.add_test_suite(test_suite);
} else {
for (filename, diagnostics) in group_diagnostics_by_filename(diagnostics, self.resolver)
{
let mut test_suite = TestSuite::new(filename);
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
let classname = Path::new(filename).with_extension("");
for diagnostic in diagnostics {
let DiagnosticWithLocation {
diagnostic,
start_location: location,
} = diagnostic;
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
status.set_message(diagnostic.body());
if let Some(location) = location {
status.set_description(format!(
"line {row}, col {col}, {body}",
row = location.line,
col = location.column,
body = diagnostic.body()
));
} else {
status.set_description(diagnostic.body());
}
let code = diagnostic
.secondary_code()
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
let mut case = TestCase::new(format!("org.ruff.{code}"), status);
case.set_classname(classname.to_str().unwrap());
if let Some(location) = location {
case.extra.insert(
XmlString::new("line"),
XmlString::new(location.line.to_string()),
);
case.extra.insert(
XmlString::new("column"),
XmlString::new(location.column.to_string()),
);
}
test_suite.add_test_case(case);
}
report.add_test_suite(test_suite);
}
}
let adapter = FmtAdapter { fmt: f };
report.serialize(adapter).map_err(|_| std::fmt::Error)
}
}
// TODO(brent) this and `group_diagnostics_by_filename` are also used by the `grouped` output
// format. I think they'd make more sense in that file, but I started here first. I'll move them to
// that module when adding the `grouped` output format.
struct DiagnosticWithLocation<'a> {
diagnostic: &'a Diagnostic,
start_location: Option<LineColumn>,
}
impl Deref for DiagnosticWithLocation<'_> {
type Target = Diagnostic;
fn deref(&self) -> &Self::Target {
self.diagnostic
}
}
fn group_diagnostics_by_filename<'a>(
diagnostics: &'a [Diagnostic],
resolver: &'a dyn FileResolver,
) -> BTreeMap<&'a str, Vec<DiagnosticWithLocation<'a>>> {
let mut grouped_diagnostics = BTreeMap::default();
for diagnostic in diagnostics {
let (filename, start_location) = diagnostic
.primary_span_ref()
.map(|span| {
let file = span.file();
let start_location =
span.range()
.filter(|_| !resolver.is_notebook(file))
.map(|range| {
file.diagnostic_source(resolver)
.as_source_code()
.line_column(range.start())
});
(span.file().path(resolver), start_location)
})
.unwrap_or_default();
grouped_diagnostics
.entry(filename)
.or_insert_with(Vec::new)
.push(DiagnosticWithLocation {
diagnostic,
start_location,
});
}
grouped_diagnostics
}
struct FmtAdapter<'a> {
fmt: &'a mut dyn std::fmt::Write,
}
impl std::io::Write for FmtAdapter<'_> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.fmt
.write_str(std::str::from_utf8(buf).map_err(|_| {
std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid UTF-8 in JUnit report",
)
})?)
.map_err(std::io::Error::other)?;
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
fn write_fmt(&mut self, args: std::fmt::Arguments<'_>) -> std::io::Result<()> {
self.fmt.write_fmt(args).map_err(std::io::Error::other)
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Junit);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Junit);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
}

View File

@@ -1,97 +0,0 @@
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
/// Generate violations in Pylint format.
///
/// The format is given by this string:
///
/// ```python
/// "%(path)s:%(row)d: [%(code)s] %(text)s"
/// ```
///
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
pub(super) struct PylintRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> PylintRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
}
impl PylintRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
for diagnostic in diagnostics {
let (filename, row) = diagnostic
.primary_span_ref()
.map(|span| {
let file = span.file();
let row = span
.range()
.filter(|_| !self.resolver.is_notebook(file))
.map(|range| {
file.diagnostic_source(self.resolver)
.as_source_code()
.line_column(range.start())
.line
});
(file.relative_path(self.resolver).to_string_lossy(), row)
})
.unwrap_or_default();
let code = diagnostic
.secondary_code()
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
let row = row.unwrap_or_default();
writeln!(
f,
"{path}:{row}: [{code}] {body}",
path = filename,
body = diagnostic.body()
)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Pylint);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Pylint);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn missing_file() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Pylint);
let diag = env.err().build();
insta::assert_snapshot!(
env.render(&diag),
@":1: [test-diagnostic] main diagnostic message",
);
}
}

View File

@@ -1,235 +0,0 @@
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use ruff_diagnostics::{Edit, Fix};
use ruff_source_file::{LineColumn, SourceCode};
use ruff_text_size::Ranged;
use crate::diagnostic::Diagnostic;
use super::FileResolver;
pub struct RdjsonRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> RdjsonRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
write!(
f,
"{:#}",
serde_json::json!(RdjsonDiagnostics::new(diagnostics, self.resolver))
)
}
}
struct ExpandedDiagnostics<'a> {
resolver: &'a dyn FileResolver,
diagnostics: &'a [Diagnostic],
}
impl Serialize for ExpandedDiagnostics<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
for diagnostic in self.diagnostics {
let value = diagnostic_to_rdjson(diagnostic, self.resolver);
s.serialize_element(&value)?;
}
s.end()
}
}
fn diagnostic_to_rdjson<'a>(
diagnostic: &'a Diagnostic,
resolver: &'a dyn FileResolver,
) -> RdjsonDiagnostic<'a> {
let span = diagnostic.primary_span_ref();
let source_file = span.map(|span| {
let file = span.file();
(file.path(resolver), file.diagnostic_source(resolver))
});
let location = source_file.as_ref().map(|(path, source)| {
let range = diagnostic.range().map(|range| {
let source_code = source.as_source_code();
let start = source_code.line_column(range.start());
let end = source_code.line_column(range.end());
RdjsonRange::new(start, end)
});
RdjsonLocation { path, range }
});
let edits = diagnostic.fix().map(Fix::edits).unwrap_or_default();
RdjsonDiagnostic {
message: diagnostic.body(),
location,
code: RdjsonCode {
value: diagnostic
.secondary_code()
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
url: diagnostic.to_ruff_url(),
},
suggestions: rdjson_suggestions(
edits,
source_file
.as_ref()
.map(|(_, source)| source.as_source_code()),
),
}
}
fn rdjson_suggestions<'a>(
edits: &'a [Edit],
source_code: Option<SourceCode>,
) -> Vec<RdjsonSuggestion<'a>> {
if edits.is_empty() {
return Vec::new();
}
let Some(source_code) = source_code else {
debug_assert!(false, "Expected a source file for a diagnostic with a fix");
return Vec::new();
};
edits
.iter()
.map(|edit| {
let start = source_code.line_column(edit.start());
let end = source_code.line_column(edit.end());
let range = RdjsonRange::new(start, end);
RdjsonSuggestion {
range,
text: edit.content().unwrap_or_default(),
}
})
.collect()
}
#[derive(Serialize)]
struct RdjsonDiagnostics<'a> {
diagnostics: ExpandedDiagnostics<'a>,
severity: &'static str,
source: RdjsonSource,
}
impl<'a> RdjsonDiagnostics<'a> {
fn new(diagnostics: &'a [Diagnostic], resolver: &'a dyn FileResolver) -> Self {
Self {
source: RdjsonSource {
name: "ruff",
url: env!("CARGO_PKG_HOMEPAGE"),
},
severity: "WARNING",
diagnostics: ExpandedDiagnostics {
diagnostics,
resolver,
},
}
}
}
#[derive(Serialize)]
struct RdjsonSource {
name: &'static str,
url: &'static str,
}
#[derive(Serialize)]
struct RdjsonDiagnostic<'a> {
code: RdjsonCode<'a>,
#[serde(skip_serializing_if = "Option::is_none")]
location: Option<RdjsonLocation<'a>>,
message: &'a str,
#[serde(skip_serializing_if = "Vec::is_empty")]
suggestions: Vec<RdjsonSuggestion<'a>>,
}
#[derive(Serialize)]
struct RdjsonLocation<'a> {
path: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
range: Option<RdjsonRange>,
}
#[derive(Default, Serialize)]
struct RdjsonRange {
end: LineColumn,
start: LineColumn,
}
impl RdjsonRange {
fn new(start: LineColumn, end: LineColumn) -> Self {
Self { start, end }
}
}
#[derive(Serialize)]
struct RdjsonCode<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
url: Option<String>,
value: &'a str,
}
#[derive(Serialize)]
struct RdjsonSuggestion<'a> {
range: RdjsonRange,
text: &'a str,
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Rdjson);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Rdjson);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn missing_file_stable() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Rdjson);
env.preview(false);
let diag = env.err().build();
insta::assert_snapshot!(env.render(&diag));
}
#[test]
fn missing_file_preview() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Rdjson);
env.preview(true);
let diag = env.err().build();
insta::assert_snapshot!(env.render(&diag));
}
}

View File

@@ -1,6 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/pylint.rs
expression: env.render_diagnostics(&diagnostics)
---
syntax_errors.py:1: [invalid-syntax] SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3: [invalid-syntax] SyntaxError: Expected ')', found newline

View File

@@ -1,20 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
expression: env.render(&diag)
---
{
"diagnostics": [
{
"code": {
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
"value": "test-diagnostic"
},
"message": "main diagnostic message"
}
],
"severity": "WARNING",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"
}
}

View File

@@ -1,20 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
expression: env.render(&diag)
---
{
"diagnostics": [
{
"code": {
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
"value": "test-diagnostic"
},
"message": "main diagnostic message"
}
],
"severity": "WARNING",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"
}
}

View File

@@ -41,8 +41,6 @@ pub struct DiagnosticStylesheet {
pub(crate) line_no: Style,
pub(crate) emphasis: Style,
pub(crate) none: Style,
pub(crate) separator: Style,
pub(crate) secondary_code: Style,
}
impl Default for DiagnosticStylesheet {
@@ -64,8 +62,6 @@ impl DiagnosticStylesheet {
line_no: bright_blue.effects(Effects::BOLD),
emphasis: Style::new().effects(Effects::BOLD),
none: Style::new(),
separator: AnsiColor::Cyan.on_default(),
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
}
}
@@ -79,8 +75,6 @@ impl DiagnosticStylesheet {
line_no: Style::new(),
emphasis: Style::new(),
none: Style::new(),
separator: Style::new(),
secondary_code: Style::new(),
}
}
}

View File

@@ -1,6 +1,7 @@
use std::fmt;
use std::sync::Arc;
use countme::Count;
use dashmap::mapref::entry::Entry;
pub use file_root::{FileRoot, FileRootKind};
pub use path::FilePath;
@@ -231,7 +232,7 @@ impl Files {
let roots = inner.roots.read().unwrap();
for root in roots.all() {
if path.starts_with(root.path(db)) {
if root.path(db).starts_with(&path) {
root.set_revision(db).to(FileRevision::now());
}
}
@@ -311,6 +312,11 @@ pub struct File {
/// the file has been deleted is to change the status to `Deleted`.
#[default]
status: FileStatus,
/// Counter that counts the number of created file instances and active file instances.
/// Only enabled in debug builds.
#[default]
count: Count<File>,
}
// The Salsa heap is tracked separately.
@@ -369,25 +375,12 @@ impl File {
}
/// Refreshes the file metadata by querying the file system if needed.
///
/// This also "touches" the file root associated with the given path.
/// This means that any Salsa queries that depend on the corresponding
/// root's revision will become invalidated.
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
let absolute = SystemPath::absolute(path, db.system().current_directory());
Files::touch_root(db, &absolute);
Self::sync_system_path(db, &absolute, None);
}
/// Refreshes *only* the file metadata by querying the file system if needed.
///
/// This specifically does not touch any file root associated with the
/// given file path.
pub fn sync_path_only(db: &mut dyn Db, path: &SystemPath) {
let absolute = SystemPath::absolute(path, db.system().current_directory());
Self::sync_system_path(db, &absolute, None);
}
/// Increments the revision for the virtual file at `path`.
pub fn sync_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath) {
if let Some(virtual_file) = db.files().try_virtual_file(path) {
@@ -493,7 +486,7 @@ impl fmt::Debug for File {
///
/// This is a wrapper around a [`File`] that provides additional methods to interact with a virtual
/// file.
#[derive(Copy, Clone, Debug)]
#[derive(Copy, Clone)]
pub struct VirtualFile(File);
impl VirtualFile {

View File

@@ -23,7 +23,7 @@ pub struct FileRoot {
pub path: SystemPathBuf,
/// The kind of the root at the time of its creation.
pub kind_at_time_of_creation: FileRootKind,
kind_at_time_of_creation: FileRootKind,
/// A revision that changes when the contents of the source root change.
///

View File

@@ -5,7 +5,6 @@ use ruff_python_ast::PythonVersion;
use rustc_hash::FxHasher;
use std::hash::BuildHasherDefault;
use std::num::NonZeroUsize;
use ty_static::EnvVars;
pub mod diagnostic;
pub mod display;
@@ -28,21 +27,6 @@ pub use web_time::{Instant, SystemTime, SystemTimeError};
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
pub type FxDashSet<K> = dashmap::DashSet<K, BuildHasherDefault<FxHasher>>;
static VERSION: std::sync::OnceLock<String> = std::sync::OnceLock::new();
/// Returns the version of the executing program if set.
pub fn program_version() -> Option<&'static str> {
VERSION.get().map(|version| version.as_str())
}
/// Sets the version of the executing program.
///
/// ## Errors
/// If the version has already been initialized (can only be set once).
pub fn set_program_version(version: String) -> Result<(), String> {
VERSION.set(version)
}
/// Most basic database that gives access to files, the host system, source code, and parsed AST.
#[salsa::db]
pub trait Db: salsa::Database {
@@ -66,8 +50,8 @@ pub trait Db: salsa::Database {
/// ty can still spawn more threads for other tasks, e.g. to wait for a Ctrl+C signal or
/// watching the files for changes.
pub fn max_parallelism() -> NonZeroUsize {
std::env::var(EnvVars::TY_MAX_PARALLELISM)
.or_else(|_| std::env::var(EnvVars::RAYON_NUM_THREADS))
std::env::var("TY_MAX_PARALLELISM")
.or_else(|_| std::env::var("RAYON_NUM_THREADS"))
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or_else(|| {

View File

@@ -1,6 +1,8 @@
use std::ops::Deref;
use std::sync::Arc;
use countme::Count;
use ruff_notebook::Notebook;
use ruff_python_ast::PySourceType;
use ruff_source_file::LineIndex;
@@ -36,7 +38,11 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
};
SourceText {
inner: Arc::new(SourceTextInner { kind, read_error }),
inner: Arc::new(SourceTextInner {
kind,
read_error,
count: Count::new(),
}),
}
}
@@ -119,6 +125,8 @@ impl std::fmt::Debug for SourceText {
#[derive(Eq, PartialEq, get_size2::GetSize)]
struct SourceTextInner {
#[get_size(ignore)]
count: Count<SourceText>,
kind: SourceTextKind,
read_error: Option<SourceTextError>,
}

View File

@@ -21,19 +21,6 @@ type LockedZipArchive<'a> = MutexGuard<'a, VendoredZipArchive>;
///
/// "Files" in the `VendoredFileSystem` are read-only and immutable.
/// Directories are supported, but symlinks and hardlinks cannot exist.
///
/// # Path separators
///
/// At time of writing (2025-07-11), this implementation always uses `/` as a
/// path separator, even in Windows environments where `\` is traditionally
/// used as a file path separator. Namely, this is only currently used with zip
/// files built by `crates/ty_vendored/build.rs`.
///
/// Callers using this may provide paths that use a `\` as a separator. It will
/// be transparently normalized to `/`.
///
/// This is particularly important because the presence of a trailing separator
/// in a zip file is conventionally used to indicate a directory entry.
#[derive(Clone)]
pub struct VendoredFileSystem {
inner: Arc<Mutex<VendoredZipArchive>>,
@@ -128,68 +115,6 @@ impl VendoredFileSystem {
read_to_string(self, path.as_ref())
}
/// Read the direct children of the directory
/// identified by `path`.
///
/// If `path` is not a directory, then this will
/// return an empty `Vec`.
pub fn read_directory(&self, dir: impl AsRef<VendoredPath>) -> Vec<DirectoryEntry> {
// N.B. We specifically do not return an iterator here to avoid
// holding a lock for the lifetime of the iterator returned.
// That is, it seems like a footgun to keep the zip archive
// locked during iteration, since the unit of work for each
// item in the iterator could be arbitrarily long. Allocating
// up front and stuffing all entries into it is probably the
// simplest solution and what we do here. If this becomes
// a problem, there are other strategies we could pursue.
// (Amortizing allocs, using a different synchronization
// behavior or even exposing additional APIs.) ---AG
fn read_directory(fs: &VendoredFileSystem, dir: &VendoredPath) -> Vec<DirectoryEntry> {
let mut normalized = NormalizedVendoredPath::from(dir);
if !normalized.as_str().ends_with('/') {
normalized = normalized.with_trailing_slash();
}
let archive = fs.lock_archive();
let mut entries = vec![];
for name in archive.0.file_names() {
// Any entry that doesn't have the `path` (with a
// trailing slash) as a prefix cannot possibly be in
// the directory referenced by `path`.
let Some(without_dir_prefix) = name.strip_prefix(normalized.as_str()) else {
continue;
};
// Filter out an entry equivalent to the path given
// since we only want children of the directory.
if without_dir_prefix.is_empty() {
continue;
}
// We only want *direct* children. Files that are
// direct children cannot have any slashes (or else
// they are not direct children). Directories that
// are direct children can only have one slash and
// it must be at the end.
//
// (We do this manually ourselves to avoid doing a
// full file lookup and metadata retrieval via the
// `zip` crate.)
let file_type = FileType::from_zip_file_name(without_dir_prefix);
let slash_count = without_dir_prefix.matches('/').count();
match file_type {
FileType::File if slash_count > 0 => continue,
FileType::Directory if slash_count > 1 => continue,
_ => {}
}
entries.push(DirectoryEntry {
path: VendoredPathBuf::from(name),
file_type,
});
}
entries
}
read_directory(self, dir.as_ref())
}
/// Acquire a lock on the underlying zip archive.
/// The call will block until it is able to acquire the lock.
///
@@ -281,14 +206,6 @@ pub enum FileType {
}
impl FileType {
fn from_zip_file_name(name: &str) -> FileType {
if name.ends_with('/') {
FileType::Directory
} else {
FileType::File
}
}
pub const fn is_file(self) -> bool {
matches!(self, Self::File)
}
@@ -327,30 +244,6 @@ impl Metadata {
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct DirectoryEntry {
path: VendoredPathBuf,
file_type: FileType,
}
impl DirectoryEntry {
pub fn new(path: VendoredPathBuf, file_type: FileType) -> Self {
Self { path, file_type }
}
pub fn into_path(self) -> VendoredPathBuf {
self.path
}
pub fn path(&self) -> &VendoredPath {
&self.path
}
pub fn file_type(&self) -> FileType {
self.file_type
}
}
/// Newtype wrapper around a ZipArchive.
#[derive(Debug)]
struct VendoredZipArchive(ZipArchive<io::Cursor<Cow<'static, [u8]>>>);
@@ -605,60 +498,6 @@ pub(crate) mod tests {
test_directory("./stdlib/asyncio/../asyncio/")
}
fn readdir_snapshot(fs: &VendoredFileSystem, path: &str) -> String {
let mut paths = fs
.read_directory(VendoredPath::new(path))
.into_iter()
.map(|entry| entry.path().to_string())
.collect::<Vec<String>>();
paths.sort();
paths.join("\n")
}
#[test]
fn read_directory_stdlib() {
let mock_typeshed = mock_typeshed();
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib"), @r"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib/"), @r"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib"), @r"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib/"), @r"
vendored://stdlib/asyncio/
vendored://stdlib/functools.pyi
");
}
#[test]
fn read_directory_asyncio() {
let mock_typeshed = mock_typeshed();
assert_snapshot!(
readdir_snapshot(&mock_typeshed, "stdlib/asyncio"),
@"vendored://stdlib/asyncio/tasks.pyi",
);
assert_snapshot!(
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio"),
@"vendored://stdlib/asyncio/tasks.pyi",
);
assert_snapshot!(
readdir_snapshot(&mock_typeshed, "stdlib/asyncio/"),
@"vendored://stdlib/asyncio/tasks.pyi",
);
assert_snapshot!(
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio/"),
@"vendored://stdlib/asyncio/tasks.pyi",
);
}
fn test_nonexistent_path(path: &str) {
let mock_typeshed = mock_typeshed();
let path = VendoredPath::new(path);

View File

@@ -17,10 +17,6 @@ impl VendoredPath {
unsafe { &*(path as *const Utf8Path as *const VendoredPath) }
}
pub fn file_name(&self) -> Option<&str> {
self.0.file_name()
}
pub fn to_path_buf(&self) -> VendoredPathBuf {
VendoredPathBuf(self.0.to_path_buf())
}

View File

@@ -13,7 +13,6 @@ license = { workspace = true }
[dependencies]
ty = { workspace = true }
ty_project = { workspace = true, features = ["schemars"] }
ty_static = { workspace = true }
ruff = { workspace = true }
ruff_formatter = { workspace = true }
ruff_linter = { workspace = true, features = ["schemars"] }

View File

@@ -4,7 +4,7 @@ use anyhow::Result;
use crate::{
generate_cli_help, generate_docs, generate_json_schema, generate_ty_cli_reference,
generate_ty_env_vars_reference, generate_ty_options, generate_ty_rules, generate_ty_schema,
generate_ty_options, generate_ty_rules, generate_ty_schema,
};
pub(crate) const REGENERATE_ALL_COMMAND: &str = "cargo dev generate-all";
@@ -44,8 +44,5 @@ pub(crate) fn main(args: &Args) -> Result<()> {
generate_ty_options::main(&generate_ty_options::Args { mode: args.mode })?;
generate_ty_rules::main(&generate_ty_rules::Args { mode: args.mode })?;
generate_ty_cli_reference::main(&generate_ty_cli_reference::Args { mode: args.mode })?;
generate_ty_env_vars_reference::main(&generate_ty_env_vars_reference::Args {
mode: args.mode,
})?;
Ok(())
}

View File

@@ -1,119 +0,0 @@
//! Generate the environment variables reference from `ty_static::EnvVars`.
use std::collections::BTreeSet;
use std::fs;
use std::path::PathBuf;
use anyhow::bail;
use pretty_assertions::StrComparison;
use ty_static::EnvVars;
use crate::generate_all::Mode;
#[derive(clap::Args)]
pub(crate) struct Args {
#[arg(long, default_value_t, value_enum)]
pub(crate) mode: Mode,
}
pub(crate) fn main(args: &Args) -> anyhow::Result<()> {
let reference_string = generate();
let filename = "environment.md";
let reference_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.parent()
.unwrap()
.join("crates")
.join("ty")
.join("docs")
.join(filename);
match args.mode {
Mode::DryRun => {
println!("{reference_string}");
}
Mode::Check => match fs::read_to_string(&reference_path) {
Ok(current) => {
if current == reference_string {
println!("Up-to-date: {filename}");
} else {
let comparison = StrComparison::new(&current, &reference_string);
bail!(
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{comparison}"
);
}
}
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
bail!(
"{filename} not found, please run `cargo dev generate-ty-env-vars-reference`"
);
}
Err(err) => {
bail!(
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{err}"
);
}
},
Mode::Write => {
// Ensure the docs directory exists
if let Some(parent) = reference_path.parent() {
fs::create_dir_all(parent)?;
}
match fs::read_to_string(&reference_path) {
Ok(current) => {
if current == reference_string {
println!("Up-to-date: {filename}");
} else {
println!("Updating: {filename}");
fs::write(&reference_path, reference_string.as_bytes())?;
}
}
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
println!("Updating: {filename}");
fs::write(&reference_path, reference_string.as_bytes())?;
}
Err(err) => {
bail!(
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{err}"
);
}
}
}
}
Ok(())
}
fn generate() -> String {
let mut output = String::new();
output.push_str("# Environment variables\n\n");
// Partition and sort environment variables into TY_ and external variables.
let (ty_vars, external_vars): (BTreeSet<_>, BTreeSet<_>) = EnvVars::metadata()
.iter()
.partition(|(var, _)| var.starts_with("TY_"));
output.push_str("ty defines and respects the following environment variables:\n\n");
for (var, doc) in ty_vars {
output.push_str(&render(var, doc));
}
output.push_str("## Externally-defined variables\n\n");
output.push_str("ty also reads the following externally defined environment variables:\n\n");
for (var, doc) in external_vars {
output.push_str(&render(var, doc));
}
output
}
/// Render an environment variable and its documentation.
fn render(var: &str, doc: &str) -> String {
format!("### `{var}`\n\n{doc}\n\n")
}

View File

@@ -18,7 +18,6 @@ mod generate_json_schema;
mod generate_options;
mod generate_rules_table;
mod generate_ty_cli_reference;
mod generate_ty_env_vars_reference;
mod generate_ty_options;
mod generate_ty_rules;
mod generate_ty_schema;
@@ -54,8 +53,6 @@ enum Command {
/// Generate a Markdown-compatible listing of configuration options.
GenerateOptions,
GenerateTyOptions(generate_ty_options::Args),
/// Generate environment variables reference for ty.
GenerateTyEnvVarsReference(generate_ty_env_vars_reference::Args),
/// Generate CLI help.
GenerateCliHelp(generate_cli_help::Args),
/// Generate Markdown docs.
@@ -101,7 +98,6 @@ fn main() -> Result<ExitCode> {
Command::GenerateTyRules(args) => generate_ty_rules::main(&args)?,
Command::GenerateOptions => println!("{}", generate_options::generate()),
Command::GenerateTyOptions(args) => generate_ty_options::main(&args)?,
Command::GenerateTyEnvVarsReference(args) => generate_ty_env_vars_reference::main(&args)?,
Command::GenerateCliHelp(args) => generate_cli_help::main(&args)?,
Command::GenerateDocs(args) => generate_docs::main(&args)?,
Command::PrintAST(args) => print_ast::main(&args)?,

View File

@@ -20,7 +20,6 @@ ty_python_semantic = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true, optional = true }
memchr = { workspace = true }
salsa = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }

View File

@@ -1,4 +1,3 @@
use crate::StringImports;
use ruff_python_ast::visitor::source_order::{
SourceOrderVisitor, walk_expr, walk_module, walk_stmt,
};
@@ -11,13 +10,13 @@ pub(crate) struct Collector<'a> {
/// The path to the current module.
module_path: Option<&'a [String]>,
/// Whether to detect imports from string literals.
string_imports: StringImports,
string_imports: bool,
/// The collected imports from the Python AST.
imports: Vec<CollectedImport>,
}
impl<'a> Collector<'a> {
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: bool) -> Self {
Self {
module_path,
string_imports,
@@ -119,7 +118,7 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
| Stmt::Continue(_)
| Stmt::IpyEscapeCommand(_) => {
// Only traverse simple statements when string imports is enabled.
if self.string_imports.enabled {
if self.string_imports {
walk_stmt(self, stmt);
}
}
@@ -127,26 +126,20 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
}
fn visit_expr(&mut self, expr: &'ast Expr) {
if self.string_imports.enabled {
if self.string_imports {
if let Expr::StringLiteral(ast::ExprStringLiteral {
value,
range: _,
node_index: _,
}) = expr
{
let value = value.to_str();
// Determine whether the string literal "looks like" an import statement: contains
// the requisite number of dots, and consists solely of valid Python identifiers.
if self.string_imports.min_dots == 0
|| memchr::memchr_iter(b'.', value.as_bytes()).count()
>= self.string_imports.min_dots
{
if let Some(module_name) = ModuleName::new(value) {
self.imports.push(CollectedImport::Import(module_name));
}
// a dot, and consists solely of valid Python identifiers.
let value = value.to_str();
if let Some(module_name) = ModuleName::new(value) {
self.imports.push(CollectedImport::Import(module_name));
}
}
walk_expr(self, expr);
}
}

View File

@@ -87,7 +87,7 @@ impl SourceDb for ModuleDb {
#[salsa::db]
impl Db for ModuleDb {
fn should_check_file(&self, file: File) -> bool {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}

View File

@@ -9,7 +9,7 @@ use ruff_python_parser::{Mode, ParseOptions, parse};
use crate::collector::Collector;
pub use crate::db::ModuleDb;
use crate::resolver::Resolver;
pub use crate::settings::{AnalyzeSettings, Direction, StringImports};
pub use crate::settings::{AnalyzeSettings, Direction};
mod collector;
mod db;
@@ -26,7 +26,7 @@ impl ModuleImports {
db: &ModuleDb,
path: &SystemPath,
package: Option<&SystemPath>,
string_imports: StringImports,
string_imports: bool,
) -> Result<Self> {
// Read and parse the source code.
let source = std::fs::read_to_string(path)?;
@@ -42,11 +42,13 @@ impl ModuleImports {
// Resolve the imports.
let mut resolved_imports = ModuleImports::default();
for import in imports {
for resolved in Resolver::new(db).resolve(import) {
if let Some(path) = resolved.as_system_path() {
resolved_imports.insert(path.to_path_buf());
}
}
let Some(resolved) = Resolver::new(db).resolve(import) else {
continue;
};
let Some(path) = resolved.as_system_path() else {
continue;
};
resolved_imports.insert(path.to_path_buf());
}
Ok(resolved_imports)

View File

@@ -1,5 +1,5 @@
use ruff_db::files::FilePath;
use ty_python_semantic::{ModuleName, resolve_module, resolve_real_module};
use ty_python_semantic::resolve_module;
use crate::ModuleDb;
use crate::collector::CollectedImport;
@@ -16,67 +16,24 @@ impl<'a> Resolver<'a> {
}
/// Resolve the [`CollectedImport`] into a [`FilePath`].
pub(crate) fn resolve(&self, import: CollectedImport) -> impl Iterator<Item = &'a FilePath> {
pub(crate) fn resolve(&self, import: CollectedImport) -> Option<&'a FilePath> {
match import {
CollectedImport::Import(import) => {
// Attempt to resolve the module (e.g., given `import foo`, look for `foo`).
let file = self.resolve_module(&import);
// If the file is a stub, look for the corresponding source file.
let source_file = file
.is_some_and(|file| file.extension() == Some("pyi"))
.then(|| self.resolve_real_module(&import))
.flatten();
std::iter::once(file)
.chain(std::iter::once(source_file))
.flatten()
let module = resolve_module(self.db, &import)?;
Some(module.file()?.path(self.db))
}
CollectedImport::ImportFrom(import) => {
// Attempt to resolve the member (e.g., given `from foo import bar`, look for `foo.bar`).
if let Some(file) = self.resolve_module(&import) {
// If the file is a stub, look for the corresponding source file.
let source_file = (file.extension() == Some("pyi"))
.then(|| self.resolve_real_module(&import))
.flatten();
return std::iter::once(Some(file))
.chain(std::iter::once(source_file))
.flatten();
}
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
let parent = import.parent();
let file = parent
.as_ref()
.and_then(|parent| self.resolve_module(parent));
// If the file is a stub, look for the corresponding source file.
let source_file = file
.is_some_and(|file| file.extension() == Some("pyi"))
.then(|| {
parent
.as_ref()
.and_then(|parent| self.resolve_real_module(parent))
})
.flatten();
let module = resolve_module(self.db, &import).or_else(|| {
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
std::iter::once(file)
.chain(std::iter::once(source_file))
.flatten()
resolve_module(self.db, &parent?)
})?;
Some(module.file()?.path(self.db))
}
}
}
/// Resolves a module name to a module.
fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
let module = resolve_module(self.db, module_name)?;
Some(module.file(self.db)?.path(self.db))
}
/// Resolves a module name to a module (stubs not allowed).
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
let module = resolve_real_module(self.db, module_name)?;
Some(module.file(self.db)?.path(self.db))
}
}

View File

@@ -11,7 +11,7 @@ pub struct AnalyzeSettings {
pub exclude: FilePatternSet,
pub preview: PreviewMode,
pub target_version: PythonVersion,
pub string_imports: StringImports,
pub detect_string_imports: bool,
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
pub extension: ExtensionMapping,
}
@@ -26,7 +26,7 @@ impl fmt::Display for AnalyzeSettings {
self.exclude,
self.preview,
self.target_version,
self.string_imports,
self.detect_string_imports,
self.extension | debug,
self.include_dependencies | debug,
]
@@ -35,31 +35,6 @@ impl fmt::Display for AnalyzeSettings {
}
}
#[derive(Debug, Copy, Clone, CacheKey)]
pub struct StringImports {
pub enabled: bool,
pub min_dots: usize,
}
impl Default for StringImports {
fn default() -> Self {
Self {
enabled: false,
min_dots: 2,
}
}
}
impl fmt::Display for StringImports {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.enabled {
write!(f, "enabled (min_dots: {})", self.min_dots)
} else {
write!(f, "disabled")
}
}
}
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, CacheKey)]
#[cfg_attr(
feature = "serde",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.12.7"
version = "0.12.2"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -15,7 +15,7 @@ license = { workspace = true }
[dependencies]
ruff_annotate_snippets = { workspace = true }
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["junit", "serde"] }
ruff_db = { workspace = true, features = ["serde"] }
ruff_diagnostics = { workspace = true, features = ["serde"] }
ruff_notebook = { workspace = true }
ruff_macros = { workspace = true }
@@ -55,6 +55,7 @@ path-absolutize = { workspace = true, features = [
pathdiff = { workspace = true }
pep440_rs = { workspace = true }
pyproject-toml = { workspace = true }
quick-junit = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }

View File

@@ -25,5 +25,5 @@ def my_func():
# t-strings - all ok
t"0.0.0.0"
t"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
t"0.0.0.0" t"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"

View File

@@ -94,7 +94,7 @@ except Exception:
logging.error("...", exc_info=True)
from logging import critical, error, exception
from logging import error, exception
try:
pass
@@ -114,23 +114,6 @@ except Exception:
error("...", exc_info=None)
try:
pass
except Exception:
critical("...")
try:
pass
except Exception:
critical("...", exc_info=False)
try:
pass
except Exception:
critical("...", exc_info=None)
try:
pass
except Exception:
@@ -142,13 +125,6 @@ try:
except Exception:
error("...", exc_info=True)
try:
pass
except Exception:
critical("...", exc_info=True)
try:
...
except Exception as e:

View File

@@ -1,10 +1,10 @@
"""
Should emit:
B017 - on lines 24, 28, 46, 49, 52, 58, 62, 68, and 71
B017 - on lines 23 and 41
"""
import asyncio
import unittest
import pytest, contextlib
import pytest
CONSTANT = True
@@ -56,17 +56,3 @@ def test_pytest_raises():
with contextlib.nullcontext(), pytest.raises(Exception):
raise ValueError("Multiple context managers")
def test_pytest_raises_keyword():
with pytest.raises(expected_exception=Exception):
raise ValueError("Should be flagged")
def test_assert_raises_keyword():
class TestKwargs(unittest.TestCase):
def test_method(self):
with self.assertRaises(exception=Exception):
raise ValueError("Should be flagged")
with self.assertRaises(exception=BaseException):
raise ValueError("Should be flagged")

View File

@@ -1,28 +0,0 @@
"""
Should emit:
B017 - on lines 20, 21, 25, and 26
"""
import unittest
import pytest
def something_else() -> None:
for i in (1, 2, 3):
print(i)
class Foo:
pass
class Foobar(unittest.TestCase):
def call_form_raises(self) -> None:
self.assertRaises(Exception, something_else)
self.assertRaises(BaseException, something_else)
def test_pytest_call_form() -> None:
pytest.raises(Exception, something_else)
pytest.raises(BaseException, something_else)
pytest.raises(Exception, something_else, match="hello")

View File

@@ -650,17 +650,3 @@ f"""This is a test. {
if True else
"Don't add a trailing comma here ->"
}"""
type X[
T
] = T
def f[
T
](): pass
class C[
T
]: pass
type X[T,] = T
def f[T,](): pass
class C[T,]: pass

View File

@@ -181,51 +181,3 @@ class SubclassTestModel2(TestModel4):
# Subclass without __str__
class SubclassTestModel3(TestModel1):
pass
# Test cases for type-annotated abstract models - these should NOT trigger DJ008
from typing import ClassVar
from django_stubs_ext.db.models import TypedModelMeta
class TypeAnnotatedAbstractModel1(models.Model):
"""Model with type-annotated abstract = True - should not trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta(TypedModelMeta):
abstract: ClassVar[bool] = True
class TypeAnnotatedAbstractModel2(models.Model):
"""Model with type-annotated abstract = True using regular Meta - should not trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta:
abstract: ClassVar[bool] = True
class TypeAnnotatedAbstractModel3(models.Model):
"""Model with type-annotated abstract = True but without ClassVar - should not trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta:
abstract: bool = True
class TypeAnnotatedNonAbstractModel(models.Model):
"""Model with type-annotated abstract = False - should trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta:
abstract: ClassVar[bool] = False
class TypeAnnotatedAbstractModelWithStr(models.Model):
"""Model with type-annotated abstract = True and __str__ method - should not trigger DJ008"""
new_field = models.CharField(max_length=10)
class Meta(TypedModelMeta):
abstract: ClassVar[bool] = True
def __str__(self):
return self.new_field

View File

@@ -1,4 +1 @@
_(f"{'value'}")
# Don't trigger for t-strings
_(t"{'value'}")

View File

@@ -13,7 +13,3 @@ from logging import info
info(f"{name}")
info(f"{__name__}")
# Don't trigger for t-strings
info(t"{name}")
info(t"{__name__}")

View File

@@ -39,11 +39,6 @@ class NonEmptyWithInit:
pass
class NonEmptyChildWithInlineComment:
value: int
... # preserve me
class EmptyClass:
...

View File

@@ -38,10 +38,6 @@ class NonEmptyWithInit:
def __init__():
pass
class NonEmptyChildWithInlineComment:
value: int
... # preserve me
# Not violations
class EmptyClass: ...

View File

@@ -142,7 +142,3 @@ field47: typing.Optional[int] | typing.Optional[dict]
# avoid reporting twice
field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
field49: typing.Optional[complex | complex] | complex
# Regression test for https://github.com/astral-sh/ruff/issues/19403
# Should throw duplicate union member but not fix
isinstance(None, typing.Union[None, None])

View File

@@ -47,7 +47,3 @@ def test_error_match_is_empty():
with pytest.raises(ValueError, match=f""):
raise ValueError("Can't divide 1 by 0")
def test_ok_t_string_match():
with pytest.raises(ValueError, match=t""):
raise ValueError("Can't divide 1 by 0")

View File

@@ -23,9 +23,3 @@ def f():
pytest.fail(msg=f"")
pytest.fail(reason="")
pytest.fail(reason=f"")
# Skip for t-strings
def g():
pytest.fail(t"")
pytest.fail(msg=t"")
pytest.fail(reason=t"")

View File

@@ -32,7 +32,3 @@ def test_error_match_is_empty():
with pytest.warns(UserWarning, match=f""):
pass
def test_ok_match_t_string():
with pytest.warns(UserWarning, match=t""):
pass

View File

@@ -422,35 +422,6 @@ def func(a: dict[str, int]) -> list[dict[str, int]]:
services = a["services"]
return services
# See: https://github.com/astral-sh/ruff/issues/14052
def outer() -> list[object]:
@register
async def inner() -> None:
print(layout)
layout = [...]
return layout
def outer() -> list[object]:
with open("") as f:
async def inner() -> None:
print(layout)
layout = [...]
return layout
def outer() -> list[object]:
def inner():
with open("") as f:
async def inner_inner() -> None:
print(layout)
layout = [...]
return layout
# See: https://github.com/astral-sh/ruff/issues/18411
def f():
(#=

View File

@@ -1,10 +0,0 @@
from collections import Counter
from elsewhere import third_party
from . import first_party
def f(x: first_party.foo): ...
def g(x: third_party.bar): ...
def h(x: Counter): ...

View File

@@ -1,68 +0,0 @@
def f():
from . import first_party
def f(x: first_party.foo): ...
# Type parameter bounds
def g():
from . import foo
class C[T: foo.Ty]: ...
def h():
from . import foo
def f[T: foo.Ty](x: T): ...
def i():
from . import foo
type Alias[T: foo.Ty] = list[T]
# Type parameter defaults
def j():
from . import foo
class C[T = foo.Ty]: ...
def k():
from . import foo
def f[T = foo.Ty](x: T): ...
def l():
from . import foo
type Alias[T = foo.Ty] = list[T]
# non-generic type alias
def m():
from . import foo
type Alias = foo.Ty
# unions
from typing import Union
def n():
from . import foo
def f(x: Union[foo.Ty, int]): ...
def g(x: foo.Ty | int): ...
# runtime and typing usage
def o():
from . import foo
def f(x: foo.Ty):
return foo.Ty()

View File

@@ -1,6 +0,0 @@
from __future__ import annotations
from . import first_party
def f(x: first_party.foo): ...

View File

@@ -1,6 +0,0 @@
# Regression test for: https://github.com/astral-sh/ruff/issues/19175
# there is a (potentially invisible) unicode formfeed character (000C) between `TYPE_CHECKING` and the backslash
from typing import TYPE_CHECKING \
if TYPE_CHECKING: import builtins
builtins.print("!")

View File

@@ -245,14 +245,3 @@ def f(bar: str):
class C:
def __init__(self, x) -> None:
print(locals())
###
# Should trigger for t-string here
# even though the corresponding f-string
# does not trigger (since it is common in stubs)
###
class C:
def f(self, x, y):
"""Docstring."""
msg = t"{x}..."
raise NotImplementedError(msg)

View File

@@ -54,13 +54,6 @@ windows_path.with_suffix(r"s")
windows_path.with_suffix(u'' "json")
windows_path.with_suffix(suffix="js")
Path().with_suffix(".")
Path().with_suffix("py")
PosixPath().with_suffix("py")
PurePath().with_suffix("py")
PurePosixPath().with_suffix("py")
PureWindowsPath().with_suffix("py")
WindowsPath().with_suffix("py")
### No errors
path.with_suffix()

View File

@@ -1,26 +0,0 @@
from pathlib import (
Path,
PosixPath,
PurePath,
PurePosixPath,
PureWindowsPath,
WindowsPath,
)
import pathlib
path = Path()
posix_path: pathlib.PosixPath = PosixPath()
pure_path: PurePath = PurePath()
pure_posix_path = pathlib.PurePosixPath()
pure_windows_path: PureWindowsPath = pathlib.PureWindowsPath()
windows_path: pathlib.WindowsPath = pathlib.WindowsPath()
### No Errors
path.with_suffix(".")
posix_path.with_suffix(".")
pure_path.with_suffix(".")
pure_posix_path.with_suffix(".")
pure_windows_path.with_suffix(".")
windows_path.with_suffix(".")

View File

@@ -104,6 +104,3 @@ os.chmod(x)
os.replace("src", "dst", src_dir_fd=1, dst_dir_fd=2)
os.replace("src", "dst", src_dir_fd=1)
os.replace("src", "dst", dst_dir_fd=2)
os.getcwd()
os.getcwdb()

Some files were not shown because too many files have changed in this diff Show More