Compare commits

..

4 Commits

Author SHA1 Message Date
Micha Reiser
af975d19cf Re-add rust-cache 2025-07-14 19:43:03 +02:00
Micha Reiser
8e8f216970 Random change 2025-07-14 19:26:17 +02:00
Micha Reiser
a248ad7e8d Install sscache 2025-07-14 19:18:37 +02:00
Micha Reiser
5eb414dd1e Try depot-cargo 2025-07-14 19:18:35 +02:00
1177 changed files with 37199 additions and 50392 deletions

View File

@@ -143,12 +143,12 @@ jobs:
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
# NOTE: Do not exclude all Markdown files here, but rather use
# specific exclude patterns like 'docs/**'), because tests for
# 'ty' are written in Markdown.
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
':!**/*.md' \
':crates/ty_python_semantic/resources/mdtest/**/*.md' \
':!docs/**' \
':!assets/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
@@ -235,16 +235,19 @@ jobs:
with:
persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: depot/setup-action@v1
- name: "Install Rust toolchain"
run: rustup show
- name: "Setup sccache"
uses: Mozilla-Actions/sccache-action@v0.0.6
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
with:
tool: cargo-insta
- name: ty mdtests (GitHub annotations)
@@ -254,22 +257,22 @@ jobs:
MDTEST_GITHUB_ANNOTATIONS_FORMAT: 1
# Ignore errors if this step fails; we want to continue to later steps in the workflow anyway.
# This step is just to get nice GitHub annotations on the PR diff in the files-changed tab.
run: cargo test -p ty_python_semantic --test mdtest || true
run: depot cargo test -p ty_python_semantic --test mdtest || true
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
run: depot cargo insta test --all-features --unreferenced reject --test-runner nextest
# Check for broken links in the documentation.
- run: cargo doc --all --no-deps
- run: depot cargo doc --all --no-deps
env:
RUSTDOCFLAGS: "-D warnings"
# Use --document-private-items so that all our doc comments are kept in
# sync, not just public items. Eventually we should do this for all
# crates; for now add crates here as they are warning-clean to prevent
# regression.
- run: cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db --document-private-items
- run: depot cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db --document-private-items
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
@@ -293,23 +296,26 @@ jobs:
with:
persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: depot/setup-action@v1
- name: "Install Rust toolchain"
run: rustup show
- name: "Setup sccache"
uses: Mozilla-Actions/sccache-action@v0.0.6
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
with:
tool: cargo-insta
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
run: depot cargo insta test --release --all-features --unreferenced reject --test-runner nextest
cargo-test-windows:
name: "cargo test (windows)"
@@ -322,10 +328,13 @@ jobs:
with:
persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: depot/setup-action@v1
- name: "Install Rust toolchain"
run: rustup show
- name: "Setup sccache"
uses: Mozilla-Actions/sccache-action@v0.0.6
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
with:
tool: cargo-nextest
- name: "Run tests"
@@ -335,8 +344,8 @@ jobs:
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
run: |
cargo nextest run --all-features --profile ci
cargo test --all-features --doc
depot cargo nextest run --all-features --profile ci
depot cargo test --all-features --doc
cargo-test-wasm:
name: "cargo test (wasm)"
@@ -381,7 +390,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Build"
run: cargo build --release --locked
@@ -395,6 +404,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: depot/setup-action@v1
- uses: SebRollen/toml-action@b1b3628f55fc3a28208d4203ada8b737e9687876 # v1.2.0
id: msrv
with:
@@ -405,13 +415,15 @@ jobs:
env:
MSRV: ${{ steps.msrv.outputs.value }}
run: rustup default "${MSRV}"
- name: "Setup sccache"
uses: Mozilla-Actions/sccache-action@v0.0.6
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- name: "Build tests"
shell: bash
env:
MSRV: ${{ steps.msrv.outputs.value }}
run: cargo "+${MSRV}" test --no-run --all-features
run: depot cargo "+${MSRV}" test --no-run --all-features
cargo-fuzz-build:
name: "cargo fuzz build"
@@ -429,7 +441,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
with:
tool: cargo-fuzz@0.11.2
- name: "Install cargo-fuzz"
@@ -451,7 +463,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download Ruff binary to test
id: download-cached-binary
@@ -518,6 +530,7 @@ jobs:
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download comparison Ruff binary
@@ -652,7 +665,7 @@ jobs:
branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml"
check_artifacts: true
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: Fuzz
env:
FORCE_COLOR: 1
@@ -682,7 +695,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
- uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
- run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
@@ -722,8 +735,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22
@@ -765,7 +777,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
@@ -897,13 +909,13 @@ jobs:
persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
with:
tool: cargo-codspeed
@@ -911,7 +923,7 @@ jobs:
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}
@@ -930,13 +942,13 @@ jobs:
persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@c07504cae06f832dc8de08911c9a9c5cddb0d2d3 # v2.56.13
with:
tool: cargo-codspeed
@@ -944,7 +956,7 @@ jobs:
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -34,11 +34,11 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: Build ruff
# A debug build means the script runs slower once it gets started,

View File

@@ -38,7 +38,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
@@ -81,7 +81,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
with:

View File

@@ -22,7 +22,7 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: wheels-*

View File

@@ -1,25 +1,5 @@
name: Sync typeshed
# How this works:
#
# 1. A Linux worker:
# a. Checks out Ruff and typeshed
# b. Deletes the vendored typeshed stdlib stubs from Ruff
# c. Copies the latest versions of the stubs from typeshed
# d. Uses docstring-adder to sync all docstrings available on Linux
# e. Creates a new branch on the upstream astral-sh/ruff repository
# f. Commits the changes it's made and pushes them to the new upstream branch
# 2. Once the Linux worker is done, a Windows worker:
# a. Checks out the branch created by the Linux worker
# b. Syncs all docstrings available on Windows that are not available on Linux
# c. Commits the changes and pushes them to the same upstream branch
# 3. Once the Windows worker is done, a MacOS worker:
# a. Checks out the branch created by the Linux worker
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
# c. Commits the changes and pushes them to the same upstream branch
# d. Creates a PR against the `main` branch using the branch all three workers have pushed to
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
on:
workflow_dispatch:
schedule:
@@ -30,17 +10,7 @@ env:
FORCE_COLOR: 1
GH_TOKEN: ${{ github.token }}
# The name of the upstream branch that the first worker creates,
# and which all three workers push to.
UPSTREAM_BRANCH: typeshedbot/sync-typeshed
# The path to the directory that contains the vendored typeshed stubs,
# relative to the root of the Ruff repository.
VENDORED_TYPESHED: crates/ty_vendored/vendor/typeshed
jobs:
# Sync typeshed stubs, and sync all docstrings available on Linux.
# Push the changes to a new branch on the upstream repository.
sync:
name: Sync typeshed
runs-on: ubuntu-latest
@@ -49,6 +19,7 @@ jobs:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff
@@ -65,130 +36,68 @@ jobs:
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Sync typeshed stubs
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: Sync typeshed
id: sync
run: |
rm -rf "ruff/${VENDORED_TYPESHED}"
mkdir "ruff/${VENDORED_TYPESHED}"
cp typeshed/README.md "ruff/${VENDORED_TYPESHED}"
cp typeshed/LICENSE "ruff/${VENDORED_TYPESHED}"
docstring_adder="git+https://github.com/astral-sh/docstring-adder.git@6de51c5f44aea11fe8c8f2d30f9ee0683682c3d2"
# The pyproject.toml file is needed by a later job for the black configuration.
# It's deleted before creating the PR.
cp typeshed/pyproject.toml "ruff/${VENDORED_TYPESHED}"
cp -r typeshed/stdlib "ruff/${VENDORED_TYPESHED}/stdlib"
rm -rf "ruff/${VENDORED_TYPESHED}/stdlib/@tests"
git -C typeshed rev-parse HEAD > "ruff/${VENDORED_TYPESHED}/source_commit.txt"
cd ruff
git checkout -b "${UPSTREAM_BRANCH}"
git add .
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
- name: Sync Linux docstrings
if: ${{ success() }}
run: |
cd ruff
./scripts/codemod_docstrings.sh
git commit -am "Sync Linux docstrings" --allow-empty
- name: Push the changes
id: commit
if: ${{ success() }}
run: git -C ruff push --force --set-upstream origin "${UPSTREAM_BRANCH}"
# Checkout the branch created by the sync job,
# and sync all docstrings available on Windows that are not available on Linux.
# Commit the changes and push them to the same branch.
docstrings-windows:
runs-on: windows-latest
timeout-minutes: 20
needs: [sync]
# Don't run the cron job on forks.
# The job will also be skipped if the sync job failed, because it's specified in `needs` above,
# and we haven't used `always()` in the `if` condition here
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
permissions:
contents: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff
with:
persist-credentials: true
ref: ${{ env.UPSTREAM_BRANCH}}
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Setup git
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- name: Sync Windows docstrings
id: docstrings
shell: bash
run: ./scripts/codemod_docstrings.sh
- name: Commit the changes
if: ${{ steps.docstrings.outcome == 'success' }}
run: |
git commit -am "Sync Windows docstrings" --allow-empty
git push
# Checkout the branch created by the sync job,
# and sync all docstrings available on macOS that are not available on Linux or Windows.
# Push the changes to the same branch and create a PR against the `main` branch using that branch.
docstrings-macos-and-pr:
runs-on: macos-latest
timeout-minutes: 20
needs: [sync, docstrings-windows]
# Don't run the cron job on forks.
# The job will also be skipped if the sync or docstrings-windows jobs failed,
# because they're specified in `needs` above and we haven't used an `always()` condition in the `if` here
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff
with:
persist-credentials: true
ref: ${{ env.UPSTREAM_BRANCH}}
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Setup git
run: |
git config --global user.name typeshedbot
git config --global user.email '<>'
- name: Sync macOS docstrings
run: ./scripts/codemod_docstrings.sh
- name: Commit and push the changes
if: ${{ success() }}
run: |
git commit -am "Sync macOS docstrings" --allow-empty
# Run with the full matrix of Python versions supported by typeshed,
# so that we codemod in docstrings that only exist on certain versions.
#
# The codemod will only add docstrings to functions/classes that do not
# already have docstrings. We run with Python 3.14 before running with
# any other Python version so that we get the Python 3.14 version of the
# docstring for a definition that exists on all Python versions: if we
# ran with Python 3.9 first, then the later runs with Python 3.10+ would
# not modify the docstring that had already been added using the old version of Python.
#
# TODO: In order to add docstrings for platform-specific APIs, we would also
# need to run the codemod on Windows. We get the runtime docstrings by inspecting
# the docstrings at runtime, so if an API doesn't exist at runtime (because e.g.
# it's Windows-specific and we're running on Linux), then we won't add a docstring to it.
#
uvx --python=3.14 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
uvx --python=3.13 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
uvx --python=3.12 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
uvx --python=3.11 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
uvx --python=3.10 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
uvx --python=3.9 --force-reinstall --from="${docstring_adder}" add-docstrings --stdlib-path ./typeshed/stdlib
# Here we just reformat the codemodded stubs so that they are
# consistent with the other typeshed stubs around them.
# Typeshed formats code using black in their CI, so we just invoke
# black on the stubs the same way that typeshed does.
uvx black "${VENDORED_TYPESHED}/stdlib" --config "${VENDORED_TYPESHED}/pyproject.toml" || true
git commit -am "Format codemodded docstrings" --allow-empty
uvx --directory=typeshed pre-commit run -a black || true
rm "${VENDORED_TYPESHED}/pyproject.toml"
git commit -am "Remove pyproject.toml file"
git push
- name: Create a PR
if: ${{ success() }}
rm -rf ruff/crates/ty_vendored/vendor/typeshed
mkdir ruff/crates/ty_vendored/vendor/typeshed
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
- name: Commit the changes
id: commit
if: ${{ steps.sync.outcome == 'success' }}
run: |
gh pr list --repo "${GITHUB_REPOSITORY}" --head "${UPSTREAM_BRANCH}" --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
cd ruff
git checkout -b typeshedbot/sync-typeshed
git add .
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
- name: Create a PR
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
run: |
cd ruff
git push --force origin typeshedbot/sync-typeshed
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
create-issue-on-failure:
name: Create an issue if the typeshed sync failed
runs-on: ubuntu-latest
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
needs: [sync]
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
permissions:
issues: write
steps:

View File

@@ -33,7 +33,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
@@ -64,7 +64,7 @@ jobs:
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@f0eec0e549684d8e1d7b8bc3e351202124b63bda"
ecosystem-analyzer \
--repository ruff \

View File

@@ -1,76 +0,0 @@
name: ty ecosystem-report
permissions: {}
on:
workflow_dispatch:
schedule:
# Run every Wednesday at 5:00 UTC:
- cron: 0 5 * * 3
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
RUST_BACKTRACE: 1
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
jobs:
ty-ecosystem-report:
name: Create ecosystem report
runs-on: depot-ubuntu-22.04-32
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
workspaces: "ruff"
- name: Install Rust toolchain
run: rustup show
- name: Create report
shell: bash
run: |
cd ruff
echo "Enabling configuration overloads (see .github/mypy-primer-ty.toml)"
mkdir -p ~/.config/ty
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
ecosystem-analyzer \
--verbose \
--repository ruff \
analyze \
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
--output ecosystem-diagnostics.json
mkdir dist
ecosystem-analyzer \
generate-report \
--max-diagnostics-per-project=1200 \
ecosystem-diagnostics.json \
--output dist/index.html
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
id: deploy
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}

View File

@@ -1,109 +0,0 @@
name: Run typing conformance
permissions: {}
on:
pull_request:
paths:
- "crates/ty*/**"
- "crates/ruff_db"
- "crates/ruff_python_ast"
- "crates/ruff_python_parser"
- ".github/workflows/typing_conformance.yaml"
- ".github/workflows/typing_conformance_comment.yaml"
- "Cargo.lock"
- "!**.md"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
RUST_BACKTRACE: 1
jobs:
typing_conformance:
name: Compute diagnostic diff
runs-on: depot-ubuntu-22.04-32
timeout-minutes: 10
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
repository: python/typing
ref: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
path: typing
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
workspaces: "ruff"
- name: Install Rust toolchain
run: rustup show
- name: Compute diagnostic diff
shell: bash
run: |
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
# Build the executable for the old and new commit
(
cd ruff
echo "new commit"
git checkout -b new_commit "${{ github.event.pull_request.head.sha }}"
git rev-list --format=%s --max-count=1 new_commit
cargo build --release --bin ty
mv target/release/ty ty-new
echo "old commit (merge base)"
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
git checkout -b old_commit "$MERGE_BASE"
git rev-list --format=%s --max-count=1 old_commit
cargo build --release --bin ty
mv target/release/ty ty-old
)
(
cd typing/conformance/tests
echo "Running ty on old commit (merge base)"
"$RUFF_DIR/ty-old" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/old-output.txt" 2>&1 || true
echo "Running ty on new commit"
"$RUFF_DIR/ty-new" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/new-output.txt" 2>&1 || true
)
if ! diff -u old-output.txt new-output.txt > typing_conformance_diagnostics.diff; then
echo "Differences found between base and PR"
else
echo "No differences found"
touch typing_conformance_diagnostics.diff
fi
echo ${{ github.event.number }} > pr-number
- name: Upload diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: typing_conformance_diagnostics_diff
path: typing_conformance_diagnostics.diff
- name: Upload pr-number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr-number
path: pr-number

View File

@@ -1,97 +0,0 @@
name: PR comment (typing_conformance)
on: # zizmor: ignore[dangerous-triggers]
workflow_run:
workflows: [Run typing conformance]
types: [completed]
workflow_dispatch:
inputs:
workflow_run_id:
description: The typing_conformance workflow that triggers the workflow run
required: true
jobs:
comment:
runs-on: ubuntu-24.04
permissions:
pull-requests: write
steps:
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: Download PR number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
allow_forks: true
- name: Parse pull request number
id: pr-number
run: |
if [[ -f pr-number ]]
then
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
fi
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: "Download typing_conformance results"
id: download-typing_conformance_diff
if: steps.pr-number.outputs.pr-number
with:
name: typing_conformance_diagnostics_diff
workflow: typing_conformance.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/typing_conformance_diagnostics_diff
workflow_conclusion: completed
if_no_artifact_found: ignore
allow_forks: true
- name: Generate comment content
id: generate-comment
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
run: |
# Guard against malicious typing_conformance results that symlink to a secret
# file on this runner
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
then
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
exit 1
fi
# Note this identifier is used to find the comment to update on
# subsequent runs
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
echo '## Diagnostic diff on typing conformance tests' >> comment.txt
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
echo '<details>' >> comment.txt
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
echo '' >> comment.txt
echo '```diff' >> comment.txt
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
echo '```' >> comment.txt
echo '</details>' >> comment.txt
else
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
fi
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
cat comment.txt >> "$GITHUB_OUTPUT"
echo 'EOF' >> "$GITHUB_OUTPUT"
- name: Find existing comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
issue-number: ${{ steps.pr-number.outputs.pr-number }}
comment-author: "github-actions[bot]"
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
- name: Create or update comment
if: steps.find-comment.outcome == 'success'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.pr-number.outputs.pr-number }}
body-path: comment.txt
edit-mode: replace

1
.github/zizmor.yml vendored
View File

@@ -11,7 +11,6 @@ rules:
- build-docker.yml
- publish-playground.yml
- ty-ecosystem-analyzer.yaml
- ty-ecosystem-report.yaml
excessive-permissions:
# it's hard to test what the impact of removing these ignores would be
# without actually running the release workflow...

View File

@@ -81,10 +81,10 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.5
rev: v0.12.3
hooks:
- id: ruff-format
- id: ruff-check
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
types_or: [python, pyi]
require_serial: true
@@ -128,10 +128,5 @@ repos:
# but the integration only works if shellcheck is installed
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.10.0.1
hooks:
- id: shellcheck
ci:
skip: [cargo-fmt, dev-generate-all]

View File

@@ -1,54 +1,5 @@
# Changelog
## 0.12.5
### Preview features
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
### Bug fixes
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
### Rule changes
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
## 0.12.4
### Preview features
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
### Bug fixes
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
### Rule changes
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
### Documentation
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
## 0.12.3
### Preview features

View File

@@ -266,13 +266,6 @@ Finally, regenerate the documentation and generated code with `cargo dev generat
## MkDocs
> [!NOTE]
>
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
> This means only members of the Astral organization can preview the documentation exactly as it
> will appear in production.
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
To preview any changes to the documentation locally:
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).

145
Cargo.lock generated
View File

@@ -261,18 +261,6 @@ version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
[[package]]
name = "bitvec"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
dependencies = [
"funty",
"radium",
"tap",
"wyz",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@@ -530,7 +518,7 @@ dependencies = [
"ciborium",
"clap",
"codspeed",
"criterion-plot 0.5.0",
"criterion-plot",
"is-terminal",
"itertools 0.10.5",
"num-traits",
@@ -713,15 +701,15 @@ dependencies = [
[[package]]
name = "criterion"
version = "0.7.0"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1c047a62b0cc3e145fa84415a3191f628e980b194c2755aa12300a4e6cbd928"
checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679"
dependencies = [
"anes",
"cast",
"ciborium",
"clap",
"criterion-plot 0.6.0",
"criterion-plot",
"itertools 0.13.0",
"num-traits",
"oorandom",
@@ -742,16 +730,6 @@ dependencies = [
"itertools 0.10.5",
]
[[package]]
name = "criterion-plot"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b1bcc0dc7dfae599d84ad0b1a55f80cde8af3725da8313b528da95ef783e338"
dependencies = [
"cast",
"itertools 0.13.0",
]
[[package]]
name = "crossbeam"
version = "0.8.4"
@@ -1143,12 +1121,6 @@ dependencies = [
"libc",
]
[[package]]
name = "funty"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "generic-array"
version = "0.14.7"
@@ -1161,9 +1133,9 @@ dependencies = [
[[package]]
name = "get-size-derive2"
version = "0.6.1"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca171f9f8ed2f416ac044de2dc4acde3e356662a14ac990345639653bdc7fc28"
checksum = "028f3cfad7c3e3b1d8d04ef0a1c03576f2d62800803fe1301a4cd262849f2dea"
dependencies = [
"attribute-derive",
"quote",
@@ -1172,9 +1144,9 @@ dependencies = [
[[package]]
name = "get-size2"
version = "0.6.1"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "965bc5c1c5fe05c5bbd398bb9b3f0f14d750261ebdd1af959f2c8a603fedb5ad"
checksum = "3a09c2043819a3def7bfbb4927e7df96aab0da4cfd8824484b22d0c94e84458e"
dependencies = [
"compact_str",
"get-size-derive2",
@@ -1585,15 +1557,6 @@ dependencies = [
"memoffset",
]
[[package]]
name = "inventory"
version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab08d7cd2c5897f2c949e5383ea7c7db03fb19130ffcfbf7eda795137ae3cb83"
dependencies = [
"rustversion",
]
[[package]]
name = "is-docker"
version = "0.2.0"
@@ -2160,6 +2123,16 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "papaya"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
dependencies = [
"equivalent",
"seize",
]
[[package]]
name = "parking_lot"
version = "0.12.3"
@@ -2576,12 +2549,6 @@ version = "5.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
[[package]]
name = "radium"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
[[package]]
name = "rand"
version = "0.8.5"
@@ -2595,9 +2562,9 @@ dependencies = [
[[package]]
name = "rand"
version = "0.9.2"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.3",
@@ -2744,7 +2711,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.12.5"
version = "0.12.3"
dependencies = [
"anyhow",
"argfile",
@@ -2861,6 +2828,7 @@ dependencies = [
"anstyle",
"arc-swap",
"camino",
"countme",
"dashmap",
"dunce",
"etcetera",
@@ -2871,7 +2839,6 @@ dependencies = [
"insta",
"matchit",
"path-slash",
"quick-junit",
"ruff_annotate_snippets",
"ruff_cache",
"ruff_diagnostics",
@@ -2891,7 +2858,6 @@ dependencies = [
"tracing",
"tracing-subscriber",
"ty_static",
"unicode-width 0.2.1",
"web-time",
"zip",
]
@@ -2971,7 +2937,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"memchr",
"ruff_cache",
"ruff_db",
"ruff_linter",
@@ -2997,7 +2962,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.12.5"
version = "0.12.3"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3022,6 +2987,7 @@ dependencies = [
"pathdiff",
"pep440_rs",
"pyproject-toml",
"quick-junit",
"regex",
"ruff_annotate_snippets",
"ruff_cache",
@@ -3076,7 +3042,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"itertools 0.14.0",
"rand 0.9.2",
"rand 0.9.1",
"ruff_diagnostics",
"ruff_source_file",
"ruff_text_size",
@@ -3329,7 +3295,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.12.5"
version = "0.12.3"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3443,7 +3409,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
dependencies = [
"boxcar",
"compact_str",
@@ -3453,7 +3419,7 @@ dependencies = [
"hashlink",
"indexmap",
"intrusive-collections",
"inventory",
"papaya",
"parking_lot",
"portable-atomic",
"rayon",
@@ -3468,12 +3434,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
[[package]]
name = "salsa-macros"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
dependencies = [
"proc-macro2",
"quote",
@@ -3526,6 +3492,16 @@ version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "seize"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "serde"
version = "1.0.219"
@@ -3570,9 +3546,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.141"
version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
dependencies = [
"itoa",
"memchr",
@@ -3762,22 +3738,23 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.27.2"
version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.27.2"
version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7"
checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8"
dependencies = [
"heck",
"proc-macro2",
"quote",
"rustversion",
"syn",
]
@@ -3803,12 +3780,6 @@ dependencies = [
"syn",
]
[[package]]
name = "tap"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tempfile"
version = "3.20.0"
@@ -4223,7 +4194,6 @@ version = "0.0.0"
dependencies = [
"bitflags 2.9.1",
"insta",
"itertools 0.14.0",
"regex",
"ruff_db",
"ruff_python_ast",
@@ -4232,10 +4202,11 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"rustc-hash",
"salsa",
"smallvec",
"tracing",
"ty_project",
"ty_python_semantic",
"ty_vendored",
]
[[package]]
@@ -4269,6 +4240,7 @@ dependencies = [
"thiserror 2.0.12",
"toml 0.9.2",
"tracing",
"ty_ide",
"ty_python_semantic",
"ty_vendored",
]
@@ -4279,7 +4251,6 @@ version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.9.1",
"bitvec",
"camino",
"colored 3.0.0",
"compact_str",
@@ -4329,15 +4300,11 @@ name = "ty_server"
version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.9.1",
"crossbeam",
"dunce",
"insta",
"jod-thread",
"libc",
"lsp-server",
"lsp-types",
"regex",
"ruff_db",
"ruff_notebook",
"ruff_python_ast",
@@ -4348,7 +4315,6 @@ dependencies = [
"serde",
"serde_json",
"shellexpand",
"tempfile",
"thiserror 2.0.12",
"tracing",
"tracing-subscriber",
@@ -4608,7 +4574,7 @@ checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
dependencies = [
"getrandom 0.3.3",
"js-sys",
"rand 0.9.2",
"rand 0.9.1",
"uuid-macro-internal",
"wasm-bindgen",
]
@@ -5137,15 +5103,6 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "wyz"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
dependencies = [
"tap",
]
[[package]]
name = "yansi"
version = "1.0.1"

View File

@@ -57,9 +57,6 @@ assert_fs = { version = "1.1.0" }
argfile = { version = "0.2.0" }
bincode = { version = "2.0.0" }
bitflags = { version = "2.5.0" }
bitvec = { version = "1.0.1", default-features = false, features = [
"alloc",
] }
bstr = { version = "1.9.1" }
cachedir = { version = "0.3.1" }
camino = { version = "1.1.7" }
@@ -73,7 +70,7 @@ console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
compact_str = "0.9.0"
criterion = { version = "0.7.0", default-features = false }
criterion = { version = "0.6.0", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
dir-test = { version = "0.4.0" }
@@ -83,7 +80,7 @@ etcetera = { version = "0.10.0" }
fern = { version = "0.7.0" }
filetime = { version = "0.2.23" }
getrandom = { version = "0.3.1" }
get-size2 = { version = "0.6.0", features = [
get-size2 = { version = "0.5.0", features = [
"derive",
"smallvec",
"hashbrown",
@@ -141,7 +138,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "dba66f1a37acca014c2402f231ed5b361bd7d8fe" }
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "fc00eba89e5dcaa5edba51c41aa5f309b5cb126b" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -153,7 +150,7 @@ serde_with = { version = "3.6.0", default-features = false, features = [
] }
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2", features = ["union", "const_generics", "const_new"] }
smallvec = { version = "1.13.2" }
snapbox = { version = "0.6.0", features = [
"diff",
"term-svg",

View File

@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.12.5/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.5/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.12.3/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.3/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.12.5
rev: v0.12.3
hooks:
# Run the linter.
- id: ruff-check

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.12.5"
version = "0.12.3"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -169,9 +169,6 @@ pub struct AnalyzeGraphCommand {
/// Attempt to detect imports from string literals.
#[clap(long)]
detect_string_imports: bool,
/// The minimum number of dots in a string import to consider it a valid import.
#[clap(long)]
min_dots: Option<usize>,
/// Enable preview mode. Use `--no-preview` to disable.
#[arg(long, overrides_with("no_preview"))]
preview: bool,
@@ -811,7 +808,6 @@ impl AnalyzeGraphCommand {
} else {
None
},
string_imports_min_dots: self.min_dots,
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
target_version: self.target_version.map(ast::PythonVersion::from),
..ExplicitConfigOverrides::default()
@@ -1309,7 +1305,6 @@ struct ExplicitConfigOverrides {
show_fixes: Option<bool>,
extension: Option<Vec<ExtensionPair>>,
detect_string_imports: Option<bool>,
string_imports_min_dots: Option<usize>,
}
impl ConfigurationTransformer for ExplicitConfigOverrides {
@@ -1397,9 +1392,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
if let Some(detect_string_imports) = &self.detect_string_imports {
config.analyze.detect_string_imports = Some(*detect_string_imports);
}
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
}
config
}

View File

@@ -454,7 +454,7 @@ impl LintCacheData {
CacheMessage {
rule,
body: msg.body().to_string(),
suggestion: msg.first_help_text().map(ToString::to_string),
suggestion: msg.suggestion().map(ToString::to_string),
range: msg.expect_range(),
parent: msg.parent(),
fix: msg.fix().cloned(),

View File

@@ -102,7 +102,7 @@ pub(crate) fn analyze_graph(
// Resolve the per-file settings.
let settings = resolver.resolve(path);
let string_imports = settings.analyze.string_imports;
let string_imports = settings.analyze.detect_string_imports;
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
// Skip excluded files.

View File

@@ -279,7 +279,6 @@ mod test {
TextEmitter::default()
.with_show_fix_status(true)
.with_color(false)
.emit(
&mut output,
&diagnostics.inner,

View File

@@ -15,8 +15,8 @@ use ruff_db::diagnostic::{
use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel;
use ruff_linter::message::{
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, SarifEmitter,
TextEmitter,
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, JunitEmitter,
PylintEmitter, RdjsonEmitter, SarifEmitter, TextEmitter,
};
use ruff_linter::notify_user;
use ruff_linter::settings::flags::{self};
@@ -238,11 +238,7 @@ impl Printer {
write!(writer, "{value}")?;
}
OutputFormat::Rdjson => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Rdjson)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
RdjsonEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::JsonLines => {
let config = DisplayDiagnosticConfig::default()
@@ -252,11 +248,7 @@ impl Printer {
write!(writer, "{value}")?;
}
OutputFormat::Junit => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Junit)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
JunitEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Concise | OutputFormat::Full => {
TextEmitter::default()
@@ -264,7 +256,6 @@ impl Printer {
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
.with_show_source(self.format == OutputFormat::Full)
.with_unsafe_fixes(self.unsafe_fixes)
.with_preview(preview)
.emit(writer, &diagnostics.inner, &context)?;
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
@@ -299,11 +290,7 @@ impl Printer {
GitlabEmitter::default().emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Pylint => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Pylint)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
PylintEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Azure => {
let config = DisplayDiagnosticConfig::default()

View File

@@ -57,40 +57,33 @@ fn dependencies() -> Result<()> {
.write_str(indoc::indoc! {r#"
def f(): pass
"#})?;
root.child("ruff")
.child("e.pyi")
.write_str(indoc::indoc! {r#"
def f() -> None: ...
"#})?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": [
"ruff/d.py"
],
"ruff/d.py": [
"ruff/e.py",
"ruff/e.pyi"
],
"ruff/e.py": [],
"ruff/e.pyi": []
}
assert_cmd_snapshot!(command().current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": [
"ruff/d.py"
],
"ruff/d.py": [
"ruff/e.py"
],
"ruff/e.py": []
}
----- stderr -----
"#);
----- stderr -----
"###);
});
Ok(())
@@ -204,43 +197,23 @@ fn string_detection() -> Result<()> {
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [],
"ruff/c.py": []
}
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": []
}
----- stderr -----
"#);
});
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().arg("--detect-string-imports").arg("--min-dots").arg("1").current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": []
}
----- stderr -----
"#);
----- stderr -----
"###);
});
Ok(())

View File

@@ -993,7 +993,6 @@ fn value_given_to_table_key_is_not_inline_table_2() {
- `lint.exclude`
- `lint.preview`
- `lint.typing-extensions`
- `lint.future-annotations`
For more information, try '--help'.
");
@@ -2422,7 +2421,7 @@ requires-python = ">= 3.11"
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.11
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -2734,7 +2733,7 @@ requires-python = ">= 3.11"
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.10
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -3098,7 +3097,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.11
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -3478,7 +3477,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.11
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -3806,7 +3805,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.10
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -4134,7 +4133,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.9
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -4419,7 +4418,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.9
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -4757,7 +4756,7 @@ from typing import Union;foo: Union[int, str] = 1
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.10
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
@@ -5718,11 +5717,8 @@ match 42: # invalid-syntax
let snapshot = format!("output_format_{output_format}");
let project_dir = dunce::canonicalize(tempdir.path())?;
insta::with_settings!({
filters => vec![
(tempdir_filter(&project_dir).as_str(), "[TMP]/"),
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
(ruff_linter::VERSION, "[VERSION]"),
@@ -5748,25 +5744,3 @@ match 42: # invalid-syntax
Ok(())
}
#[test]
fn future_annotations_preview_warning() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.args(["--config", "lint.future-annotations = true"])
.args(["--select", "F"])
.arg("--no-preview")
.arg("-")
.pass_stdin("1"),
@r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
warning: The `lint.future-annotations` setting will have no effect because `preview` is disabled
",
);
}

View File

@@ -25,7 +25,7 @@ exit_code: 1
<testcase name="org.ruff.F821" classname="[TMP]/input" line="2" column="5">
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
</testcase>
<testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1">
<testcase name="org.ruff" classname="[TMP]/input" line="3" column="1">
<failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
</testcase>
</testsuite>

View File

@@ -18,6 +18,6 @@ exit_code: 1
----- stdout -----
input.py:1: [F401] `os` imported but unused
input.py:2: [F821] Undefined name `y`
input.py:3: [invalid-syntax] SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
input.py:3: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr -----

View File

@@ -75,7 +75,8 @@ exit_code: 1
},
{
"code": {
"value": "invalid-syntax"
"url": null,
"value": null
},
"location": {
"path": "[TMP]/input.py",
@@ -93,7 +94,7 @@ exit_code: 1
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
}
],
"severity": "WARNING",
"severity": "warning",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"

View File

@@ -392,7 +392,7 @@ formatter.docstring_code_line_width = dynamic
analyze.exclude = []
analyze.preview = disabled
analyze.target_version = 3.7
analyze.string_imports = disabled
analyze.detect_string_imports = false
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}

View File

@@ -2,7 +2,6 @@
use ruff_benchmark::criterion;
use ruff_benchmark::real_world_projects::{InstalledProject, RealWorldProject};
use std::fmt::Write;
use std::ops::Range;
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
@@ -18,7 +17,7 @@ use ruff_python_ast::PythonVersion;
use ty_project::metadata::options::{EnvironmentOptions, Options};
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
use ty_project::watch::{ChangeEvent, ChangedKind};
use ty_project::{CheckMode, Db, ProjectDatabase, ProjectMetadata};
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
struct Case {
db: ProjectDatabase,
@@ -102,7 +101,6 @@ fn setup_tomllib_case() -> Case {
let re = re.unwrap();
db.set_check_mode(CheckMode::OpenFiles);
db.project().set_open_files(&mut db, tomllib_files);
let re_path = re.path(&db).as_system_path().unwrap().to_owned();
@@ -238,7 +236,6 @@ fn setup_micro_case(code: &str) -> Case {
let mut db = ProjectDatabase::new(metadata, system).unwrap();
let file = system_path_to_file(&db, SystemPathBuf::from(file_path)).unwrap();
db.set_check_mode(CheckMode::OpenFiles);
db.project()
.set_open_files(&mut db, FxHashSet::from_iter([file]));
@@ -444,37 +441,6 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
});
}
fn benchmark_many_enum_members(criterion: &mut Criterion) {
const NUM_ENUM_MEMBERS: usize = 512;
setup_rayon();
let mut code = String::new();
writeln!(&mut code, "from enum import Enum").ok();
writeln!(&mut code, "class E(Enum):").ok();
for i in 0..NUM_ENUM_MEMBERS {
writeln!(&mut code, " m{i} = {i}").ok();
}
writeln!(&mut code).ok();
for i in 0..NUM_ENUM_MEMBERS {
writeln!(&mut code, "print(E.m{i})").ok();
}
criterion.bench_function("ty_micro[many_enum_members]", |b| {
b.iter_batched_ref(
|| setup_micro_case(&code),
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
struct ProjectBenchmark<'a> {
project: InstalledProject<'a>,
fs: MemoryFileSystem,
@@ -527,21 +493,14 @@ impl<'a> ProjectBenchmark<'a> {
#[track_caller]
fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
fn check_project(db: &mut ProjectDatabase, project_name: &str, max_diagnostics: usize) {
fn check_project(db: &mut ProjectDatabase, max_diagnostics: usize) {
let result = db.check();
let diagnostics = result.len();
if diagnostics > max_diagnostics {
let details = result
.into_iter()
.map(|diagnostic| diagnostic.concise_message().to_string())
.collect::<Vec<_>>()
.join("\n ");
assert!(
diagnostics <= max_diagnostics,
"{project_name}: Expected <={max_diagnostics} diagnostics but got {diagnostics}:\n {details}",
);
}
assert!(
diagnostics <= max_diagnostics,
"Expected <={max_diagnostics} diagnostics but got {diagnostics}"
);
}
setup_rayon();
@@ -551,7 +510,7 @@ fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
group.bench_function(benchmark.project.config.name, |b| {
b.iter_batched_ref(
|| benchmark.setup_iteration(),
|db| check_project(db, benchmark.project.config.name, benchmark.max_diagnostics),
|db| check_project(db, benchmark.max_diagnostics),
BatchSize::SmallInput,
);
});
@@ -619,7 +578,7 @@ fn datetype(criterion: &mut Criterion) {
max_dep_date: "2025-07-04",
python_version: PythonVersion::PY313,
},
2,
0,
);
bench_project(&benchmark, criterion);
@@ -632,7 +591,6 @@ criterion_group!(
benchmark_many_tuple_assignments,
benchmark_complex_constrained_attributes_1,
benchmark_complex_constrained_attributes_2,
benchmark_many_enum_members,
);
criterion_group!(project, anyio, attrs, hydra, datetype);
criterion_main!(check_file, micro, project);

View File

@@ -25,6 +25,7 @@ ty_static = { workspace = true }
anstyle = { workspace = true }
arc-swap = { workspace = true }
camino = { workspace = true }
countme = { workspace = true }
dashmap = { workspace = true }
dunce = { workspace = true }
filetime = { workspace = true }
@@ -33,7 +34,6 @@ glob = { workspace = true }
ignore = { workspace = true, optional = true }
matchit = { workspace = true }
path-slash = { workspace = true }
quick-junit = { workspace = true, optional = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
schemars = { workspace = true, optional = true }
@@ -42,7 +42,6 @@ serde_json = { workspace = true, optional = true }
thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true, optional = true }
unicode-width = { workspace = true }
zip = { workspace = true }
[target.'cfg(target_arch="wasm32")'.dependencies]
@@ -57,13 +56,7 @@ tempfile = { workspace = true }
[features]
cache = ["ruff_cache"]
junit = ["dep:quick-junit"]
os = ["ignore", "dep:etcetera"]
serde = [
"camino/serde1",
"dep:serde",
"dep:serde_json",
"ruff_diagnostics/serde",
]
serde = ["camino/serde1", "dep:serde", "dep:serde_json", "ruff_diagnostics/serde"]
# Exposes testing utilities.
testing = ["tracing-subscriber"]

View File

@@ -1,14 +1,12 @@
use std::{fmt::Formatter, path::Path, sync::Arc};
use std::{fmt::Formatter, sync::Arc};
use ruff_diagnostics::{Applicability, Fix};
use ruff_diagnostics::Fix;
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
use ruff_annotate_snippets::Level as AnnotateLevel;
use ruff_text_size::{Ranged, TextRange, TextSize};
pub use self::render::{
DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary,
};
pub use self::render::{DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input};
use crate::{Db, files::File};
mod render;
@@ -124,14 +122,7 @@ impl Diagnostic {
/// directly. If callers want or need to avoid cloning the diagnostic
/// message, then they can also pass a `DiagnosticMessage` directly.
pub fn info<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Info, message));
}
/// Adds a "help" sub-diagnostic with the given message.
///
/// See the closely related [`Diagnostic::info`] method for more details.
pub fn help<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Help, message));
self.sub(SubDiagnostic::new(Severity::Info, message));
}
/// Adds a "sub" diagnostic to this diagnostic.
@@ -317,10 +308,6 @@ impl Diagnostic {
/// Set the fix for this diagnostic.
pub fn set_fix(&mut self, fix: Fix) {
debug_assert!(
self.primary_span().is_some(),
"Expected a source file for a diagnostic with a fix"
);
Arc::make_mut(&mut self.inner).fix = Some(fix);
}
@@ -386,15 +373,9 @@ impl Diagnostic {
self.primary_message()
}
/// Returns the message of the first sub-diagnostic with a `Help` severity.
///
/// Note that this is used as the fix title/suggestion for some of Ruff's output formats, but in
/// general this is not the guaranteed meaning of such a message.
pub fn first_help_text(&self) -> Option<&str> {
self.sub_diagnostics()
.iter()
.find(|sub| matches!(sub.inner.severity, SubDiagnosticSeverity::Help))
.map(|sub| sub.inner.message.as_str())
/// Returns the fix suggestion for the violation.
pub fn suggestion(&self) -> Option<&str> {
self.primary_annotation()?.get_message()
}
/// Returns the URL for the rule documentation, if it exists.
@@ -580,10 +561,7 @@ impl SubDiagnostic {
/// Callers can pass anything that implements `std::fmt::Display`
/// directly. If callers want or need to avoid cloning the diagnostic
/// message, then they can also pass a `DiagnosticMessage` directly.
pub fn new<'a>(
severity: SubDiagnosticSeverity,
message: impl IntoDiagnosticMessage + 'a,
) -> SubDiagnostic {
pub fn new<'a>(severity: Severity, message: impl IntoDiagnosticMessage + 'a) -> SubDiagnostic {
let inner = Box::new(SubDiagnosticInner {
severity,
message: message.into_diagnostic_message(),
@@ -661,7 +639,7 @@ impl SubDiagnostic {
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
struct SubDiagnosticInner {
severity: SubDiagnosticSeverity,
severity: Severity,
message: DiagnosticMessage,
annotations: Vec<Annotation>,
}
@@ -1030,18 +1008,6 @@ impl UnifiedFile {
}
}
/// Return the file's path relative to the current working directory.
pub fn relative_path<'a>(&'a self, resolver: &'a dyn FileResolver) -> &'a Path {
let cwd = resolver.current_directory();
let path = Path::new(self.path(resolver));
if let Ok(path) = path.strip_prefix(cwd) {
return path;
}
path
}
fn diagnostic_source(&self, resolver: &dyn FileResolver) -> DiagnosticSource {
match self {
UnifiedFile::Ty(file) => DiagnosticSource::Ty(resolver.input(*file)),
@@ -1188,32 +1154,6 @@ impl Severity {
}
}
/// Like [`Severity`] but exclusively for sub-diagnostics.
///
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
/// deleted and the two combined again.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
pub enum SubDiagnosticSeverity {
Help,
Info,
Warning,
Error,
Fatal,
}
impl SubDiagnosticSeverity {
fn to_annotate(self) -> AnnotateLevel {
match self {
SubDiagnosticSeverity::Help => AnnotateLevel::Help,
SubDiagnosticSeverity::Info => AnnotateLevel::Info,
SubDiagnosticSeverity::Warning => AnnotateLevel::Warning,
SubDiagnosticSeverity::Error => AnnotateLevel::Error,
SubDiagnosticSeverity::Fatal => AnnotateLevel::Error,
}
}
}
/// Configuration for rendering diagnostics.
#[derive(Clone, Debug)]
pub struct DisplayDiagnosticConfig {
@@ -1240,15 +1180,6 @@ pub struct DisplayDiagnosticConfig {
reason = "This is currently only used for JSON but will be needed soon for other formats"
)]
preview: bool,
/// Whether to hide the real `Severity` of diagnostics.
///
/// This is intended for temporary use by Ruff, which only has a single `error` severity at the
/// moment. We should be able to remove this option when Ruff gets more severities.
hide_severity: bool,
/// Whether to show the availability of a fix in a diagnostic.
show_fix_status: bool,
/// The lowest applicability that should be shown when reporting diagnostics.
fix_applicability: Applicability,
}
impl DisplayDiagnosticConfig {
@@ -1277,35 +1208,6 @@ impl DisplayDiagnosticConfig {
..self
}
}
/// Whether to hide a diagnostic's severity or not.
pub fn hide_severity(self, yes: bool) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
hide_severity: yes,
..self
}
}
/// Whether to show a fix's availability or not.
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
show_fix_status: yes,
..self
}
}
/// Set the lowest fix applicability that should be shown.
///
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
/// availability for unsafe or display-only fixes.
///
/// Note that this option is currently ignored when `hide_severity` is false.
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
fix_applicability: applicability,
..self
}
}
}
impl Default for DisplayDiagnosticConfig {
@@ -1315,9 +1217,6 @@ impl Default for DisplayDiagnosticConfig {
color: false,
context: 2,
preview: false,
hide_severity: false,
show_fix_status: false,
fix_applicability: Applicability::Safe,
}
}
}
@@ -1360,16 +1259,6 @@ pub enum DiagnosticFormat {
/// format for an array of all diagnostics. See <https://jsonlines.org/> for more details.
#[cfg(feature = "serde")]
JsonLines,
/// Print diagnostics in the JSON format expected by [reviewdog].
///
/// [reviewdog]: https://github.com/reviewdog/reviewdog
#[cfg(feature = "serde")]
Rdjson,
/// Print diagnostics in the format emitted by Pylint.
Pylint,
/// Print diagnostics in the format expected by JUnit.
#[cfg(feature = "junit")]
Junit,
}
/// A representation of the kinds of messages inside a diagnostic.

View File

@@ -1,6 +1,4 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::path::Path;
use ruff_annotate_snippets::{
Annotation as AnnotateAnnotation, Level as AnnotateLevel, Message as AnnotateMessage,
@@ -8,9 +6,9 @@ use ruff_annotate_snippets::{
};
use ruff_notebook::{Notebook, NotebookIndex};
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_text_size::{TextRange, TextSize};
use crate::diagnostic::stylesheet::DiagnosticStylesheet;
use crate::diagnostic::stylesheet::{DiagnosticStylesheet, fmt_styled};
use crate::{
Db,
files::File,
@@ -19,26 +17,17 @@ use crate::{
};
use super::{
Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig,
Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig, Severity,
SubDiagnostic, UnifiedFile,
};
use azure::AzureRenderer;
use concise::ConciseRenderer;
use pylint::PylintRenderer;
mod azure;
mod concise;
mod full;
#[cfg(feature = "serde")]
mod json;
#[cfg(feature = "serde")]
mod json_lines;
#[cfg(feature = "junit")]
mod junit;
mod pylint;
#[cfg(feature = "serde")]
mod rdjson;
/// A type that implements `std::fmt::Display` for diagnostic rendering.
///
@@ -108,7 +97,48 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self.config.format {
DiagnosticFormat::Concise => {
ConciseRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?;
let stylesheet = if self.config.color {
DiagnosticStylesheet::styled()
} else {
DiagnosticStylesheet::plain()
};
for diag in self.diagnostics {
let (severity, severity_style) = match diag.severity() {
Severity::Info => ("info", stylesheet.info),
Severity::Warning => ("warning", stylesheet.warning),
Severity::Error => ("error", stylesheet.error),
Severity::Fatal => ("fatal", stylesheet.error),
};
write!(
f,
"{severity}[{id}]",
severity = fmt_styled(severity, severity_style),
id = fmt_styled(diag.id(), stylesheet.emphasis)
)?;
if let Some(span) = diag.primary_span() {
write!(
f,
" {path}",
path = fmt_styled(span.file().path(self.resolver), stylesheet.emphasis)
)?;
if let Some(range) = span.range() {
let diagnostic_source = span.file().diagnostic_source(self.resolver);
let start = diagnostic_source
.as_source_code()
.line_column(range.start());
write!(
f,
":{line}:{col}",
line = fmt_styled(start.line, stylesheet.emphasis),
col = fmt_styled(start.column, stylesheet.emphasis),
)?;
}
write!(f, ":")?;
}
writeln!(f, " {message}", message = diag.concise_message())?;
}
}
DiagnosticFormat::Full => {
let stylesheet = if self.config.color {
@@ -121,8 +151,7 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
AnnotateRenderer::styled()
} else {
AnnotateRenderer::plain()
}
.cut_indicator("");
};
renderer = renderer
.error(stylesheet.error)
@@ -155,17 +184,6 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
json_lines::JsonLinesRenderer::new(self.resolver, self.config)
.render(f, self.diagnostics)?;
}
#[cfg(feature = "serde")]
DiagnosticFormat::Rdjson => {
rdjson::RdjsonRenderer::new(self.resolver).render(f, self.diagnostics)?;
}
DiagnosticFormat::Pylint => {
PylintRenderer::new(self.resolver).render(f, self.diagnostics)?;
}
#[cfg(feature = "junit")]
DiagnosticFormat::Junit => {
junit::JunitRenderer::new(self.resolver).render(f, self.diagnostics)?;
}
}
Ok(())
@@ -219,7 +237,7 @@ impl<'a> Resolved<'a> {
/// both.)
#[derive(Debug)]
struct ResolvedDiagnostic<'a> {
level: AnnotateLevel,
severity: Severity,
id: Option<String>,
message: String,
annotations: Vec<ResolvedAnnotation<'a>>,
@@ -244,7 +262,7 @@ impl<'a> ResolvedDiagnostic<'a> {
let id = Some(diag.inner.id.to_string());
let message = diag.inner.message.as_str().to_string();
ResolvedDiagnostic {
level: diag.inner.severity.to_annotate(),
severity: diag.inner.severity,
id,
message,
annotations,
@@ -267,7 +285,7 @@ impl<'a> ResolvedDiagnostic<'a> {
})
.collect();
ResolvedDiagnostic {
level: diag.inner.severity.to_annotate(),
severity: diag.inner.severity,
id: None,
message: diag.inner.message.as_str().to_string(),
annotations,
@@ -334,7 +352,7 @@ impl<'a> ResolvedDiagnostic<'a> {
snippets_by_input
.sort_by(|snips1, snips2| snips1.has_primary.cmp(&snips2.has_primary).reverse());
RenderableDiagnostic {
level: self.level,
severity: self.severity,
id: self.id.as_deref(),
message: &self.message,
snippets_by_input,
@@ -422,7 +440,7 @@ struct Renderable<'r> {
#[derive(Debug)]
struct RenderableDiagnostic<'r> {
/// The severity of the diagnostic.
level: AnnotateLevel,
severity: Severity,
/// The ID of the diagnostic. The ID can usually be used on the CLI or in a
/// config file to change the severity of a lint.
///
@@ -441,6 +459,7 @@ struct RenderableDiagnostic<'r> {
impl RenderableDiagnostic<'_> {
/// Convert this to an "annotate" snippet.
fn to_annotate(&self) -> AnnotateMessage<'_> {
let level = self.severity.to_annotate();
let snippets = self.snippets_by_input.iter().flat_map(|snippets| {
let path = snippets.path;
snippets
@@ -448,7 +467,7 @@ impl RenderableDiagnostic<'_> {
.iter()
.map(|snippet| snippet.to_annotate(path))
});
let mut message = self.level.title(self.message);
let mut message = level.title(self.message);
if let Some(id) = self.id {
message = message.id(id);
}
@@ -521,7 +540,7 @@ impl<'r> RenderableSnippets<'r> {
#[derive(Debug)]
struct RenderableSnippet<'r> {
/// The actual snippet text.
snippet: Cow<'r, str>,
snippet: &'r str,
/// The absolute line number corresponding to where this
/// snippet begins.
line_start: OneIndexed,
@@ -581,13 +600,6 @@ impl<'r> RenderableSnippet<'r> {
.iter()
.map(|ann| RenderableAnnotation::new(snippet_start, ann))
.collect();
let EscapedSourceCode {
text: snippet,
annotations,
} = replace_whitespace_and_unprintable(snippet, annotations)
.fix_up_empty_spans_after_line_terminator();
RenderableSnippet {
snippet,
line_start,
@@ -598,7 +610,7 @@ impl<'r> RenderableSnippet<'r> {
/// Convert this to an "annotate" snippet.
fn to_annotate<'a>(&'a self, path: &'a str) -> AnnotateSnippet<'a> {
AnnotateSnippet::source(&self.snippet)
AnnotateSnippet::source(self.snippet)
.origin(path)
.line_start(self.line_start.get())
.annotations(
@@ -693,9 +705,6 @@ pub trait FileResolver {
/// Returns whether the file given is a Jupyter notebook.
fn is_notebook(&self, file: &UnifiedFile) -> bool;
/// Returns the current working directory.
fn current_directory(&self) -> &Path;
}
impl<T> FileResolver for T
@@ -731,10 +740,6 @@ where
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
}
}
fn current_directory(&self) -> &Path {
self.system().current_directory().as_std_path()
}
}
impl FileResolver for &dyn Db {
@@ -767,10 +772,6 @@ impl FileResolver for &dyn Db {
UnifiedFile::Ruff(_) => unimplemented!("Expected an interned ty file"),
}
}
fn current_directory(&self) -> &Path {
self.system().current_directory().as_std_path()
}
}
/// An abstraction over a unit of user input.
@@ -828,239 +829,12 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
path
}
/// Given some source code and annotation ranges, this routine replaces tabs
/// with ASCII whitespace, and unprintable characters with printable
/// representations of them.
///
/// The source code and annotations returned are updated to reflect changes made
/// to the source code (if any).
fn replace_whitespace_and_unprintable<'r>(
source: &'r str,
mut annotations: Vec<RenderableAnnotation<'r>>,
) -> EscapedSourceCode<'r> {
// Updates the annotation ranges given by the caller whenever a single byte (at `index` in
// `source`) is replaced with `len` bytes.
//
// When the index occurs before the start of the range, the range is
// offset by `len`. When the range occurs after or at the start but before
// the end, then the end of the range only is offset by `len`.
let mut update_ranges = |index: usize, len: u32| {
for ann in &mut annotations {
if index < usize::from(ann.range.start()) {
ann.range += TextSize::new(len - 1);
} else if index < usize::from(ann.range.end()) {
ann.range = ann.range.add_end(TextSize::new(len - 1));
}
}
};
// If `c` is an unprintable character, then this returns a printable
// representation of it (using a fancier Unicode codepoint).
let unprintable_replacement = |c: char| -> Option<char> {
match c {
'\x07' => Some('␇'),
'\x08' => Some('␈'),
'\x1b' => Some('␛'),
'\x7f' => Some('␡'),
_ => None,
}
};
const TAB_SIZE: usize = 4;
let mut width = 0;
let mut column = 0;
let mut last_end = 0;
let mut result = String::new();
for (index, c) in source.char_indices() {
let old_width = width;
match c {
'\n' | '\r' => {
width = 0;
column = 0;
}
'\t' => {
let tab_offset = TAB_SIZE - (column % TAB_SIZE);
width += tab_offset;
column += tab_offset;
let tab_width =
u32::try_from(width - old_width).expect("small width because of tab size");
result.push_str(&source[last_end..index]);
update_ranges(result.text_len().to_usize(), tab_width);
for _ in 0..tab_width {
result.push(' ');
}
last_end = index + 1;
}
_ => {
width += unicode_width::UnicodeWidthChar::width(c).unwrap_or(0);
column += 1;
if let Some(printable) = unprintable_replacement(c) {
result.push_str(&source[last_end..index]);
let len = printable.text_len().to_u32();
update_ranges(result.text_len().to_usize(), len);
result.push(printable);
last_end = index + 1;
}
}
}
}
// No tabs or unprintable chars
if result.is_empty() {
EscapedSourceCode {
annotations,
text: Cow::Borrowed(source),
}
} else {
result.push_str(&source[last_end..]);
EscapedSourceCode {
annotations,
text: Cow::Owned(result),
}
}
}
struct EscapedSourceCode<'r> {
text: Cow<'r, str>,
annotations: Vec<RenderableAnnotation<'r>>,
}
impl<'r> EscapedSourceCode<'r> {
// This attempts to "fix up" the spans on each annotation in the case where
// it's an empty span immediately following a line terminator.
//
// At present, `annotate-snippets` (both upstream and our vendored copy)
// will render annotations of such spans to point to the space immediately
// following the previous line. But ideally, this should point to the space
// immediately preceding the next line.
//
// After attempting to fix `annotate-snippets` and giving up after a couple
// hours, this routine takes a different tact: it adjusts the span to be
// non-empty and it will cover the first codepoint of the following line.
// This forces `annotate-snippets` to point to the right place.
//
// See also: <https://github.com/astral-sh/ruff/issues/15509> and
// `ruff_linter::message::text::SourceCode::fix_up_empty_spans_after_line_terminator`,
// from which this was adapted.
fn fix_up_empty_spans_after_line_terminator(mut self) -> EscapedSourceCode<'r> {
for ann in &mut self.annotations {
let range = ann.range;
if !range.is_empty()
|| range.start() == TextSize::from(0)
|| range.start() >= self.text.text_len()
{
continue;
}
if !matches!(
self.text.as_bytes()[range.start().to_usize() - 1],
b'\n' | b'\r'
) {
continue;
}
let start = range.start();
let end = ceil_char_boundary(&self.text, start + TextSize::from(1));
ann.range = TextRange::new(start, end);
}
self
}
}
/// Finds the closest [`TextSize`] not less than the offset given for which
/// `is_char_boundary` is `true`. Unless the offset given is greater than
/// the length of the underlying contents, in which case, the length of the
/// contents is returned.
///
/// Can be replaced with `str::ceil_char_boundary` once it's stable.
///
/// # Examples
///
/// From `std`:
///
/// ```
/// use ruff_db::diagnostic::ceil_char_boundary;
/// use ruff_text_size::{Ranged, TextLen, TextSize};
///
/// let source = "❤️🧡💛💚💙💜";
/// assert_eq!(source.text_len(), TextSize::from(26));
/// assert!(!source.is_char_boundary(13));
///
/// let closest = ceil_char_boundary(source, TextSize::from(13));
/// assert_eq!(closest, TextSize::from(14));
/// assert_eq!(&source[..closest.to_usize()], "❤️🧡💛");
/// ```
///
/// Additional examples:
///
/// ```
/// use ruff_db::diagnostic::ceil_char_boundary;
/// use ruff_text_size::{Ranged, TextRange, TextSize};
///
/// let source = "Hello";
///
/// assert_eq!(
/// ceil_char_boundary(source, TextSize::from(0)),
/// TextSize::from(0)
/// );
///
/// assert_eq!(
/// ceil_char_boundary(source, TextSize::from(5)),
/// TextSize::from(5)
/// );
///
/// assert_eq!(
/// ceil_char_boundary(source, TextSize::from(6)),
/// TextSize::from(5)
/// );
///
/// let source = "α";
///
/// assert_eq!(
/// ceil_char_boundary(source, TextSize::from(0)),
/// TextSize::from(0)
/// );
///
/// assert_eq!(
/// ceil_char_boundary(source, TextSize::from(1)),
/// TextSize::from(2)
/// );
///
/// assert_eq!(
/// ceil_char_boundary(source, TextSize::from(2)),
/// TextSize::from(2)
/// );
///
/// assert_eq!(
/// ceil_char_boundary(source, TextSize::from(3)),
/// TextSize::from(2)
/// );
/// ```
pub fn ceil_char_boundary(text: &str, offset: TextSize) -> TextSize {
let upper_bound = offset
.to_u32()
.saturating_add(4)
.min(text.text_len().to_u32());
(offset.to_u32()..upper_bound)
.map(TextSize::from)
.find(|offset| text.is_char_boundary(offset.to_usize()))
.unwrap_or_else(|| TextSize::from(upper_bound))
}
#[cfg(test)]
mod tests {
use ruff_diagnostics::{Applicability, Edit, Fix};
use ruff_diagnostics::{Edit, Fix};
use crate::diagnostic::{
Annotation, DiagnosticId, IntoDiagnosticMessage, SecondaryCode, Severity, Span,
SubDiagnosticSeverity,
};
use crate::diagnostic::{Annotation, DiagnosticId, SecondaryCode, Severity, Span};
use crate::files::system_path_to_file;
use crate::system::{DbWithWritableSystem, SystemPath};
use crate::tests::TestDb;
@@ -1744,7 +1518,7 @@ watermelon
let mut diag = env.err().primary("animals", "3", "3", "").build();
diag.sub(
env.sub_builder(SubDiagnosticSeverity::Info, "this is a helpful note")
env.sub_builder(Severity::Info, "this is a helpful note")
.build(),
);
insta::assert_snapshot!(
@@ -1773,15 +1547,15 @@ watermelon
let mut diag = env.err().primary("animals", "3", "3", "").build();
diag.sub(
env.sub_builder(SubDiagnosticSeverity::Info, "this is a helpful note")
env.sub_builder(Severity::Info, "this is a helpful note")
.build(),
);
diag.sub(
env.sub_builder(SubDiagnosticSeverity::Info, "another helpful note")
env.sub_builder(Severity::Info, "another helpful note")
.build(),
);
diag.sub(
env.sub_builder(SubDiagnosticSeverity::Info, "and another helpful note")
env.sub_builder(Severity::Info, "and another helpful note")
.build(),
);
insta::assert_snapshot!(
@@ -2503,27 +2277,6 @@ watermelon
self.config = config;
}
/// Hide diagnostic severity when rendering.
pub(super) fn hide_severity(&mut self, yes: bool) {
let mut config = std::mem::take(&mut self.config);
config = config.hide_severity(yes);
self.config = config;
}
/// Show fix availability when rendering.
pub(super) fn show_fix_status(&mut self, yes: bool) {
let mut config = std::mem::take(&mut self.config);
config = config.show_fix_status(yes);
self.config = config;
}
/// The lowest fix applicability to show when rendering.
pub(super) fn fix_applicability(&mut self, applicability: Applicability) {
let mut config = std::mem::take(&mut self.config);
config = config.fix_applicability(applicability);
self.config = config;
}
/// Add a file with the given path and contents to this environment.
pub(super) fn add(&mut self, path: &str, contents: &str) {
let path = SystemPath::new(path);
@@ -2587,11 +2340,11 @@ watermelon
/// sub-diagnostic with "error" severity and canned values for
/// its identifier and message.
fn sub_warn(&mut self) -> SubDiagnosticBuilder<'_> {
self.sub_builder(SubDiagnosticSeverity::Warning, "sub-diagnostic message")
self.sub_builder(Severity::Warning, "sub-diagnostic message")
}
/// Returns a builder for tersely constructing diagnostics.
pub(super) fn builder(
fn builder(
&mut self,
identifier: &'static str,
severity: Severity,
@@ -2608,11 +2361,7 @@ watermelon
}
/// Returns a builder for tersely constructing sub-diagnostics.
fn sub_builder(
&mut self,
severity: SubDiagnosticSeverity,
message: &str,
) -> SubDiagnosticBuilder<'_> {
fn sub_builder(&mut self, severity: Severity, message: &str) -> SubDiagnosticBuilder<'_> {
let subdiag = SubDiagnostic::new(severity, message);
SubDiagnosticBuilder { env: self, subdiag }
}
@@ -2658,7 +2407,7 @@ watermelon
///
/// See the docs on `TestEnvironment::span` for the meaning of
/// `path`, `line_offset_start` and `line_offset_end`.
pub(super) fn primary(
fn primary(
mut self,
path: &str,
line_offset_start: &str,
@@ -2715,12 +2464,6 @@ watermelon
self.diag.set_noqa_offset(noqa_offset);
self
}
/// Adds a "help" sub-diagnostic with the given message.
fn help(mut self, message: impl IntoDiagnosticMessage) -> DiagnosticBuilder<'e> {
self.diag.help(message);
self
}
}
/// A helper builder for tersely populating a `SubDiagnostic`.
@@ -2827,8 +2570,7 @@ def fibonacci(n):
let diagnostics = vec![
env.builder("unused-import", Severity::Error, "`os` imported but unused")
.primary("fib.py", "1:7", "1:9", "")
.help("Remove unused import: `os`")
.primary("fib.py", "1:7", "1:9", "Remove unused import: `os`")
.secondary_code("F401")
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(0),
@@ -2841,8 +2583,12 @@ def fibonacci(n):
Severity::Error,
"Local variable `x` is assigned to but never used",
)
.primary("fib.py", "6:4", "6:5", "")
.help("Remove assignment to unused variable `x`")
.primary(
"fib.py",
"6:4",
"6:5",
"Remove assignment to unused variable `x`",
)
.secondary_code("F841")
.fix(Fix::unsafe_edit(Edit::deletion(
TextSize::from(94),
@@ -2889,25 +2635,6 @@ if call(foo
}
/// Create Ruff-style diagnostics for testing the various output formats for a notebook.
///
/// The concatenated cells look like this:
///
/// ```python
/// # cell 1
/// import os
/// # cell 2
/// import math
///
/// print('hello world')
/// # cell 3
/// def foo():
/// print()
/// x = 1
/// ```
///
/// The first diagnostic is on the unused `os` import with location cell 1, row 2, column 8
/// (`cell 1:2:8`). The second diagnostic is the unused `math` import at `cell 2:2:8`, and the
/// third diagnostic is an unfixable unused variable at `cell 3:4:5`.
#[allow(
dead_code,
reason = "This is currently only used for JSON but will be needed soon for other formats"
@@ -2963,8 +2690,7 @@ if call(foo
let diagnostics = vec![
env.builder("unused-import", Severity::Error, "`os` imported but unused")
.primary("notebook.ipynb", "2:7", "2:9", "")
.help("Remove unused import: `os`")
.primary("notebook.ipynb", "2:7", "2:9", "Remove unused import: `os`")
.secondary_code("F401")
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(9),
@@ -2977,8 +2703,12 @@ if call(foo
Severity::Error,
"`math` imported but unused",
)
.primary("notebook.ipynb", "4:7", "4:11", "")
.help("Remove unused import: `math`")
.primary(
"notebook.ipynb",
"4:7",
"4:11",
"Remove unused import: `math`",
)
.secondary_code("F401")
.fix(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(28),
@@ -2991,8 +2721,12 @@ if call(foo
Severity::Error,
"Local variable `x` is assigned to but never used",
)
.primary("notebook.ipynb", "10:4", "10:5", "")
.help("Remove assignment to unused variable `x`")
.primary(
"notebook.ipynb",
"10:4",
"10:5",
"Remove assignment to unused variable `x`",
)
.secondary_code("F841")
.fix(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(94),

View File

@@ -1,195 +0,0 @@
use crate::diagnostic::{
Diagnostic, DisplayDiagnosticConfig, Severity,
stylesheet::{DiagnosticStylesheet, fmt_styled},
};
use super::FileResolver;
pub(super) struct ConciseRenderer<'a> {
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
}
impl<'a> ConciseRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
Self { resolver, config }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
let stylesheet = if self.config.color {
DiagnosticStylesheet::styled()
} else {
DiagnosticStylesheet::plain()
};
let sep = fmt_styled(":", stylesheet.separator);
for diag in diagnostics {
if let Some(span) = diag.primary_span() {
write!(
f,
"{path}",
path = fmt_styled(
span.file().relative_path(self.resolver).to_string_lossy(),
stylesheet.emphasis
)
)?;
if let Some(range) = span.range() {
let diagnostic_source = span.file().diagnostic_source(self.resolver);
let start = diagnostic_source
.as_source_code()
.line_column(range.start());
if let Some(notebook_index) = self.resolver.notebook_index(span.file()) {
write!(
f,
"{sep}cell {cell}{sep}{line}{sep}{col}",
cell = notebook_index.cell(start.line).unwrap_or_default(),
line = notebook_index.cell_row(start.line).unwrap_or_default(),
col = start.column,
)?;
} else {
write!(
f,
"{sep}{line}{sep}{col}",
line = start.line,
col = start.column,
)?;
}
}
write!(f, "{sep} ")?;
}
if self.config.hide_severity {
if let Some(code) = diag.secondary_code() {
write!(
f,
"{code} ",
code = fmt_styled(code, stylesheet.secondary_code)
)?;
}
if self.config.show_fix_status {
if let Some(fix) = diag.fix() {
// Do not display an indicator for inapplicable fixes
if fix.applies(self.config.fix_applicability) {
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
}
}
}
} else {
let (severity, severity_style) = match diag.severity() {
Severity::Info => ("info", stylesheet.info),
Severity::Warning => ("warning", stylesheet.warning),
Severity::Error => ("error", stylesheet.error),
Severity::Fatal => ("fatal", stylesheet.error),
};
write!(
f,
"{severity}[{id}] ",
severity = fmt_styled(severity, severity_style),
id = fmt_styled(diag.id(), stylesheet.emphasis)
)?;
}
writeln!(f, "{message}", message = diag.concise_message())?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use ruff_diagnostics::Applicability;
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
create_syntax_error_diagnostics,
},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
fib.py:1:8: error[unused-import] `os` imported but unused
fib.py:6:5: error[unused-variable] Local variable `x` is assigned to but never used
undef.py:1:4: error[undefined-name] Undefined name `a`
");
}
#[test]
fn show_fixes() {
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
fib.py:1:8: F401 [*] `os` imported but unused
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
undef.py:1:4: F821 Undefined name `a`
");
}
#[test]
fn show_fixes_preview() {
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
env.preview(true);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
fib.py:1:8: F401 [*] `os` imported but unused
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
undef.py:1:4: F821 Undefined name `a`
");
}
#[test]
fn show_fixes_syntax_errors() {
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
");
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
syntax_errors.py:1:15: error[invalid-syntax] SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3:12: error[invalid-syntax] SyntaxError: Expected ')', found newline
");
}
#[test]
fn notebook_output() {
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
notebook.ipynb:cell 1:2:8: error[unused-import] `os` imported but unused
notebook.ipynb:cell 2:2:8: error[unused-import] `math` imported but unused
notebook.ipynb:cell 3:4:5: error[unused-variable] Local variable `x` is assigned to but never used
");
}
#[test]
fn missing_file() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Concise);
let diag = env.err().build();
insta::assert_snapshot!(
env.render(&diag),
@"error[test-diagnostic] main diagnostic message",
);
}
}

View File

@@ -1,180 +0,0 @@
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat, Severity,
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
error[unused-import]: `os` imported but unused
--> fib.py:1:8
|
1 | import os
| ^^
|
help: Remove unused import: `os`
error[unused-variable]: Local variable `x` is assigned to but never used
--> fib.py:6:5
|
4 | def fibonacci(n):
5 | """Compute the nth number in the Fibonacci sequence."""
6 | x = 1
| ^
7 | if n == 0:
8 | return 0
|
help: Remove assignment to unused variable `x`
error[undefined-name]: Undefined name `a`
--> undef.py:1:4
|
1 | if a == 1: pass
| ^
|
"#);
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
error[invalid-syntax]: SyntaxError: Expected one or more symbol names after import
--> syntax_errors.py:1:15
|
1 | from os import
| ^
2 |
3 | if call(foo
|
error[invalid-syntax]: SyntaxError: Expected ')', found newline
--> syntax_errors.py:3:12
|
1 | from os import
2 |
3 | if call(foo
| ^
4 | def bar():
5 | pass
|
");
}
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit c9b99e4.
///
/// For example, without the fix, we get diagnostics like this:
///
/// ```
/// error[no-indented-block]: Expected an indented block
/// --> example.py:3:1
/// |
/// 2 | if False:
/// | ^
/// 3 | print()
/// |
/// ```
///
/// where the caret points to the end of the previous line instead of the start of the next.
#[test]
fn empty_span_after_line_terminator() {
let mut env = TestEnvironment::new();
env.add(
"example.py",
r#"
if False:
print()
"#,
);
env.format(DiagnosticFormat::Full);
let diagnostic = env
.builder(
"no-indented-block",
Severity::Error,
"Expected an indented block",
)
.primary("example.py", "3:0", "3:0", "")
.build();
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[no-indented-block]: Expected an indented block
--> example.py:3:1
|
2 | if False:
3 | print()
| ^
|
");
}
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit 2922490.
///
/// For example, without the fix, we get diagnostics like this:
///
/// ```
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
/// --> example.py:1:25
/// |
/// 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
/// | ^
/// |
/// ```
///
/// where the caret points to the `f` in the f-string instead of the start of the invalid
/// character (`^Z`).
#[test]
fn unprintable_characters() {
let mut env = TestEnvironment::new();
env.add("example.py", "nested_fstrings = f'{f'{f''}'}'");
env.format(DiagnosticFormat::Full);
let diagnostic = env
.builder(
"invalid-character-sub",
Severity::Error,
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
)
.primary("example.py", "1:24", "1:24", "")
.build();
insta::assert_snapshot!(env.render(&diagnostic), @r#"
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
--> example.py:1:25
|
1 | nested_fstrings = f'␈{f'{f'␛'}'}'
| ^
|
"#);
}
#[test]
fn multiple_unprintable_characters() -> std::io::Result<()> {
let mut env = TestEnvironment::new();
env.add("example.py", "");
env.format(DiagnosticFormat::Full);
let diagnostic = env
.builder(
"invalid-character-sub",
Severity::Error,
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
)
.primary("example.py", "1:1", "1:1", "")
.build();
insta::assert_snapshot!(env.render(&diagnostic), @r#"
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
--> example.py:1:2
|
1 | ␈
| ^
|
"#);
Ok(())
}
}

View File

@@ -87,7 +87,7 @@ pub(super) fn diagnostic_to_json<'a>(
let fix = diagnostic.fix().map(|fix| JsonFix {
applicability: fix.applicability(),
message: diagnostic.first_help_text(),
message: diagnostic.suggestion(),
edits: ExpandedEdits {
edits: fix.edits(),
notebook_index,
@@ -262,6 +262,9 @@ struct JsonEdit<'a> {
#[cfg(test)]
mod tests {
use ruff_diagnostics::{Edit, Fix};
use ruff_text_size::TextSize;
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{
@@ -294,7 +297,13 @@ mod tests {
env.format(DiagnosticFormat::Json);
env.preview(false);
let diag = env.err().build();
let diag = env
.err()
.fix(Fix::safe_edit(Edit::insertion(
"edit".to_string(),
TextSize::from(0),
)))
.build();
insta::assert_snapshot!(
env.render(&diag),
@@ -308,7 +317,23 @@ mod tests {
"row": 1
},
"filename": "",
"fix": null,
"fix": {
"applicability": "safe",
"edits": [
{
"content": "edit",
"end_location": {
"column": 1,
"row": 1
},
"location": {
"column": 1,
"row": 1
}
}
],
"message": null
},
"location": {
"column": 1,
"row": 1
@@ -328,7 +353,13 @@ mod tests {
env.format(DiagnosticFormat::Json);
env.preview(true);
let diag = env.err().build();
let diag = env
.err()
.fix(Fix::safe_edit(Edit::insertion(
"edit".to_string(),
TextSize::from(0),
)))
.build();
insta::assert_snapshot!(
env.render(&diag),
@@ -339,7 +370,17 @@ mod tests {
"code": null,
"end_location": null,
"filename": null,
"fix": null,
"fix": {
"applicability": "safe",
"edits": [
{
"content": "edit",
"end_location": null,
"location": null
}
],
"message": null
},
"location": null,
"message": "main diagnostic message",
"noqa_row": null,

View File

@@ -1,195 +0,0 @@
use std::{collections::BTreeMap, ops::Deref, path::Path};
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
use ruff_source_file::LineColumn;
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
/// A renderer for diagnostics in the [JUnit] format.
///
/// See [`junit.xsd`] for the specification in the JUnit repository and an annotated [version]
/// linked from the [`quick_junit`] docs.
///
/// [JUnit]: https://junit.org/
/// [`junit.xsd`]: https://github.com/junit-team/junit-framework/blob/2870b7d8fd5bf7c1efe489d3991d3ed3900e82bb/platform-tests/src/test/resources/jenkins-junit.xsd
/// [version]: https://llg.cubic.org/docs/junit/
/// [`quick_junit`]: https://docs.rs/quick-junit/latest/quick_junit/
pub struct JunitRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> JunitRenderer<'a> {
pub fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
let mut report = Report::new("ruff");
if diagnostics.is_empty() {
let mut test_suite = TestSuite::new("ruff");
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
let mut case = TestCase::new("No errors found", TestCaseStatus::success());
case.set_classname("ruff");
test_suite.add_test_case(case);
report.add_test_suite(test_suite);
} else {
for (filename, diagnostics) in group_diagnostics_by_filename(diagnostics, self.resolver)
{
let mut test_suite = TestSuite::new(filename);
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
let classname = Path::new(filename).with_extension("");
for diagnostic in diagnostics {
let DiagnosticWithLocation {
diagnostic,
start_location: location,
} = diagnostic;
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
status.set_message(diagnostic.body());
if let Some(location) = location {
status.set_description(format!(
"line {row}, col {col}, {body}",
row = location.line,
col = location.column,
body = diagnostic.body()
));
} else {
status.set_description(diagnostic.body());
}
let code = diagnostic
.secondary_code()
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
let mut case = TestCase::new(format!("org.ruff.{code}"), status);
case.set_classname(classname.to_str().unwrap());
if let Some(location) = location {
case.extra.insert(
XmlString::new("line"),
XmlString::new(location.line.to_string()),
);
case.extra.insert(
XmlString::new("column"),
XmlString::new(location.column.to_string()),
);
}
test_suite.add_test_case(case);
}
report.add_test_suite(test_suite);
}
}
let adapter = FmtAdapter { fmt: f };
report.serialize(adapter).map_err(|_| std::fmt::Error)
}
}
// TODO(brent) this and `group_diagnostics_by_filename` are also used by the `grouped` output
// format. I think they'd make more sense in that file, but I started here first. I'll move them to
// that module when adding the `grouped` output format.
struct DiagnosticWithLocation<'a> {
diagnostic: &'a Diagnostic,
start_location: Option<LineColumn>,
}
impl Deref for DiagnosticWithLocation<'_> {
type Target = Diagnostic;
fn deref(&self) -> &Self::Target {
self.diagnostic
}
}
fn group_diagnostics_by_filename<'a>(
diagnostics: &'a [Diagnostic],
resolver: &'a dyn FileResolver,
) -> BTreeMap<&'a str, Vec<DiagnosticWithLocation<'a>>> {
let mut grouped_diagnostics = BTreeMap::default();
for diagnostic in diagnostics {
let (filename, start_location) = diagnostic
.primary_span_ref()
.map(|span| {
let file = span.file();
let start_location =
span.range()
.filter(|_| !resolver.is_notebook(file))
.map(|range| {
file.diagnostic_source(resolver)
.as_source_code()
.line_column(range.start())
});
(span.file().path(resolver), start_location)
})
.unwrap_or_default();
grouped_diagnostics
.entry(filename)
.or_insert_with(Vec::new)
.push(DiagnosticWithLocation {
diagnostic,
start_location,
});
}
grouped_diagnostics
}
struct FmtAdapter<'a> {
fmt: &'a mut dyn std::fmt::Write,
}
impl std::io::Write for FmtAdapter<'_> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.fmt
.write_str(std::str::from_utf8(buf).map_err(|_| {
std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid UTF-8 in JUnit report",
)
})?)
.map_err(std::io::Error::other)?;
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
fn write_fmt(&mut self, args: std::fmt::Arguments<'_>) -> std::io::Result<()> {
self.fmt.write_fmt(args).map_err(std::io::Error::other)
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Junit);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Junit);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
}

View File

@@ -1,97 +0,0 @@
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
/// Generate violations in Pylint format.
///
/// The format is given by this string:
///
/// ```python
/// "%(path)s:%(row)d: [%(code)s] %(text)s"
/// ```
///
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
pub(super) struct PylintRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> PylintRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
}
impl PylintRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
for diagnostic in diagnostics {
let (filename, row) = diagnostic
.primary_span_ref()
.map(|span| {
let file = span.file();
let row = span
.range()
.filter(|_| !self.resolver.is_notebook(file))
.map(|range| {
file.diagnostic_source(self.resolver)
.as_source_code()
.line_column(range.start())
.line
});
(file.relative_path(self.resolver).to_string_lossy(), row)
})
.unwrap_or_default();
let code = diagnostic
.secondary_code()
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
let row = row.unwrap_or_default();
writeln!(
f,
"{path}:{row}: [{code}] {body}",
path = filename,
body = diagnostic.body()
)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Pylint);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Pylint);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn missing_file() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Pylint);
let diag = env.err().build();
insta::assert_snapshot!(
env.render(&diag),
@":1: [test-diagnostic] main diagnostic message",
);
}
}

View File

@@ -1,235 +0,0 @@
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use ruff_diagnostics::{Edit, Fix};
use ruff_source_file::{LineColumn, SourceCode};
use ruff_text_size::Ranged;
use crate::diagnostic::Diagnostic;
use super::FileResolver;
pub struct RdjsonRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> RdjsonRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
write!(
f,
"{:#}",
serde_json::json!(RdjsonDiagnostics::new(diagnostics, self.resolver))
)
}
}
struct ExpandedDiagnostics<'a> {
resolver: &'a dyn FileResolver,
diagnostics: &'a [Diagnostic],
}
impl Serialize for ExpandedDiagnostics<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
for diagnostic in self.diagnostics {
let value = diagnostic_to_rdjson(diagnostic, self.resolver);
s.serialize_element(&value)?;
}
s.end()
}
}
fn diagnostic_to_rdjson<'a>(
diagnostic: &'a Diagnostic,
resolver: &'a dyn FileResolver,
) -> RdjsonDiagnostic<'a> {
let span = diagnostic.primary_span_ref();
let source_file = span.map(|span| {
let file = span.file();
(file.path(resolver), file.diagnostic_source(resolver))
});
let location = source_file.as_ref().map(|(path, source)| {
let range = diagnostic.range().map(|range| {
let source_code = source.as_source_code();
let start = source_code.line_column(range.start());
let end = source_code.line_column(range.end());
RdjsonRange::new(start, end)
});
RdjsonLocation { path, range }
});
let edits = diagnostic.fix().map(Fix::edits).unwrap_or_default();
RdjsonDiagnostic {
message: diagnostic.body(),
location,
code: RdjsonCode {
value: diagnostic
.secondary_code()
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
url: diagnostic.to_ruff_url(),
},
suggestions: rdjson_suggestions(
edits,
source_file
.as_ref()
.map(|(_, source)| source.as_source_code()),
),
}
}
fn rdjson_suggestions<'a>(
edits: &'a [Edit],
source_code: Option<SourceCode>,
) -> Vec<RdjsonSuggestion<'a>> {
if edits.is_empty() {
return Vec::new();
}
let Some(source_code) = source_code else {
debug_assert!(false, "Expected a source file for a diagnostic with a fix");
return Vec::new();
};
edits
.iter()
.map(|edit| {
let start = source_code.line_column(edit.start());
let end = source_code.line_column(edit.end());
let range = RdjsonRange::new(start, end);
RdjsonSuggestion {
range,
text: edit.content().unwrap_or_default(),
}
})
.collect()
}
#[derive(Serialize)]
struct RdjsonDiagnostics<'a> {
diagnostics: ExpandedDiagnostics<'a>,
severity: &'static str,
source: RdjsonSource,
}
impl<'a> RdjsonDiagnostics<'a> {
fn new(diagnostics: &'a [Diagnostic], resolver: &'a dyn FileResolver) -> Self {
Self {
source: RdjsonSource {
name: "ruff",
url: env!("CARGO_PKG_HOMEPAGE"),
},
severity: "WARNING",
diagnostics: ExpandedDiagnostics {
diagnostics,
resolver,
},
}
}
}
#[derive(Serialize)]
struct RdjsonSource {
name: &'static str,
url: &'static str,
}
#[derive(Serialize)]
struct RdjsonDiagnostic<'a> {
code: RdjsonCode<'a>,
#[serde(skip_serializing_if = "Option::is_none")]
location: Option<RdjsonLocation<'a>>,
message: &'a str,
#[serde(skip_serializing_if = "Vec::is_empty")]
suggestions: Vec<RdjsonSuggestion<'a>>,
}
#[derive(Serialize)]
struct RdjsonLocation<'a> {
path: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
range: Option<RdjsonRange>,
}
#[derive(Default, Serialize)]
struct RdjsonRange {
end: LineColumn,
start: LineColumn,
}
impl RdjsonRange {
fn new(start: LineColumn, end: LineColumn) -> Self {
Self { start, end }
}
}
#[derive(Serialize)]
struct RdjsonCode<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
url: Option<String>,
value: &'a str,
}
#[derive(Serialize)]
struct RdjsonSuggestion<'a> {
range: RdjsonRange,
text: &'a str,
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Rdjson);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Rdjson);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn missing_file_stable() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Rdjson);
env.preview(false);
let diag = env.err().build();
insta::assert_snapshot!(env.render(&diag));
}
#[test]
fn missing_file_preview() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Rdjson);
env.preview(true);
let diag = env.err().build();
insta::assert_snapshot!(env.render(&diag));
}
}

View File

@@ -1,6 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/pylint.rs
expression: env.render_diagnostics(&diagnostics)
---
syntax_errors.py:1: [invalid-syntax] SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3: [invalid-syntax] SyntaxError: Expected ')', found newline

View File

@@ -1,20 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
expression: env.render(&diag)
---
{
"diagnostics": [
{
"code": {
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
"value": "test-diagnostic"
},
"message": "main diagnostic message"
}
],
"severity": "WARNING",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"
}
}

View File

@@ -1,20 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
expression: env.render(&diag)
---
{
"diagnostics": [
{
"code": {
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
"value": "test-diagnostic"
},
"message": "main diagnostic message"
}
],
"severity": "WARNING",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"
}
}

View File

@@ -41,8 +41,6 @@ pub struct DiagnosticStylesheet {
pub(crate) line_no: Style,
pub(crate) emphasis: Style,
pub(crate) none: Style,
pub(crate) separator: Style,
pub(crate) secondary_code: Style,
}
impl Default for DiagnosticStylesheet {
@@ -64,8 +62,6 @@ impl DiagnosticStylesheet {
line_no: bright_blue.effects(Effects::BOLD),
emphasis: Style::new().effects(Effects::BOLD),
none: Style::new(),
separator: AnsiColor::Cyan.on_default(),
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
}
}
@@ -79,8 +75,6 @@ impl DiagnosticStylesheet {
line_no: Style::new(),
emphasis: Style::new(),
none: Style::new(),
separator: Style::new(),
secondary_code: Style::new(),
}
}
}

View File

@@ -1,6 +1,7 @@
use std::fmt;
use std::sync::Arc;
use countme::Count;
use dashmap::mapref::entry::Entry;
pub use file_root::{FileRoot, FileRootKind};
pub use path::FilePath;
@@ -231,7 +232,7 @@ impl Files {
let roots = inner.roots.read().unwrap();
for root in roots.all() {
if path.starts_with(root.path(db)) {
if root.path(db).starts_with(&path) {
root.set_revision(db).to(FileRevision::now());
}
}
@@ -311,6 +312,11 @@ pub struct File {
/// the file has been deleted is to change the status to `Deleted`.
#[default]
status: FileStatus,
/// Counter that counts the number of created file instances and active file instances.
/// Only enabled in debug builds.
#[default]
count: Count<File>,
}
// The Salsa heap is tracked separately.
@@ -369,25 +375,12 @@ impl File {
}
/// Refreshes the file metadata by querying the file system if needed.
///
/// This also "touches" the file root associated with the given path.
/// This means that any Salsa queries that depend on the corresponding
/// root's revision will become invalidated.
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
let absolute = SystemPath::absolute(path, db.system().current_directory());
Files::touch_root(db, &absolute);
Self::sync_system_path(db, &absolute, None);
}
/// Refreshes *only* the file metadata by querying the file system if needed.
///
/// This specifically does not touch any file root associated with the
/// given file path.
pub fn sync_path_only(db: &mut dyn Db, path: &SystemPath) {
let absolute = SystemPath::absolute(path, db.system().current_directory());
Self::sync_system_path(db, &absolute, None);
}
/// Increments the revision for the virtual file at `path`.
pub fn sync_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath) {
if let Some(virtual_file) = db.files().try_virtual_file(path) {
@@ -493,7 +486,7 @@ impl fmt::Debug for File {
///
/// This is a wrapper around a [`File`] that provides additional methods to interact with a virtual
/// file.
#[derive(Copy, Clone, Debug)]
#[derive(Copy, Clone)]
pub struct VirtualFile(File);
impl VirtualFile {

View File

@@ -23,7 +23,7 @@ pub struct FileRoot {
pub path: SystemPathBuf,
/// The kind of the root at the time of its creation.
pub kind_at_time_of_creation: FileRootKind,
kind_at_time_of_creation: FileRootKind,
/// A revision that changes when the contents of the source root change.
///

View File

@@ -1,6 +1,8 @@
use std::ops::Deref;
use std::sync::Arc;
use countme::Count;
use ruff_notebook::Notebook;
use ruff_python_ast::PySourceType;
use ruff_source_file::LineIndex;
@@ -36,7 +38,11 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
};
SourceText {
inner: Arc::new(SourceTextInner { kind, read_error }),
inner: Arc::new(SourceTextInner {
kind,
read_error,
count: Count::new(),
}),
}
}
@@ -119,6 +125,8 @@ impl std::fmt::Debug for SourceText {
#[derive(Eq, PartialEq, get_size2::GetSize)]
struct SourceTextInner {
#[get_size(ignore)]
count: Count<SourceText>,
kind: SourceTextKind,
read_error: Option<SourceTextError>,
}

View File

@@ -20,7 +20,6 @@ ty_python_semantic = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true, optional = true }
memchr = { workspace = true }
salsa = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }

View File

@@ -1,4 +1,3 @@
use crate::StringImports;
use ruff_python_ast::visitor::source_order::{
SourceOrderVisitor, walk_expr, walk_module, walk_stmt,
};
@@ -11,13 +10,13 @@ pub(crate) struct Collector<'a> {
/// The path to the current module.
module_path: Option<&'a [String]>,
/// Whether to detect imports from string literals.
string_imports: StringImports,
string_imports: bool,
/// The collected imports from the Python AST.
imports: Vec<CollectedImport>,
}
impl<'a> Collector<'a> {
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: bool) -> Self {
Self {
module_path,
string_imports,
@@ -119,7 +118,7 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
| Stmt::Continue(_)
| Stmt::IpyEscapeCommand(_) => {
// Only traverse simple statements when string imports is enabled.
if self.string_imports.enabled {
if self.string_imports {
walk_stmt(self, stmt);
}
}
@@ -127,26 +126,20 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
}
fn visit_expr(&mut self, expr: &'ast Expr) {
if self.string_imports.enabled {
if self.string_imports {
if let Expr::StringLiteral(ast::ExprStringLiteral {
value,
range: _,
node_index: _,
}) = expr
{
let value = value.to_str();
// Determine whether the string literal "looks like" an import statement: contains
// the requisite number of dots, and consists solely of valid Python identifiers.
if self.string_imports.min_dots == 0
|| memchr::memchr_iter(b'.', value.as_bytes()).count()
>= self.string_imports.min_dots
{
if let Some(module_name) = ModuleName::new(value) {
self.imports.push(CollectedImport::Import(module_name));
}
// a dot, and consists solely of valid Python identifiers.
let value = value.to_str();
if let Some(module_name) = ModuleName::new(value) {
self.imports.push(CollectedImport::Import(module_name));
}
}
walk_expr(self, expr);
}
}

View File

@@ -87,7 +87,7 @@ impl SourceDb for ModuleDb {
#[salsa::db]
impl Db for ModuleDb {
fn should_check_file(&self, file: File) -> bool {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}

View File

@@ -9,7 +9,7 @@ use ruff_python_parser::{Mode, ParseOptions, parse};
use crate::collector::Collector;
pub use crate::db::ModuleDb;
use crate::resolver::Resolver;
pub use crate::settings::{AnalyzeSettings, Direction, StringImports};
pub use crate::settings::{AnalyzeSettings, Direction};
mod collector;
mod db;
@@ -26,7 +26,7 @@ impl ModuleImports {
db: &ModuleDb,
path: &SystemPath,
package: Option<&SystemPath>,
string_imports: StringImports,
string_imports: bool,
) -> Result<Self> {
// Read and parse the source code.
let source = std::fs::read_to_string(path)?;
@@ -42,11 +42,13 @@ impl ModuleImports {
// Resolve the imports.
let mut resolved_imports = ModuleImports::default();
for import in imports {
for resolved in Resolver::new(db).resolve(import) {
if let Some(path) = resolved.as_system_path() {
resolved_imports.insert(path.to_path_buf());
}
}
let Some(resolved) = Resolver::new(db).resolve(import) else {
continue;
};
let Some(path) = resolved.as_system_path() else {
continue;
};
resolved_imports.insert(path.to_path_buf());
}
Ok(resolved_imports)

View File

@@ -1,5 +1,5 @@
use ruff_db::files::FilePath;
use ty_python_semantic::{ModuleName, resolve_module, resolve_real_module};
use ty_python_semantic::resolve_module;
use crate::ModuleDb;
use crate::collector::CollectedImport;
@@ -16,67 +16,24 @@ impl<'a> Resolver<'a> {
}
/// Resolve the [`CollectedImport`] into a [`FilePath`].
pub(crate) fn resolve(&self, import: CollectedImport) -> impl Iterator<Item = &'a FilePath> {
pub(crate) fn resolve(&self, import: CollectedImport) -> Option<&'a FilePath> {
match import {
CollectedImport::Import(import) => {
// Attempt to resolve the module (e.g., given `import foo`, look for `foo`).
let file = self.resolve_module(&import);
// If the file is a stub, look for the corresponding source file.
let source_file = file
.is_some_and(|file| file.extension() == Some("pyi"))
.then(|| self.resolve_real_module(&import))
.flatten();
std::iter::once(file)
.chain(std::iter::once(source_file))
.flatten()
let module = resolve_module(self.db, &import)?;
Some(module.file()?.path(self.db))
}
CollectedImport::ImportFrom(import) => {
// Attempt to resolve the member (e.g., given `from foo import bar`, look for `foo.bar`).
if let Some(file) = self.resolve_module(&import) {
// If the file is a stub, look for the corresponding source file.
let source_file = (file.extension() == Some("pyi"))
.then(|| self.resolve_real_module(&import))
.flatten();
return std::iter::once(Some(file))
.chain(std::iter::once(source_file))
.flatten();
}
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
let parent = import.parent();
let file = parent
.as_ref()
.and_then(|parent| self.resolve_module(parent));
// If the file is a stub, look for the corresponding source file.
let source_file = file
.is_some_and(|file| file.extension() == Some("pyi"))
.then(|| {
parent
.as_ref()
.and_then(|parent| self.resolve_real_module(parent))
})
.flatten();
let module = resolve_module(self.db, &import).or_else(|| {
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
std::iter::once(file)
.chain(std::iter::once(source_file))
.flatten()
resolve_module(self.db, &parent?)
})?;
Some(module.file()?.path(self.db))
}
}
}
/// Resolves a module name to a module.
fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
let module = resolve_module(self.db, module_name)?;
Some(module.file(self.db)?.path(self.db))
}
/// Resolves a module name to a module (stubs not allowed).
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
let module = resolve_real_module(self.db, module_name)?;
Some(module.file(self.db)?.path(self.db))
}
}

View File

@@ -11,7 +11,7 @@ pub struct AnalyzeSettings {
pub exclude: FilePatternSet,
pub preview: PreviewMode,
pub target_version: PythonVersion,
pub string_imports: StringImports,
pub detect_string_imports: bool,
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
pub extension: ExtensionMapping,
}
@@ -26,7 +26,7 @@ impl fmt::Display for AnalyzeSettings {
self.exclude,
self.preview,
self.target_version,
self.string_imports,
self.detect_string_imports,
self.extension | debug,
self.include_dependencies | debug,
]
@@ -35,31 +35,6 @@ impl fmt::Display for AnalyzeSettings {
}
}
#[derive(Debug, Copy, Clone, CacheKey)]
pub struct StringImports {
pub enabled: bool,
pub min_dots: usize,
}
impl Default for StringImports {
fn default() -> Self {
Self {
enabled: false,
min_dots: 2,
}
}
}
impl fmt::Display for StringImports {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.enabled {
write!(f, "enabled (min_dots: {})", self.min_dots)
} else {
write!(f, "disabled")
}
}
}
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, CacheKey)]
#[cfg_attr(
feature = "serde",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.12.5"
version = "0.12.3"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -15,7 +15,7 @@ license = { workspace = true }
[dependencies]
ruff_annotate_snippets = { workspace = true }
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["junit", "serde"] }
ruff_db = { workspace = true, features = ["serde"] }
ruff_diagnostics = { workspace = true, features = ["serde"] }
ruff_notebook = { workspace = true }
ruff_macros = { workspace = true }
@@ -55,6 +55,7 @@ path-absolutize = { workspace = true, features = [
pathdiff = { workspace = true }
pep440_rs = { workspace = true }
pyproject-toml = { workspace = true }
quick-junit = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }

View File

@@ -25,5 +25,5 @@ def my_func():
# t-strings - all ok
t"0.0.0.0"
t"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
t"0.0.0.0" t"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"

View File

@@ -94,7 +94,7 @@ except Exception:
logging.error("...", exc_info=True)
from logging import critical, error, exception
from logging import error, exception
try:
pass
@@ -114,23 +114,6 @@ except Exception:
error("...", exc_info=None)
try:
pass
except Exception:
critical("...")
try:
pass
except Exception:
critical("...", exc_info=False)
try:
pass
except Exception:
critical("...", exc_info=None)
try:
pass
except Exception:
@@ -142,13 +125,6 @@ try:
except Exception:
error("...", exc_info=True)
try:
pass
except Exception:
critical("...", exc_info=True)
try:
...
except Exception as e:

View File

@@ -650,17 +650,3 @@ f"""This is a test. {
if True else
"Don't add a trailing comma here ->"
}"""
type X[
T
] = T
def f[
T
](): pass
class C[
T
]: pass
type X[T,] = T
def f[T,](): pass
class C[T,]: pass

View File

@@ -142,7 +142,3 @@ field47: typing.Optional[int] | typing.Optional[dict]
# avoid reporting twice
field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
field49: typing.Optional[complex | complex] | complex
# Regression test for https://github.com/astral-sh/ruff/issues/19403
# Should throw duplicate union member but not fix
isinstance(None, typing.Union[None, None])

View File

@@ -1,10 +0,0 @@
from collections import Counter
from elsewhere import third_party
from . import first_party
def f(x: first_party.foo): ...
def g(x: third_party.bar): ...
def h(x: Counter): ...

View File

@@ -1,68 +0,0 @@
def f():
from . import first_party
def f(x: first_party.foo): ...
# Type parameter bounds
def g():
from . import foo
class C[T: foo.Ty]: ...
def h():
from . import foo
def f[T: foo.Ty](x: T): ...
def i():
from . import foo
type Alias[T: foo.Ty] = list[T]
# Type parameter defaults
def j():
from . import foo
class C[T = foo.Ty]: ...
def k():
from . import foo
def f[T = foo.Ty](x: T): ...
def l():
from . import foo
type Alias[T = foo.Ty] = list[T]
# non-generic type alias
def m():
from . import foo
type Alias = foo.Ty
# unions
from typing import Union
def n():
from . import foo
def f(x: Union[foo.Ty, int]): ...
def g(x: foo.Ty | int): ...
# runtime and typing usage
def o():
from . import foo
def f(x: foo.Ty):
return foo.Ty()

View File

@@ -1,6 +0,0 @@
from __future__ import annotations
from . import first_party
def f(x: first_party.foo): ...

View File

@@ -54,13 +54,6 @@ windows_path.with_suffix(r"s")
windows_path.with_suffix(u'' "json")
windows_path.with_suffix(suffix="js")
Path().with_suffix(".")
Path().with_suffix("py")
PosixPath().with_suffix("py")
PurePath().with_suffix("py")
PurePosixPath().with_suffix("py")
PureWindowsPath().with_suffix("py")
WindowsPath().with_suffix("py")
### No errors
path.with_suffix()

View File

@@ -1,26 +0,0 @@
from pathlib import (
Path,
PosixPath,
PurePath,
PurePosixPath,
PureWindowsPath,
WindowsPath,
)
import pathlib
path = Path()
posix_path: pathlib.PosixPath = PosixPath()
pure_path: PurePath = PurePath()
pure_posix_path = pathlib.PurePosixPath()
pure_windows_path: PureWindowsPath = pathlib.PureWindowsPath()
windows_path: pathlib.WindowsPath = pathlib.WindowsPath()
### No Errors
path.with_suffix(".")
posix_path.with_suffix(".")
pure_path.with_suffix(".")
pure_posix_path.with_suffix(".")
pure_windows_path.with_suffix(".")
windows_path.with_suffix(".")

View File

@@ -104,6 +104,3 @@ os.chmod(x)
os.replace("src", "dst", src_dir_fd=1, dst_dir_fd=2)
os.replace("src", "dst", src_dir_fd=1)
os.replace("src", "dst", dst_dir_fd=2)
os.getcwd()
os.getcwdb()

View File

@@ -47,19 +47,3 @@ def _():
from builtin import open
with open(p) as _: ... # No error
file = "file_1.py"
rename(file, "file_2.py")
rename(
# commment 1
file, # comment 2
"file_2.py"
,
# comment 3
)
rename(file, "file_2.py", src_dir_fd=None, dst_dir_fd=None)
rename(file, "file_2.py", src_dir_fd=1)

View File

@@ -1,5 +0,0 @@
# This is a regression test for https://github.com/astral-sh/ruff/issues/19310
# there is a (potentially invisible) unicode formfeed character (000C) between "docstring" and the semicolon
"docstring" ; print(
f"{__doc__=}",
)

View File

@@ -84,25 +84,3 @@ class MyRequestHandler(BaseHTTPRequestHandler):
def dont_GET(self):
pass
from http.server import CGIHTTPRequestHandler
class MyCGIRequestHandler(CGIHTTPRequestHandler):
def do_OPTIONS(self):
pass
def dont_OPTIONS(self):
pass
from http.server import SimpleHTTPRequestHandler
class MySimpleRequestHandler(SimpleHTTPRequestHandler):
def do_OPTIONS(self):
pass
def dont_OPTIONS(self):
pass

View File

@@ -278,30 +278,3 @@ def f():
for i in src:
if lambda: 0:
dst.append(i)
def f():
i = "xyz"
result = []
for i in range(3):
result.append(x for x in [i])
def f():
i = "xyz"
result = []
for i in range(3):
result.append((x for x in [i]))
G_INDEX = None
def f():
global G_INDEX
result = []
for G_INDEX in range(3):
result.append(G_INDEX)
def f():
NL_INDEX = None
def x():
nonlocal NL_INDEX
result = []
for NL_INDEX in range(3):
result.append(NL_INDEX)

View File

@@ -189,18 +189,3 @@ f"{ham[lower + 1 :, "columnname"]}"
#: Okay: https://github.com/astral-sh/ruff/issues/12023
f"{x = :.2f}"
f"{(x) = :.2f}"
# t-strings
t"{ {'a': 1} }"
t"{[ { {'a': 1} } ]}"
t"normal { {t"{ { [1, 2] } }" } } normal"
t"{x = :.2f}"
t"{(x) = :.2f}"
#: Okay
t"{ham[lower +1 :, "columnname"]}"
#: E203:1:13
t"{ham[lower + 1 :, "columnname"]}"

View File

@@ -142,20 +142,3 @@ class PEP696GoodWithEmptyBases[A: object="foo"[::-1], B: object =[[["foo", "bar"
class PEP696GoodWithNonEmptyBases[A: object="foo"[::-1], B: object =[[["foo", "bar"]]], C: object= bytes](object, something_dynamic[x::-1]):
pass
# E231
t"{(a,b)}"
# Okay because it's hard to differentiate between the usages of a colon in a t-string
t"{a:=1}"
t"{ {'a':1} }"
t"{a:.3f}"
t"{(a:=1)}"
t"{(lambda x:x)}"
t"normal{t"{a:.3f}"}normal"
#: Okay
snapshot.file_uri[len(t's3://{self.s3_bucket_name}/'):]
#: E231
{len(t's3://{self.s3_bucket_name}/'):1}

View File

@@ -722,10 +722,3 @@ def inconsistent_indent_byte_size():
    Returns:
"""
def line_continuation_chars():\
"""No fix should be offered for D201/D202 because of the line continuation chars."""\
...

View File

@@ -1,5 +0,0 @@
#
x = 0 \
#
+1
print(x)

View File

@@ -91,16 +91,9 @@ Path("foo.txt").write_text(text, encoding="utf-8")
Path("foo.txt").write_text(text, *args)
Path("foo.txt").write_text(text, **kwargs)
# https://github.com/astral-sh/ruff/issues/19294
# Violation but not detectable
x = Path("foo.txt")
x.open()
# https://github.com/astral-sh/ruff/issues/18107
codecs.open("plw1514.py", "r", "utf-8").close() # this is fine
# function argument annotated as Path
from pathlib import Path
def format_file(file: Path):
with file.open() as f:
contents = f.read()

View File

@@ -143,23 +143,3 @@ class NotAMethodButHardToDetect:
# without risking false positives elsewhere or introducing complex heuristics
# that users would find surprising and confusing
FOO = sorted([x for x in BAR], key=lambda x: x.baz)
# https://github.com/astral-sh/ruff/issues/19305
import pytest
@pytest.fixture
def my_fixture_with_param(request):
return request.param
@pytest.fixture()
def my_fixture_with_param2(request):
return request.param
# Decorated function (should be ignored)
def custom_decorator(func):
return func
@custom_decorator
def add(x, y):
return x + y

View File

@@ -27,24 +27,7 @@ _ = Decimal.from_float(float(" -inF\n \t"))
_ = Decimal.from_float(float(" InfinIty\n\t "))
_ = Decimal.from_float(float(" -InfinIty\n \t"))
# Cases with keyword arguments - should produce unsafe fixes
_ = Fraction.from_decimal(dec=Decimal("4.2"))
_ = Decimal.from_float(f=4.2)
# Cases with invalid argument counts - should not get fixes
_ = Fraction.from_decimal(Decimal("4.2"), 1)
_ = Decimal.from_float(4.2, None)
# Cases with wrong keyword arguments - should not get fixes
_ = Fraction.from_decimal(numerator=Decimal("4.2"))
_ = Decimal.from_float(value=4.2)
# Cases with type validation issues - should produce unsafe fixes
_ = Decimal.from_float("4.2") # Invalid type for from_float
_ = Fraction.from_decimal(4.2) # Invalid type for from_decimal
_ = Fraction.from_float("4.2") # Invalid type for from_float
# OK - should not trigger the rule
# OK
_ = Fraction(0.1)
_ = Fraction(-0.5)
_ = Fraction(5.0)
@@ -55,12 +38,3 @@ _ = Decimal(0.1)
_ = Decimal(-0.5)
_ = Decimal(5.0)
_ = decimal.Decimal(4.2)
# Cases with int and bool - should produce safe fixes
_ = Decimal.from_float(1)
_ = Decimal.from_float(True)
# Cases with non-finite floats - should produce safe fixes
_ = Decimal.from_float(float("-nan"))
_ = Decimal.from_float(float("\x2dnan"))
_ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))

View File

@@ -79,8 +79,3 @@ def in_type_def():
from typing import cast
a = 'int'
cast('f"{a}"','11')
# Regression test for parser bug
# https://github.com/astral-sh/ruff/issues/18860
def fuzz_bug():
c('{\t"i}')

View File

@@ -65,62 +65,3 @@ class Foo:
bar = "should've used attrs"
def __post_init__(self, bar: str = "ahhh", baz: str = "hmm") -> None: ...
# https://github.com/astral-sh/ruff/issues/18950
@dataclass
class Foo:
def __post_init__(self, bar: int = (x := 1)) -> None:
pass
@dataclass
class Foo:
def __post_init__(
self,
bar: int = (x := 1) # comment
,
baz: int = (y := 2), # comment
foo = (a := 1) # comment
,
faz = (b := 2), # comment
) -> None:
pass
@dataclass
class Foo:
def __post_init__(
self,
bar: int = 1, # comment
baz: int = 2, # comment
) -> None:
pass
@dataclass
class Foo:
def __post_init__(
self,
arg1: int = (1) # comment
,
arg2: int = ((1)) # comment
,
arg2: int = (i for i in range(10)) # comment
,
) -> None:
pass
# makes little sense, but is valid syntax
def fun_with_python_syntax():
@dataclass
class Foo:
def __post_init__(
self,
bar: (int) = (yield from range(5)) # comment
,
) -> None:
...
return Foo

View File

@@ -53,16 +53,3 @@ regex.subn(br"""eak your machine with rm -""", rf"""/""")
regex.splititer(both, non_literal)
regex.subf(f, lambda _: r'means', '"format"')
regex.subfn(fn, f'''a$1n't''', lambda: "'function'")
# https://github.com/astral-sh/ruff/issues/16713
re.compile("\a\f\n\r\t\u27F2\U0001F0A1\v\x41") # with unsafe fix
re.compile("\b") # without fix
re.compile("\"") # without fix
re.compile("\'") # without fix
re.compile('\"') # without fix
re.compile('\'') # without fix
re.compile("\\") # without fix
re.compile("\101") # without fix
re.compile("a\
b") # without fix

View File

@@ -91,20 +91,3 @@ regex.subf(
br''br""br''
)
regex.subfn(br'I\s\nee*d\s[O0o]me\x20\Qoffe\E, ' br'b')
# https://github.com/astral-sh/ruff/issues/16713
re.compile(
"["
"\U0001F600-\U0001F64F" # emoticons
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U00002702-\U000027B0"
"\U000024C2-\U0001F251"
"\u200d" # zero width joiner
"\u200c" # zero width non-joiner
"\\u200c" # must not be escaped in a raw string
"]+",
flags=re.UNICODE,
)

View File

@@ -1,3 +0,0 @@
import re
re.compile("\N{Partial Differential}") # with unsafe fix if python target is 3.8 or higher, else without fix

View File

@@ -1,24 +0,0 @@
import re
b_src = b"abc"
# Should be replaced with `b_src.replace(rb"x", b"y")`
re.sub(rb"x", b"y", b_src)
# Should be replaced with `b_src.startswith(rb"abc")`
if re.match(rb"abc", b_src):
pass
# Should be replaced with `rb"x" in b_src`
if re.search(rb"x", b_src):
pass
# Should be replaced with `b_src.split(rb"abc")`
re.split(rb"abc", b_src)
# Patterns containing metacharacters should NOT be replaced
re.sub(rb"ab[c]", b"", b_src)
re.match(rb"ab[c]", b_src)
re.search(rb"ab[c]", b_src)
re.fullmatch(rb"ab[c]", b_src)
re.split(rb"ab[c]", b_src)

View File

@@ -1,16 +0,0 @@
from itertools import starmap
import itertools
# Errors in Python 3.14+
starmap(func, zip(a, b, c, strict=True))
starmap(func, zip(a, b, c, strict=False))
starmap(func, zip(a, b, c, strict=strict))
# No errors
starmap(func)
starmap(func, zip(a, b, c, **kwargs))
starmap(func, zip(a, b, c), foo)
starmap(func, zip(a, b, c, lorem=ipsum))
starmap(func, zip(a, b, c), lorem=ipsum)

View File

@@ -71,7 +71,7 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
flake8_type_checking::helpers::is_valid_runtime_import(
binding,
&checker.semantic,
checker.settings(),
&checker.settings().flake8_type_checking,
)
})
.collect()

View File

@@ -1039,10 +1039,15 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
flake8_simplify::rules::zip_dict_keys_and_values(checker, call);
}
if checker.any_rule_enabled(&[
Rule::OsChmod,
Rule::OsMkdir,
Rule::OsMakedirs,
Rule::OsRename,
Rule::OsReplace,
Rule::OsGetcwd,
Rule::OsStat,
Rule::OsPathJoin,
Rule::OsPathSamefile,
Rule::OsPathSplitext,
Rule::BuiltinOpen,
Rule::PyPath,
@@ -1105,21 +1110,6 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
if checker.is_rule_enabled(Rule::OsReadlink) {
flake8_use_pathlib::rules::os_readlink(checker, call, segments);
}
if checker.is_rule_enabled(Rule::OsGetcwd) {
flake8_use_pathlib::rules::os_getcwd(checker, call, segments);
}
if checker.is_rule_enabled(Rule::OsChmod) {
flake8_use_pathlib::rules::os_chmod(checker, call, segments);
}
if checker.is_rule_enabled(Rule::OsRename) {
flake8_use_pathlib::rules::os_rename(checker, call, segments);
}
if checker.is_rule_enabled(Rule::OsReplace) {
flake8_use_pathlib::rules::os_replace(checker, call, segments);
}
if checker.is_rule_enabled(Rule::OsPathSamefile) {
flake8_use_pathlib::rules::os_path_samefile(checker, call, segments);
}
if checker.is_rule_enabled(Rule::PathConstructorCurrentDirectory) {
flake8_use_pathlib::rules::path_constructor_current_directory(
checker, call, segments,

View File

@@ -2770,10 +2770,11 @@ impl<'a> Checker<'a> {
self.semantic.restore(snapshot);
if self.is_rule_enabled(Rule::QuotedAnnotation) {
pyupgrade::rules::quoted_annotation(self, annotation, range);
if self.semantic.in_typing_only_annotation() {
if self.is_rule_enabled(Rule::QuotedAnnotation) {
pyupgrade::rules::quoted_annotation(self, annotation, range);
}
}
if self.source_type.is_stub() {
if self.is_rule_enabled(Rule::QuotedAnnotationInStub) {
flake8_pyi::rules::quoted_annotation_in_stub(
@@ -3216,11 +3217,6 @@ impl<'a> LintContext<'a> {
pub(crate) fn iter(&mut self) -> impl Iterator<Item = &Diagnostic> {
self.diagnostics.get_mut().iter()
}
/// The [`LinterSettings`] for the current analysis, including the enabled rules.
pub(crate) const fn settings(&self) -> &LinterSettings {
self.settings
}
}
/// An abstraction for mutating a diagnostic.

View File

@@ -16,6 +16,7 @@ use crate::rules::{
eradicate, flake8_commas, flake8_executable, flake8_fixme, flake8_implicit_str_concat,
flake8_pyi, flake8_todos, pycodestyle, pygrep_hooks, pylint, pyupgrade, ruff,
};
use crate::settings::LinterSettings;
use super::ast::LintContext;
@@ -26,6 +27,7 @@ pub(crate) fn check_tokens(
locator: &Locator,
indexer: &Indexer,
stylist: &Stylist,
settings: &LinterSettings,
source_type: PySourceType,
cell_offsets: Option<&CellOffsets>,
context: &mut LintContext,
@@ -40,8 +42,15 @@ pub(crate) fn check_tokens(
Rule::BlankLinesAfterFunctionOrClass,
Rule::BlankLinesBeforeNestedDefinition,
]) {
BlankLinesChecker::new(locator, stylist, source_type, cell_offsets, context)
.check_lines(tokens);
BlankLinesChecker::new(
locator,
stylist,
settings,
source_type,
cell_offsets,
context,
)
.check_lines(tokens);
}
if context.is_rule_enabled(Rule::BlanketTypeIgnore) {
@@ -49,17 +58,17 @@ pub(crate) fn check_tokens(
}
if context.is_rule_enabled(Rule::EmptyComment) {
pylint::rules::empty_comments(context, comment_ranges, locator, indexer);
pylint::rules::empty_comments(context, comment_ranges, locator);
}
if context.is_rule_enabled(Rule::AmbiguousUnicodeCharacterComment) {
for range in comment_ranges {
ruff::rules::ambiguous_unicode_character_comment(context, locator, range);
ruff::rules::ambiguous_unicode_character_comment(context, locator, range, settings);
}
}
if context.is_rule_enabled(Rule::CommentedOutCode) {
eradicate::rules::commented_out_code(context, locator, comment_ranges);
eradicate::rules::commented_out_code(context, locator, comment_ranges, settings);
}
if context.is_rule_enabled(Rule::UTF8EncodingDeclaration) {
@@ -101,7 +110,7 @@ pub(crate) fn check_tokens(
Rule::SingleLineImplicitStringConcatenation,
Rule::MultiLineImplicitStringConcatenation,
]) {
flake8_implicit_str_concat::rules::implicit(context, tokens, locator, indexer);
flake8_implicit_str_concat::rules::implicit(context, tokens, locator, indexer, settings);
}
if context.any_rule_enabled(&[

View File

@@ -920,15 +920,15 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
// flake8-use-pathlib
(Flake8UsePathlib, "100") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathAbspath),
(Flake8UsePathlib, "101") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsChmod),
(Flake8UsePathlib, "101") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsChmod),
(Flake8UsePathlib, "102") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsMkdir),
(Flake8UsePathlib, "103") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsMakedirs),
(Flake8UsePathlib, "104") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRename),
(Flake8UsePathlib, "105") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsReplace),
(Flake8UsePathlib, "104") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsRename),
(Flake8UsePathlib, "105") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsReplace),
(Flake8UsePathlib, "106") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRmdir),
(Flake8UsePathlib, "107") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRemove),
(Flake8UsePathlib, "108") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsUnlink),
(Flake8UsePathlib, "109") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsGetcwd),
(Flake8UsePathlib, "109") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsGetcwd),
(Flake8UsePathlib, "110") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathExists),
(Flake8UsePathlib, "111") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathExpanduser),
(Flake8UsePathlib, "112") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathIsdir),
@@ -940,7 +940,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Flake8UsePathlib, "118") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathJoin),
(Flake8UsePathlib, "119") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathBasename),
(Flake8UsePathlib, "120") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathDirname),
(Flake8UsePathlib, "121") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathSamefile),
(Flake8UsePathlib, "121") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathSamefile),
(Flake8UsePathlib, "122") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsPathSplitext),
(Flake8UsePathlib, "123") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::BuiltinOpen),
(Flake8UsePathlib, "124") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::PyPath),

View File

@@ -288,7 +288,7 @@ fn match_docstring_end(body: &[Stmt]) -> Option<TextSize> {
fn match_semicolon(s: &str) -> Option<TextSize> {
for (offset, c) in s.char_indices() {
match c {
_ if is_python_whitespace(c) => continue,
' ' | '\t' => continue,
';' => return Some(TextSize::try_from(offset).unwrap()),
_ => break,
}

View File

@@ -527,17 +527,6 @@ impl<'a> Importer<'a> {
None
}
}
/// Add a `from __future__ import annotations` import.
pub(crate) fn add_future_import(&self) -> Edit {
let import = &NameImport::ImportFrom(MemberNameImport::member(
"__future__".to_string(),
"annotations".to_string(),
));
// Note that `TextSize::default` should ensure that the import is added at the very
// beginning of the file via `Insertion::start_of_file`.
self.add_import(import, TextSize::default())
}
}
/// An edit to the top-level of a module, making it available at runtime.

View File

@@ -188,6 +188,7 @@ pub fn check_path(
locator,
indexer,
stylist,
settings,
source_type,
source_kind.as_ipy_notebook().map(Notebook::cell_offsets),
&mut context,
@@ -472,7 +473,7 @@ pub fn lint_only(
&& !is_py314_support_enabled(settings)
{
warn_user_once!(
"Support for Python 3.14 is in preview and may undergo breaking changes. Enable `preview` to remove this warning."
"Support for Python 3.14 is under development and may be unstable. Enable `preview` to remove this warning."
);
}
@@ -583,7 +584,7 @@ pub fn lint_fix<'a>(
&& !is_py314_support_enabled(settings)
{
warn_user_once!(
"Support for Python 3.14 is in preview and may undergo breaking changes. Enable `preview` to remove this warning."
"Support for Python 3.14 is under development and may be unstable. Enable `preview` to remove this warning."
);
}

View File

@@ -118,6 +118,86 @@ impl<'a> Locator<'a> {
}
}
/// Finds the closest [`TextSize`] not less than the offset given for which
/// `is_char_boundary` is `true`. Unless the offset given is greater than
/// the length of the underlying contents, in which case, the length of the
/// contents is returned.
///
/// Can be replaced with `str::ceil_char_boundary` once it's stable.
///
/// # Examples
///
/// From `std`:
///
/// ```
/// use ruff_text_size::{Ranged, TextSize};
/// use ruff_linter::Locator;
///
/// let locator = Locator::new("❤️🧡💛💚💙💜");
/// assert_eq!(locator.text_len(), TextSize::from(26));
/// assert!(!locator.contents().is_char_boundary(13));
///
/// let closest = locator.ceil_char_boundary(TextSize::from(13));
/// assert_eq!(closest, TextSize::from(14));
/// assert_eq!(&locator.contents()[..closest.to_usize()], "❤️🧡💛");
/// ```
///
/// Additional examples:
///
/// ```
/// use ruff_text_size::{Ranged, TextRange, TextSize};
/// use ruff_linter::Locator;
///
/// let locator = Locator::new("Hello");
///
/// assert_eq!(
/// locator.ceil_char_boundary(TextSize::from(0)),
/// TextSize::from(0)
/// );
///
/// assert_eq!(
/// locator.ceil_char_boundary(TextSize::from(5)),
/// TextSize::from(5)
/// );
///
/// assert_eq!(
/// locator.ceil_char_boundary(TextSize::from(6)),
/// TextSize::from(5)
/// );
///
/// let locator = Locator::new("α");
///
/// assert_eq!(
/// locator.ceil_char_boundary(TextSize::from(0)),
/// TextSize::from(0)
/// );
///
/// assert_eq!(
/// locator.ceil_char_boundary(TextSize::from(1)),
/// TextSize::from(2)
/// );
///
/// assert_eq!(
/// locator.ceil_char_boundary(TextSize::from(2)),
/// TextSize::from(2)
/// );
///
/// assert_eq!(
/// locator.ceil_char_boundary(TextSize::from(3)),
/// TextSize::from(2)
/// );
/// ```
pub fn ceil_char_boundary(&self, offset: TextSize) -> TextSize {
let upper_bound = offset
.to_u32()
.saturating_add(4)
.min(self.text_len().to_u32());
(offset.to_u32()..upper_bound)
.map(TextSize::from)
.find(|offset| self.contents.is_char_boundary(offset.to_usize()))
.unwrap_or_else(|| TextSize::from(upper_bound))
}
/// Take the source code between the given [`TextRange`].
#[inline]
pub fn slice<T: Ranged>(&self, ranged: T) -> &'a str {

View File

@@ -0,0 +1,117 @@
use std::io::Write;
use std::path::Path;
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
use ruff_db::diagnostic::Diagnostic;
use ruff_source_file::LineColumn;
use crate::message::{Emitter, EmitterContext, MessageWithLocation, group_diagnostics_by_filename};
#[derive(Default)]
pub struct JunitEmitter;
impl Emitter for JunitEmitter {
fn emit(
&mut self,
writer: &mut dyn Write,
diagnostics: &[Diagnostic],
context: &EmitterContext,
) -> anyhow::Result<()> {
let mut report = Report::new("ruff");
if diagnostics.is_empty() {
let mut test_suite = TestSuite::new("ruff");
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
let mut case = TestCase::new("No errors found", TestCaseStatus::success());
case.set_classname("ruff");
test_suite.add_test_case(case);
report.add_test_suite(test_suite);
} else {
for (filename, messages) in group_diagnostics_by_filename(diagnostics) {
let mut test_suite = TestSuite::new(&filename);
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
for message in messages {
let MessageWithLocation {
message,
start_location,
} = message;
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
status.set_message(message.body());
let location = if context.is_notebook(&message.expect_ruff_filename()) {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
LineColumn::default()
} else {
start_location
};
status.set_description(format!(
"line {row}, col {col}, {body}",
row = location.line,
col = location.column,
body = message.body()
));
let mut case = TestCase::new(
if let Some(code) = message.secondary_code() {
format!("org.ruff.{code}")
} else {
"org.ruff".to_string()
},
status,
);
let file_path = Path::new(&*filename);
let file_stem = file_path.file_stem().unwrap().to_str().unwrap();
let classname = file_path.parent().unwrap().join(file_stem);
case.set_classname(classname.to_str().unwrap());
case.extra.insert(
XmlString::new("line"),
XmlString::new(location.line.to_string()),
);
case.extra.insert(
XmlString::new("column"),
XmlString::new(location.column.to_string()),
);
test_suite.add_test_case(case);
}
report.add_test_suite(test_suite);
}
}
report.serialize(writer)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use crate::message::JunitEmitter;
use crate::message::tests::{
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
};
#[test]
fn output() {
let mut emitter = JunitEmitter;
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn syntax_errors() {
let mut emitter = JunitEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
assert_snapshot!(content);
}
}

View File

@@ -14,6 +14,9 @@ use ruff_db::files::File;
pub use github::GithubEmitter;
pub use gitlab::GitlabEmitter;
pub use grouped::GroupedEmitter;
pub use junit::JunitEmitter;
pub use pylint::PylintEmitter;
pub use rdjson::RdjsonEmitter;
use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, SourceFile};
use ruff_text_size::{Ranged, TextRange, TextSize};
@@ -27,6 +30,9 @@ mod diff;
mod github;
mod gitlab;
mod grouped;
mod junit;
mod pylint;
mod rdjson;
mod sarif;
mod text;
@@ -74,14 +80,6 @@ where
body,
);
let span = Span::from(file).with_range(range);
let annotation = Annotation::primary(span);
diagnostic.annotate(annotation);
if let Some(suggestion) = suggestion {
diagnostic.help(suggestion);
}
if let Some(fix) = fix {
diagnostic.set_fix(fix);
}
@@ -94,6 +92,13 @@ where
diagnostic.set_noqa_offset(noqa_offset);
}
let span = Span::from(file).with_range(range);
let mut annotation = Annotation::primary(span);
if let Some(suggestion) = suggestion {
annotation = annotation.message(suggestion);
}
diagnostic.annotate(annotation);
diagnostic.set_secondary_code(SecondaryCode::new(rule.noqa_code().to_string()));
diagnostic
@@ -125,10 +130,6 @@ impl FileResolver for EmitterContext<'_> {
UnifiedFile::Ruff(file) => self.notebook_indexes.get(file.name()).is_some(),
}
}
fn current_directory(&self) -> &std::path::Path {
crate::fs::get_cwd()
}
}
struct MessageWithLocation<'a> {

View File

@@ -0,0 +1,72 @@
use std::io::Write;
use ruff_db::diagnostic::Diagnostic;
use ruff_source_file::OneIndexed;
use crate::fs::relativize_path;
use crate::message::{Emitter, EmitterContext};
/// Generate violations in Pylint format.
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
#[derive(Default)]
pub struct PylintEmitter;
impl Emitter for PylintEmitter {
fn emit(
&mut self,
writer: &mut dyn Write,
diagnostics: &[Diagnostic],
context: &EmitterContext,
) -> anyhow::Result<()> {
for diagnostic in diagnostics {
let filename = diagnostic.expect_ruff_filename();
let row = if context.is_notebook(&filename) {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
OneIndexed::from_zero_indexed(0)
} else {
diagnostic.expect_ruff_start_location().line
};
let body = if let Some(code) = diagnostic.secondary_code() {
format!("[{code}] {body}", body = diagnostic.body())
} else {
diagnostic.body().to_string()
};
writeln!(
writer,
"{path}:{row}: {body}",
path = relativize_path(&filename),
)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use crate::message::PylintEmitter;
use crate::message::tests::{
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
};
#[test]
fn output() {
let mut emitter = PylintEmitter;
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn syntax_errors() {
let mut emitter = PylintEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
assert_snapshot!(content);
}
}

View File

@@ -0,0 +1,143 @@
use std::io::Write;
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use serde_json::{Value, json};
use ruff_db::diagnostic::Diagnostic;
use ruff_source_file::SourceCode;
use ruff_text_size::Ranged;
use crate::Edit;
use crate::message::{Emitter, EmitterContext, LineColumn};
#[derive(Default)]
pub struct RdjsonEmitter;
impl Emitter for RdjsonEmitter {
fn emit(
&mut self,
writer: &mut dyn Write,
diagnostics: &[Diagnostic],
_context: &EmitterContext,
) -> anyhow::Result<()> {
serde_json::to_writer_pretty(
writer,
&json!({
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff",
},
"severity": "warning",
"diagnostics": &ExpandedMessages{ diagnostics }
}),
)?;
Ok(())
}
}
struct ExpandedMessages<'a> {
diagnostics: &'a [Diagnostic],
}
impl Serialize for ExpandedMessages<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
for message in self.diagnostics {
let value = message_to_rdjson_value(message);
s.serialize_element(&value)?;
}
s.end()
}
}
fn message_to_rdjson_value(message: &Diagnostic) -> Value {
let source_file = message.expect_ruff_source_file();
let source_code = source_file.to_source_code();
let start_location = source_code.line_column(message.expect_range().start());
let end_location = source_code.line_column(message.expect_range().end());
if let Some(fix) = message.fix() {
json!({
"message": message.body(),
"location": {
"path": message.expect_ruff_filename(),
"range": rdjson_range(start_location, end_location),
},
"code": {
"value": message.secondary_code(),
"url": message.to_ruff_url(),
},
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
})
} else {
json!({
"message": message.body(),
"location": {
"path": message.expect_ruff_filename(),
"range": rdjson_range(start_location, end_location),
},
"code": {
"value": message.secondary_code(),
"url": message.to_ruff_url(),
},
})
}
}
fn rdjson_suggestions(edits: &[Edit], source_code: &SourceCode) -> Value {
Value::Array(
edits
.iter()
.map(|edit| {
let location = source_code.line_column(edit.start());
let end_location = source_code.line_column(edit.end());
json!({
"range": rdjson_range(location, end_location),
"text": edit.content().unwrap_or_default(),
})
})
.collect(),
)
}
fn rdjson_range(start: LineColumn, end: LineColumn) -> Value {
json!({
"start": start,
"end": end,
})
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use crate::message::RdjsonEmitter;
use crate::message::tests::{
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
};
#[test]
fn output() {
let mut emitter = RdjsonEmitter;
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn syntax_errors() {
let mut emitter = RdjsonEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
assert_snapshot!(content);
}
}

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/junit.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/junit.rs
expression: content
snapshot_kind: text
---
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="3" failures="3" errors="0">

View File

@@ -1,14 +1,15 @@
---
source: crates/ruff_db/src/diagnostic/render/junit.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/junit.rs
expression: content
snapshot_kind: text
---
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="2" failures="2" errors="0">
<testsuite name="syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="1" column="15">
<testcase name="org.ruff" classname="syntax_errors" line="1" column="15">
<failure message="SyntaxError: Expected one or more symbol names after import">line 1, col 15, SyntaxError: Expected one or more symbol names after import</failure>
</testcase>
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="3" column="12">
<testcase name="org.ruff" classname="syntax_errors" line="3" column="12">
<failure message="SyntaxError: Expected &apos;)&apos;, found newline">line 3, col 12, SyntaxError: Expected &apos;)&apos;, found newline</failure>
</testcase>
</testsuite>

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/pylint.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/pylint.rs
expression: content
snapshot_kind: text
---
fib.py:1: [F401] `os` imported but unused
fib.py:6: [F841] Local variable `x` is assigned to but never used

View File

@@ -0,0 +1,7 @@
---
source: crates/ruff_linter/src/message/pylint.rs
expression: content
snapshot_kind: text
---
syntax_errors.py:1: SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3: SyntaxError: Expected ')', found newline

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/rdjson.rs
expression: content
snapshot_kind: text
---
{
"diagnostics": [
@@ -95,7 +96,7 @@ expression: env.render_diagnostics(&diagnostics)
"message": "Undefined name `a`"
}
],
"severity": "WARNING",
"severity": "warning",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"

View File

@@ -1,12 +1,14 @@
---
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/rdjson.rs
expression: content
snapshot_kind: text
---
{
"diagnostics": [
{
"code": {
"value": "invalid-syntax"
"url": null,
"value": null
},
"location": {
"path": "syntax_errors.py",
@@ -25,7 +27,8 @@ expression: env.render_diagnostics(&diagnostics)
},
{
"code": {
"value": "invalid-syntax"
"url": null,
"value": null
},
"location": {
"path": "syntax_errors.py",
@@ -43,7 +46,7 @@ expression: env.render_diagnostics(&diagnostics)
"message": "SyntaxError: Expected ')', found newline"
}
],
"severity": "WARNING",
"severity": "warning",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"

Some files were not shown because too many files have changed in this diff Show More