Compare commits

..

1 Commits

Author SHA1 Message Date
David Peter
a0925fad4a [ty] Experiment: update salsa in isolation 2025-08-04 12:19:56 +02:00
3301 changed files with 166705 additions and 211474 deletions

6
.github/CODEOWNERS vendored
View File

@@ -19,10 +19,6 @@
# ty # ty
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager /crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/ruff_db/ @carljm @MichaReiser @sharkdp @dcreager /crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager /scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager /crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager

View File

@@ -39,17 +39,17 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build sdist" - name: "Build sdist"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
command: sdist command: sdist
args: --out dist args: --out dist
@@ -68,18 +68,18 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14 runs-on: macos-14
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
architecture: x64 architecture: x64
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - x86_64" - name: "Build wheels - x86_64"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
target: x86_64 target: x86_64
args: --release --locked --out dist args: --release --locked --out dist
@@ -110,18 +110,18 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14 runs-on: macos-14
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
architecture: arm64 architecture: arm64
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - aarch64" - name: "Build wheels - aarch64"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
target: aarch64 target: aarch64
args: --release --locked --out dist args: --release --locked --out dist
@@ -166,18 +166,18 @@ jobs:
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
arch: x64 arch: x64
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.platform.arch }} architecture: ${{ matrix.platform.arch }}
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build wheels" - name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
target: ${{ matrix.platform.target }} target: ${{ matrix.platform.target }}
args: --release --locked --out dist args: --release --locked --out dist
@@ -219,18 +219,18 @@ jobs:
- x86_64-unknown-linux-gnu - x86_64-unknown-linux-gnu
- i686-unknown-linux-gnu - i686-unknown-linux-gnu
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
architecture: x64 architecture: x64
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build wheels" - name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
target: ${{ matrix.target }} target: ${{ matrix.target }}
manylinux: auto manylinux: auto
@@ -292,21 +292,19 @@ jobs:
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16 maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: arm-unknown-linux-musleabihf - target: arm-unknown-linux-musleabihf
arch: arm arch: arm
- target: riscv64gc-unknown-linux-gnu
arch: riscv64
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build wheels" - name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
target: ${{ matrix.platform.target }} target: ${{ matrix.platform.target }}
manylinux: auto manylinux: auto
@@ -321,7 +319,7 @@ jobs:
githubToken: ${{ github.token }} githubToken: ${{ github.token }}
install: | install: |
apt-get update apt-get update
apt-get install -y --no-install-recommends python3 python3-pip libatomic1 apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip pip3 install -U pip
run: | run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
@@ -361,18 +359,18 @@ jobs:
- x86_64-unknown-linux-musl - x86_64-unknown-linux-musl
- i686-unknown-linux-musl - i686-unknown-linux-musl
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
architecture: x64 architecture: x64
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build wheels" - name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
target: ${{ matrix.target }} target: ${{ matrix.target }}
manylinux: musllinux_1_2 manylinux: musllinux_1_2
@@ -427,17 +425,17 @@ jobs:
arch: armv7 arch: armv7
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build wheels" - name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
target: ${{ matrix.platform.target }} target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2 manylinux: musllinux_1_2

View File

@@ -33,14 +33,14 @@ jobs:
- linux/amd64 - linux/amd64
- linux/arm64 - linux/arm64
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: recursive submodules: recursive
persist-credentials: false persist-credentials: false
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 - uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -113,7 +113,7 @@ jobs:
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps: steps:
- name: Download digests - name: Download digests
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with: with:
path: /tmp/digests path: /tmp/digests
pattern: digests-* pattern: digests-*
@@ -131,7 +131,7 @@ jobs:
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }} type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }} type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -169,7 +169,7 @@ jobs:
steps: steps:
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 - uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -256,7 +256,7 @@ jobs:
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps: steps:
- name: Download digests - name: Download digests
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with: with:
path: /tmp/digests path: /tmp/digests
pattern: digests-* pattern: digests-*
@@ -276,7 +276,7 @@ jobs:
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }} type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }} type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}

View File

@@ -38,12 +38,11 @@ jobs:
fuzz: ${{ steps.check_fuzzer.outputs.changed }} fuzz: ${{ steps.check_fuzzer.outputs.changed }}
# Flag that is set to "true" when code related to ty changes. # Flag that is set to "true" when code related to ty changes.
ty: ${{ steps.check_ty.outputs.changed }} ty: ${{ steps.check_ty.outputs.changed }}
# Flag that is set to "true" when code related to the py-fuzzer folder changes.
py-fuzzer: ${{ steps.check_py_fuzzer.outputs.changed }}
# Flag that is set to "true" when code related to the playground changes. # Flag that is set to "true" when code related to the playground changes.
playground: ${{ steps.check_playground.outputs.changed }} playground: ${{ steps.check_playground.outputs.changed }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
fetch-depth: 0 fetch-depth: 0
persist-credentials: false persist-credentials: false
@@ -69,6 +68,7 @@ jobs:
':crates/ruff_text_size/**' \ ':crates/ruff_text_size/**' \
':crates/ruff_python_ast/**' \ ':crates/ruff_python_ast/**' \
':crates/ruff_python_parser/**' \ ':crates/ruff_python_parser/**' \
':python/py-fuzzer/**' \
':.github/workflows/ci.yaml' \ ':.github/workflows/ci.yaml' \
; then ; then
echo "changed=false" >> "$GITHUB_OUTPUT" echo "changed=false" >> "$GITHUB_OUTPUT"
@@ -138,18 +138,6 @@ jobs:
echo "changed=true" >> "$GITHUB_OUTPUT" echo "changed=true" >> "$GITHUB_OUTPUT"
fi fi
- name: Check if the py-fuzzer code changed
id: check_py_fuzzer
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- 'python/py_fuzzer/**' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Check if there was any code related change - name: Check if there was any code related change
id: check_code id: check_code
env: env:
@@ -209,7 +197,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
@@ -223,7 +211,7 @@ jobs:
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
@@ -243,26 +231,22 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: "Install mold" - name: "Install mold"
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
- name: "Install cargo nextest" - name: "Install cargo nextest"
uses: taiki-e/install-action@67cc679904bee382389bf22082124fa963c6f6bd # v2.61.3 uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
with: with:
tool: cargo-nextest tool: cargo-nextest
- name: "Install cargo insta" - name: "Install cargo insta"
uses: taiki-e/install-action@67cc679904bee382389bf22082124fa963c6f6bd # v2.61.3 uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
with: with:
tool: cargo-insta tool: cargo-insta
- name: "Install uv"
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
with:
enable-cache: "true"
- name: ty mdtests (GitHub annotations) - name: ty mdtests (GitHub annotations)
if: ${{ needs.determine_changes.outputs.ty == 'true' }} if: ${{ needs.determine_changes.outputs.ty == 'true' }}
env: env:
@@ -305,26 +289,22 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: "Install mold" - name: "Install mold"
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
- name: "Install cargo nextest" - name: "Install cargo nextest"
uses: taiki-e/install-action@67cc679904bee382389bf22082124fa963c6f6bd # v2.61.3 uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
with: with:
tool: cargo-nextest tool: cargo-nextest
- name: "Install cargo insta" - name: "Install cargo insta"
uses: taiki-e/install-action@67cc679904bee382389bf22082124fa963c6f6bd # v2.61.3 uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
with: with:
tool: cargo-insta tool: cargo-insta
- name: "Install uv"
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
with:
enable-cache: "true"
- name: "Run tests" - name: "Run tests"
shell: bash shell: bash
env: env:
@@ -338,20 +318,16 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: "Install cargo nextest" - name: "Install cargo nextest"
uses: taiki-e/install-action@67cc679904bee382389bf22082124fa963c6f6bd # v2.61.3 uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
with: with:
tool: cargo-nextest tool: cargo-nextest
- name: "Install uv"
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
with:
enable-cache: "true"
- name: "Run tests" - name: "Run tests"
shell: bash shell: bash
env: env:
@@ -369,15 +345,15 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with: with:
node-version: 22 node-version: 20
cache: "npm" cache: "npm"
cache-dependency-path: playground/package-lock.json cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0 - uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
@@ -398,14 +374,14 @@ jobs:
if: ${{ github.ref == 'refs/heads/main' }} if: ${{ github.ref == 'refs/heads/main' }}
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: "Install mold" - name: "Install mold"
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
- name: "Build" - name: "Build"
run: cargo build --release --locked run: cargo build --release --locked
@@ -416,7 +392,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: SebRollen/toml-action@b1b3628f55fc3a28208d4203ada8b737e9687876 # v1.2.0 - uses: SebRollen/toml-action@b1b3628f55fc3a28208d4203ada8b737e9687876 # v1.2.0
@@ -430,7 +406,7 @@ jobs:
MSRV: ${{ steps.msrv.outputs.value }} MSRV: ${{ steps.msrv.outputs.value }}
run: rustup default "${MSRV}" run: rustup default "${MSRV}"
- name: "Install mold" - name: "Install mold"
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
- name: "Build tests" - name: "Build tests"
shell: bash shell: bash
env: env:
@@ -444,7 +420,7 @@ jobs:
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }} if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
@@ -453,7 +429,9 @@ jobs:
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: "Install cargo-binstall" - name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@20aa316bab4942180bbbabe93237858e8d77f1ed # v1.15.5 uses: cargo-bins/cargo-binstall@dd6a0ac24caa1243d18df0f770b941e990e8facc # v1.14.3
with:
tool: cargo-fuzz@0.11.2
- name: "Install cargo-fuzz" - name: "Install cargo-fuzz"
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets. # Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
@@ -465,16 +443,16 @@ jobs:
needs: needs:
- cargo-test-linux - cargo-test-linux
- determine_changes - determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
timeout-minutes: 20 timeout-minutes: 20
env: env:
FORCE_COLOR: 1 FORCE_COLOR: 1
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download Ruff binary to test name: Download Ruff binary to test
id: download-cached-binary id: download-cached-binary
with: with:
@@ -504,7 +482,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 5 timeout-minutes: 5
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
@@ -534,14 +512,14 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download comparison Ruff binary name: Download comparison Ruff binary
id: ruff-target id: ruff-target
with: with:
@@ -655,13 +633,13 @@ jobs:
- cargo-test-linux - cargo-test-linux
- determine_changes - determine_changes
# Only runs on pull requests, since that is the only we way we can find the base version for comparison. # Only runs on pull requests, since that is the only we way we can find the base version for comparison.
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.ty == 'true' }}
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download new ty binary name: Download new ty binary
id: ty-new id: ty-new
with: with:
@@ -674,7 +652,7 @@ jobs:
branch: ${{ github.event.pull_request.base.ref }} branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml" workflow: "ci.yaml"
check_artifacts: true check_artifacts: true
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Fuzz - name: Fuzz
env: env:
FORCE_COLOR: 1 FORCE_COLOR: 1
@@ -701,10 +679,10 @@ jobs:
needs: determine_changes needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: cargo-bins/cargo-binstall@20aa316bab4942180bbbabe93237858e8d77f1ed # v1.15.5 - uses: cargo-bins/cargo-binstall@dd6a0ac24caa1243d18df0f770b941e990e8facc # v1.14.3
- run: cargo binstall --no-confirm cargo-shear - run: cargo binstall --no-confirm cargo-shear
- run: cargo shear - run: cargo shear
@@ -714,10 +692,10 @@ jobs:
timeout-minutes: 20 timeout-minutes: 20
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: ${{ env.PYTHON_VERSION }} python-version: ${{ env.PYTHON_VERSION }}
architecture: x64 architecture: x64
@@ -725,7 +703,7 @@ jobs:
- name: "Prep README.md" - name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi run: python scripts/transform_readme.py --target pypi
- name: "Build wheels" - name: "Build wheels"
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4 uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with: with:
args: --out dist args: --out dist
- name: "Test wheel" - name: "Test wheel"
@@ -741,16 +719,16 @@ jobs:
runs-on: depot-ubuntu-22.04-16 runs-on: depot-ubuntu-22.04-16
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with: with:
node-version: 22 node-version: 22
- name: "Cache pre-commit" - name: "Cache pre-commit"
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with: with:
path: ~/.cache/pre-commit path: ~/.cache/pre-commit
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
@@ -772,10 +750,10 @@ jobs:
env: env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }} MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: "3.13" python-version: "3.13"
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
@@ -787,7 +765,7 @@ jobs:
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: "Install Insiders dependencies" - name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system run: uv pip install -r docs/requirements-insiders.txt --system
@@ -814,7 +792,7 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
@@ -840,18 +818,18 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: "Download ruff-lsp source" name: "Download ruff-lsp source"
with: with:
persist-credentials: false persist-credentials: false
repository: "astral-sh/ruff-lsp" repository: "astral-sh/ruff-lsp"
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
# installation fails on 3.13 and newer # installation fails on 3.13 and newer
python-version: "3.12" python-version: "3.12"
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download development ruff binary name: Download development ruff binary
id: ruff-target id: ruff-target
with: with:
@@ -882,13 +860,13 @@ jobs:
- determine_changes - determine_changes
if: ${{ (needs.determine_changes.outputs.playground == 'true') }} if: ${{ (needs.determine_changes.outputs.playground == 'true') }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown run: rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with: with:
node-version: 22 node-version: 22
cache: "npm" cache: "npm"
@@ -914,18 +892,18 @@ jobs:
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- name: "Checkout Branch" - name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: "Install codspeed" - name: "Install codspeed"
uses: taiki-e/install-action@67cc679904bee382389bf22082124fa963c6f6bd # v2.61.3 uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
with: with:
tool: cargo-codspeed tool: cargo-codspeed
@@ -933,9 +911,8 @@ jobs:
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark
- name: "Run benchmarks" - name: "Run benchmarks"
uses: CodSpeedHQ/action@653fdc30e6c40ffd9739e40c8a0576f4f4523ca1 # v4.0.1 uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
with: with:
mode: instrumentation
run: cargo codspeed run run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }} token: ${{ secrets.CODSPEED_TOKEN }}
@@ -948,18 +925,18 @@ jobs:
TY_LOG: ruff_benchmark=debug TY_LOG: ruff_benchmark=debug
steps: steps:
- name: "Checkout Branch" - name: "Checkout Branch"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: "Install codspeed" - name: "Install codspeed"
uses: taiki-e/install-action@67cc679904bee382389bf22082124fa963c6f6bd # v2.61.3 uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
with: with:
tool: cargo-codspeed tool: cargo-codspeed
@@ -967,13 +944,7 @@ jobs:
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
- name: "Run benchmarks" - name: "Run benchmarks"
uses: CodSpeedHQ/action@653fdc30e6c40ffd9739e40c8a0576f4f4523ca1 # v4.0.1 uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
env:
# enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
# appear to provide much useful insight for our walltime benchmarks right now
# (see https://github.com/astral-sh/ruff/pull/20419)
CODSPEED_PERF_ENABLED: false
with: with:
mode: walltime
run: cargo codspeed run run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }} token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -31,14 +31,14 @@ jobs:
# Don't run the cron job on forks: # Don't run the cron job on forks:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }} if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: "Install mold" - name: "Install mold"
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: Build ruff - name: Build ruff
# A debug build means the script runs slower once it gets started, # A debug build means the script runs slower once it gets started,
@@ -65,7 +65,7 @@ jobs:
permissions: permissions:
issues: write issues: write
steps: steps:
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with: with:
github-token: ${{ secrets.GITHUB_TOKEN }} github-token: ${{ secrets.GITHUB_TOKEN }}
script: | script: |

View File

@@ -11,7 +11,6 @@ on:
- "crates/ruff_python_parser" - "crates/ruff_python_parser"
- ".github/workflows/mypy_primer.yaml" - ".github/workflows/mypy_primer.yaml"
- ".github/workflows/mypy_primer_comment.yaml" - ".github/workflows/mypy_primer_comment.yaml"
- "scripts/mypy_primer.sh"
- "Cargo.lock" - "Cargo.lock"
- "!**.md" - "!**.md"
@@ -32,14 +31,14 @@ jobs:
runs-on: depot-ubuntu-22.04-32 runs-on: depot-ubuntu-22.04-32
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
path: ruff path: ruff
fetch-depth: 0 fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Install the latest version of uv - name: Install the latest version of uv
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with: with:
@@ -75,14 +74,14 @@ jobs:
runs-on: depot-ubuntu-22.04-32 runs-on: depot-ubuntu-22.04-32
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
path: ruff path: ruff
fetch-depth: 0 fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Install the latest version of uv - name: Install the latest version of uv
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with: with:

View File

@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: "Update pre-commit mirror" - name: "Update pre-commit mirror"
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with: with:
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }} github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
script: | script: |

View File

@@ -23,12 +23,12 @@ jobs:
env: env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }} MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
ref: ${{ inputs.ref }} ref: ${{ inputs.ref }}
persist-credentials: true persist-credentials: true
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with: with:
python-version: 3.12 python-version: 3.12

View File

@@ -24,12 +24,12 @@ jobs:
env: env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }} CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with: with:
node-version: 22 node-version: 22
cache: "npm" cache: "npm"

View File

@@ -22,8 +22,8 @@ jobs:
id-token: write id-token: write
steps: steps:
- name: "Install uv" - name: "Install uv"
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with: with:
pattern: wheels-* pattern: wheels-*
path: wheels path: wheels

View File

@@ -30,12 +30,12 @@ jobs:
env: env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }} CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with: with:
node-version: 22 node-version: 22
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0 - uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0

View File

@@ -29,7 +29,7 @@ jobs:
target: [web, bundler, nodejs] target: [web, bundler, nodejs]
fail-fast: false fail-fast: false
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
persist-credentials: false persist-credentials: false
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
@@ -45,9 +45,9 @@ jobs:
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
mv /tmp/package.json crates/ruff_wasm/pkg mv /tmp/package.json crates/ruff_wasm/pkg
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg - run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with: with:
node-version: 22 node-version: 20
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- name: "Publish (dry-run)" - name: "Publish (dry-run)"
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }} if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}

View File

@@ -61,7 +61,7 @@ jobs:
env: env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps: steps:
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 - uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
with: with:
persist-credentials: false persist-credentials: false
submodules: recursive submodules: recursive
@@ -124,19 +124,19 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
steps: steps:
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 - uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
with: with:
persist-credentials: false persist-credentials: false
submodules: recursive submodules: recursive
- name: Install cached dist - name: Install cached dist
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with: with:
name: cargo-dist-cache name: cargo-dist-cache
path: ~/.cargo/bin/ path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist - run: chmod +x ~/.cargo/bin/dist
# Get all the local artifacts for the global tasks to use (for e.g. checksums) # Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts - name: Fetch local artifacts
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with: with:
pattern: artifacts-* pattern: artifacts-*
path: target/distrib/ path: target/distrib/
@@ -175,19 +175,19 @@ jobs:
outputs: outputs:
val: ${{ steps.host.outputs.manifest }} val: ${{ steps.host.outputs.manifest }}
steps: steps:
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 - uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
with: with:
persist-credentials: false persist-credentials: false
submodules: recursive submodules: recursive
- name: Install cached dist - name: Install cached dist
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with: with:
name: cargo-dist-cache name: cargo-dist-cache
path: ~/.cargo/bin/ path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist - run: chmod +x ~/.cargo/bin/dist
# Fetch artifacts from scratch-storage # Fetch artifacts from scratch-storage
- name: Fetch artifacts - name: Fetch artifacts
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with: with:
pattern: artifacts-* pattern: artifacts-*
path: target/distrib/ path: target/distrib/
@@ -251,13 +251,13 @@ jobs:
env: env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps: steps:
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 - uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
with: with:
persist-credentials: false persist-credentials: false
submodules: recursive submodules: recursive
# Create a GitHub Release while uploading all files to it # Create a GitHub Release while uploading all files to it
- name: "Download GitHub Artifacts" - name: "Download GitHub Artifacts"
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with: with:
pattern: artifacts-* pattern: artifacts-*
path: artifacts path: artifacts

View File

@@ -50,12 +50,12 @@ jobs:
permissions: permissions:
contents: write contents: write
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff name: Checkout Ruff
with: with:
path: ruff path: ruff
persist-credentials: true persist-credentials: true
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout typeshed name: Checkout typeshed
with: with:
repository: python/typeshed repository: python/typeshed
@@ -65,7 +65,7 @@ jobs:
run: | run: |
git config --global user.name typeshedbot git config --global user.name typeshedbot
git config --global user.email '<>' git config --global user.email '<>'
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Sync typeshed stubs - name: Sync typeshed stubs
run: | run: |
rm -rf "ruff/${VENDORED_TYPESHED}" rm -rf "ruff/${VENDORED_TYPESHED}"
@@ -112,12 +112,12 @@ jobs:
permissions: permissions:
contents: write contents: write
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff name: Checkout Ruff
with: with:
persist-credentials: true persist-credentials: true
ref: ${{ env.UPSTREAM_BRANCH}} ref: ${{ env.UPSTREAM_BRANCH}}
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Setup git - name: Setup git
run: | run: |
git config --global user.name typeshedbot git config --global user.name typeshedbot
@@ -150,12 +150,12 @@ jobs:
contents: write contents: write
pull-requests: write pull-requests: write
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
name: Checkout Ruff name: Checkout Ruff
with: with:
persist-credentials: true persist-credentials: true
ref: ${{ env.UPSTREAM_BRANCH}} ref: ${{ env.UPSTREAM_BRANCH}}
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 - uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: Setup git - name: Setup git
run: | run: |
git config --global user.name typeshedbot git config --global user.name typeshedbot
@@ -192,7 +192,7 @@ jobs:
permissions: permissions:
issues: write issues: write
steps: steps:
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with: with:
github-token: ${{ secrets.GITHUB_TOKEN }} github-token: ${{ secrets.GITHUB_TOKEN }}
script: | script: |

View File

@@ -26,14 +26,14 @@ jobs:
timeout-minutes: 20 timeout-minutes: 20
if: contains(github.event.label.name, 'ecosystem-analyzer') if: contains(github.event.label.name, 'ecosystem-analyzer')
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
path: ruff path: ruff
fetch-depth: 0 fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Install the latest version of uv - name: Install the latest version of uv
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with: with:
@@ -64,12 +64,11 @@ jobs:
cd .. cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@fc0f612798710b0dd69bb7528bc9b361dc60bd43" uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
ecosystem-analyzer \ ecosystem-analyzer \
--repository ruff \ --repository ruff \
diff \ diff \
--profile=release \
--projects-old ruff/projects_old.txt \ --projects-old ruff/projects_old.txt \
--projects-new ruff/projects_new.txt \ --projects-new ruff/projects_new.txt \
--old old_commit \ --old old_commit \

View File

@@ -22,14 +22,14 @@ jobs:
runs-on: depot-ubuntu-22.04-32 runs-on: depot-ubuntu-22.04-32
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
path: ruff path: ruff
fetch-depth: 0 fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Install the latest version of uv - name: Install the latest version of uv
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0 uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with: with:

View File

@@ -24,7 +24,6 @@ env:
CARGO_TERM_COLOR: always CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10 RUSTUP_MAX_RETRIES: 10
RUST_BACKTRACE: 1 RUST_BACKTRACE: 1
CONFORMANCE_SUITE_COMMIT: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
jobs: jobs:
typing_conformance: typing_conformance:
@@ -32,19 +31,22 @@ jobs:
runs-on: depot-ubuntu-22.04-32 runs-on: depot-ubuntu-22.04-32
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
path: ruff path: ruff
fetch-depth: 0 fetch-depth: 0
persist-credentials: false persist-credentials: false
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
repository: python/typing repository: python/typing
ref: ${{ env.CONFORMANCE_SUITE_COMMIT }} ref: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
path: typing path: typing
persist-credentials: false persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with: with:
workspaces: "ruff" workspaces: "ruff"
@@ -54,9 +56,6 @@ jobs:
- name: Compute diagnostic diff - name: Compute diagnostic diff
shell: bash shell: bash
env:
# TODO: Remove this once we fixed the remaining panics in the conformance suite.
TY_MAX_PARALLELISM: 1
run: | run: |
RUFF_DIR="$GITHUB_WORKSPACE/ruff" RUFF_DIR="$GITHUB_WORKSPACE/ruff"
@@ -65,16 +64,17 @@ jobs:
cd ruff cd ruff
echo "new commit" echo "new commit"
git rev-list --format=%s --max-count=1 "$GITHUB_SHA" git checkout -b new_commit "${{ github.event.pull_request.head.sha }}"
cargo build --bin ty git rev-list --format=%s --max-count=1 new_commit
mv target/debug/ty ty-new cargo build --release --bin ty
mv target/release/ty ty-new
echo "old commit (merge base)"
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")" MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
git checkout -b old_commit "$MERGE_BASE" git checkout -b old_commit "$MERGE_BASE"
echo "old commit (merge base)"
git rev-list --format=%s --max-count=1 old_commit git rev-list --format=%s --max-count=1 old_commit
cargo build --bin ty cargo build --release --bin ty
mv target/debug/ty ty-old mv target/release/ty ty-old
) )
( (
@@ -95,7 +95,6 @@ jobs:
fi fi
echo ${{ github.event.number }} > pr-number echo ${{ github.event.number }} > pr-number
echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit
- name: Upload diff - name: Upload diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
@@ -108,9 +107,3 @@ jobs:
with: with:
name: pr-number name: pr-number
path: pr-number path: pr-number
- name: Upload conformance suite commit
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: conformance-suite-commit
path: conformance-suite-commit

View File

@@ -32,14 +32,6 @@ jobs:
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT" echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
fi fi
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: Download typing conformance suite commit
with:
name: conformance-suite-commit
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
allow_forks: true
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: "Download typing_conformance results" name: "Download typing_conformance results"
id: download-typing_conformance_diff id: download-typing_conformance_diff
@@ -69,14 +61,7 @@ jobs:
# subsequent runs # subsequent runs
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
if [[ -f conformance-suite-commit ]] echo '## Diagnostic diff on typing conformance tests' >> comment.txt
then
echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(<conformance-suite-commit)/conformance)" >> comment.txt
else
echo "conformance-suite-commit file not found"
echo "## Diagnostic diff on typing conformance tests" >> comment.txt
fi
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
echo '<details>' >> comment.txt echo '<details>' >> comment.txt
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt

View File

@@ -1,44 +1,5 @@
# Breaking Changes # Breaking Changes
## 0.13.0
- **Several rules can now add `from __future__ import annotations` automatically**
`TC001`, `TC002`, `TC003`, `RUF013`, and `UP037` now add `from __future__ import annotations` as part of their fixes when the
`lint.future-annotations` setting is enabled. This allows the rules to move
more imports into `TYPE_CHECKING` blocks (`TC001`, `TC002`, and `TC003`),
use PEP 604 union syntax on Python versions before 3.10 (`RUF013`), and
unquote more annotations (`UP037`).
- **Full module paths are now used to verify first-party modules**
Ruff now checks that the full path to a module exists on disk before
categorizing it as a first-party import. This change makes first-party
import detection more accurate, helping to avoid false positives on local
directories with the same name as a third-party dependency, for example. See
the [FAQ
section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) on import categorization for more details.
- **Deprecated rules must now be selected by exact rule code**
Ruff will no longer activate deprecated rules selected by their group name
or prefix. As noted below, the two remaining deprecated rules were also
removed in this release, so this won't affect any current rules, but it will
still affect any deprecations in the future.
- **The deprecated macOS configuration directory fallback has been removed**
Ruff will no longer look for a user-level configuration file at
`~/Library/Application Support/ruff/ruff.toml` on macOS. This feature was
deprecated in v0.5 in favor of using the [XDG
specification](https://specifications.freedesktop.org/basedir-spec/latest/)
(usually resolving to `~/.config/ruff/ruff.toml`), like on Linux. The
fallback and accompanying deprecation warning have now been removed.
- **[`pandas-df-variable-name`](https://docs.astral.sh/ruff/rules/pandas-df-variable-name) (`PD901`) has been removed**
- **[`non-pep604-isinstance`](https://docs.astral.sh/ruff/rules/non-pep604-isinstance) (`UP038`) has been removed**
## 0.12.0 ## 0.12.0
- **Detection of more syntax errors** - **Detection of more syntax errors**

View File

@@ -1,119 +1,383 @@
# Changelog # Changelog
## 0.13.0 ## 0.12.7
Check out the [blog post](https://astral.sh/blog/ruff-v0.13.0) for a migration This is a follow-up release to 0.12.6. Because of an issue in the package metadata, 0.12.6 failed to publish fully to PyPI and has been yanked. Similarly, there is no GitHub release or Git tag for 0.12.6. The contents of the 0.12.7 release are identical to 0.12.6, except for the updated metadata.
## 0.12.6
### Preview features
- \[`flake8-commas`\] Add support for trailing comma checks in type parameter lists (`COM812`, `COM819`) ([#19390](https://github.com/astral-sh/ruff/pull/19390))
- \[`pylint`\] Implement auto-fix for `missing-maxsplit-arg` (`PLC0207`) ([#19387](https://github.com/astral-sh/ruff/pull/19387))
- \[`ruff`\] Offer fixes for `RUF039` in more cases ([#19065](https://github.com/astral-sh/ruff/pull/19065))
### Bug fixes
- Support `.pyi` files in ruff analyze graph ([#19611](https://github.com/astral-sh/ruff/pull/19611))
- \[`flake8-pyi`\] Preserve inline comment in ellipsis removal (`PYI013`) ([#19399](https://github.com/astral-sh/ruff/pull/19399))
- \[`perflint`\] Ignore rule if target is `global` or `nonlocal` (`PERF401`) ([#19539](https://github.com/astral-sh/ruff/pull/19539))
- \[`pyupgrade`\] Fix `UP030` to avoid modifying double curly braces in format strings ([#19378](https://github.com/astral-sh/ruff/pull/19378))
- \[`refurb`\] Ignore decorated functions for `FURB118` ([#19339](https://github.com/astral-sh/ruff/pull/19339))
- \[`refurb`\] Mark `int` and `bool` cases for `Decimal.from_float` as safe fixes (`FURB164`) ([#19468](https://github.com/astral-sh/ruff/pull/19468))
- \[`ruff`\] Fix `RUF033` for named default expressions ([#19115](https://github.com/astral-sh/ruff/pull/19115))
### Rule changes
- \[`flake8-blind-except`\] Change `BLE001` to permit `logging.critical(..., exc_info=True)` ([#19520](https://github.com/astral-sh/ruff/pull/19520))
### Performance
- Add support for specifying minimum dots in detected string imports ([#19538](https://github.com/astral-sh/ruff/pull/19538))
## 0.12.5
### Preview features
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
### Bug fixes
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
### Rule changes
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
## 0.12.4
### Preview features
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
### Bug fixes
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
### Rule changes
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
### Documentation
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
## 0.12.3
### Preview features
- \[`flake8-bugbear`\] Support non-context-manager calls in `B017` ([#19063](https://github.com/astral-sh/ruff/pull/19063))
- \[`flake8-use-pathlib`\] Add autofixes for `PTH100`, `PTH106`, `PTH107`, `PTH108`, `PTH110`, `PTH111`, `PTH112`, `PTH113`, `PTH114`, `PTH115`, `PTH117`, `PTH119`, `PTH120` ([#19213](https://github.com/astral-sh/ruff/pull/19213))
- \[`flake8-use-pathlib`\] Add autofixes for `PTH203`, `PTH204`, `PTH205` ([#18922](https://github.com/astral-sh/ruff/pull/18922))
### Bug fixes
- \[`flake8-return`\] Fix false-positive for variables used inside nested functions in `RET504` ([#18433](https://github.com/astral-sh/ruff/pull/18433))
- Treat form feed as valid whitespace before a line continuation ([#19220](https://github.com/astral-sh/ruff/pull/19220))
- \[`flake8-type-checking`\] Fix syntax error introduced by fix (`TC008`) ([#19150](https://github.com/astral-sh/ruff/pull/19150))
- \[`pyupgrade`\] Keyword arguments in `super` should suppress the `UP008` fix ([#19131](https://github.com/astral-sh/ruff/pull/19131))
### Documentation
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI007`, `PYI008`) ([#19103](https://github.com/astral-sh/ruff/pull/19103))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM116`) ([#19111](https://github.com/astral-sh/ruff/pull/19111))
- \[`flake8-type-checking`\] Make example error out-of-the-box (`TC001`) ([#19151](https://github.com/astral-sh/ruff/pull/19151))
- \[`flake8-use-pathlib`\] Make example error out-of-the-box (`PTH210`) ([#19189](https://github.com/astral-sh/ruff/pull/19189))
- \[`pycodestyle`\] Make example error out-of-the-box (`E272`) ([#19191](https://github.com/astral-sh/ruff/pull/19191))
- \[`pycodestyle`\] Make example not raise unnecessary `SyntaxError` (`E114`) ([#19190](https://github.com/astral-sh/ruff/pull/19190))
- \[`pydoclint`\] Make example error out-of-the-box (`DOC501`) ([#19218](https://github.com/astral-sh/ruff/pull/19218))
- \[`pylint`, `pyupgrade`\] Fix syntax errors in examples (`PLW1501`, `UP028`) ([#19127](https://github.com/astral-sh/ruff/pull/19127))
- \[`pylint`\] Update `missing-maxsplit-arg` docs and error to suggest proper usage (`PLC0207`) ([#18949](https://github.com/astral-sh/ruff/pull/18949))
- \[`flake8-bandit`\] Make example error out-of-the-box (`S412`) ([#19241](https://github.com/astral-sh/ruff/pull/19241))
## 0.12.2
### Preview features
- \[`flake8-pyi`\] Expand `Optional[A]` to `A | None` (`PYI016`) ([#18572](https://github.com/astral-sh/ruff/pull/18572))
- \[`pyupgrade`\] Mark `UP008` fix safe if no comments are in range ([#18683](https://github.com/astral-sh/ruff/pull/18683))
### Bug fixes
- \[`flake8-comprehensions`\] Fix `C420` to prepend whitespace when needed ([#18616](https://github.com/astral-sh/ruff/pull/18616))
- \[`perflint`\] Fix `PERF403` panic on attribute or subscription loop variable ([#19042](https://github.com/astral-sh/ruff/pull/19042))
- \[`pydocstyle`\] Fix `D413` infinite loop for parenthesized docstring ([#18930](https://github.com/astral-sh/ruff/pull/18930))
- \[`pylint`\] Fix `PLW0108` autofix introducing a syntax error when the lambda's body contains an assignment expression ([#18678](https://github.com/astral-sh/ruff/pull/18678))
- \[`refurb`\] Fix false positive on empty tuples (`FURB168`) ([#19058](https://github.com/astral-sh/ruff/pull/19058))
- \[`ruff`\] Allow more `field` calls from `attrs` (`RUF009`) ([#19021](https://github.com/astral-sh/ruff/pull/19021))
- \[`ruff`\] Fix syntax error introduced for an empty string followed by a u-prefixed string (`UP025`) ([#18899](https://github.com/astral-sh/ruff/pull/18899))
### Rule changes
- \[`flake8-executable`\] Allow `uvx` in shebang line (`EXE003`) ([#18967](https://github.com/astral-sh/ruff/pull/18967))
- \[`pandas`\] Avoid flagging `PD002` if `pandas` is not imported ([#18963](https://github.com/astral-sh/ruff/pull/18963))
- \[`pyupgrade`\] Avoid PEP-604 unions with `typing.NamedTuple` (`UP007`, `UP045`) ([#18682](https://github.com/astral-sh/ruff/pull/18682))
### Documentation
- Document link between `import-outside-top-level (PLC0415)` and `lint.flake8-tidy-imports.banned-module-level-imports` ([#18733](https://github.com/astral-sh/ruff/pull/18733))
- Fix description of the `format.skip-magic-trailing-comma` example ([#19095](https://github.com/astral-sh/ruff/pull/19095))
- \[`airflow`\] Make `AIR302` example error out-of-the-box ([#18988](https://github.com/astral-sh/ruff/pull/18988))
- \[`airflow`\] Make `AIR312` example error out-of-the-box ([#18989](https://github.com/astral-sh/ruff/pull/18989))
- \[`flake8-annotations`\] Make `ANN401` example error out-of-the-box ([#18974](https://github.com/astral-sh/ruff/pull/18974))
- \[`flake8-async`\] Make `ASYNC100` example error out-of-the-box ([#18993](https://github.com/astral-sh/ruff/pull/18993))
- \[`flake8-async`\] Make `ASYNC105` example error out-of-the-box ([#19002](https://github.com/astral-sh/ruff/pull/19002))
- \[`flake8-async`\] Make `ASYNC110` example error out-of-the-box ([#18975](https://github.com/astral-sh/ruff/pull/18975))
- \[`flake8-async`\] Make `ASYNC210` example error out-of-the-box ([#18977](https://github.com/astral-sh/ruff/pull/18977))
- \[`flake8-async`\] Make `ASYNC220`, `ASYNC221`, and `ASYNC222` examples error out-of-the-box ([#18978](https://github.com/astral-sh/ruff/pull/18978))
- \[`flake8-async`\] Make `ASYNC251` example error out-of-the-box ([#18990](https://github.com/astral-sh/ruff/pull/18990))
- \[`flake8-bandit`\] Make `S201` example error out-of-the-box ([#19017](https://github.com/astral-sh/ruff/pull/19017))
- \[`flake8-bandit`\] Make `S604` and `S609` examples error out-of-the-box ([#19049](https://github.com/astral-sh/ruff/pull/19049))
- \[`flake8-bugbear`\] Make `B028` example error out-of-the-box ([#19054](https://github.com/astral-sh/ruff/pull/19054))
- \[`flake8-bugbear`\] Make `B911` example error out-of-the-box ([#19051](https://github.com/astral-sh/ruff/pull/19051))
- \[`flake8-datetimez`\] Make `DTZ011` example error out-of-the-box ([#19055](https://github.com/astral-sh/ruff/pull/19055))
- \[`flake8-datetimez`\] Make `DTZ901` example error out-of-the-box ([#19056](https://github.com/astral-sh/ruff/pull/19056))
- \[`flake8-pyi`\] Make `PYI032` example error out-of-the-box ([#19061](https://github.com/astral-sh/ruff/pull/19061))
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI014`, `PYI015`) ([#19097](https://github.com/astral-sh/ruff/pull/19097))
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI042`) ([#19101](https://github.com/astral-sh/ruff/pull/19101))
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI059`) ([#19080](https://github.com/astral-sh/ruff/pull/19080))
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI062`) ([#19079](https://github.com/astral-sh/ruff/pull/19079))
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT023`) ([#19104](https://github.com/astral-sh/ruff/pull/19104))
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT030`) ([#19105](https://github.com/astral-sh/ruff/pull/19105))
- \[`flake8-quotes`\] Make example error out-of-the-box (`Q003`) ([#19106](https://github.com/astral-sh/ruff/pull/19106))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM110`) ([#19113](https://github.com/astral-sh/ruff/pull/19113))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM113`) ([#19109](https://github.com/astral-sh/ruff/pull/19109))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM401`) ([#19110](https://github.com/astral-sh/ruff/pull/19110))
- \[`pyflakes`\] Fix backslash in docs (`F621`) ([#19098](https://github.com/astral-sh/ruff/pull/19098))
- \[`pylint`\] Fix `PLC0415` example ([#18970](https://github.com/astral-sh/ruff/pull/18970))
## 0.12.1
### Preview features
- \[`flake8-errmsg`\] Extend `EM101` to support byte strings ([#18867](https://github.com/astral-sh/ruff/pull/18867))
- \[`flake8-use-pathlib`\] Add autofix for `PTH202` ([#18763](https://github.com/astral-sh/ruff/pull/18763))
- \[`pygrep-hooks`\] Add `AsyncMock` methods to `invalid-mock-access` (`PGH005`) ([#18547](https://github.com/astral-sh/ruff/pull/18547))
- \[`pylint`\] Ignore `__init__.py` files in (`PLC0414`) ([#18400](https://github.com/astral-sh/ruff/pull/18400))
- \[`ruff`\] Trigger `RUF037` for empty string and byte strings ([#18862](https://github.com/astral-sh/ruff/pull/18862))
- [formatter] Fix missing blank lines before decorated classes in `.pyi` files ([#18888](https://github.com/astral-sh/ruff/pull/18888))
### Bug fixes
- Avoid generating diagnostics with per-file ignores ([#18801](https://github.com/astral-sh/ruff/pull/18801))
- Handle parenthesized arguments in `remove_argument` ([#18805](https://github.com/astral-sh/ruff/pull/18805))
- \[`flake8-logging`\] Avoid false positive for `exc_info=True` outside `logger.exception` (`LOG014`) ([#18737](https://github.com/astral-sh/ruff/pull/18737))
- \[`flake8-pytest-style`\] Enforce `pytest` import for decorators ([#18779](https://github.com/astral-sh/ruff/pull/18779))
- \[`flake8-pytest-style`\] Mark autofix for `PT001` and `PT023` as unsafe if there's comments in the decorator ([#18792](https://github.com/astral-sh/ruff/pull/18792))
- \[`flake8-pytest-style`\] `PT001`/`PT023` fix makes syntax error on parenthesized decorator ([#18782](https://github.com/astral-sh/ruff/pull/18782))
- \[`flake8-raise`\] Make fix unsafe if it deletes comments (`RSE102`) ([#18788](https://github.com/astral-sh/ruff/pull/18788))
- \[`flake8-simplify`\] Fix `SIM911` autofix creating a syntax error ([#18793](https://github.com/astral-sh/ruff/pull/18793))
- \[`flake8-simplify`\] Fix false negatives for shadowed bindings (`SIM910`, `SIM911`) ([#18794](https://github.com/astral-sh/ruff/pull/18794))
- \[`flake8-simplify`\] Preserve original behavior for `except ()` and bare `except` (`SIM105`) ([#18213](https://github.com/astral-sh/ruff/pull/18213))
- \[`flake8-pyi`\] Fix `PYI041`'s fix causing `TypeError` with `None | None | ...` ([#18637](https://github.com/astral-sh/ruff/pull/18637))
- \[`perflint`\] Fix `PERF101` autofix creating a syntax error and mark autofix as unsafe if there are comments in the `list` call expr ([#18803](https://github.com/astral-sh/ruff/pull/18803))
- \[`perflint`\] Fix false negative in `PERF401` ([#18866](https://github.com/astral-sh/ruff/pull/18866))
- \[`pylint`\] Avoid flattening nested `min`/`max` when outer call has single argument (`PLW3301`) ([#16885](https://github.com/astral-sh/ruff/pull/16885))
- \[`pylint`\] Fix `PLC2801` autofix creating a syntax error ([#18857](https://github.com/astral-sh/ruff/pull/18857))
- \[`pylint`\] Mark `PLE0241` autofix as unsafe if there's comments in the base classes ([#18832](https://github.com/astral-sh/ruff/pull/18832))
- \[`pylint`\] Suppress `PLE2510`/`PLE2512`/`PLE2513`/`PLE2514`/`PLE2515` autofix if the text contains an odd number of backslashes ([#18856](https://github.com/astral-sh/ruff/pull/18856))
- \[`refurb`\] Detect more exotic float literals in `FURB164` ([#18925](https://github.com/astral-sh/ruff/pull/18925))
- \[`refurb`\] Fix `FURB163` autofix creating a syntax error for `yield` expressions ([#18756](https://github.com/astral-sh/ruff/pull/18756))
- \[`refurb`\] Mark `FURB129` autofix as unsafe if there's comments in the `readlines` call ([#18858](https://github.com/astral-sh/ruff/pull/18858))
- \[`ruff`\] Fix false positives and negatives in `RUF010` ([#18690](https://github.com/astral-sh/ruff/pull/18690))
- Fix casing of `analyze.direction` variant names ([#18892](https://github.com/astral-sh/ruff/pull/18892))
### Rule changes
- Fix f-string interpolation escaping in generated fixes ([#18882](https://github.com/astral-sh/ruff/pull/18882))
- \[`flake8-return`\] Mark `RET501` fix unsafe if comments are inside ([#18780](https://github.com/astral-sh/ruff/pull/18780))
- \[`flake8-async`\] Fix detection for large integer sleep durations in `ASYNC116` rule ([#18767](https://github.com/astral-sh/ruff/pull/18767))
- \[`flake8-async`\] Mark autofix for `ASYNC115` as unsafe if the call expression contains comments ([#18753](https://github.com/astral-sh/ruff/pull/18753))
- \[`flake8-bugbear`\] Mark autofix for `B004` as unsafe if the `hasattr` call expr contains comments ([#18755](https://github.com/astral-sh/ruff/pull/18755))
- \[`flake8-comprehension`\] Mark autofix for `C420` as unsafe if there's comments inside the dict comprehension ([#18768](https://github.com/astral-sh/ruff/pull/18768))
- \[`flake8-comprehensions`\] Handle template strings for comprehension fixes ([#18710](https://github.com/astral-sh/ruff/pull/18710))
- \[`flake8-future-annotations`\] Add autofix (`FA100`) ([#18903](https://github.com/astral-sh/ruff/pull/18903))
- \[`pyflakes`\] Mark `F504`/`F522`/`F523` autofix as unsafe if there's a call with side effect ([#18839](https://github.com/astral-sh/ruff/pull/18839))
- \[`pylint`\] Allow fix with comments and document performance implications (`PLW3301`) ([#18936](https://github.com/astral-sh/ruff/pull/18936))
- \[`pylint`\] Detect more exotic `NaN` literals in `PLW0177` ([#18630](https://github.com/astral-sh/ruff/pull/18630))
- \[`pylint`\] Fix `PLC1802` autofix creating a syntax error and mark autofix as unsafe if there's comments in the `len` call ([#18836](https://github.com/astral-sh/ruff/pull/18836))
- \[`pyupgrade`\] Extend version detection to include `sys.version_info.major` (`UP036`) ([#18633](https://github.com/astral-sh/ruff/pull/18633))
- \[`ruff`\] Add lint rule `RUF064` for calling `chmod` with non-octal integers ([#18541](https://github.com/astral-sh/ruff/pull/18541))
- \[`ruff`\] Added `cls.__dict__.get('__annotations__')` check (`RUF063`) ([#18233](https://github.com/astral-sh/ruff/pull/18233))
- \[`ruff`\] Frozen `dataclass` default should be valid (`RUF009`) ([#18735](https://github.com/astral-sh/ruff/pull/18735))
### Server
- Consider virtual path for various server actions ([#18910](https://github.com/astral-sh/ruff/pull/18910))
### Documentation
- Add fix safety sections ([#18940](https://github.com/astral-sh/ruff/pull/18940),[#18841](https://github.com/astral-sh/ruff/pull/18841),[#18802](https://github.com/astral-sh/ruff/pull/18802),[#18837](https://github.com/astral-sh/ruff/pull/18837),[#18800](https://github.com/astral-sh/ruff/pull/18800),[#18415](https://github.com/astral-sh/ruff/pull/18415),[#18853](https://github.com/astral-sh/ruff/pull/18853),[#18842](https://github.com/astral-sh/ruff/pull/18842))
- Use updated pre-commit id ([#18718](https://github.com/astral-sh/ruff/pull/18718))
- \[`perflint`\] Small docs improvement to `PERF401` ([#18786](https://github.com/astral-sh/ruff/pull/18786))
- \[`pyupgrade`\]: Use `super()`, not `__super__` in error messages (`UP008`) ([#18743](https://github.com/astral-sh/ruff/pull/18743))
- \[`flake8-pie`\] Small docs fix to `PIE794` ([#18829](https://github.com/astral-sh/ruff/pull/18829))
- \[`flake8-pyi`\] Correct `collections-named-tuple` example to use PascalCase assignment ([#16884](https://github.com/astral-sh/ruff/pull/16884))
- \[`flake8-pie`\] Add note on type checking benefits to `unnecessary-dict-kwargs` (`PIE804`) ([#18666](https://github.com/astral-sh/ruff/pull/18666))
- \[`pycodestyle`\] Clarify PEP 8 relationship to `whitespace-around-operator` rules ([#18870](https://github.com/astral-sh/ruff/pull/18870))
### Other changes
- Disallow newlines in format specifiers of single quoted f- or t-strings ([#18708](https://github.com/astral-sh/ruff/pull/18708))
- \[`flake8-logging`\] Add fix safety section to `LOG002` ([#18840](https://github.com/astral-sh/ruff/pull/18840))
- \[`pyupgrade`\] Add fix safety section to `UP010` ([#18838](https://github.com/astral-sh/ruff/pull/18838))
## 0.12.0
Check out the [blog post](https://astral.sh/blog/ruff-v0.12.0) for a migration
guide and overview of the changes! guide and overview of the changes!
### Breaking changes ### Breaking changes
- **Several rules can now add `from __future__ import annotations` automatically** - **Detection of more syntax errors**
`TC001`, `TC002`, `TC003`, `RUF013`, and `UP037` now add `from __future__ import annotations` as part of their fixes when the Ruff now detects version-related syntax errors, such as the use of the `match`
`lint.future-annotations` setting is enabled. This allows the rules to move statement on Python versions before 3.10, and syntax errors emitted by
more imports into `TYPE_CHECKING` blocks (`TC001`, `TC002`, and `TC003`), CPython's compiler, such as irrefutable `match` patterns before the final
use PEP 604 union syntax on Python versions before 3.10 (`RUF013`), and `case` arm.
unquote more annotations (`UP037`).
- **Full module paths are now used to verify first-party modules** - **New default Python version handling for syntax errors**
Ruff now checks that the full path to a module exists on disk before Ruff will default to the *latest* supported Python version (3.13) when
categorizing it as a first-party import. This change makes first-party checking for the version-related syntax errors mentioned above to prevent
import detection more accurate, helping to avoid false positives on local false positives in projects without a Python version configured. The default
directories with the same name as a third-party dependency, for example. See in all other cases, like applying lint rules, is unchanged and remains at the
the [FAQ minimum supported Python version (3.9).
section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) on import categorization for more details.
- **Deprecated rules must now be selected by exact rule code** - **Updated f-string formatting**
Ruff will no longer activate deprecated rules selected by their group name Ruff now formats multi-line f-strings with format specifiers to avoid adding a
or prefix. As noted below, the two remaining deprecated rules were also line break after the format specifier. This addresses a change to the Python
removed in this release, so this won't affect any current rules, but it will grammar in version 3.13.4 that made such a line break a syntax error.
still affect any deprecations in the future.
- **The deprecated macOS configuration directory fallback has been removed** - **`rust-toolchain.toml` is no longer included in source distributions**
Ruff will no longer look for a user-level configuration file at The `rust-toolchain.toml` is used to specify a higher Rust version than Ruff's
`~/Library/Application Support/ruff/ruff.toml` on macOS. This feature was minimum supported Rust version (MSRV) for development and building release
deprecated in v0.5 in favor of using the [XDG artifacts. However, when present in source distributions, it would also cause
specification](https://specifications.freedesktop.org/basedir-spec/latest/) downstream package maintainers to pull in the same Rust toolchain, even if
(usually resolving to `~/.config/ruff/ruff.toml`), like on Linux. The their available toolchain was MSRV-compatible.
fallback and accompanying deprecation warning have now been removed.
### Removed Rules ### Removed Rules
The following rules have been removed: The following rules have been removed:
- [`pandas-df-variable-name`](https://docs.astral.sh/ruff/rules/pandas-df-variable-name) (`PD901`) - [`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/)
- [`non-pep604-isinstance`](https://docs.astral.sh/ruff/rules/non-pep604-isinstance) (`UP038`) (`S320`)
### Deprecated Rules
The following rules have been deprecated:
- [`pandas-df-variable-name`](https://docs.astral.sh/ruff/rules/pandas-df-variable-name/)
### Stabilization ### Stabilization
The following rules have been stabilized and are no longer in preview: The following rules have been stabilized and are no longer in preview:
- [`airflow-dag-no-schedule-argument`](https://docs.astral.sh/ruff/rules/airflow-dag-no-schedule-argument) - [`for-loop-writes`](https://docs.astral.sh/ruff/rules/for-loop-writes) (`FURB122`)
(`AIR002`) - [`check-and-remove-from-set`](https://docs.astral.sh/ruff/rules/check-and-remove-from-set) (`FURB132`)
- [`airflow3-removal`](https://docs.astral.sh/ruff/rules/airflow3-removal) (`AIR301`) - [`verbose-decimal-constructor`](https://docs.astral.sh/ruff/rules/verbose-decimal-constructor) (`FURB157`)
- [`airflow3-moved-to-provider`](https://docs.astral.sh/ruff/rules/airflow3-moved-to-provider) - [`fromisoformat-replace-z`](https://docs.astral.sh/ruff/rules/fromisoformat-replace-z) (`FURB162`)
(`AIR302`) - [`int-on-sliced-str`](https://docs.astral.sh/ruff/rules/int-on-sliced-str) (`FURB166`)
- [`airflow3-suggested-update`](https://docs.astral.sh/ruff/rules/airflow3-suggested-update) - [`exc-info-outside-except-handler`](https://docs.astral.sh/ruff/rules/exc-info-outside-except-handler) (`LOG014`)
(`AIR311`) - [`import-outside-top-level`](https://docs.astral.sh/ruff/rules/import-outside-top-level) (`PLC0415`)
- [`airflow3-suggested-to-move-to-provider`](https://docs.astral.sh/ruff/rules/airflow3-suggested-to-move-to-provider) - [`unnecessary-dict-index-lookup`](https://docs.astral.sh/ruff/rules/unnecessary-dict-index-lookup) (`PLR1733`)
(`AIR312`) - [`nan-comparison`](https://docs.astral.sh/ruff/rules/nan-comparison) (`PLW0177`)
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever) (`ASYNC116`) - [`eq-without-hash`](https://docs.astral.sh/ruff/rules/eq-without-hash) (`PLW1641`)
- [`f-string-number-format`](https://docs.astral.sh/ruff/rules/f-string-number-format) (`FURB116`) - [`pytest-parameter-with-default-argument`](https://docs.astral.sh/ruff/rules/pytest-parameter-with-default-argument) (`PT028`)
- [`os-symlink`](https://docs.astral.sh/ruff/rules/os-symlink) (`PTH211`) - [`pytest-warns-too-broad`](https://docs.astral.sh/ruff/rules/pytest-warns-too-broad) (`PT030`)
- [`generic-not-last-base-class`](https://docs.astral.sh/ruff/rules/generic-not-last-base-class) - [`pytest-warns-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-warns-with-multiple-statements) (`PT031`)
(`PYI059`) - [`invalid-formatter-suppression-comment`](https://docs.astral.sh/ruff/rules/invalid-formatter-suppression-comment) (`RUF028`)
- [`redundant-none-literal`](https://docs.astral.sh/ruff/rules/redundant-none-literal) (`PYI061`) - [`dataclass-enum`](https://docs.astral.sh/ruff/rules/dataclass-enum) (`RUF049`)
- [`pytest-raises-ambiguous-pattern`](https://docs.astral.sh/ruff/rules/pytest-raises-ambiguous-pattern) - [`class-with-mixed-type-vars`](https://docs.astral.sh/ruff/rules/class-with-mixed-type-vars) (`RUF053`)
(`RUF043`) - [`unnecessary-round`](https://docs.astral.sh/ruff/rules/unnecessary-round) (`RUF057`)
- [`unused-unpacked-variable`](https://docs.astral.sh/ruff/rules/unused-unpacked-variable) - [`starmap-zip`](https://docs.astral.sh/ruff/rules/starmap-zip) (`RUF058`)
(`RUF059`) - [`non-pep604-annotation-optional`] (`UP045`)
- [`useless-class-metaclass-type`](https://docs.astral.sh/ruff/rules/useless-class-metaclass-type) - [`non-pep695-generic-class`](https://docs.astral.sh/ruff/rules/non-pep695-generic-class) (`UP046`)
(`UP050`) - [`non-pep695-generic-function`](https://docs.astral.sh/ruff/rules/non-pep695-generic-function) (`UP047`)
- [`private-type-parameter`](https://docs.astral.sh/ruff/rules/private-type-parameter) (`UP049`)
The following behaviors have been stabilized: The following behaviors have been stabilized:
- [`assert-raises-exception`](https://docs.astral.sh/ruff/rules/assert-raises-exception) (`B017`) - [`collection-literal-concatenation`] (`RUF005`) now recognizes slices, in
now checks for direct calls to `unittest.TestCase.assert_raises` and `pytest.raises` instead of addition to list literals and variables.
only the context manager forms. - The fix for [`readlines-in-for`] (`FURB129`) is now marked as always safe.
- [`missing-trailing-comma`](https://docs.astral.sh/ruff/rules/missing-trailing-comma) (`COM812`) - [`if-else-block-instead-of-if-exp`] (`SIM108`) will now further simplify
and [`prohibited-trailing-comma`](https://docs.astral.sh/ruff/rules/prohibited-trailing-comma) expressions to use `or` instead of an `if` expression, where possible.
(`COM819`) now check for trailing commas in PEP 695 type parameter lists. - [`unused-noqa`] (`RUF100`) now checks for file-level `noqa` comments as well
- [`raw-string-in-exception`](https://docs.astral.sh/ruff/rules/raw-string-in-exception) (`EM101`) as inline comments.
now also checks for byte strings in exception messages. - [`subprocess-without-shell-equals-true`] (`S603`) now accepts literal strings,
- [`invalid-mock-access`](https://docs.astral.sh/ruff/rules/invalid-mock-access) (`PGH005`) now as well as lists and tuples of literal strings, as trusted input.
checks for `AsyncMock` methods like `not_awaited` in addition to the synchronous variants. - [`boolean-type-hint-positional-argument`] (`FBT001`) now applies to types that
- [`useless-import-alias`](https://docs.astral.sh/ruff/rules/useless-import-alias) (`PLC0414`) no include `bool`, like `bool | int` or `typing.Optional[bool]`, in addition to
longer applies to `__init__.py` files, where it conflicted with one of the suggested fixes for plain `bool` annotations.
[`unused-import`](https://docs.astral.sh/ruff/rules/unused-import) (`F401`). - [`non-pep604-annotation-union`] (`UP007`) has now been split into two rules.
- [`bidirectional-unicode`](https://docs.astral.sh/ruff/rules/bidirectional-unicode) (`PLE2502`) now `UP007` now applies only to `typing.Union`, while
also checks for U+061C (Arabic Letter Mark). [`non-pep604-annotation-optional`] (`UP045`) checks for use of
- The fix for `typing.Optional`. `UP045` has also been stabilized in this release, but you
[`multiple-with-statements`](https://docs.astral.sh/ruff/rules/multiple-with-statements) may need to update existing `include`, `ignore`, or `noqa` settings to
(`SIM117`) is now marked as always safe. accommodate this change.
### Preview features ### Preview features
- \[`pyupgrade`\] Enable `UP043` in stub files ([#20027](https://github.com/astral-sh/ruff/pull/20027)) - \[`ruff`\] Check for non-context-manager use of `pytest.raises`, `pytest.warns`, and `pytest.deprecated_call` (`RUF061`) ([#17368](https://github.com/astral-sh/ruff/pull/17368))
- [syntax-errors] Raise unsupported syntax error for template strings prior to Python 3.14 ([#18664](https://github.com/astral-sh/ruff/pull/18664))
### Bug fixes ### Bug fixes
- \[`pyupgrade`\] Apply `UP008` only when the `__class__` cell exists ([#19424](https://github.com/astral-sh/ruff/pull/19424)) - Add syntax error when conversion flag does not immediately follow exclamation mark ([#18706](https://github.com/astral-sh/ruff/pull/18706))
- \[`ruff`\] Fix empty f-string detection in `in-empty-collection` (`RUF060`) ([#20249](https://github.com/astral-sh/ruff/pull/20249)) - Add trailing space around `readlines` ([#18542](https://github.com/astral-sh/ruff/pull/18542))
- Fix `\r` and `\r\n` handling in t- and f-string debug texts ([#18673](https://github.com/astral-sh/ruff/pull/18673))
- Hug closing `}` when f-string expression has a format specifier ([#18704](https://github.com/astral-sh/ruff/pull/18704))
- \[`flake8-pyi`\] Avoid syntax error in the case of starred and keyword arguments (`PYI059`) ([#18611](https://github.com/astral-sh/ruff/pull/18611))
- \[`flake8-return`\] Fix `RET504` autofix generating a syntax error ([#18428](https://github.com/astral-sh/ruff/pull/18428))
- \[`pep8-naming`\] Suppress fix for `N804` and `N805` if the recommended name is already used ([#18472](https://github.com/astral-sh/ruff/pull/18472))
- \[`pycodestyle`\] Avoid causing a syntax error in expressions spanning multiple lines (`E731`) ([#18479](https://github.com/astral-sh/ruff/pull/18479))
- \[`pyupgrade`\] Suppress `UP008` if `super` is shadowed ([#18688](https://github.com/astral-sh/ruff/pull/18688))
- \[`refurb`\] Parenthesize lambda and ternary expressions (`FURB122`, `FURB142`) ([#18592](https://github.com/astral-sh/ruff/pull/18592))
- \[`ruff`\] Handle extra arguments to `deque` (`RUF037`) ([#18614](https://github.com/astral-sh/ruff/pull/18614))
- \[`ruff`\] Preserve parentheses around `deque` in fix for `unnecessary-empty-iterable-within-deque-call` (`RUF037`) ([#18598](https://github.com/astral-sh/ruff/pull/18598))
- \[`ruff`\] Validate arguments before offering a fix (`RUF056`) ([#18631](https://github.com/astral-sh/ruff/pull/18631))
- \[`ruff`\] Skip fix for `RUF059` if dummy name is already bound ([#18509](https://github.com/astral-sh/ruff/pull/18509))
- \[`pylint`\] Fix `PLW0128` to check assignment targets in square brackets and after asterisks ([#18665](https://github.com/astral-sh/ruff/pull/18665))
### Rule changes
- Fix false positive on mutations in `return` statements (`B909`) ([#18408](https://github.com/astral-sh/ruff/pull/18408))
- Treat `ty:` comments as pragma comments ([#18532](https://github.com/astral-sh/ruff/pull/18532))
- \[`flake8-pyi`\] Apply `custom-typevar-for-self` to string annotations (`PYI019`) ([#18311](https://github.com/astral-sh/ruff/pull/18311))
- \[`pyupgrade`\] Don't offer a fix for `Optional[None]` (`UP007`, `UP045)` ([#18545](https://github.com/astral-sh/ruff/pull/18545))
- \[`pyupgrade`\] Fix `super(__class__, self)` detection (`UP008`) ([#18478](https://github.com/astral-sh/ruff/pull/18478))
- \[`refurb`\] Make the fix for `FURB163` unsafe for `log2`, `log10`, `*args`, and deleted comments ([#18645](https://github.com/astral-sh/ruff/pull/18645))
### Server ### Server
- Add support for using uv as an alternative formatter backend ([#19665](https://github.com/astral-sh/ruff/pull/19665)) - Support cancellation requests ([#18627](https://github.com/astral-sh/ruff/pull/18627))
### Documentation ### Documentation
- \[`pep8-naming`\] Fix formatting of `__all__` (`N816`) ([#20301](https://github.com/astral-sh/ruff/pull/20301)) - Drop confusing second `*` from glob pattern example for `per-file-target-version` ([#18709](https://github.com/astral-sh/ruff/pull/18709))
- Update Neovim configuration examples ([#18491](https://github.com/astral-sh/ruff/pull/18491))
## 0.12.x - \[`pylint`\] De-emphasize `__hash__ = Parent.__hash__` (`PLW1641`) ([#18613](https://github.com/astral-sh/ruff/pull/18613))
- \[`refurb`\] Add a note about float literal handling (`FURB157`) ([#18615](https://github.com/astral-sh/ruff/pull/18615))
See [changelogs/0.12.x](./changelogs/0.12.x.md)
## 0.11.x ## 0.11.x
@@ -158,3 +422,12 @@ See [changelogs/0.2.x](./changelogs/0.2.x.md)
## 0.1.x ## 0.1.x
See [changelogs/0.1.x](./changelogs/0.1.x.md) See [changelogs/0.1.x](./changelogs/0.1.x.md)
[`boolean-type-hint-positional-argument`]: https://docs.astral.sh/ruff/rules/boolean-type-hint-positional-argument
[`collection-literal-concatenation`]: https://docs.astral.sh/ruff/rules/collection-literal-concatenation
[`if-else-block-instead-of-if-exp`]: https://docs.astral.sh/ruff/rules/if-else-block-instead-of-if-exp
[`non-pep604-annotation-optional`]: https://docs.astral.sh/ruff/rules/non-pep604-annotation-optional
[`non-pep604-annotation-union`]: https://docs.astral.sh/ruff/rules/non-pep604-annotation-union
[`readlines-in-for`]: https://docs.astral.sh/ruff/rules/readlines-in-for
[`subprocess-without-shell-equals-true`]: https://docs.astral.sh/ruff/rules/subprocess-without-shell-equals-true
[`unused-noqa`]: https://docs.astral.sh/ruff/rules/unused-noqa

633
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -5,7 +5,7 @@ resolver = "2"
[workspace.package] [workspace.package]
# Please update rustfmt.toml when bumping the Rust edition # Please update rustfmt.toml when bumping the Rust edition
edition = "2024" edition = "2024"
rust-version = "1.87" rust-version = "1.86"
homepage = "https://docs.astral.sh/ruff" homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff" documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff" repository = "https://github.com/astral-sh/ruff"
@@ -23,13 +23,11 @@ ruff_graph = { path = "crates/ruff_graph" }
ruff_index = { path = "crates/ruff_index" } ruff_index = { path = "crates/ruff_index" }
ruff_linter = { path = "crates/ruff_linter" } ruff_linter = { path = "crates/ruff_linter" }
ruff_macros = { path = "crates/ruff_macros" } ruff_macros = { path = "crates/ruff_macros" }
ruff_memory_usage = { path = "crates/ruff_memory_usage" }
ruff_notebook = { path = "crates/ruff_notebook" } ruff_notebook = { path = "crates/ruff_notebook" }
ruff_options_metadata = { path = "crates/ruff_options_metadata" } ruff_options_metadata = { path = "crates/ruff_options_metadata" }
ruff_python_ast = { path = "crates/ruff_python_ast" } ruff_python_ast = { path = "crates/ruff_python_ast" }
ruff_python_codegen = { path = "crates/ruff_python_codegen" } ruff_python_codegen = { path = "crates/ruff_python_codegen" }
ruff_python_formatter = { path = "crates/ruff_python_formatter" } ruff_python_formatter = { path = "crates/ruff_python_formatter" }
ruff_python_importer = { path = "crates/ruff_python_importer" }
ruff_python_index = { path = "crates/ruff_python_index" } ruff_python_index = { path = "crates/ruff_python_index" }
ruff_python_literal = { path = "crates/ruff_python_literal" } ruff_python_literal = { path = "crates/ruff_python_literal" }
ruff_python_parser = { path = "crates/ruff_python_parser" } ruff_python_parser = { path = "crates/ruff_python_parser" }
@@ -42,7 +40,6 @@ ruff_text_size = { path = "crates/ruff_text_size" }
ruff_workspace = { path = "crates/ruff_workspace" } ruff_workspace = { path = "crates/ruff_workspace" }
ty = { path = "crates/ty" } ty = { path = "crates/ty" }
ty_combine = { path = "crates/ty_combine" }
ty_ide = { path = "crates/ty_ide" } ty_ide = { path = "crates/ty_ide" }
ty_project = { path = "crates/ty_project", default-features = false } ty_project = { path = "crates/ty_project", default-features = false }
ty_python_semantic = { path = "crates/ty_python_semantic" } ty_python_semantic = { path = "crates/ty_python_semantic" }
@@ -86,7 +83,7 @@ etcetera = { version = "0.10.0" }
fern = { version = "0.7.0" } fern = { version = "0.7.0" }
filetime = { version = "0.2.23" } filetime = { version = "0.2.23" }
getrandom = { version = "0.3.1" } getrandom = { version = "0.3.1" }
get-size2 = { version = "0.6.2", features = [ get-size2 = { version = "0.6.0", features = [
"derive", "derive",
"smallvec", "smallvec",
"hashbrown", "hashbrown",
@@ -116,7 +113,7 @@ jiff = { version = "0.2.0" }
js-sys = { version = "0.3.69" } js-sys = { version = "0.3.69" }
jod-thread = { version = "1.0.0" } jod-thread = { version = "1.0.0" }
libc = { version = "0.2.153" } libc = { version = "0.2.153" }
libcst = { version = "1.8.4", default-features = false } libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" } log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" } lsp-server = { version = "0.7.6" }
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [ lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
@@ -144,7 +141,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" } rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" } rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml` # When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "3713cd7eb30821c0c086591832dd6f59f2af7fe7", default-features = false, features = [ salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "c2f4827b512b82842dbc84b1ccc4367500e301ed", default-features = false, features = [
"compact_str", "compact_str",
"macros", "macros",
"salsa_unstable", "salsa_unstable",
@@ -216,8 +213,6 @@ unexpected_cfgs = { level = "warn", check-cfg = [
[workspace.lints.clippy] [workspace.lints.clippy]
pedantic = { level = "warn", priority = -2 } pedantic = { level = "warn", priority = -2 }
# Enabled at the crate level
disallowed_methods = "allow"
# Allowed pedantic lints # Allowed pedantic lints
char_lit_as_u8 = "allow" char_lit_as_u8 = "allow"
collapsible_else_if = "allow" collapsible_else_if = "allow"
@@ -252,19 +247,10 @@ rest_pat_in_fully_bound_structs = "warn"
redundant_clone = "warn" redundant_clone = "warn"
debug_assert_with_mut_call = "warn" debug_assert_with_mut_call = "warn"
unused_peekable = "warn" unused_peekable = "warn"
# This lint sometimes flags code whose `if` and `else`
# bodies could be flipped when a `!` operator is removed.
# While perhaps sometimes a good idea, it is also often
# not a good idea due to other factors impacting
# readability. For example, if flipping the bodies results
# in the `if` being an order of magnitude bigger than the
# `else`, then some might consider that harder to read.
if_not_else = "allow"
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved. # Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
large_stack_arrays = "allow" large_stack_arrays = "allow"
[profile.release] [profile.release]
# Note that we set these explicitly, and these values # Note that we set these explicitly, and these values
# were chosen based on a trade-off between compile times # were chosen based on a trade-off between compile times

1160
LICENSE

File diff suppressed because it is too large Load Diff

View File

@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex" powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version. # For a specific version.
curl -LsSf https://astral.sh/ruff/0.13.0/install.sh | sh curl -LsSf https://astral.sh/ruff/0.12.7/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.13.0/install.ps1 | iex" powershell -c "irm https://astral.sh/ruff/0.12.7/install.ps1 | iex"
``` ```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff), You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml ```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version. # Ruff version.
rev: v0.13.0 rev: v0.12.7
hooks: hooks:
# Run the linter. # Run the linter.
- id: ruff-check - id: ruff-check
@@ -421,7 +421,7 @@ Ruff is released under the MIT license.
Ruff is used by a number of major open-source projects and companies, including: Ruff is used by a number of major open-source projects and companies, including:
- [Albumentations](https://github.com/albumentations-team/AlbumentationsX) - [Albumentations](https://github.com/albumentations-team/albumentations)
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model)) - Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
- [Anki](https://apps.ankiweb.net/) - [Anki](https://apps.ankiweb.net/)
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python)) - Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))

View File

@@ -1,551 +0,0 @@
# Changelog 0.12.x
## 0.12.0
Check out the [blog post](https://astral.sh/blog/ruff-v0.12.0) for a migration
guide and overview of the changes!
### Breaking changes
- **Detection of more syntax errors**
Ruff now detects version-related syntax errors, such as the use of the `match`
statement on Python versions before 3.10, and syntax errors emitted by
CPython's compiler, such as irrefutable `match` patterns before the final
`case` arm.
- **New default Python version handling for syntax errors**
Ruff will default to the *latest* supported Python version (3.13) when
checking for the version-related syntax errors mentioned above to prevent
false positives in projects without a Python version configured. The default
in all other cases, like applying lint rules, is unchanged and remains at the
minimum supported Python version (3.9).
- **Updated f-string formatting**
Ruff now formats multi-line f-strings with format specifiers to avoid adding a
line break after the format specifier. This addresses a change to the Python
grammar in version 3.13.4 that made such a line break a syntax error.
- **`rust-toolchain.toml` is no longer included in source distributions**
The `rust-toolchain.toml` is used to specify a higher Rust version than Ruff's
minimum supported Rust version (MSRV) for development and building release
artifacts. However, when present in source distributions, it would also cause
downstream package maintainers to pull in the same Rust toolchain, even if
their available toolchain was MSRV-compatible.
### Removed Rules
The following rules have been removed:
- [`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/)
(`S320`)
### Deprecated Rules
The following rules have been deprecated:
- [`pandas-df-variable-name`](https://docs.astral.sh/ruff/rules/pandas-df-variable-name/)
### Stabilization
The following rules have been stabilized and are no longer in preview:
- [`for-loop-writes`](https://docs.astral.sh/ruff/rules/for-loop-writes) (`FURB122`)
- [`check-and-remove-from-set`](https://docs.astral.sh/ruff/rules/check-and-remove-from-set) (`FURB132`)
- [`verbose-decimal-constructor`](https://docs.astral.sh/ruff/rules/verbose-decimal-constructor) (`FURB157`)
- [`fromisoformat-replace-z`](https://docs.astral.sh/ruff/rules/fromisoformat-replace-z) (`FURB162`)
- [`int-on-sliced-str`](https://docs.astral.sh/ruff/rules/int-on-sliced-str) (`FURB166`)
- [`exc-info-outside-except-handler`](https://docs.astral.sh/ruff/rules/exc-info-outside-except-handler) (`LOG014`)
- [`import-outside-top-level`](https://docs.astral.sh/ruff/rules/import-outside-top-level) (`PLC0415`)
- [`unnecessary-dict-index-lookup`](https://docs.astral.sh/ruff/rules/unnecessary-dict-index-lookup) (`PLR1733`)
- [`nan-comparison`](https://docs.astral.sh/ruff/rules/nan-comparison) (`PLW0177`)
- [`eq-without-hash`](https://docs.astral.sh/ruff/rules/eq-without-hash) (`PLW1641`)
- [`pytest-parameter-with-default-argument`](https://docs.astral.sh/ruff/rules/pytest-parameter-with-default-argument) (`PT028`)
- [`pytest-warns-too-broad`](https://docs.astral.sh/ruff/rules/pytest-warns-too-broad) (`PT030`)
- [`pytest-warns-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-warns-with-multiple-statements) (`PT031`)
- [`invalid-formatter-suppression-comment`](https://docs.astral.sh/ruff/rules/invalid-formatter-suppression-comment) (`RUF028`)
- [`dataclass-enum`](https://docs.astral.sh/ruff/rules/dataclass-enum) (`RUF049`)
- [`class-with-mixed-type-vars`](https://docs.astral.sh/ruff/rules/class-with-mixed-type-vars) (`RUF053`)
- [`unnecessary-round`](https://docs.astral.sh/ruff/rules/unnecessary-round) (`RUF057`)
- [`starmap-zip`](https://docs.astral.sh/ruff/rules/starmap-zip) (`RUF058`)
- [`non-pep604-annotation-optional`] (`UP045`)
- [`non-pep695-generic-class`](https://docs.astral.sh/ruff/rules/non-pep695-generic-class) (`UP046`)
- [`non-pep695-generic-function`](https://docs.astral.sh/ruff/rules/non-pep695-generic-function) (`UP047`)
- [`private-type-parameter`](https://docs.astral.sh/ruff/rules/private-type-parameter) (`UP049`)
The following behaviors have been stabilized:
- [`collection-literal-concatenation`] (`RUF005`) now recognizes slices, in
addition to list literals and variables.
- The fix for [`readlines-in-for`] (`FURB129`) is now marked as always safe.
- [`if-else-block-instead-of-if-exp`] (`SIM108`) will now further simplify
expressions to use `or` instead of an `if` expression, where possible.
- [`unused-noqa`] (`RUF100`) now checks for file-level `noqa` comments as well
as inline comments.
- [`subprocess-without-shell-equals-true`] (`S603`) now accepts literal strings,
as well as lists and tuples of literal strings, as trusted input.
- [`boolean-type-hint-positional-argument`] (`FBT001`) now applies to types that
include `bool`, like `bool | int` or `typing.Optional[bool]`, in addition to
plain `bool` annotations.
- [`non-pep604-annotation-union`] (`UP007`) has now been split into two rules.
`UP007` now applies only to `typing.Union`, while
[`non-pep604-annotation-optional`] (`UP045`) checks for use of
`typing.Optional`. `UP045` has also been stabilized in this release, but you
may need to update existing `include`, `ignore`, or `noqa` settings to
accommodate this change.
### Preview features
- \[`ruff`\] Check for non-context-manager use of `pytest.raises`, `pytest.warns`, and `pytest.deprecated_call` (`RUF061`) ([#17368](https://github.com/astral-sh/ruff/pull/17368))
- [syntax-errors] Raise unsupported syntax error for template strings prior to Python 3.14 ([#18664](https://github.com/astral-sh/ruff/pull/18664))
### Bug fixes
- Add syntax error when conversion flag does not immediately follow exclamation mark ([#18706](https://github.com/astral-sh/ruff/pull/18706))
- Add trailing space around `readlines` ([#18542](https://github.com/astral-sh/ruff/pull/18542))
- Fix `\r` and `\r\n` handling in t- and f-string debug texts ([#18673](https://github.com/astral-sh/ruff/pull/18673))
- Hug closing `}` when f-string expression has a format specifier ([#18704](https://github.com/astral-sh/ruff/pull/18704))
- \[`flake8-pyi`\] Avoid syntax error in the case of starred and keyword arguments (`PYI059`) ([#18611](https://github.com/astral-sh/ruff/pull/18611))
- \[`flake8-return`\] Fix `RET504` autofix generating a syntax error ([#18428](https://github.com/astral-sh/ruff/pull/18428))
- \[`pep8-naming`\] Suppress fix for `N804` and `N805` if the recommended name is already used ([#18472](https://github.com/astral-sh/ruff/pull/18472))
- \[`pycodestyle`\] Avoid causing a syntax error in expressions spanning multiple lines (`E731`) ([#18479](https://github.com/astral-sh/ruff/pull/18479))
- \[`pyupgrade`\] Suppress `UP008` if `super` is shadowed ([#18688](https://github.com/astral-sh/ruff/pull/18688))
- \[`refurb`\] Parenthesize lambda and ternary expressions (`FURB122`, `FURB142`) ([#18592](https://github.com/astral-sh/ruff/pull/18592))
- \[`ruff`\] Handle extra arguments to `deque` (`RUF037`) ([#18614](https://github.com/astral-sh/ruff/pull/18614))
- \[`ruff`\] Preserve parentheses around `deque` in fix for `unnecessary-empty-iterable-within-deque-call` (`RUF037`) ([#18598](https://github.com/astral-sh/ruff/pull/18598))
- \[`ruff`\] Validate arguments before offering a fix (`RUF056`) ([#18631](https://github.com/astral-sh/ruff/pull/18631))
- \[`ruff`\] Skip fix for `RUF059` if dummy name is already bound ([#18509](https://github.com/astral-sh/ruff/pull/18509))
- \[`pylint`\] Fix `PLW0128` to check assignment targets in square brackets and after asterisks ([#18665](https://github.com/astral-sh/ruff/pull/18665))
### Rule changes
- Fix false positive on mutations in `return` statements (`B909`) ([#18408](https://github.com/astral-sh/ruff/pull/18408))
- Treat `ty:` comments as pragma comments ([#18532](https://github.com/astral-sh/ruff/pull/18532))
- \[`flake8-pyi`\] Apply `custom-typevar-for-self` to string annotations (`PYI019`) ([#18311](https://github.com/astral-sh/ruff/pull/18311))
- \[`pyupgrade`\] Don't offer a fix for `Optional[None]` (`UP007`, `UP045)` ([#18545](https://github.com/astral-sh/ruff/pull/18545))
- \[`pyupgrade`\] Fix `super(__class__, self)` detection (`UP008`) ([#18478](https://github.com/astral-sh/ruff/pull/18478))
- \[`refurb`\] Make the fix for `FURB163` unsafe for `log2`, `log10`, `*args`, and deleted comments ([#18645](https://github.com/astral-sh/ruff/pull/18645))
### Server
- Support cancellation requests ([#18627](https://github.com/astral-sh/ruff/pull/18627))
### Documentation
- Drop confusing second `*` from glob pattern example for `per-file-target-version` ([#18709](https://github.com/astral-sh/ruff/pull/18709))
- Update Neovim configuration examples ([#18491](https://github.com/astral-sh/ruff/pull/18491))
- \[`pylint`\] De-emphasize `__hash__ = Parent.__hash__` (`PLW1641`) ([#18613](https://github.com/astral-sh/ruff/pull/18613))
- \[`refurb`\] Add a note about float literal handling (`FURB157`) ([#18615](https://github.com/astral-sh/ruff/pull/18615))
## 0.12.1
### Preview features
- \[`flake8-errmsg`\] Extend `EM101` to support byte strings ([#18867](https://github.com/astral-sh/ruff/pull/18867))
- \[`flake8-use-pathlib`\] Add autofix for `PTH202` ([#18763](https://github.com/astral-sh/ruff/pull/18763))
- \[`pygrep-hooks`\] Add `AsyncMock` methods to `invalid-mock-access` (`PGH005`) ([#18547](https://github.com/astral-sh/ruff/pull/18547))
- \[`pylint`\] Ignore `__init__.py` files in (`PLC0414`) ([#18400](https://github.com/astral-sh/ruff/pull/18400))
- \[`ruff`\] Trigger `RUF037` for empty string and byte strings ([#18862](https://github.com/astral-sh/ruff/pull/18862))
- [formatter] Fix missing blank lines before decorated classes in `.pyi` files ([#18888](https://github.com/astral-sh/ruff/pull/18888))
### Bug fixes
- Avoid generating diagnostics with per-file ignores ([#18801](https://github.com/astral-sh/ruff/pull/18801))
- Handle parenthesized arguments in `remove_argument` ([#18805](https://github.com/astral-sh/ruff/pull/18805))
- \[`flake8-logging`\] Avoid false positive for `exc_info=True` outside `logger.exception` (`LOG014`) ([#18737](https://github.com/astral-sh/ruff/pull/18737))
- \[`flake8-pytest-style`\] Enforce `pytest` import for decorators ([#18779](https://github.com/astral-sh/ruff/pull/18779))
- \[`flake8-pytest-style`\] Mark autofix for `PT001` and `PT023` as unsafe if there's comments in the decorator ([#18792](https://github.com/astral-sh/ruff/pull/18792))
- \[`flake8-pytest-style`\] `PT001`/`PT023` fix makes syntax error on parenthesized decorator ([#18782](https://github.com/astral-sh/ruff/pull/18782))
- \[`flake8-raise`\] Make fix unsafe if it deletes comments (`RSE102`) ([#18788](https://github.com/astral-sh/ruff/pull/18788))
- \[`flake8-simplify`\] Fix `SIM911` autofix creating a syntax error ([#18793](https://github.com/astral-sh/ruff/pull/18793))
- \[`flake8-simplify`\] Fix false negatives for shadowed bindings (`SIM910`, `SIM911`) ([#18794](https://github.com/astral-sh/ruff/pull/18794))
- \[`flake8-simplify`\] Preserve original behavior for `except ()` and bare `except` (`SIM105`) ([#18213](https://github.com/astral-sh/ruff/pull/18213))
- \[`flake8-pyi`\] Fix `PYI041`'s fix causing `TypeError` with `None | None | ...` ([#18637](https://github.com/astral-sh/ruff/pull/18637))
- \[`perflint`\] Fix `PERF101` autofix creating a syntax error and mark autofix as unsafe if there are comments in the `list` call expr ([#18803](https://github.com/astral-sh/ruff/pull/18803))
- \[`perflint`\] Fix false negative in `PERF401` ([#18866](https://github.com/astral-sh/ruff/pull/18866))
- \[`pylint`\] Avoid flattening nested `min`/`max` when outer call has single argument (`PLW3301`) ([#16885](https://github.com/astral-sh/ruff/pull/16885))
- \[`pylint`\] Fix `PLC2801` autofix creating a syntax error ([#18857](https://github.com/astral-sh/ruff/pull/18857))
- \[`pylint`\] Mark `PLE0241` autofix as unsafe if there's comments in the base classes ([#18832](https://github.com/astral-sh/ruff/pull/18832))
- \[`pylint`\] Suppress `PLE2510`/`PLE2512`/`PLE2513`/`PLE2514`/`PLE2515` autofix if the text contains an odd number of backslashes ([#18856](https://github.com/astral-sh/ruff/pull/18856))
- \[`refurb`\] Detect more exotic float literals in `FURB164` ([#18925](https://github.com/astral-sh/ruff/pull/18925))
- \[`refurb`\] Fix `FURB163` autofix creating a syntax error for `yield` expressions ([#18756](https://github.com/astral-sh/ruff/pull/18756))
- \[`refurb`\] Mark `FURB129` autofix as unsafe if there's comments in the `readlines` call ([#18858](https://github.com/astral-sh/ruff/pull/18858))
- \[`ruff`\] Fix false positives and negatives in `RUF010` ([#18690](https://github.com/astral-sh/ruff/pull/18690))
- Fix casing of `analyze.direction` variant names ([#18892](https://github.com/astral-sh/ruff/pull/18892))
### Rule changes
- Fix f-string interpolation escaping in generated fixes ([#18882](https://github.com/astral-sh/ruff/pull/18882))
- \[`flake8-return`\] Mark `RET501` fix unsafe if comments are inside ([#18780](https://github.com/astral-sh/ruff/pull/18780))
- \[`flake8-async`\] Fix detection for large integer sleep durations in `ASYNC116` rule ([#18767](https://github.com/astral-sh/ruff/pull/18767))
- \[`flake8-async`\] Mark autofix for `ASYNC115` as unsafe if the call expression contains comments ([#18753](https://github.com/astral-sh/ruff/pull/18753))
- \[`flake8-bugbear`\] Mark autofix for `B004` as unsafe if the `hasattr` call expr contains comments ([#18755](https://github.com/astral-sh/ruff/pull/18755))
- \[`flake8-comprehension`\] Mark autofix for `C420` as unsafe if there's comments inside the dict comprehension ([#18768](https://github.com/astral-sh/ruff/pull/18768))
- \[`flake8-comprehensions`\] Handle template strings for comprehension fixes ([#18710](https://github.com/astral-sh/ruff/pull/18710))
- \[`flake8-future-annotations`\] Add autofix (`FA100`) ([#18903](https://github.com/astral-sh/ruff/pull/18903))
- \[`pyflakes`\] Mark `F504`/`F522`/`F523` autofix as unsafe if there's a call with side effect ([#18839](https://github.com/astral-sh/ruff/pull/18839))
- \[`pylint`\] Allow fix with comments and document performance implications (`PLW3301`) ([#18936](https://github.com/astral-sh/ruff/pull/18936))
- \[`pylint`\] Detect more exotic `NaN` literals in `PLW0177` ([#18630](https://github.com/astral-sh/ruff/pull/18630))
- \[`pylint`\] Fix `PLC1802` autofix creating a syntax error and mark autofix as unsafe if there's comments in the `len` call ([#18836](https://github.com/astral-sh/ruff/pull/18836))
- \[`pyupgrade`\] Extend version detection to include `sys.version_info.major` (`UP036`) ([#18633](https://github.com/astral-sh/ruff/pull/18633))
- \[`ruff`\] Add lint rule `RUF064` for calling `chmod` with non-octal integers ([#18541](https://github.com/astral-sh/ruff/pull/18541))
- \[`ruff`\] Added `cls.__dict__.get('__annotations__')` check (`RUF063`) ([#18233](https://github.com/astral-sh/ruff/pull/18233))
- \[`ruff`\] Frozen `dataclass` default should be valid (`RUF009`) ([#18735](https://github.com/astral-sh/ruff/pull/18735))
### Server
- Consider virtual path for various server actions ([#18910](https://github.com/astral-sh/ruff/pull/18910))
### Documentation
- Add fix safety sections ([#18940](https://github.com/astral-sh/ruff/pull/18940),[#18841](https://github.com/astral-sh/ruff/pull/18841),[#18802](https://github.com/astral-sh/ruff/pull/18802),[#18837](https://github.com/astral-sh/ruff/pull/18837),[#18800](https://github.com/astral-sh/ruff/pull/18800),[#18415](https://github.com/astral-sh/ruff/pull/18415),[#18853](https://github.com/astral-sh/ruff/pull/18853),[#18842](https://github.com/astral-sh/ruff/pull/18842))
- Use updated pre-commit id ([#18718](https://github.com/astral-sh/ruff/pull/18718))
- \[`perflint`\] Small docs improvement to `PERF401` ([#18786](https://github.com/astral-sh/ruff/pull/18786))
- \[`pyupgrade`\]: Use `super()`, not `__super__` in error messages (`UP008`) ([#18743](https://github.com/astral-sh/ruff/pull/18743))
- \[`flake8-pie`\] Small docs fix to `PIE794` ([#18829](https://github.com/astral-sh/ruff/pull/18829))
- \[`flake8-pyi`\] Correct `collections-named-tuple` example to use PascalCase assignment ([#16884](https://github.com/astral-sh/ruff/pull/16884))
- \[`flake8-pie`\] Add note on type checking benefits to `unnecessary-dict-kwargs` (`PIE804`) ([#18666](https://github.com/astral-sh/ruff/pull/18666))
- \[`pycodestyle`\] Clarify PEP 8 relationship to `whitespace-around-operator` rules ([#18870](https://github.com/astral-sh/ruff/pull/18870))
### Other changes
- Disallow newlines in format specifiers of single quoted f- or t-strings ([#18708](https://github.com/astral-sh/ruff/pull/18708))
- \[`flake8-logging`\] Add fix safety section to `LOG002` ([#18840](https://github.com/astral-sh/ruff/pull/18840))
- \[`pyupgrade`\] Add fix safety section to `UP010` ([#18838](https://github.com/astral-sh/ruff/pull/18838))
## 0.12.2
### Preview features
- \[`flake8-pyi`\] Expand `Optional[A]` to `A | None` (`PYI016`) ([#18572](https://github.com/astral-sh/ruff/pull/18572))
- \[`pyupgrade`\] Mark `UP008` fix safe if no comments are in range ([#18683](https://github.com/astral-sh/ruff/pull/18683))
### Bug fixes
- \[`flake8-comprehensions`\] Fix `C420` to prepend whitespace when needed ([#18616](https://github.com/astral-sh/ruff/pull/18616))
- \[`perflint`\] Fix `PERF403` panic on attribute or subscription loop variable ([#19042](https://github.com/astral-sh/ruff/pull/19042))
- \[`pydocstyle`\] Fix `D413` infinite loop for parenthesized docstring ([#18930](https://github.com/astral-sh/ruff/pull/18930))
- \[`pylint`\] Fix `PLW0108` autofix introducing a syntax error when the lambda's body contains an assignment expression ([#18678](https://github.com/astral-sh/ruff/pull/18678))
- \[`refurb`\] Fix false positive on empty tuples (`FURB168`) ([#19058](https://github.com/astral-sh/ruff/pull/19058))
- \[`ruff`\] Allow more `field` calls from `attrs` (`RUF009`) ([#19021](https://github.com/astral-sh/ruff/pull/19021))
- \[`ruff`\] Fix syntax error introduced for an empty string followed by a u-prefixed string (`UP025`) ([#18899](https://github.com/astral-sh/ruff/pull/18899))
### Rule changes
- \[`flake8-executable`\] Allow `uvx` in shebang line (`EXE003`) ([#18967](https://github.com/astral-sh/ruff/pull/18967))
- \[`pandas`\] Avoid flagging `PD002` if `pandas` is not imported ([#18963](https://github.com/astral-sh/ruff/pull/18963))
- \[`pyupgrade`\] Avoid PEP-604 unions with `typing.NamedTuple` (`UP007`, `UP045`) ([#18682](https://github.com/astral-sh/ruff/pull/18682))
### Documentation
- Document link between `import-outside-top-level (PLC0415)` and `lint.flake8-tidy-imports.banned-module-level-imports` ([#18733](https://github.com/astral-sh/ruff/pull/18733))
- Fix description of the `format.skip-magic-trailing-comma` example ([#19095](https://github.com/astral-sh/ruff/pull/19095))
- \[`airflow`\] Make `AIR302` example error out-of-the-box ([#18988](https://github.com/astral-sh/ruff/pull/18988))
- \[`airflow`\] Make `AIR312` example error out-of-the-box ([#18989](https://github.com/astral-sh/ruff/pull/18989))
- \[`flake8-annotations`\] Make `ANN401` example error out-of-the-box ([#18974](https://github.com/astral-sh/ruff/pull/18974))
- \[`flake8-async`\] Make `ASYNC100` example error out-of-the-box ([#18993](https://github.com/astral-sh/ruff/pull/18993))
- \[`flake8-async`\] Make `ASYNC105` example error out-of-the-box ([#19002](https://github.com/astral-sh/ruff/pull/19002))
- \[`flake8-async`\] Make `ASYNC110` example error out-of-the-box ([#18975](https://github.com/astral-sh/ruff/pull/18975))
- \[`flake8-async`\] Make `ASYNC210` example error out-of-the-box ([#18977](https://github.com/astral-sh/ruff/pull/18977))
- \[`flake8-async`\] Make `ASYNC220`, `ASYNC221`, and `ASYNC222` examples error out-of-the-box ([#18978](https://github.com/astral-sh/ruff/pull/18978))
- \[`flake8-async`\] Make `ASYNC251` example error out-of-the-box ([#18990](https://github.com/astral-sh/ruff/pull/18990))
- \[`flake8-bandit`\] Make `S201` example error out-of-the-box ([#19017](https://github.com/astral-sh/ruff/pull/19017))
- \[`flake8-bandit`\] Make `S604` and `S609` examples error out-of-the-box ([#19049](https://github.com/astral-sh/ruff/pull/19049))
- \[`flake8-bugbear`\] Make `B028` example error out-of-the-box ([#19054](https://github.com/astral-sh/ruff/pull/19054))
- \[`flake8-bugbear`\] Make `B911` example error out-of-the-box ([#19051](https://github.com/astral-sh/ruff/pull/19051))
- \[`flake8-datetimez`\] Make `DTZ011` example error out-of-the-box ([#19055](https://github.com/astral-sh/ruff/pull/19055))
- \[`flake8-datetimez`\] Make `DTZ901` example error out-of-the-box ([#19056](https://github.com/astral-sh/ruff/pull/19056))
- \[`flake8-pyi`\] Make `PYI032` example error out-of-the-box ([#19061](https://github.com/astral-sh/ruff/pull/19061))
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI014`, `PYI015`) ([#19097](https://github.com/astral-sh/ruff/pull/19097))
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI042`) ([#19101](https://github.com/astral-sh/ruff/pull/19101))
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI059`) ([#19080](https://github.com/astral-sh/ruff/pull/19080))
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI062`) ([#19079](https://github.com/astral-sh/ruff/pull/19079))
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT023`) ([#19104](https://github.com/astral-sh/ruff/pull/19104))
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT030`) ([#19105](https://github.com/astral-sh/ruff/pull/19105))
- \[`flake8-quotes`\] Make example error out-of-the-box (`Q003`) ([#19106](https://github.com/astral-sh/ruff/pull/19106))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM110`) ([#19113](https://github.com/astral-sh/ruff/pull/19113))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM113`) ([#19109](https://github.com/astral-sh/ruff/pull/19109))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM401`) ([#19110](https://github.com/astral-sh/ruff/pull/19110))
- \[`pyflakes`\] Fix backslash in docs (`F621`) ([#19098](https://github.com/astral-sh/ruff/pull/19098))
- \[`pylint`\] Fix `PLC0415` example ([#18970](https://github.com/astral-sh/ruff/pull/18970))
## 0.12.3
### Preview features
- \[`flake8-bugbear`\] Support non-context-manager calls in `B017` ([#19063](https://github.com/astral-sh/ruff/pull/19063))
- \[`flake8-use-pathlib`\] Add autofixes for `PTH100`, `PTH106`, `PTH107`, `PTH108`, `PTH110`, `PTH111`, `PTH112`, `PTH113`, `PTH114`, `PTH115`, `PTH117`, `PTH119`, `PTH120` ([#19213](https://github.com/astral-sh/ruff/pull/19213))
- \[`flake8-use-pathlib`\] Add autofixes for `PTH203`, `PTH204`, `PTH205` ([#18922](https://github.com/astral-sh/ruff/pull/18922))
### Bug fixes
- \[`flake8-return`\] Fix false-positive for variables used inside nested functions in `RET504` ([#18433](https://github.com/astral-sh/ruff/pull/18433))
- Treat form feed as valid whitespace before a line continuation ([#19220](https://github.com/astral-sh/ruff/pull/19220))
- \[`flake8-type-checking`\] Fix syntax error introduced by fix (`TC008`) ([#19150](https://github.com/astral-sh/ruff/pull/19150))
- \[`pyupgrade`\] Keyword arguments in `super` should suppress the `UP008` fix ([#19131](https://github.com/astral-sh/ruff/pull/19131))
### Documentation
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI007`, `PYI008`) ([#19103](https://github.com/astral-sh/ruff/pull/19103))
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM116`) ([#19111](https://github.com/astral-sh/ruff/pull/19111))
- \[`flake8-type-checking`\] Make example error out-of-the-box (`TC001`) ([#19151](https://github.com/astral-sh/ruff/pull/19151))
- \[`flake8-use-pathlib`\] Make example error out-of-the-box (`PTH210`) ([#19189](https://github.com/astral-sh/ruff/pull/19189))
- \[`pycodestyle`\] Make example error out-of-the-box (`E272`) ([#19191](https://github.com/astral-sh/ruff/pull/19191))
- \[`pycodestyle`\] Make example not raise unnecessary `SyntaxError` (`E114`) ([#19190](https://github.com/astral-sh/ruff/pull/19190))
- \[`pydoclint`\] Make example error out-of-the-box (`DOC501`) ([#19218](https://github.com/astral-sh/ruff/pull/19218))
- \[`pylint`, `pyupgrade`\] Fix syntax errors in examples (`PLW1501`, `UP028`) ([#19127](https://github.com/astral-sh/ruff/pull/19127))
- \[`pylint`\] Update `missing-maxsplit-arg` docs and error to suggest proper usage (`PLC0207`) ([#18949](https://github.com/astral-sh/ruff/pull/18949))
- \[`flake8-bandit`\] Make example error out-of-the-box (`S412`) ([#19241](https://github.com/astral-sh/ruff/pull/19241))
## 0.12.4
### Preview features
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
### Bug fixes
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
### Rule changes
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
### Documentation
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
## 0.12.5
### Preview features
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
### Bug fixes
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
### Rule changes
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
## 0.12.6
### Preview features
- \[`flake8-commas`\] Add support for trailing comma checks in type parameter lists (`COM812`, `COM819`) ([#19390](https://github.com/astral-sh/ruff/pull/19390))
- \[`pylint`\] Implement auto-fix for `missing-maxsplit-arg` (`PLC0207`) ([#19387](https://github.com/astral-sh/ruff/pull/19387))
- \[`ruff`\] Offer fixes for `RUF039` in more cases ([#19065](https://github.com/astral-sh/ruff/pull/19065))
### Bug fixes
- Support `.pyi` files in ruff analyze graph ([#19611](https://github.com/astral-sh/ruff/pull/19611))
- \[`flake8-pyi`\] Preserve inline comment in ellipsis removal (`PYI013`) ([#19399](https://github.com/astral-sh/ruff/pull/19399))
- \[`perflint`\] Ignore rule if target is `global` or `nonlocal` (`PERF401`) ([#19539](https://github.com/astral-sh/ruff/pull/19539))
- \[`pyupgrade`\] Fix `UP030` to avoid modifying double curly braces in format strings ([#19378](https://github.com/astral-sh/ruff/pull/19378))
- \[`refurb`\] Ignore decorated functions for `FURB118` ([#19339](https://github.com/astral-sh/ruff/pull/19339))
- \[`refurb`\] Mark `int` and `bool` cases for `Decimal.from_float` as safe fixes (`FURB164`) ([#19468](https://github.com/astral-sh/ruff/pull/19468))
- \[`ruff`\] Fix `RUF033` for named default expressions ([#19115](https://github.com/astral-sh/ruff/pull/19115))
### Rule changes
- \[`flake8-blind-except`\] Change `BLE001` to permit `logging.critical(..., exc_info=True)` ([#19520](https://github.com/astral-sh/ruff/pull/19520))
### Performance
- Add support for specifying minimum dots in detected string imports ([#19538](https://github.com/astral-sh/ruff/pull/19538))
## 0.12.7
This is a follow-up release to 0.12.6. Because of an issue in the package metadata, 0.12.6 failed to publish fully to PyPI and has been yanked. Similarly, there is no GitHub release or Git tag for 0.12.6. The contents of the 0.12.7 release are identical to 0.12.6, except for the updated metadata.
## 0.12.8
### Preview features
- \[`flake8-use-pathlib`\] Expand `PTH201` to check all `PurePath` subclasses ([#19440](https://github.com/astral-sh/ruff/pull/19440))
### Bug fixes
- \[`flake8-blind-except`\] Change `BLE001` to correctly parse exception tuples ([#19747](https://github.com/astral-sh/ruff/pull/19747))
- \[`flake8-errmsg`\] Exclude `typing.cast` from `EM101` ([#19656](https://github.com/astral-sh/ruff/pull/19656))
- \[`flake8-simplify`\] Fix raw string handling in `SIM905` for embedded quotes ([#19591](https://github.com/astral-sh/ruff/pull/19591))
- \[`flake8-import-conventions`\] Avoid false positives for NFKC-normalized `__debug__` import aliases in `ICN001` ([#19411](https://github.com/astral-sh/ruff/pull/19411))
- \[`isort`\] Fix syntax error after docstring ending with backslash (`I002`) ([#19505](https://github.com/astral-sh/ruff/pull/19505))
- \[`pylint`\] Mark `PLC0207` fixes as unsafe when `*args` unpacking is present ([#19679](https://github.com/astral-sh/ruff/pull/19679))
- \[`pyupgrade`\] Prevent infinite loop with `I002` (`UP010`, `UP035`) ([#19413](https://github.com/astral-sh/ruff/pull/19413))
- \[`ruff`\] Parenthesize generator expressions in f-strings (`RUF010`) ([#19434](https://github.com/astral-sh/ruff/pull/19434))
### Rule changes
- \[`eradicate`\] Don't flag `pyrefly` pragmas as unused code (`ERA001`) ([#19731](https://github.com/astral-sh/ruff/pull/19731))
### Documentation
- Replace "associative" with "commutative" in docs for `RUF036` ([#19706](https://github.com/astral-sh/ruff/pull/19706))
- Fix copy and line separator colors in dark mode ([#19630](https://github.com/astral-sh/ruff/pull/19630))
- Fix link to `typing` documentation ([#19648](https://github.com/astral-sh/ruff/pull/19648))
- \[`refurb`\] Make more examples error out-of-the-box ([#19695](https://github.com/astral-sh/ruff/pull/19695),[#19673](https://github.com/astral-sh/ruff/pull/19673),[#19672](https://github.com/astral-sh/ruff/pull/19672))
### Other changes
- Include column numbers in GitLab output format ([#19708](https://github.com/astral-sh/ruff/pull/19708))
- Always expand tabs to four spaces in diagnostics ([#19618](https://github.com/astral-sh/ruff/pull/19618))
- Update pre-commit's `ruff` id ([#19654](https://github.com/astral-sh/ruff/pull/19654))
## 0.12.9
### Preview features
- \[`airflow`\] Add check for `airflow.secrets.cache.SecretCache` (`AIR301`) ([#17707](https://github.com/astral-sh/ruff/pull/17707))
- \[`ruff`\] Offer a safe fix for multi-digit zeros (`RUF064`) ([#19847](https://github.com/astral-sh/ruff/pull/19847))
### Bug fixes
- \[`flake8-blind-except`\] Fix `BLE001` false-positive on `raise ... from None` ([#19755](https://github.com/astral-sh/ruff/pull/19755))
- \[`flake8-comprehensions`\] Fix false positive for `C420` with attribute, subscript, or slice assignment targets ([#19513](https://github.com/astral-sh/ruff/pull/19513))
- \[`flake8-simplify`\] Fix handling of U+001C..U+001F whitespace (`SIM905`) ([#19849](https://github.com/astral-sh/ruff/pull/19849))
### Rule changes
- \[`pylint`\] Use lowercase hex characters to match the formatter (`PLE2513`) ([#19808](https://github.com/astral-sh/ruff/pull/19808))
### Documentation
- Fix `lint.future-annotations` link ([#19876](https://github.com/astral-sh/ruff/pull/19876))
### Other changes
- Build `riscv64` binaries for release ([#19819](https://github.com/astral-sh/ruff/pull/19819))
- Add rule code to error description in GitLab output ([#19896](https://github.com/astral-sh/ruff/pull/19896))
- Improve rendering of the `full` output format ([#19415](https://github.com/astral-sh/ruff/pull/19415))
Below is an example diff for [`F401`](https://docs.astral.sh/ruff/rules/unused-import/):
```diff
-unused.py:8:19: F401 [*] `pathlib` imported but unused
+F401 [*] `pathlib` imported but unused
+ --> unused.py:8:19
|
7 | # Unused, _not_ marked as required (due to the alias).
8 | import pathlib as non_alias
- | ^^^^^^^^^ F401
+ | ^^^^^^^^^
9 |
10 | # Unused, marked as required.
|
- = help: Remove unused import: `pathlib`
+help: Remove unused import: `pathlib`
```
For now, the primary difference is the movement of the filename, line number, and column information to a second line in the header. This new representation will allow us to make further additions to Ruff's diagnostics, such as adding sub-diagnostics and multiple annotations to the same snippet.
## 0.12.10
### Preview features
- \[`flake8-simplify`\] Implement fix for `maxsplit` without separator (`SIM905`) ([#19851](https://github.com/astral-sh/ruff/pull/19851))
- \[`flake8-use-pathlib`\] Add fixes for `PTH102` and `PTH103` ([#19514](https://github.com/astral-sh/ruff/pull/19514))
### Bug fixes
- \[`isort`\] Handle multiple continuation lines after module docstring (`I002`) ([#19818](https://github.com/astral-sh/ruff/pull/19818))
- \[`pyupgrade`\] Avoid reporting `__future__` features as unnecessary when they are used (`UP010`) ([#19769](https://github.com/astral-sh/ruff/pull/19769))
- \[`pyupgrade`\] Handle nested `Optional`s (`UP045`) ([#19770](https://github.com/astral-sh/ruff/pull/19770))
### Rule changes
- \[`pycodestyle`\] Make `E731` fix unsafe instead of display-only for class assignments ([#19700](https://github.com/astral-sh/ruff/pull/19700))
- \[`pyflakes`\] Add secondary annotation showing previous definition (`F811`) ([#19900](https://github.com/astral-sh/ruff/pull/19900))
### Documentation
- Fix description of global config file discovery strategy ([#19188](https://github.com/astral-sh/ruff/pull/19188))
- Update outdated links to <https://typing.python.org/en/latest/source/stubs.html> ([#19992](https://github.com/astral-sh/ruff/pull/19992))
- \[`flake8-annotations`\] Remove unused import in example (`ANN401`) ([#20000](https://github.com/astral-sh/ruff/pull/20000))
## 0.12.11
### Preview features
- \[`airflow`\] Extend `AIR311` and `AIR312` rules ([#20082](https://github.com/astral-sh/ruff/pull/20082))
- \[`airflow`\] Replace wrong path `airflow.io.storage` with `airflow.io.store` (`AIR311`) ([#20081](https://github.com/astral-sh/ruff/pull/20081))
- \[`flake8-async`\] Implement `blocking-http-call-httpx-in-async-function` (`ASYNC212`) ([#20091](https://github.com/astral-sh/ruff/pull/20091))
- \[`flake8-logging-format`\] Add auto-fix for f-string logging calls (`G004`) ([#19303](https://github.com/astral-sh/ruff/pull/19303))
- \[`flake8-use-pathlib`\] Add autofix for `PTH211` ([#20009](https://github.com/astral-sh/ruff/pull/20009))
- \[`flake8-use-pathlib`\] Make `PTH100` fix unsafe because it can change behavior ([#20100](https://github.com/astral-sh/ruff/pull/20100))
### Bug fixes
- \[`pyflakes`, `pylint`\] Fix false positives caused by `__class__` cell handling (`F841`, `PLE0117`) ([#20048](https://github.com/astral-sh/ruff/pull/20048))
- \[`pyflakes`\] Fix `allowed-unused-imports` matching for top-level modules (`F401`) ([#20115](https://github.com/astral-sh/ruff/pull/20115))
- \[`ruff`\] Fix false positive for t-strings in `default-factory-kwarg` (`RUF026`) ([#20032](https://github.com/astral-sh/ruff/pull/20032))
- \[`ruff`\] Preserve relative whitespace in multi-line expressions (`RUF033`) ([#19647](https://github.com/astral-sh/ruff/pull/19647))
### Rule changes
- \[`ruff`\] Handle empty t-strings in `unnecessary-empty-iterable-within-deque-call` (`RUF037`) ([#20045](https://github.com/astral-sh/ruff/pull/20045))
### Documentation
- Fix incorrect `D413` links in docstrings convention FAQ ([#20089](https://github.com/astral-sh/ruff/pull/20089))
- \[`flake8-use-pathlib`\] Update links to the table showing the correspondence between `os` and `pathlib` ([#20103](https://github.com/astral-sh/ruff/pull/20103))
## 0.12.12
### Preview features
- Show fixes by default ([#19919](https://github.com/astral-sh/ruff/pull/19919))
- \[`airflow`\] Convert `DatasetOrTimeSchedule(datasets=...)` to `AssetOrTimeSchedule(assets=...)` (`AIR311`) ([#20202](https://github.com/astral-sh/ruff/pull/20202))
- \[`airflow`\] Improve the `AIR002` error message ([#20173](https://github.com/astral-sh/ruff/pull/20173))
- \[`airflow`\] Move `airflow.operators.postgres_operator.Mapping` from `AIR302` to `AIR301` ([#20172](https://github.com/astral-sh/ruff/pull/20172))
- \[`flake8-async`\] Implement `blocking-input` rule (`ASYNC250`) ([#20122](https://github.com/astral-sh/ruff/pull/20122))
- \[`flake8-use-pathlib`\] Make `PTH119` and `PTH120` fixes unsafe because they can change behavior ([#20118](https://github.com/astral-sh/ruff/pull/20118))
- \[`pylint`\] Add U+061C to `PLE2502` ([#20106](https://github.com/astral-sh/ruff/pull/20106))
- \[`ruff`\] Fix false negative for empty f-strings in `deque` calls (`RUF037`) ([#20109](https://github.com/astral-sh/ruff/pull/20109))
### Bug fixes
- Less confidently mark f-strings as empty when inferring truthiness ([#20152](https://github.com/astral-sh/ruff/pull/20152))
- \[`fastapi`\] Fix false positive for paths with spaces around parameters (`FAST003`) ([#20077](https://github.com/astral-sh/ruff/pull/20077))
- \[`flake8-comprehensions`\] Skip `C417` when lambda contains `yield`/`yield from` ([#20201](https://github.com/astral-sh/ruff/pull/20201))
- \[`perflint`\] Handle tuples in dictionary comprehensions (`PERF403`) ([#19934](https://github.com/astral-sh/ruff/pull/19934))
### Rule changes
- \[`pycodestyle`\] Preserve return type annotation for `ParamSpec` (`E731`) ([#20108](https://github.com/astral-sh/ruff/pull/20108))
### Documentation
- Add fix safety sections to docs ([#17490](https://github.com/astral-sh/ruff/pull/17490),[#17499](https://github.com/astral-sh/ruff/pull/17499))
[`boolean-type-hint-positional-argument`]: https://docs.astral.sh/ruff/rules/boolean-type-hint-positional-argument
[`collection-literal-concatenation`]: https://docs.astral.sh/ruff/rules/collection-literal-concatenation
[`if-else-block-instead-of-if-exp`]: https://docs.astral.sh/ruff/rules/if-else-block-instead-of-if-exp
[`non-pep604-annotation-optional`]: https://docs.astral.sh/ruff/rules/non-pep604-annotation-optional
[`non-pep604-annotation-union`]: https://docs.astral.sh/ruff/rules/non-pep604-annotation-union
[`readlines-in-for`]: https://docs.astral.sh/ruff/rules/readlines-in-for
[`subprocess-without-shell-equals-true`]: https://docs.astral.sh/ruff/rules/subprocess-without-shell-equals-true
[`unused-noqa`]: https://docs.astral.sh/ruff/rules/unused-noqa

View File

@@ -24,20 +24,3 @@ ignore-interior-mutability = [
# The expression is read-only. # The expression is read-only.
"ruff_python_ast::hashable::HashableExpr", "ruff_python_ast::hashable::HashableExpr",
] ]
disallowed-methods = [
{ path = "std::env::var", reason = "Use System::env_var instead in ty crates" },
{ path = "std::env::current_dir", reason = "Use System::current_directory instead in ty crates" },
{ path = "std::fs::read_to_string", reason = "Use System::read_to_string instead in ty crates" },
{ path = "std::fs::metadata", reason = "Use System::path_metadata instead in ty crates" },
{ path = "std::fs::canonicalize", reason = "Use System::canonicalize_path instead in ty crates" },
{ path = "dunce::canonicalize", reason = "Use System::canonicalize_path instead in ty crates" },
{ path = "std::fs::read_dir", reason = "Use System::read_directory instead in ty crates" },
{ path = "std::fs::write", reason = "Use WritableSystem::write_file instead in ty crates" },
{ path = "std::fs::create_dir_all", reason = "Use WritableSystem::create_directory_all instead in ty crates" },
{ path = "std::fs::File::create_new", reason = "Use WritableSystem::create_new_file instead in ty crates" },
# Path methods that have System trait equivalents
{ path = "std::path::Path::exists", reason = "Use System::path_exists instead in ty crates" },
{ path = "std::path::Path::is_dir", reason = "Use System::is_directory instead in ty crates" },
{ path = "std::path::Path::is_file", reason = "Use System::is_file instead in ty crates" },
]

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "ruff" name = "ruff"
version = "0.13.0" version = "0.12.7"
publish = true publish = true
authors = { workspace = true } authors = { workspace = true }
edition = { workspace = true } edition = { workspace = true }
@@ -85,7 +85,7 @@ dist = true
[target.'cfg(target_os = "windows")'.dependencies] [target.'cfg(target_os = "windows")'.dependencies]
mimalloc = { workspace = true } mimalloc = { workspace = true }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies] [target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
tikv-jemallocator = { workspace = true } tikv-jemallocator = { workspace = true }
[lints] [lints]

View File

@@ -13,16 +13,25 @@ use itertools::Itertools;
use log::{debug, error}; use log::{debug, error};
use rayon::iter::ParallelIterator; use rayon::iter::ParallelIterator;
use rayon::iter::{IntoParallelIterator, ParallelBridge}; use rayon::iter::{IntoParallelIterator, ParallelBridge};
use ruff_linter::codes::Rule;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
use ruff_cache::{CacheKey, CacheKeyHasher}; use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_db::diagnostic::Diagnostic;
use ruff_diagnostics::Fix;
use ruff_linter::message::create_lint_diagnostic;
use ruff_linter::package::PackageRoot; use ruff_linter::package::PackageRoot;
use ruff_linter::{VERSION, warn_user}; use ruff_linter::{VERSION, warn_user};
use ruff_macros::CacheKey; use ruff_macros::CacheKey;
use ruff_notebook::NotebookIndex;
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::Settings; use ruff_workspace::Settings;
use ruff_workspace::resolver::Resolver; use ruff_workspace::resolver::Resolver;
use crate::diagnostics::Diagnostics;
/// [`Path`] that is relative to the package root in [`PackageCache`]. /// [`Path`] that is relative to the package root in [`PackageCache`].
pub(crate) type RelativePath = Path; pub(crate) type RelativePath = Path;
/// [`PathBuf`] that is relative to the package root in [`PackageCache`]. /// [`PathBuf`] that is relative to the package root in [`PackageCache`].
@@ -289,8 +298,13 @@ impl Cache {
}); });
} }
pub(crate) fn set_linted(&self, path: RelativePathBuf, key: &FileCacheKey, yes: bool) { pub(crate) fn update_lint(
self.update(path, key, ChangeData::Linted(yes)); &self,
path: RelativePathBuf,
key: &FileCacheKey,
data: LintCacheData,
) {
self.update(path, key, ChangeData::Lint(data));
} }
pub(crate) fn set_formatted(&self, path: RelativePathBuf, key: &FileCacheKey) { pub(crate) fn set_formatted(&self, path: RelativePathBuf, key: &FileCacheKey) {
@@ -325,15 +339,42 @@ pub(crate) struct FileCache {
} }
impl FileCache { impl FileCache {
/// Return whether or not the file in the cache was linted and found to have no diagnostics. /// Convert the file cache into `Diagnostics`, using `path` as file name.
pub(crate) fn linted(&self) -> bool { pub(crate) fn to_diagnostics(&self, path: &Path) -> Option<Diagnostics> {
self.data.linted self.data.lint.as_ref().map(|lint| {
let diagnostics = if lint.messages.is_empty() {
Vec::new()
} else {
let file = SourceFileBuilder::new(path.to_string_lossy(), &*lint.source).finish();
lint.messages
.iter()
.map(|msg| {
create_lint_diagnostic(
&msg.body,
msg.suggestion.as_ref(),
msg.range,
msg.fix.clone(),
msg.parent,
file.clone(),
msg.noqa_offset,
msg.rule,
)
})
.collect()
};
let notebook_indexes = if let Some(notebook_index) = lint.notebook_index.as_ref() {
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
} else {
FxHashMap::default()
};
Diagnostics::new(diagnostics, notebook_indexes)
})
} }
} }
#[derive(Debug, Default, bincode::Decode, bincode::Encode)] #[derive(Debug, Default, bincode::Decode, bincode::Encode)]
struct FileCacheData { struct FileCacheData {
linted: bool, lint: Option<LintCacheData>,
formatted: bool, formatted: bool,
} }
@@ -369,6 +410,88 @@ pub(crate) fn init(path: &Path) -> Result<()> {
Ok(()) Ok(())
} }
#[derive(bincode::Decode, Debug, bincode::Encode, PartialEq)]
pub(crate) struct LintCacheData {
/// Imports made.
// pub(super) imports: ImportMap,
/// Diagnostic messages.
pub(super) messages: Vec<CacheMessage>,
/// Source code of the file.
///
/// # Notes
///
/// This will be empty if `messages` is empty.
pub(super) source: String,
/// Notebook index if this file is a Jupyter Notebook.
#[bincode(with_serde)]
pub(super) notebook_index: Option<NotebookIndex>,
}
impl LintCacheData {
pub(crate) fn from_diagnostics(
diagnostics: &[Diagnostic],
notebook_index: Option<NotebookIndex>,
) -> Self {
let source = if let Some(msg) = diagnostics.first() {
msg.expect_ruff_source_file().source_text().to_owned()
} else {
String::new() // No messages, no need to keep the source!
};
let messages = diagnostics
.iter()
// Parse the kebab-case rule name into a `Rule`. This will fail for syntax errors, so
// this also serves to filter them out, but we shouldn't be caching files with syntax
// errors anyway.
.filter_map(|msg| Some((msg.name().parse().ok()?, msg)))
.map(|(rule, msg)| {
// Make sure that all message use the same source file.
assert_eq!(
msg.expect_ruff_source_file(),
diagnostics.first().unwrap().expect_ruff_source_file(),
"message uses a different source file"
);
CacheMessage {
rule,
body: msg.body().to_string(),
suggestion: msg.first_help_text().map(ToString::to_string),
range: msg.expect_range(),
parent: msg.parent(),
fix: msg.fix().cloned(),
noqa_offset: msg.noqa_offset(),
}
})
.collect();
Self {
messages,
source,
notebook_index,
}
}
}
/// On disk representation of a diagnostic message.
#[derive(bincode::Decode, Debug, bincode::Encode, PartialEq)]
pub(super) struct CacheMessage {
/// The rule for the cached diagnostic.
#[bincode(with_serde)]
rule: Rule,
/// The message body to display to the user, to explain the diagnostic.
body: String,
/// The message to display to the user, to explain the suggested fix.
suggestion: Option<String>,
/// Range into the message's [`FileCache::source`].
#[bincode(with_serde)]
range: TextRange,
#[bincode(with_serde)]
parent: Option<TextSize>,
#[bincode(with_serde)]
fix: Option<Fix>,
#[bincode(with_serde)]
noqa_offset: Option<TextSize>,
}
pub(crate) trait PackageCaches { pub(crate) trait PackageCaches {
fn get(&self, package_root: &Path) -> Option<&Cache>; fn get(&self, package_root: &Path) -> Option<&Cache>;
@@ -456,15 +579,15 @@ struct Change {
#[derive(Debug)] #[derive(Debug)]
enum ChangeData { enum ChangeData {
Linted(bool), Lint(LintCacheData),
Formatted, Formatted,
} }
impl ChangeData { impl ChangeData {
fn apply(self, data: &mut FileCacheData) { fn apply(self, data: &mut FileCacheData) {
match self { match self {
ChangeData::Linted(yes) => { ChangeData::Lint(new_lint) => {
data.linted = yes; data.lint = Some(new_lint);
} }
ChangeData::Formatted => { ChangeData::Formatted => {
data.formatted = true; data.formatted = true;
@@ -489,6 +612,7 @@ mod tests {
use test_case::test_case; use test_case::test_case;
use ruff_cache::CACHE_DIR_NAME; use ruff_cache::CACHE_DIR_NAME;
use ruff_db::diagnostic::Diagnostic;
use ruff_linter::package::PackageRoot; use ruff_linter::package::PackageRoot;
use ruff_linter::settings::LinterSettings; use ruff_linter::settings::LinterSettings;
use ruff_linter::settings::flags; use ruff_linter::settings::flags;
@@ -496,7 +620,7 @@ mod tests {
use ruff_python_ast::{PySourceType, PythonVersion}; use ruff_python_ast::{PySourceType, PythonVersion};
use ruff_workspace::Settings; use ruff_workspace::Settings;
use crate::cache::{self, ChangeData, FileCache, FileCacheData, FileCacheKey}; use crate::cache::{self, FileCache, FileCacheData, FileCacheKey};
use crate::cache::{Cache, RelativePathBuf}; use crate::cache::{Cache, RelativePathBuf};
use crate::commands::format::{FormatCommandError, FormatMode, FormatResult, format_path}; use crate::commands::format::{FormatCommandError, FormatMode, FormatResult, format_path};
use crate::diagnostics::{Diagnostics, lint_path}; use crate::diagnostics::{Diagnostics, lint_path};
@@ -523,7 +647,7 @@ mod tests {
assert_eq!(cache.changes.lock().unwrap().len(), 0); assert_eq!(cache.changes.lock().unwrap().len(), 0);
let mut paths = Vec::new(); let mut paths = Vec::new();
let mut paths_with_diagnostics = Vec::new(); let mut parse_errors = Vec::new();
let mut expected_diagnostics = Diagnostics::default(); let mut expected_diagnostics = Diagnostics::default();
for entry in fs::read_dir(&package_root).unwrap() { for entry in fs::read_dir(&package_root).unwrap() {
let entry = entry.unwrap(); let entry = entry.unwrap();
@@ -547,7 +671,7 @@ mod tests {
continue; continue;
} }
let mut diagnostics = lint_path( let diagnostics = lint_path(
&path, &path,
Some(PackageRoot::root(&package_root)), Some(PackageRoot::root(&package_root)),
&settings.linter, &settings.linter,
@@ -557,15 +681,8 @@ mod tests {
UnsafeFixes::Enabled, UnsafeFixes::Enabled,
) )
.unwrap(); .unwrap();
if diagnostics.inner.is_empty() { if diagnostics.inner.iter().any(Diagnostic::is_invalid_syntax) {
// We won't load a notebook index from the cache for files without diagnostics, parse_errors.push(path.clone());
// so remove them from `expected_diagnostics` too. This allows us to keep the
// full equality assertion below.
diagnostics
.notebook_indexes
.remove(&path.to_string_lossy().to_string());
} else {
paths_with_diagnostics.push(path.clone());
} }
paths.push(path); paths.push(path);
expected_diagnostics += diagnostics; expected_diagnostics += diagnostics;
@@ -578,11 +695,11 @@ mod tests {
let cache = Cache::open(package_root.clone(), &settings); let cache = Cache::open(package_root.clone(), &settings);
assert_ne!(cache.package.files.len(), 0); assert_ne!(cache.package.files.len(), 0);
paths_with_diagnostics.sort(); parse_errors.sort();
for path in &paths { for path in &paths {
if paths_with_diagnostics.binary_search(path).is_ok() { if parse_errors.binary_search(path).is_ok() {
continue; // We don't cache files with diagnostics. continue; // We don't cache parsing errors.
} }
let relative_path = cache.relative_path(path).unwrap(); let relative_path = cache.relative_path(path).unwrap();
@@ -616,7 +733,7 @@ mod tests {
#[test] #[test]
fn cache_adds_file_on_lint() { fn cache_adds_file_on_lint() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n"; let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("cache_adds_file_on_lint"); let test_cache = TestCache::new("cache_adds_file_on_lint");
let cache = test_cache.open(); let cache = test_cache.open();
@@ -640,7 +757,7 @@ mod tests {
#[test] #[test]
fn cache_adds_files_on_lint() { fn cache_adds_files_on_lint() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n"; let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("cache_adds_files_on_lint"); let test_cache = TestCache::new("cache_adds_files_on_lint");
let cache = test_cache.open(); let cache = test_cache.open();
@@ -665,40 +782,6 @@ mod tests {
cache.persist().unwrap(); cache.persist().unwrap();
} }
#[test]
fn cache_does_not_add_file_on_lint_with_diagnostic() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("cache_does_not_add_file_on_lint_with_diagnostic");
let cache = test_cache.open();
test_cache.write_source_file("source.py", source);
assert_eq!(cache.changes.lock().unwrap().len(), 0);
cache.persist().unwrap();
let cache = test_cache.open();
let results = test_cache
.lint_file_with_cache("source.py", &cache)
.expect("Failed to lint test file");
assert_eq!(results.inner.len(), 1, "Expected one F822 diagnostic");
assert_eq!(
cache.changes.lock().unwrap().len(),
1,
"Files with diagnostics still trigger change events"
);
assert!(
cache
.changes
.lock()
.unwrap()
.last()
.is_some_and(|change| matches!(change.new_data, ChangeData::Linted(false))),
"Files with diagnostics are marked as unlinted"
);
cache.persist().unwrap();
}
#[test] #[test]
fn cache_adds_files_on_format() { fn cache_adds_files_on_format() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n"; let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
@@ -729,7 +812,7 @@ mod tests {
#[test] #[test]
fn cache_invalidated_on_file_modified_time() { fn cache_invalidated_on_file_modified_time() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n"; let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("cache_invalidated_on_file_modified_time"); let test_cache = TestCache::new("cache_invalidated_on_file_modified_time");
let cache = test_cache.open(); let cache = test_cache.open();
@@ -786,7 +869,7 @@ mod tests {
file.set_permissions(perms) file.set_permissions(perms)
} }
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n"; let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("cache_invalidated_on_permission_change"); let test_cache = TestCache::new("cache_invalidated_on_permission_change");
let cache = test_cache.open(); let cache = test_cache.open();
@@ -839,7 +922,7 @@ mod tests {
); );
// Now actually lint a file. // Now actually lint a file.
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n"; let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
test_cache.write_source_file("new.py", source); test_cache.write_source_file("new.py", source);
let new_path_key = RelativePathBuf::from("new.py"); let new_path_key = RelativePathBuf::from("new.py");
assert_eq!(cache.changes.lock().unwrap().len(), 0); assert_eq!(cache.changes.lock().unwrap().len(), 0);
@@ -862,7 +945,7 @@ mod tests {
#[test] #[test]
fn format_updates_cache_entry() { fn format_updates_cache_entry() {
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n"; let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
let test_cache = TestCache::new("format_updates_cache_entry"); let test_cache = TestCache::new("format_updates_cache_entry");
let cache = test_cache.open(); let cache = test_cache.open();
@@ -896,7 +979,7 @@ mod tests {
panic!("Cache entry for `source.py` is missing."); panic!("Cache entry for `source.py` is missing.");
}; };
assert!(file_cache.data.linted); assert!(file_cache.data.lint.is_some());
assert!(file_cache.data.formatted); assert!(file_cache.data.formatted);
} }
@@ -946,7 +1029,7 @@ mod tests {
panic!("Cache entry for `source.py` is missing."); panic!("Cache entry for `source.py` is missing.");
}; };
assert!(!file_cache.data.linted); assert_eq!(file_cache.data.lint, None);
assert!(file_cache.data.formatted); assert!(file_cache.data.formatted);
} }

View File

@@ -6,14 +6,12 @@ use std::time::Instant;
use anyhow::Result; use anyhow::Result;
use colored::Colorize; use colored::Colorize;
use ignore::Error; use ignore::Error;
use log::{debug, warn}; use log::{debug, error, warn};
#[cfg(not(target_family = "wasm"))] #[cfg(not(target_family = "wasm"))]
use rayon::prelude::*; use rayon::prelude::*;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use ruff_db::diagnostic::{ use ruff_db::diagnostic::Diagnostic;
Annotation, Diagnostic, DiagnosticId, Span, SubDiagnostic, SubDiagnosticSeverity,
};
use ruff_db::panic::catch_unwind; use ruff_db::panic::catch_unwind;
use ruff_linter::package::PackageRoot; use ruff_linter::package::PackageRoot;
use ruff_linter::registry::Rule; use ruff_linter::registry::Rule;
@@ -195,24 +193,21 @@ fn lint_path(
match result { match result {
Ok(inner) => inner, Ok(inner) => inner,
Err(error) => { Err(error) => {
let message = match error.payload.as_str() { let message = r"This indicates a bug in Ruff. If you could open an issue at:
Some(summary) => format!("Fatal error while linting: {summary}"),
_ => "Fatal error while linting".to_owned(), https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
};
let mut diagnostic = Diagnostic::new( ...with the relevant file contents, the `pyproject.toml` settings, and the following stack trace, we'd be very appreciative!
DiagnosticId::Panic, ";
ruff_db::diagnostic::Severity::Fatal,
message, error!(
"{}{}{} {message}\n{error}",
"Panicked while linting ".bold(),
fs::relativize_path(path).bold(),
":".bold()
); );
let span = Span::from(SourceFileBuilder::new(path.to_string_lossy(), "").finish());
let mut annotation = Annotation::primary(span); Ok(Diagnostics::default())
annotation.set_file_level(true);
diagnostic.annotate(annotation);
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!("{error}"),
));
Ok(Diagnostics::new(vec![diagnostic], FxHashMap::default()))
} }
} }
} }

View File

@@ -20,21 +20,15 @@ use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{LinterSettings, flags}; use ruff_linter::settings::{LinterSettings, flags};
use ruff_linter::source_kind::{SourceError, SourceKind}; use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::{IOError, Violation, fs}; use ruff_linter::{IOError, Violation, fs};
use ruff_notebook::{NotebookError, NotebookIndex}; use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType}; use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder; use ruff_source_file::SourceFileBuilder;
use ruff_text_size::TextRange; use ruff_text_size::TextRange;
use ruff_workspace::Settings; use ruff_workspace::Settings;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::cache::{Cache, FileCache, FileCacheKey}; use crate::cache::{Cache, FileCacheKey, LintCacheData};
/// A collection of [`Diagnostic`]s and additional information needed to render them.
///
/// Note that `notebook_indexes` may be empty if there are no diagnostics because the
/// `NotebookIndex` isn't cached in this case. This isn't a problem for any current uses as of
/// 2025-08-12, which are all related to diagnostic rendering, but could be surprising if used
/// differently in the future.
#[derive(Debug, Default, PartialEq)] #[derive(Debug, Default, PartialEq)]
pub(crate) struct Diagnostics { pub(crate) struct Diagnostics {
pub(crate) inner: Vec<Diagnostic>, pub(crate) inner: Vec<Diagnostic>,
@@ -199,9 +193,19 @@ pub(crate) fn lint_path(
let cache_key = FileCacheKey::from_path(path).context("Failed to create cache key")?; let cache_key = FileCacheKey::from_path(path).context("Failed to create cache key")?;
let cached_diagnostics = cache let cached_diagnostics = cache
.get(relative_path, &cache_key) .get(relative_path, &cache_key)
.is_some_and(FileCache::linted); .and_then(|entry| entry.to_diagnostics(path));
if cached_diagnostics { if let Some(diagnostics) = cached_diagnostics {
return Ok(Diagnostics::default()); // `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
// and writing the diff to stdout, respectively). If a file has diagnostics, we
// need to avoid reading from and writing to the cache in these modes.
if match fix_mode {
flags::FixMode::Generate => true,
flags::FixMode::Apply | flags::FixMode::Diff => {
diagnostics.inner.is_empty() && diagnostics.fixed.is_empty()
}
} {
return Ok(diagnostics);
}
} }
// Stash the file metadata for later so when we update the cache it reflects the prerun // Stash the file metadata for later so when we update the cache it reflects the prerun
@@ -318,21 +322,31 @@ pub(crate) fn lint_path(
(result, transformed, fixed) (result, transformed, fixed)
}; };
let has_error = result.has_syntax_errors();
let diagnostics = result.diagnostics; let diagnostics = result.diagnostics;
if let Some((cache, relative_path, key)) = caching { if let Some((cache, relative_path, key)) = caching {
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk, // We don't cache parsing errors.
// and writing the diff to stdout, respectively). If a file has diagnostics if !has_error {
// with fixes, we need to avoid reading from and writing to the cache in these // `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
// modes. // and writing the diff to stdout, respectively). If a file has diagnostics, we
let use_fixes = match fix_mode { // need to avoid reading from and writing to the cache in these modes.
flags::FixMode::Generate => true, if match fix_mode {
flags::FixMode::Apply | flags::FixMode::Diff => fixed.is_empty(), flags::FixMode::Generate => true,
}; flags::FixMode::Apply | flags::FixMode::Diff => {
diagnostics.is_empty() && fixed.is_empty()
// We don't cache files with diagnostics. }
let linted = diagnostics.is_empty() && use_fixes; } {
cache.set_linted(relative_path.to_owned(), &key, linted); cache.update_lint(
relative_path.to_owned(),
&key,
LintCacheData::from_diagnostics(
&diagnostics,
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
),
);
}
}
} }
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed { let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {

View File

@@ -9,11 +9,10 @@ use std::sync::mpsc::channel;
use anyhow::Result; use anyhow::Result;
use clap::CommandFactory; use clap::CommandFactory;
use colored::Colorize; use colored::Colorize;
use log::{error, warn}; use log::warn;
use notify::{RecursiveMode, Watcher, recommended_watcher}; use notify::{RecursiveMode, Watcher, recommended_watcher};
use args::{GlobalConfigArgs, ServerCommand}; use args::{GlobalConfigArgs, ServerCommand};
use ruff_db::diagnostic::{Diagnostic, Severity};
use ruff_linter::logging::{LogLevel, set_up_logging}; use ruff_linter::logging::{LogLevel, set_up_logging};
use ruff_linter::settings::flags::FixMode; use ruff_linter::settings::flags::FixMode;
use ruff_linter::settings::types::OutputFormat; use ruff_linter::settings::types::OutputFormat;
@@ -445,27 +444,6 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
} }
if !cli.exit_zero { if !cli.exit_zero {
let max_severity = diagnostics
.inner
.iter()
.map(Diagnostic::severity)
.max()
.unwrap_or(Severity::Info);
if max_severity.is_fatal() {
// When a panic/fatal error is reported, prompt the user to open an issue on github.
// Diagnostics with severity `fatal` will be sorted to the bottom, and printing the
// message here instead of attaching it to the diagnostic ensures that we only print
// it once instead of repeating it for each diagnostic. Prints to stderr to prevent
// the message from being captured by tools parsing the normal output.
let message = "Panic during linting indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the stack trace above, we'd be very appreciative!
";
error!("{message}");
return Ok(ExitStatus::Error);
}
if cli.diff { if cli.diff {
// If we're printing a diff, we always want to exit non-zero if there are // If we're printing a diff, we always want to exit non-zero if there are
// any fixable violations (since we've printed the diff, but not applied the // any fixable violations (since we've printed the diff, but not applied the

View File

@@ -19,8 +19,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
any( any(
target_arch = "x86_64", target_arch = "x86_64",
target_arch = "aarch64", target_arch = "aarch64",
target_arch = "powerpc64", target_arch = "powerpc64"
target_arch = "riscv64"
) )
))] ))]
#[global_allocator] #[global_allocator]

View File

@@ -10,12 +10,14 @@ use ruff_linter::linter::FixTable;
use serde::Serialize; use serde::Serialize;
use ruff_db::diagnostic::{ use ruff_db::diagnostic::{
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
DisplayGithubDiagnostics, GithubRenderer, SecondaryCode,
}; };
use ruff_linter::fs::relativize_path; use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel; use ruff_linter::logging::LogLevel;
use ruff_linter::message::{Emitter, EmitterContext, GroupedEmitter, SarifEmitter, TextEmitter}; use ruff_linter::message::{
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, SarifEmitter,
TextEmitter,
};
use ruff_linter::notify_user; use ruff_linter::notify_user;
use ruff_linter::settings::flags::{self}; use ruff_linter::settings::flags::{self};
use ruff_linter::settings::types::{OutputFormat, UnsafeFixes}; use ruff_linter::settings::types::{OutputFormat, UnsafeFixes};
@@ -29,6 +31,8 @@ bitflags! {
const SHOW_VIOLATIONS = 1 << 0; const SHOW_VIOLATIONS = 1 << 0;
/// Whether to show a summary of the fixed violations when emitting diagnostics. /// Whether to show a summary of the fixed violations when emitting diagnostics.
const SHOW_FIX_SUMMARY = 1 << 1; const SHOW_FIX_SUMMARY = 1 << 1;
/// Whether to show a diff of each fixed violation when emitting diagnostics.
const SHOW_FIX_DIFF = 1 << 2;
} }
} }
@@ -225,35 +229,41 @@ impl Printer {
let context = EmitterContext::new(&diagnostics.notebook_indexes); let context = EmitterContext::new(&diagnostics.notebook_indexes);
let fixables = FixableStatistics::try_from(diagnostics, self.unsafe_fixes); let fixables = FixableStatistics::try_from(diagnostics, self.unsafe_fixes);
let config = DisplayDiagnosticConfig::default().preview(preview);
match self.format { match self.format {
OutputFormat::Json => { OutputFormat::Json => {
let config = config.format(DiagnosticFormat::Json); let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Json)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner); let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?; write!(writer, "{value}")?;
} }
OutputFormat::Rdjson => { OutputFormat::Rdjson => {
let config = config.format(DiagnosticFormat::Rdjson); let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Rdjson)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner); let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?; write!(writer, "{value}")?;
} }
OutputFormat::JsonLines => { OutputFormat::JsonLines => {
let config = config.format(DiagnosticFormat::JsonLines); let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::JsonLines)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner); let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?; write!(writer, "{value}")?;
} }
OutputFormat::Junit => { OutputFormat::Junit => {
let config = config.format(DiagnosticFormat::Junit); let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Junit)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner); let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?; write!(writer, "{value}")?;
} }
OutputFormat::Concise | OutputFormat::Full => { OutputFormat::Concise | OutputFormat::Full => {
TextEmitter::default() TextEmitter::default()
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref())) .with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.with_show_fix_diff(self.format == OutputFormat::Full && preview) .with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
.with_show_source(self.format == OutputFormat::Full) .with_show_source(self.format == OutputFormat::Full)
.with_fix_applicability(self.unsafe_fixes.required_applicability()) .with_unsafe_fixes(self.unsafe_fixes)
.with_preview(preview) .with_preview(preview)
.emit(writer, &diagnostics.inner, &context)?; .emit(writer, &diagnostics.inner, &context)?;
@@ -283,22 +293,22 @@ impl Printer {
self.write_summary_text(writer, diagnostics)?; self.write_summary_text(writer, diagnostics)?;
} }
OutputFormat::Github => { OutputFormat::Github => {
let renderer = GithubRenderer::new(&context, "Ruff"); GithubEmitter.emit(writer, &diagnostics.inner, &context)?;
let value = DisplayGithubDiagnostics::new(&renderer, &diagnostics.inner);
write!(writer, "{value}")?;
} }
OutputFormat::Gitlab => { OutputFormat::Gitlab => {
let config = config.format(DiagnosticFormat::Gitlab); GitlabEmitter::default().emit(writer, &diagnostics.inner, &context)?;
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
} }
OutputFormat::Pylint => { OutputFormat::Pylint => {
let config = config.format(DiagnosticFormat::Pylint); let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Pylint)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner); let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?; write!(writer, "{value}")?;
} }
OutputFormat::Azure => { OutputFormat::Azure => {
let config = config.format(DiagnosticFormat::Azure); let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Azure)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner); let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?; write!(writer, "{value}")?;
} }
@@ -451,7 +461,7 @@ impl Printer {
TextEmitter::default() TextEmitter::default()
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref())) .with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.with_show_source(preview) .with_show_source(preview)
.with_fix_applicability(self.unsafe_fixes.required_applicability()) .with_unsafe_fixes(self.unsafe_fixes)
.emit(writer, &diagnostics.inner, &context)?; .emit(writer, &diagnostics.inner, &context)?;
} }
writer.flush()?; writer.flush()?;

View File

@@ -246,59 +246,6 @@ fn string_detection() -> Result<()> {
Ok(()) Ok(())
} }
#[test]
fn string_detection_from_config() -> Result<()> {
let tempdir = TempDir::new()?;
let root = ChildPath::new(tempdir.path());
// Configure string import detection with a lower min-dots via ruff.toml
root.child("ruff.toml").write_str(indoc::indoc! {r#"
[analyze]
detect-string-imports = true
string-imports-min-dots = 1
"#})?;
root.child("ruff").child("__init__.py").write_str("")?;
root.child("ruff")
.child("a.py")
.write_str(indoc::indoc! {r#"
import ruff.b
"#})?;
root.child("ruff")
.child("b.py")
.write_str(indoc::indoc! {r#"
import importlib
importlib.import_module("ruff.c")
"#})?;
root.child("ruff").child("c.py").write_str("")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec(),
}, {
assert_cmd_snapshot!(command().current_dir(&root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"ruff/__init__.py": [],
"ruff/a.py": [
"ruff/b.py"
],
"ruff/b.py": [
"ruff/c.py"
],
"ruff/c.py": []
}
----- stderr -----
"#);
});
Ok(())
}
#[test] #[test]
fn globs() -> Result<()> { fn globs() -> Result<()> {
let tempdir = TempDir::new()?; let tempdir = TempDir::new()?;

View File

@@ -115,13 +115,12 @@ fn stdin_error() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `os` imported but unused -:1:8: F401 [*] `os` imported but unused
--> -:1:8
| |
1 | import os 1 | import os
| ^^ | ^^ F401
| |
help: Remove unused import: `os` = help: Remove unused import: `os`
Found 1 error. Found 1 error.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.
@@ -140,13 +139,12 @@ fn stdin_filename() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `os` imported but unused F401.py:1:8: F401 [*] `os` imported but unused
--> F401.py:1:8
| |
1 | import os 1 | import os
| ^^ | ^^ F401
| |
help: Remove unused import: `os` = help: Remove unused import: `os`
Found 1 error. Found 1 error.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.
@@ -176,21 +174,19 @@ import bar # unused import
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `bar` imported but unused bar.py:2:8: F401 [*] `bar` imported but unused
--> bar.py:2:8
| |
2 | import bar # unused import 2 | import bar # unused import
| ^^^ | ^^^ F401
| |
help: Remove unused import: `bar` = help: Remove unused import: `bar`
F401 [*] `foo` imported but unused foo.py:2:8: F401 [*] `foo` imported but unused
--> foo.py:2:8
| |
2 | import foo # unused import 2 | import foo # unused import
| ^^^ | ^^^ F401
| |
help: Remove unused import: `foo` = help: Remove unused import: `foo`
Found 2 errors. Found 2 errors.
[*] 2 fixable with the `--fix` option. [*] 2 fixable with the `--fix` option.
@@ -212,13 +208,12 @@ fn check_warn_stdin_filename_with_files() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `os` imported but unused F401.py:1:8: F401 [*] `os` imported but unused
--> F401.py:1:8
| |
1 | import os 1 | import os
| ^^ | ^^ F401
| |
help: Remove unused import: `os` = help: Remove unused import: `os`
Found 1 error. Found 1 error.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.
@@ -239,13 +234,12 @@ fn stdin_source_type_py() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `os` imported but unused TCH.py:1:8: F401 [*] `os` imported but unused
--> TCH.py:1:8
| |
1 | import os 1 | import os
| ^^ | ^^ F401
| |
help: Remove unused import: `os` = help: Remove unused import: `os`
Found 1 error. Found 1 error.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.
@@ -477,11 +471,10 @@ fn stdin_fix_jupyter() {
"nbformat_minor": 5 "nbformat_minor": 5
} }
----- stderr ----- ----- stderr -----
F821 Undefined name `x` Jupyter.ipynb:cell 3:1:7: F821 Undefined name `x`
--> Jupyter.ipynb:cell 3:1:7
| |
1 | print(x) 1 | print(x)
| ^ | ^ F821
| |
Found 3 errors (2 fixed, 1 remaining). Found 3 errors (2 fixed, 1 remaining).
@@ -576,21 +569,19 @@ fn stdin_override_parser_ipynb() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `os` imported but unused Jupyter.py:cell 1:1:8: F401 [*] `os` imported but unused
--> Jupyter.py:cell 1:1:8
| |
1 | import os 1 | import os
| ^^ | ^^ F401
| |
help: Remove unused import: `os` = help: Remove unused import: `os`
F401 [*] `sys` imported but unused Jupyter.py:cell 3:1:8: F401 [*] `sys` imported but unused
--> Jupyter.py:cell 3:1:8
| |
1 | import sys 1 | import sys
| ^^^ | ^^^ F401
| |
help: Remove unused import: `sys` = help: Remove unused import: `sys`
Found 2 errors. Found 2 errors.
[*] 2 fixable with the `--fix` option. [*] 2 fixable with the `--fix` option.
@@ -614,13 +605,12 @@ fn stdin_override_parser_py() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `os` imported but unused F401.ipynb:1:8: F401 [*] `os` imported but unused
--> F401.ipynb:1:8
| |
1 | import os 1 | import os
| ^^ | ^^ F401
| |
help: Remove unused import: `os` = help: Remove unused import: `os`
Found 1 error. Found 1 error.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.
@@ -643,13 +633,12 @@ fn stdin_fix_when_not_fixable_should_still_print_contents() {
print(sys.version) print(sys.version)
----- stderr ----- ----- stderr -----
F634 If test is a tuple, which is always `True` -:3:4: F634 If test is a tuple, which is always `True`
--> -:3:4
| |
1 | import sys 1 | import sys
2 | 2 |
3 | if (1, 2): 3 | if (1, 2):
| ^^^^^^ | ^^^^^^ F634
4 | print(sys.version) 4 | print(sys.version)
| |
@@ -809,8 +798,7 @@ fn stdin_parse_error() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
invalid-syntax: Expected one or more symbol names after import -:1:16: SyntaxError: Expected one or more symbol names after import
--> -:1:16
| |
1 | from foo import 1 | from foo import
| ^ | ^
@@ -830,16 +818,14 @@ fn stdin_multiple_parse_error() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
invalid-syntax: Expected one or more symbol names after import -:1:16: SyntaxError: Expected one or more symbol names after import
--> -:1:16
| |
1 | from foo import 1 | from foo import
| ^ | ^
2 | bar = 2 | bar =
| |
invalid-syntax: Expected an expression -:2:6: SyntaxError: Expected an expression
--> -:2:6
| |
1 | from foo import 1 | from foo import
2 | bar = 2 | bar =
@@ -861,8 +847,7 @@ fn parse_error_not_included() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
invalid-syntax: Expected an expression -:1:6: SyntaxError: Expected an expression
--> -:1:6
| |
1 | foo = 1 | foo =
| ^ | ^
@@ -882,11 +867,10 @@ fn full_output_preview() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
E741 Ambiguous variable name: `l` -:1:1: E741 Ambiguous variable name: `l`
--> -:1:1
| |
1 | l = 1 1 | l = 1
| ^ | ^ E741
| |
Found 1 error. Found 1 error.
@@ -911,11 +895,10 @@ preview = true
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
E741 Ambiguous variable name: `l` -:1:1: E741 Ambiguous variable name: `l`
--> -:1:1
| |
1 | l = 1 1 | l = 1
| ^ | ^ E741
| |
Found 1 error. Found 1 error.
@@ -933,11 +916,10 @@ fn full_output_format() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
E741 Ambiguous variable name: `l` -:1:1: E741 Ambiguous variable name: `l`
--> -:1:1
| |
1 | l = 1 1 | l = 1
| ^ | ^ E741
| |
Found 1 error. Found 1 error.
@@ -1424,9 +1406,7 @@ fn redirect_direct() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF950 Hey this is a test rule that was redirected from another. -:1:1: RUF950 Hey this is a test rule that was redirected from another.
--> -:1:1
Found 1 error. Found 1 error.
----- stderr ----- ----- stderr -----
@@ -1458,9 +1438,7 @@ fn redirect_prefix() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF950 Hey this is a test rule that was redirected from another. -:1:1: RUF950 Hey this is a test rule that was redirected from another.
--> -:1:1
Found 1 error. Found 1 error.
----- stderr ----- ----- stderr -----
@@ -1477,9 +1455,7 @@ fn deprecated_direct() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF920 Hey this is a deprecated test rule. -:1:1: RUF920 Hey this is a deprecated test rule.
--> -:1:1
Found 1 error. Found 1 error.
----- stderr ----- ----- stderr -----
@@ -1489,8 +1465,6 @@ fn deprecated_direct() {
#[test] #[test]
fn deprecated_multiple_direct() { fn deprecated_multiple_direct() {
// Multiple deprecated rules selected by exact code should be included
// but a warning should be displayed
let mut cmd = RuffCheck::default() let mut cmd = RuffCheck::default()
.args(["--select", "RUF920", "--select", "RUF921"]) .args(["--select", "RUF920", "--select", "RUF921"])
.build(); .build();
@@ -1498,12 +1472,8 @@ fn deprecated_multiple_direct() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF920 Hey this is a deprecated test rule. -:1:1: RUF920 Hey this is a deprecated test rule.
--> -:1:1 -:1:1: RUF921 Hey this is another deprecated test rule.
RUF921 Hey this is another deprecated test rule.
--> -:1:1
Found 2 errors. Found 2 errors.
----- stderr ----- ----- stderr -----
@@ -1518,10 +1488,12 @@ fn deprecated_indirect() {
// since it is not a "direct" selection // since it is not a "direct" selection
let mut cmd = RuffCheck::default().args(["--select", "RUF92"]).build(); let mut cmd = RuffCheck::default().args(["--select", "RUF92"]).build();
assert_cmd_snapshot!(cmd, @r" assert_cmd_snapshot!(cmd, @r"
success: true success: false
exit_code: 0 exit_code: 1
----- stdout ----- ----- stdout -----
All checks passed! -:1:1: RUF920 Hey this is a deprecated test rule.
-:1:1: RUF921 Hey this is another deprecated test rule.
Found 2 errors.
----- stderr ----- ----- stderr -----
"); ");
@@ -1666,23 +1638,22 @@ fn check_input_from_argfile() -> Result<()> {
(file_a_path.display().to_string().as_str(), "/path/to/a.py"), (file_a_path.display().to_string().as_str(), "/path/to/a.py"),
]}, { ]}, {
assert_cmd_snapshot!(cmd assert_cmd_snapshot!(cmd
.pass_stdin(""), @r" .pass_stdin(""), @r###"
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `os` imported but unused /path/to/a.py:1:8: F401 [*] `os` imported but unused
--> /path/to/a.py:1:8
| |
1 | import os 1 | import os
| ^^ | ^^ F401
| |
help: Remove unused import: `os` = help: Remove unused import: `os`
Found 1 error. Found 1 error.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.
----- stderr ----- ----- stderr -----
"); "###);
}); });
Ok(()) Ok(())
@@ -1698,12 +1669,8 @@ fn check_hints_hidden_unsafe_fixes() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF901 [*] Hey this is a stable test rule with a safe fix. -:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
--> -:1:1 -:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
RUF902 Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 2 errors. Found 2 errors.
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option). [*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
@@ -1720,9 +1687,7 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF902 Hey this is a stable test rule with an unsafe fix. -:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 1 error. Found 1 error.
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option). No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
@@ -1740,12 +1705,8 @@ fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF901 [*] Hey this is a stable test rule with a safe fix. -:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
--> -:1:1 -:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
RUF902 Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 2 errors. Found 2 errors.
[*] 1 fixable with the --fix option. [*] 1 fixable with the --fix option.
@@ -1764,9 +1725,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF902 Hey this is a stable test rule with an unsafe fix. -:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 1 error. Found 1 error.
----- stderr ----- ----- stderr -----
@@ -1783,12 +1742,8 @@ fn check_shows_unsafe_fixes_with_opt_in() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF901 [*] Hey this is a stable test rule with a safe fix. -:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
--> -:1:1 -:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
RUF902 [*] Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 2 errors. Found 2 errors.
[*] 2 fixable with the --fix option. [*] 2 fixable with the --fix option.
@@ -1809,9 +1764,7 @@ fn fix_applies_safe_fixes_by_default() {
# fix from stable-test-rule-safe-fix # fix from stable-test-rule-safe-fix
----- stderr ----- ----- stderr -----
RUF902 Hey this is a stable test rule with an unsafe fix. -:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 2 errors (1 fixed, 1 remaining). Found 2 errors (1 fixed, 1 remaining).
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option). No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
"); ");
@@ -1848,9 +1801,7 @@ fn fix_does_not_apply_display_only_fixes() {
----- stdout ----- ----- stdout -----
def add_to_list(item, some_list=[]): ... def add_to_list(item, some_list=[]): ...
----- stderr ----- ----- stderr -----
RUF903 Hey this is a stable test rule with a display only fix. -:1:1: RUF903 Hey this is a stable test rule with a display only fix.
--> -:1:1
Found 1 error. Found 1 error.
"); ");
} }
@@ -1868,9 +1819,7 @@ fn fix_does_not_apply_display_only_fixes_with_unsafe_fixes_enabled() {
----- stdout ----- ----- stdout -----
def add_to_list(item, some_list=[]): ... def add_to_list(item, some_list=[]): ...
----- stderr ----- ----- stderr -----
RUF903 Hey this is a stable test rule with a display only fix. -:1:1: RUF903 Hey this is a stable test rule with a display only fix.
--> -:1:1
Found 1 error. Found 1 error.
"); ");
} }
@@ -1887,9 +1836,7 @@ fn fix_only_unsafe_fixes_available() {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
RUF902 Hey this is a stable test rule with an unsafe fix. -:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 1 error. Found 1 error.
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option). No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
"); ");
@@ -2025,12 +1972,8 @@ extend-unsafe-fixes = ["RUF901"]
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF901 Hey this is a stable test rule with a safe fix. -:1:1: RUF901 Hey this is a stable test rule with a safe fix.
--> -:1:1 -:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
RUF902 Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 2 errors. Found 2 errors.
No fixes available (2 hidden fixes can be enabled with the `--unsafe-fixes` option). No fixes available (2 hidden fixes can be enabled with the `--unsafe-fixes` option).
@@ -2061,12 +2004,8 @@ extend-safe-fixes = ["RUF902"]
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF901 [*] Hey this is a stable test rule with a safe fix. -:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
--> -:1:1 -:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
RUF902 [*] Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 2 errors. Found 2 errors.
[*] 2 fixable with the `--fix` option. [*] 2 fixable with the `--fix` option.
@@ -2099,12 +2038,8 @@ extend-safe-fixes = ["RUF902"]
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF901 [*] Hey this is a stable test rule with a safe fix. -:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
--> -:1:1 -:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
RUF902 Hey this is a stable test rule with an unsafe fix.
--> -:1:1
Found 2 errors. Found 2 errors.
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option). [*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
@@ -2139,22 +2074,14 @@ extend-safe-fixes = ["RUF9"]
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF900 Hey this is a stable test rule. -:1:1: RUF900 Hey this is a stable test rule.
--> -:1:1 -:1:1: RUF901 Hey this is a stable test rule with a safe fix.
-:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
RUF901 Hey this is a stable test rule with a safe fix. -:1:1: RUF903 Hey this is a stable test rule with a display only fix.
--> -:1:1 -:1:1: RUF920 Hey this is a deprecated test rule.
-:1:1: RUF921 Hey this is another deprecated test rule.
RUF902 [*] Hey this is a stable test rule with an unsafe fix. -:1:1: RUF950 Hey this is a test rule that was redirected from another.
--> -:1:1 Found 7 errors.
RUF903 Hey this is a stable test rule with a display only fix.
--> -:1:1
RUF950 Hey this is a test rule that was redirected from another.
--> -:1:1
Found 5 errors.
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option). [*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
----- stderr ----- ----- stderr -----
@@ -2214,11 +2141,10 @@ def log(x, base) -> float:
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
D417 Missing argument description in the docstring for `log`: `base` -:2:5: D417 Missing argument description in the docstring for `log`: `base`
--> -:2:5
| |
2 | def log(x, base) -> float: 2 | def log(x, base) -> float:
| ^^^ | ^^^ D417
3 | """Calculate natural log of a value 3 | """Calculate natural log of a value
| |
@@ -2251,15 +2177,14 @@ select = ["RUF017"]
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF017 Avoid quadratic list summation -:3:1: RUF017 Avoid quadratic list summation
--> -:3:1
| |
1 | x = [1, 2, 3] 1 | x = [1, 2, 3]
2 | y = [4, 5, 6] 2 | y = [4, 5, 6]
3 | sum([x, y], []) 3 | sum([x, y], [])
| ^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^ RUF017
| |
help: Replace with `functools.reduce` = help: Replace with `functools.reduce`
Found 1 error. Found 1 error.
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option). No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
@@ -2292,15 +2217,14 @@ unfixable = ["RUF"]
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF017 Avoid quadratic list summation -:3:1: RUF017 Avoid quadratic list summation
--> -:3:1
| |
1 | x = [1, 2, 3] 1 | x = [1, 2, 3]
2 | y = [4, 5, 6] 2 | y = [4, 5, 6]
3 | sum([x, y], []) 3 | sum([x, y], [])
| ^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^ RUF017
| |
help: Replace with `functools.reduce` = help: Replace with `functools.reduce`
Found 1 error. Found 1 error.
@@ -2322,11 +2246,10 @@ fn pyproject_toml_stdin_syntax_error() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF200 Failed to parse pyproject.toml: unclosed table, expected `]` pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: unclosed table, expected `]`
--> pyproject.toml:1:9
| |
1 | [project 1 | [project
| ^ | ^ RUF200
| |
Found 1 error. Found 1 error.
@@ -2348,12 +2271,11 @@ fn pyproject_toml_stdin_schema_error() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string pyproject.toml:2:8: RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
--> pyproject.toml:2:8
| |
1 | [project] 1 | [project]
2 | name = 1 2 | name = 1
| ^ | ^ RUF200
| |
Found 1 error. Found 1 error.
@@ -2441,12 +2363,11 @@ fn pyproject_toml_stdin_schema_error_fix() {
[project] [project]
name = 1 name = 1
----- stderr ----- ----- stderr -----
RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string pyproject.toml:2:8: RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
--> pyproject.toml:2:8
| |
1 | [project] 1 | [project]
2 | name = 1 2 | name = 1
| ^ | ^ RUF200
| |
Found 1 error. Found 1 error.

View File

@@ -4996,37 +4996,6 @@ fn flake8_import_convention_invalid_aliases_config_module_name() -> Result<()> {
Ok(()) Ok(())
} }
#[test]
fn flake8_import_convention_nfkc_normalization() -> Result<()> {
let tempdir = TempDir::new()?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
r#"
[lint.flake8-import-conventions.aliases]
"test.module" = "_𝘥𝘦𝘣𝘶𝘨"
"#,
)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--config")
.arg(&ruff_toml)
, @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: Invalid alias for module 'test.module': alias normalizes to '__debug__', which is not allowed.
");});
Ok(())
}
#[test] #[test]
fn flake8_import_convention_unused_aliased_import() { fn flake8_import_convention_unused_aliased_import() {
assert_cmd_snapshot!( assert_cmd_snapshot!(
@@ -5059,59 +5028,6 @@ fn flake8_import_convention_unused_aliased_import_no_conflict() {
); );
} }
// https://github.com/astral-sh/ruff/issues/19842
#[test]
fn pyupgrade_up026_respects_isort_required_import_fix() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.arg("--isolated")
.arg("check")
.arg("-")
.args(["--select", "I002,UP026"])
.arg("--config")
.arg(r#"lint.isort.required-imports=["import mock"]"#)
.arg("--fix")
.arg("--no-cache")
.pass_stdin("1\n"),
@r"
success: true
exit_code: 0
----- stdout -----
import mock
1
----- stderr -----
Found 1 error (1 fixed, 0 remaining).
"
);
}
// https://github.com/astral-sh/ruff/issues/19842
#[test]
fn pyupgrade_up026_respects_isort_required_import_from_fix() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.arg("--isolated")
.arg("check")
.arg("-")
.args(["--select", "I002,UP026"])
.arg("--config")
.arg(r#"lint.isort.required-imports = ["from mock import mock"]"#)
.arg("--fix")
.arg("--no-cache")
.pass_stdin("from mock import mock\n"),
@r"
success: true
exit_code: 0
----- stdout -----
from mock import mock
----- stderr -----
All checks passed!
"
);
}
// See: https://github.com/astral-sh/ruff/issues/16177 // See: https://github.com/astral-sh/ruff/issues/16177
#[test] #[test]
fn flake8_pyi_redundant_none_literal() { fn flake8_pyi_redundant_none_literal() {
@@ -5473,7 +5389,7 @@ fn walrus_before_py38() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
test.py:1:2: invalid-syntax: Cannot use named assignment expression (`:=`) on Python 3.7 (syntax was added in Python 3.8) test.py:1:2: SyntaxError: Cannot use named assignment expression (`:=`) on Python 3.7 (syntax was added in Python 3.8)
Found 1 error. Found 1 error.
----- stderr ----- ----- stderr -----
@@ -5519,15 +5435,15 @@ match 2:
print("it's one") print("it's one")
"# "#
), ),
@r" @r###"
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
test.py:2:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 1 error. Found 1 error.
----- stderr ----- ----- stderr -----
" "###
); );
// syntax error on 3.9 with preview // syntax error on 3.9 with preview
@@ -5548,7 +5464,7 @@ match 2:
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
test.py:2:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 1 error. Found 1 error.
----- stderr ----- ----- stderr -----
@@ -5576,7 +5492,7 @@ fn cache_syntax_errors() -> Result<()> {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
main.py:1:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) main.py:1:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr ----- ----- stderr -----
" "
@@ -5589,7 +5505,7 @@ fn cache_syntax_errors() -> Result<()> {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
main.py:1:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) main.py:1:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr ----- ----- stderr -----
" "
@@ -5641,15 +5557,15 @@ fn cookiecutter_globbing() -> Result<()> {
.args(STDIN_BASE_OPTIONS) .args(STDIN_BASE_OPTIONS)
.arg("--select=F811") .arg("--select=F811")
.current_dir(tempdir.path()), @r" .current_dir(tempdir.path()), @r"
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
{{cookiecutter.repo_name}}/tests/maintest.py:3:8: F811 [*] Redefinition of unused `foo` from line 1: `foo` redefined here {{cookiecutter.repo_name}}/tests/maintest.py:3:8: F811 [*] Redefinition of unused `foo` from line 1
Found 1 error. Found 1 error.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.
----- stderr ----- ----- stderr -----
"); ");
}); });
Ok(()) Ok(())
@@ -5702,7 +5618,7 @@ fn semantic_syntax_errors() -> Result<()> {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
main.py:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable main.py:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
main.py:1:20: F821 Undefined name `foo` main.py:1:20: F821 Undefined name `foo`
----- stderr ----- ----- stderr -----
@@ -5716,7 +5632,7 @@ fn semantic_syntax_errors() -> Result<()> {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
main.py:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable main.py:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
main.py:1:20: F821 Undefined name `foo` main.py:1:20: F821 Undefined name `foo`
----- stderr ----- ----- stderr -----
@@ -5735,7 +5651,7 @@ fn semantic_syntax_errors() -> Result<()> {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
-:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable -:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
Found 1 error. Found 1 error.
----- stderr ----- ----- stderr -----
@@ -5834,170 +5750,23 @@ match 42: # invalid-syntax
} }
#[test] #[test]
fn up045_nested_optional_flatten_all() { fn future_annotations_preview_warning() {
let contents = "\
from typing import Optional
nested_optional: Optional[Optional[Optional[str]]] = None
";
assert_cmd_snapshot!( assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)) Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS) .args(STDIN_BASE_OPTIONS)
.args(["--select", "UP045", "--diff", "--target-version", "py312"]) .args(["--config", "lint.future-annotations = true"])
.args(["--select", "F"])
.arg("--no-preview")
.arg("-") .arg("-")
.pass_stdin(contents), .pass_stdin("1"),
@r" @r"
success: false success: true
exit_code: 1 exit_code: 0
----- stdout ----- ----- stdout -----
@@ -1,2 +1,2 @@ All checks passed!
from typing import Optional
-nested_optional: Optional[Optional[Optional[str]]] = None
+nested_optional: str | None = None
----- stderr ----- ----- stderr -----
Would fix 1 error. warning: The `lint.future-annotations` setting will have no effect because `preview` is disabled
", ",
); );
} }
#[test]
fn show_fixes_in_full_output_with_preview_enabled() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--no-cache", "--output-format", "full"])
.args(["--select", "F401"])
.arg("--preview")
.arg("-")
.pass_stdin("import math"),
@r"
success: false
exit_code: 1
----- stdout -----
F401 [*] `math` imported but unused
--> -:1:8
|
1 | import math
| ^^^^
|
help: Remove unused import: `math`
- import math
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
",
);
}
#[test]
fn rule_panic_mixed_results_concise() -> Result<()> {
let tempdir = TempDir::new()?;
// Create python files
let file_a_path = tempdir.path().join("normal.py");
let file_b_path = tempdir.path().join("panic.py");
fs::write(&file_a_path, b"import os")?;
fs::write(&file_b_path, b"print('hello, world!')")?;
insta::with_settings!({
filters => vec![
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r"\\", r"/"),
]
}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--select", "RUF9", "--preview", "--output-format=concise", "--no-cache"])
.args([file_a_path, file_b_path]),
@r"
success: false
exit_code: 2
----- stdout -----
[TMP]/normal.py:1:1: RUF900 Hey this is a stable test rule.
[TMP]/normal.py:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
[TMP]/normal.py:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
[TMP]/normal.py:1:1: RUF903 Hey this is a stable test rule with a display only fix.
[TMP]/normal.py:1:1: RUF911 Hey this is a preview test rule.
[TMP]/normal.py:1:1: RUF950 Hey this is a test rule that was redirected from another.
[TMP]/panic.py: panic: Fatal error while linting: This is a fake panic for testing.
Found 7 errors.
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
----- stderr -----
error: Panic during linting indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the stack trace above, we'd be very appreciative!
");
});
Ok(())
}
#[test]
fn rule_panic_mixed_results_full() -> Result<()> {
let tempdir = TempDir::new()?;
// Create python files
let file_a_path = tempdir.path().join("normal.py");
let file_b_path = tempdir.path().join("panic.py");
fs::write(&file_a_path, b"import os")?;
fs::write(&file_b_path, b"print('hello, world!')")?;
insta::with_settings!({
filters => vec![
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r"\\", r"/"),
]
}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--select", "RUF9", "--preview", "--output-format=full", "--no-cache"])
.args([file_a_path, file_b_path]),
@r"
success: false
exit_code: 2
----- stdout -----
RUF900 Hey this is a stable test rule.
--> [TMP]/normal.py:1:1
RUF901 [*] Hey this is a stable test rule with a safe fix.
--> [TMP]/normal.py:1:1
1 + # fix from stable-test-rule-safe-fix
2 | import os
RUF902 Hey this is a stable test rule with an unsafe fix.
--> [TMP]/normal.py:1:1
RUF903 Hey this is a stable test rule with a display only fix.
--> [TMP]/normal.py:1:1
RUF911 Hey this is a preview test rule.
--> [TMP]/normal.py:1:1
RUF950 Hey this is a test rule that was redirected from another.
--> [TMP]/normal.py:1:1
panic: Fatal error while linting: This is a fake panic for testing.
--> [TMP]/panic.py:1:1
info: panicked at crates/ruff_linter/src/rules/ruff/rules/test_rules.rs:511:9:
This is a fake panic for testing.
run with `RUST_BACKTRACE=1` environment variable to display a backtrace
Found 7 errors.
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
----- stderr -----
error: Panic during linting indicates a bug in Ruff. If you could open an issue at:
https://github.com/astral-sh/ruff/issues/new?title=%5BLinter%20panic%5D
...with the relevant file contents, the `pyproject.toml` settings, and the stack trace above, we'd be very appreciative!
");
});
Ok(())
}

View File

@@ -55,10 +55,6 @@ either a redundant alias or, if already present in the file, an `__all__` entry.
to remove third-party and standard library imports -- the fix is unsafe because the module's to remove third-party and standard library imports -- the fix is unsafe because the module's
interface changes. interface changes.
See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)
for more details on how Ruff
determines whether an import is first or third-party.
## Example ## Example
```python ```python
@@ -87,6 +83,11 @@ else:
print("numpy is not installed") print("numpy is not installed")
``` ```
## Preview
When [preview](https://docs.astral.sh/ruff/preview/) is enabled,
the criterion for determining whether an import is first-party
is stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.
## Options ## Options
- `lint.ignore-init-module-imports` - `lint.ignore-init-module-imports`
- `lint.pyflakes.allowed-unused-imports` - `lint.pyflakes.allowed-unused-imports`

View File

@@ -18,6 +18,6 @@ exit_code: 1
----- stdout ----- ----- stdout -----
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused ##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y` ##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y`
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;code=invalid-syntax;]Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) ##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;]SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr ----- ----- stderr -----

View File

@@ -18,7 +18,7 @@ exit_code: 1
----- stdout ----- ----- stdout -----
input.py:1:8: F401 [*] `os` imported but unused input.py:1:8: F401 [*] `os` imported but unused
input.py:2:5: F821 Undefined name `y` input.py:2:5: F821 Undefined name `y`
input.py:3:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 3 errors. Found 3 errors.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.

View File

@@ -16,28 +16,25 @@ info:
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
F401 [*] `os` imported but unused input.py:1:8: F401 [*] `os` imported but unused
--> input.py:1:8
| |
1 | import os # F401 1 | import os # F401
| ^^ | ^^ F401
2 | x = y # F821 2 | x = y # F821
3 | match 42: # invalid-syntax 3 | match 42: # invalid-syntax
| |
help: Remove unused import: `os` = help: Remove unused import: `os`
F821 Undefined name `y` input.py:2:5: F821 Undefined name `y`
--> input.py:2:5
| |
1 | import os # F401 1 | import os # F401
2 | x = y # F821 2 | x = y # F821
| ^ | ^ F821
3 | match 42: # invalid-syntax 3 | match 42: # invalid-syntax
4 | case _: ... 4 | case _: ...
| |
invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
--> input.py:3:1
| |
1 | import os # F401 1 | import os # F401
2 | x = y # F821 2 | x = y # F821

View File

@@ -18,6 +18,6 @@ exit_code: 1
----- stdout ----- ----- stdout -----
::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused ::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused
::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y` ::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y`
::error title=Ruff (invalid-syntax),file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) ::error title=Ruff,file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr ----- ----- stderr -----

View File

@@ -19,60 +19,60 @@ exit_code: 1
[ [
{ {
"check_name": "F401", "check_name": "F401",
"description": "F401: `os` imported but unused", "description": "`os` imported but unused",
"severity": "major",
"fingerprint": "4dbad37161e65c72", "fingerprint": "4dbad37161e65c72",
"location": { "location": {
"path": "input.py", "path": "input.py",
"positions": { "positions": {
"begin": { "begin": {
"line": 1, "column": 8,
"column": 8 "line": 1
}, },
"end": { "end": {
"line": 1, "column": 10,
"column": 10 "line": 1
} }
} }
} },
"severity": "major"
}, },
{ {
"check_name": "F821", "check_name": "F821",
"description": "F821: Undefined name `y`", "description": "Undefined name `y`",
"severity": "major",
"fingerprint": "7af59862a085230", "fingerprint": "7af59862a085230",
"location": { "location": {
"path": "input.py", "path": "input.py",
"positions": { "positions": {
"begin": { "begin": {
"line": 2, "column": 5,
"column": 5 "line": 2
}, },
"end": { "end": {
"line": 2, "column": 6,
"column": 6 "line": 2
} }
} }
} },
"severity": "major"
}, },
{ {
"check_name": "invalid-syntax", "check_name": "syntax-error",
"description": "invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)", "description": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
"severity": "major",
"fingerprint": "e558cec859bb66e8", "fingerprint": "e558cec859bb66e8",
"location": { "location": {
"path": "input.py", "path": "input.py",
"positions": { "positions": {
"begin": { "begin": {
"line": 3, "column": 1,
"column": 1 "line": 3
}, },
"end": { "end": {
"line": 3, "column": 6,
"column": 6 "line": 3
} }
} }
} },
"severity": "major"
} }
] ]
----- stderr ----- ----- stderr -----

View File

@@ -19,7 +19,7 @@ exit_code: 1
input.py: input.py:
1:8 F401 [*] `os` imported but unused 1:8 F401 [*] `os` imported but unused
2:5 F821 Undefined name `y` 2:5 F821 Undefined name `y`
3:1 invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) 3:1 SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 3 errors. Found 3 errors.
[*] 1 fixable with the `--fix` option. [*] 1 fixable with the `--fix` option.

View File

@@ -18,6 +18,6 @@ exit_code: 1
----- stdout ----- ----- stdout -----
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"} {"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"} {"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
{"cell":null,"code":"invalid-syntax","end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null} {"cell":null,"code":null,"end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
----- stderr ----- ----- stderr -----

View File

@@ -69,7 +69,7 @@ exit_code: 1
}, },
{ {
"cell": null, "cell": null,
"code": "invalid-syntax", "code": null,
"end_location": { "end_location": {
"column": 6, "column": 6,
"row": 3 "row": 3
@@ -80,7 +80,7 @@ exit_code: 1
"column": 1, "column": 1,
"row": 3 "row": 3
}, },
"message": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)", "message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
"noqa_row": null, "noqa_row": null,
"url": null "url": null
} }

View File

@@ -26,7 +26,7 @@ exit_code: 1
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure> <failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
</testcase> </testcase>
<testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1"> <testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1">
<failure message="Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure> <failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
</testcase> </testcase>
</testsuite> </testsuite>
</testsuites> </testsuites>

View File

@@ -18,6 +18,6 @@ exit_code: 1
----- stdout ----- ----- stdout -----
input.py:1: [F401] `os` imported but unused input.py:1: [F401] `os` imported but unused
input.py:2: [F821] Undefined name `y` input.py:2: [F821] Undefined name `y`
input.py:3: [invalid-syntax] Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) input.py:3: [invalid-syntax] SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
----- stderr ----- ----- stderr -----

View File

@@ -90,7 +90,7 @@ exit_code: 1
} }
} }
}, },
"message": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)" "message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
} }
], ],
"severity": "WARNING", "severity": "WARNING",

View File

@@ -83,9 +83,9 @@ exit_code: 1
} }
], ],
"message": { "message": {
"text": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)" "text": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
}, },
"ruleId": "invalid-syntax" "ruleId": null
} }
], ],
"tool": { "tool": {
@@ -95,7 +95,7 @@ exit_code: 1
"rules": [ "rules": [
{ {
"fullDescription": { "fullDescription": {
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n" "text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
}, },
"help": { "help": {
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability" "text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"

View File

@@ -1,5 +1,3 @@
#![expect(clippy::needless_doctest_main)]
//! A library for formatting of text or programming code snippets. //! A library for formatting of text or programming code snippets.
//! //!
//! It's primary purpose is to build an ASCII-graphical representation of the snippet //! It's primary purpose is to build an ASCII-graphical representation of the snippet

View File

@@ -193,14 +193,9 @@ impl DisplaySet<'_> {
stylesheet: &Stylesheet, stylesheet: &Stylesheet,
buffer: &mut StyledBuffer, buffer: &mut StyledBuffer,
) -> fmt::Result { ) -> fmt::Result {
let hide_severity = annotation.annotation_type.is_none();
let color = get_annotation_style(&annotation.annotation_type, stylesheet); let color = get_annotation_style(&annotation.annotation_type, stylesheet);
let formatted_len = if let Some(id) = &annotation.id { let formatted_len = if let Some(id) = &annotation.id {
if hide_severity { 2 + id.len() + annotation_type_len(&annotation.annotation_type)
id.len()
} else {
2 + id.len() + annotation_type_len(&annotation.annotation_type)
}
} else { } else {
annotation_type_len(&annotation.annotation_type) annotation_type_len(&annotation.annotation_type)
}; };
@@ -214,66 +209,18 @@ impl DisplaySet<'_> {
if formatted_len == 0 { if formatted_len == 0 {
self.format_label(line_offset, &annotation.label, stylesheet, buffer) self.format_label(line_offset, &annotation.label, stylesheet, buffer)
} else { } else {
// TODO(brent) All of this complicated checking of `hide_severity` should be reverted let id = match &annotation.id {
// once we have real severities in Ruff. This code is trying to account for two Some(id) => format!("[{id}]"),
// different cases: None => String::new(),
// };
// - main diagnostic message buffer.append(
// - subdiagnostic message line_offset,
// &format!("{}{}", annotation_type_str(&annotation.annotation_type), id),
// In the first case, signaled by `hide_severity = true`, we want to print the ID (the *color,
// noqa code for a ruff lint diagnostic, e.g. `F401`, or `invalid-syntax` for a syntax );
// error) without brackets. Instead, for subdiagnostics, we actually want to print the
// severity (usually `help`) regardless of the `hide_severity` setting. This is signaled
// by an ID of `None`.
//
// With real severities these should be reported more like in ty:
//
// ```
// error[F401]: `math` imported but unused
// error[invalid-syntax]: Cannot use `match` statement on Python 3.9...
// ```
//
// instead of the current versions intended to mimic the old Ruff output format:
//
// ```
// F401 `math` imported but unused
// invalid-syntax: Cannot use `match` statement on Python 3.9...
// ```
//
// Note that the `invalid-syntax` colon is added manually in `ruff_db`, not here. We
// could eventually add a colon to Ruff lint diagnostics (`F401:`) and then make the
// colon below unconditional again.
//
// This also applies to the hard-coded `stylesheet.error()` styling of the
// hidden-severity `id`. This should just be `*color` again later, but for now we don't
// want an unformatted `id`, which is what `get_annotation_style` returns for
// `DisplayAnnotationType::None`.
let annotation_type = annotation_type_str(&annotation.annotation_type);
if let Some(id) = annotation.id {
if hide_severity {
buffer.append(line_offset, &format!("{id} "), *stylesheet.error());
} else {
buffer.append(line_offset, &format!("{annotation_type}[{id}]"), *color);
}
} else {
buffer.append(line_offset, annotation_type, *color);
}
if annotation.is_fixable {
buffer.append(line_offset, "[", stylesheet.none);
buffer.append(line_offset, "*", stylesheet.help);
buffer.append(line_offset, "]", stylesheet.none);
// In the hide-severity case, we need a space instead of the colon and space below.
if hide_severity {
buffer.append(line_offset, " ", stylesheet.none);
}
}
if !is_annotation_empty(annotation) { if !is_annotation_empty(annotation) {
if annotation.id.is_none() || !hide_severity { buffer.append(line_offset, ": ", stylesheet.none);
buffer.append(line_offset, ": ", stylesheet.none);
}
self.format_label(line_offset, &annotation.label, stylesheet, buffer)?; self.format_label(line_offset, &annotation.label, stylesheet, buffer)?;
} }
Ok(()) Ok(())
@@ -302,15 +249,11 @@ impl DisplaySet<'_> {
let lineno_color = stylesheet.line_no(); let lineno_color = stylesheet.line_no();
buffer.puts(line_offset, lineno_width, header_sigil, *lineno_color); buffer.puts(line_offset, lineno_width, header_sigil, *lineno_color);
buffer.puts(line_offset, lineno_width + 4, path, stylesheet.none); buffer.puts(line_offset, lineno_width + 4, path, stylesheet.none);
if let Some(Position { row, col, cell }) = pos { if let Some((col, row)) = pos {
if let Some(cell) = cell {
buffer.append(line_offset, ":", stylesheet.none);
buffer.append(line_offset, &format!("cell {cell}"), stylesheet.none);
}
buffer.append(line_offset, ":", stylesheet.none);
buffer.append(line_offset, row.to_string().as_str(), stylesheet.none);
buffer.append(line_offset, ":", stylesheet.none); buffer.append(line_offset, ":", stylesheet.none);
buffer.append(line_offset, col.to_string().as_str(), stylesheet.none); buffer.append(line_offset, col.to_string().as_str(), stylesheet.none);
buffer.append(line_offset, ":", stylesheet.none);
buffer.append(line_offset, row.to_string().as_str(), stylesheet.none);
} }
Ok(()) Ok(())
} }
@@ -825,7 +768,6 @@ pub(crate) struct Annotation<'a> {
pub(crate) annotation_type: DisplayAnnotationType, pub(crate) annotation_type: DisplayAnnotationType,
pub(crate) id: Option<&'a str>, pub(crate) id: Option<&'a str>,
pub(crate) label: Vec<DisplayTextFragment<'a>>, pub(crate) label: Vec<DisplayTextFragment<'a>>,
pub(crate) is_fixable: bool,
} }
/// A single line used in `DisplayList`. /// A single line used in `DisplayList`.
@@ -891,13 +833,6 @@ impl DisplaySourceAnnotation<'_> {
} }
} }
#[derive(Debug, PartialEq)]
pub(crate) struct Position {
row: usize,
col: usize,
cell: Option<usize>,
}
/// Raw line - a line which does not have the `lineno` part and is not considered /// Raw line - a line which does not have the `lineno` part and is not considered
/// a part of the snippet. /// a part of the snippet.
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
@@ -906,7 +841,7 @@ pub(crate) enum DisplayRawLine<'a> {
/// slice in the project structure. /// slice in the project structure.
Origin { Origin {
path: &'a str, path: &'a str,
pos: Option<Position>, pos: Option<(usize, usize)>,
header_type: DisplayHeaderType, header_type: DisplayHeaderType,
}, },
@@ -985,13 +920,6 @@ pub(crate) enum DisplayAnnotationType {
Help, Help,
} }
impl DisplayAnnotationType {
#[inline]
const fn is_none(&self) -> bool {
matches!(self, Self::None)
}
}
impl From<snippet::Level> for DisplayAnnotationType { impl From<snippet::Level> for DisplayAnnotationType {
fn from(at: snippet::Level) -> Self { fn from(at: snippet::Level) -> Self {
match at { match at {
@@ -1087,12 +1015,11 @@ fn format_message<'m>(
title, title,
footer, footer,
snippets, snippets,
is_fixable,
} = message; } = message;
let mut sets = vec![]; let mut sets = vec![];
let body = if !snippets.is_empty() || primary { let body = if !snippets.is_empty() || primary {
vec![format_title(level, id, title, is_fixable)] vec![format_title(level, id, title)]
} else { } else {
format_footer(level, id, title) format_footer(level, id, title)
}; };
@@ -1133,18 +1060,12 @@ fn format_message<'m>(
sets sets
} }
fn format_title<'a>( fn format_title<'a>(level: crate::Level, id: Option<&'a str>, label: &'a str) -> DisplayLine<'a> {
level: crate::Level,
id: Option<&'a str>,
label: &'a str,
is_fixable: bool,
) -> DisplayLine<'a> {
DisplayLine::Raw(DisplayRawLine::Annotation { DisplayLine::Raw(DisplayRawLine::Annotation {
annotation: Annotation { annotation: Annotation {
annotation_type: DisplayAnnotationType::from(level), annotation_type: DisplayAnnotationType::from(level),
id, id,
label: format_label(Some(label), Some(DisplayTextStyle::Emphasis)), label: format_label(Some(label), Some(DisplayTextStyle::Emphasis)),
is_fixable,
}, },
source_aligned: false, source_aligned: false,
continuation: false, continuation: false,
@@ -1163,7 +1084,6 @@ fn format_footer<'a>(
annotation_type: DisplayAnnotationType::from(level), annotation_type: DisplayAnnotationType::from(level),
id, id,
label: format_label(Some(line), None), label: format_label(Some(line), None),
is_fixable: false,
}, },
source_aligned: true, source_aligned: true,
continuation: i != 0, continuation: i != 0,
@@ -1198,28 +1118,6 @@ fn format_snippet<'m>(
let main_range = snippet.annotations.first().map(|x| x.range.start); let main_range = snippet.annotations.first().map(|x| x.range.start);
let origin = snippet.origin; let origin = snippet.origin;
let need_empty_header = origin.is_some() || is_first; let need_empty_header = origin.is_some() || is_first;
let is_file_level = snippet.annotations.iter().any(|ann| ann.is_file_level);
if is_file_level {
// TODO(brent) enable this assertion again once we set `is_file_level` for individual rules.
// It's causing too many false positives currently when the default is to make any
// annotation with a default range file-level. See
// https://github.com/astral-sh/ruff/issues/19688.
//
// assert!(
// snippet.source.is_empty(),
// "Non-empty file-level snippet that won't be rendered: {:?}",
// snippet.source
// );
let header = format_header(origin, main_range, &[], is_first, snippet.cell_index);
return DisplaySet {
display_lines: header.map_or_else(Vec::new, |header| vec![header]),
margin: Margin::new(0, 0, 0, 0, term_width, 0),
};
}
let cell_index = snippet.cell_index;
let mut body = format_body( let mut body = format_body(
snippet, snippet,
need_empty_header, need_empty_header,
@@ -1228,13 +1126,7 @@ fn format_snippet<'m>(
anonymized_line_numbers, anonymized_line_numbers,
cut_indicator, cut_indicator,
); );
let header = format_header( let header = format_header(origin, main_range, &body.display_lines, is_first);
origin,
main_range,
&body.display_lines,
is_first,
cell_index,
);
if let Some(header) = header { if let Some(header) = header {
body.display_lines.insert(0, header); body.display_lines.insert(0, header);
@@ -1254,7 +1146,6 @@ fn format_header<'a>(
main_range: Option<usize>, main_range: Option<usize>,
body: &[DisplayLine<'_>], body: &[DisplayLine<'_>],
is_first: bool, is_first: bool,
cell_index: Option<usize>,
) -> Option<DisplayLine<'a>> { ) -> Option<DisplayLine<'a>> {
let display_header = if is_first { let display_header = if is_first {
DisplayHeaderType::Initial DisplayHeaderType::Initial
@@ -1278,31 +1169,20 @@ fn format_header<'a>(
.. ..
} = item } = item
{ {
// At the very end of the `main_range`, report the location as the first character if main_range >= range.0 && main_range < range.1 + max(*end_line as usize, 1) {
// in the next line instead of falling back to the default location of `1:1`. This
// is another divergence from upstream.
let end_of_range = range.1 + max(*end_line as usize, 1);
if main_range >= range.0 && main_range < end_of_range {
let char_column = text[0..(main_range - range.0).min(text.len())] let char_column = text[0..(main_range - range.0).min(text.len())]
.chars() .chars()
.count(); .count();
col = char_column + 1; col = char_column + 1;
line_offset = lineno.unwrap_or(1); line_offset = lineno.unwrap_or(1);
break; break;
} else if main_range == end_of_range {
line_offset = lineno.map_or(1, |line| line + 1);
break;
} }
} }
} }
return Some(DisplayLine::Raw(DisplayRawLine::Origin { return Some(DisplayLine::Raw(DisplayRawLine::Origin {
path, path,
pos: Some(Position { pos: Some((line_offset, col)),
row: line_offset,
col,
cell: cell_index,
}),
header_type: display_header, header_type: display_header,
})); }));
} }
@@ -1592,7 +1472,6 @@ fn format_body<'m>(
annotation_type, annotation_type,
id: None, id: None,
label: format_label(annotation.label, None), label: format_label(annotation.label, None),
is_fixable: false,
}, },
range, range,
annotation_type: DisplayAnnotationType::from(annotation.level), annotation_type: DisplayAnnotationType::from(annotation.level),
@@ -1632,7 +1511,6 @@ fn format_body<'m>(
annotation_type, annotation_type,
id: None, id: None,
label: vec![], label: vec![],
is_fixable: false,
}, },
range, range,
annotation_type: DisplayAnnotationType::from(annotation.level), annotation_type: DisplayAnnotationType::from(annotation.level),
@@ -1702,7 +1580,6 @@ fn format_body<'m>(
annotation_type, annotation_type,
id: None, id: None,
label: format_label(annotation.label, None), label: format_label(annotation.label, None),
is_fixable: false,
}, },
range, range,
annotation_type: DisplayAnnotationType::from(annotation.level), annotation_type: DisplayAnnotationType::from(annotation.level),

View File

@@ -22,7 +22,6 @@ pub struct Message<'a> {
pub(crate) title: &'a str, pub(crate) title: &'a str,
pub(crate) snippets: Vec<Snippet<'a>>, pub(crate) snippets: Vec<Snippet<'a>>,
pub(crate) footer: Vec<Message<'a>>, pub(crate) footer: Vec<Message<'a>>,
pub(crate) is_fixable: bool,
} }
impl<'a> Message<'a> { impl<'a> Message<'a> {
@@ -50,15 +49,6 @@ impl<'a> Message<'a> {
self.footer.extend(footer); self.footer.extend(footer);
self self
} }
/// Whether or not the diagnostic for this message is fixable.
///
/// This is rendered as a `[*]` indicator after the `id` in an annotation header, if the
/// annotation also has `Level::None`.
pub fn is_fixable(mut self, yes: bool) -> Self {
self.is_fixable = yes;
self
}
} }
/// Structure containing the slice of text to be annotated and /// Structure containing the slice of text to be annotated and
@@ -75,10 +65,6 @@ pub struct Snippet<'a> {
pub(crate) annotations: Vec<Annotation<'a>>, pub(crate) annotations: Vec<Annotation<'a>>,
pub(crate) fold: bool, pub(crate) fold: bool,
/// The optional cell index in a Jupyter notebook, used for reporting source locations along
/// with the ranges on `annotations`.
pub(crate) cell_index: Option<usize>,
} }
impl<'a> Snippet<'a> { impl<'a> Snippet<'a> {
@@ -89,7 +75,6 @@ impl<'a> Snippet<'a> {
source, source,
annotations: vec![], annotations: vec![],
fold: false, fold: false,
cell_index: None,
} }
} }
@@ -118,12 +103,6 @@ impl<'a> Snippet<'a> {
self.fold = fold; self.fold = fold;
self self
} }
/// Attach a Jupyter notebook cell index.
pub fn cell_index(mut self, index: Option<usize>) -> Self {
self.cell_index = index;
self
}
} }
/// An annotation for a [`Snippet`]. /// An annotation for a [`Snippet`].
@@ -135,7 +114,6 @@ pub struct Annotation<'a> {
pub(crate) range: Range<usize>, pub(crate) range: Range<usize>,
pub(crate) label: Option<&'a str>, pub(crate) label: Option<&'a str>,
pub(crate) level: Level, pub(crate) level: Level,
pub(crate) is_file_level: bool,
} }
impl<'a> Annotation<'a> { impl<'a> Annotation<'a> {
@@ -143,11 +121,6 @@ impl<'a> Annotation<'a> {
self.label = Some(label); self.label = Some(label);
self self
} }
pub fn is_file_level(mut self, yes: bool) -> Self {
self.is_file_level = yes;
self
}
} }
/// Types of annotations. /// Types of annotations.
@@ -172,7 +145,6 @@ impl Level {
title, title,
snippets: vec![], snippets: vec![],
footer: vec![], footer: vec![],
is_fixable: false,
} }
} }
@@ -182,7 +154,6 @@ impl Level {
range: span, range: span,
label: None, label: None,
level: self, level: self,
is_file_level: false,
} }
} }
} }

View File

@@ -86,5 +86,5 @@ walltime = ["ruff_db/os", "ty_project", "divan"]
[target.'cfg(target_os = "windows")'.dev-dependencies] [target.'cfg(target_os = "windows")'.dev-dependencies]
mimalloc = { workspace = true } mimalloc = { workspace = true }
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dev-dependencies] [target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
tikv-jemallocator = { workspace = true } tikv-jemallocator = { workspace = true }

View File

@@ -21,8 +21,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
any( any(
target_arch = "x86_64", target_arch = "x86_64",
target_arch = "aarch64", target_arch = "aarch64",
target_arch = "powerpc64", target_arch = "powerpc64"
target_arch = "riscv64"
) )
))] ))]
#[global_allocator] #[global_allocator]

View File

@@ -18,8 +18,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
any( any(
target_arch = "x86_64", target_arch = "x86_64",
target_arch = "aarch64", target_arch = "aarch64",
target_arch = "powerpc64", target_arch = "powerpc64"
target_arch = "riscv64"
) )
))] ))]
#[global_allocator] #[global_allocator]

View File

@@ -26,8 +26,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
any( any(
target_arch = "x86_64", target_arch = "x86_64",
target_arch = "aarch64", target_arch = "aarch64",
target_arch = "powerpc64", target_arch = "powerpc64"
target_arch = "riscv64"
) )
))] ))]
#[global_allocator] #[global_allocator]
@@ -43,8 +42,7 @@ static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
any( any(
target_arch = "x86_64", target_arch = "x86_64",
target_arch = "aarch64", target_arch = "aarch64",
target_arch = "powerpc64", target_arch = "powerpc64"
target_arch = "riscv64"
) )
))] ))]
#[unsafe(export_name = "_rjem_malloc_conf")] #[unsafe(export_name = "_rjem_malloc_conf")]
@@ -79,11 +77,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
b.iter_batched( b.iter_batched(
|| parsed.clone(), || parsed.clone(),
|parsed| { |parsed| {
// Assert that file contains no parse errors
assert!(parsed.has_valid_syntax());
let path = case.path(); let path = case.path();
lint_only( let result = lint_only(
&path, &path,
None, None,
settings, settings,
@@ -91,7 +86,10 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
&SourceKind::Python(case.code().to_string()), &SourceKind::Python(case.code().to_string()),
PySourceType::from(path.as_path()), PySourceType::from(path.as_path()),
ParseSource::Precomputed(parsed), ParseSource::Precomputed(parsed),
) );
// Assert that file contains no parse errors
assert!(!result.has_syntax_errors());
}, },
criterion::BatchSize::SmallInput, criterion::BatchSize::SmallInput,
); );

View File

@@ -20,8 +20,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
any( any(
target_arch = "x86_64", target_arch = "x86_64",
target_arch = "aarch64", target_arch = "aarch64",
target_arch = "powerpc64", target_arch = "powerpc64"
target_arch = "riscv64"
) )
))] ))]
#[global_allocator] #[global_allocator]

View File

@@ -450,6 +450,9 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
r#" r#"
class C: class C:
def f(self: "C"): def f(self: "C"):
self.a = ""
self.b = ""
if isinstance(self.a, str): if isinstance(self.a, str):
return return
@@ -463,56 +466,6 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
return return
if isinstance(self.b, str): if isinstance(self.b, str):
return return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
self.a = ""
self.b = ""
"#,
)
},
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
fn benchmark_complex_constrained_attributes_3(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_micro[complex_constrained_attributes_3]", |b| {
b.iter_batched_ref(
|| {
// This is a regression test for https://github.com/astral-sh/ty/issues/758
setup_micro_case(
r#"
class GridOut:
def __init__(self: "GridOut") -> None:
self._buffer = b""
def _read_size_or_line(self: "GridOut", size: int = -1):
if size > self._position:
size = self._position
pass
if size == 0:
return bytes()
while size > 0:
if self._buffer:
buf = self._buffer
self._buffer = b""
else:
buf = b""
if len(buf) > size:
self._buffer = buf
self._position -= len(self._buffer)
"#, "#,
) )
}, },
@@ -715,7 +668,6 @@ criterion_group!(
benchmark_tuple_implicit_instance_attributes, benchmark_tuple_implicit_instance_attributes,
benchmark_complex_constrained_attributes_1, benchmark_complex_constrained_attributes_1,
benchmark_complex_constrained_attributes_2, benchmark_complex_constrained_attributes_2,
benchmark_complex_constrained_attributes_3,
benchmark_many_enum_members, benchmark_many_enum_members,
); );
criterion_group!(project, anyio, attrs, hydra, datetype); criterion_group!(project, anyio, attrs, hydra, datetype);

View File

@@ -218,24 +218,6 @@ static TANJUN: std::sync::LazyLock<Benchmark<'static>> = std::sync::LazyLock::ne
) )
}); });
static STATIC_FRAME: std::sync::LazyLock<Benchmark<'static>> = std::sync::LazyLock::new(|| {
Benchmark::new(
RealWorldProject {
name: "static-frame",
repository: "https://github.com/static-frame/static-frame",
commit: "34962b41baca5e7f98f5a758d530bff02748a421",
paths: vec![SystemPath::new("static_frame")],
// N.B. `arraykit` is installed as a dependency during mypy_primer runs,
// but it takes much longer to be installed in a Codspeed run than it does in a mypy_primer run
// (seems to be built from source on the Codspeed CI runners for some reason).
dependencies: vec!["numpy"],
max_dep_date: "2025-08-09",
python_version: PythonVersion::PY311,
},
500,
)
});
#[track_caller] #[track_caller]
fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) { fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
bencher bencher
@@ -250,7 +232,7 @@ fn small(bencher: Bencher, benchmark: &Benchmark) {
run_single_threaded(bencher, benchmark); run_single_threaded(bencher, benchmark);
} }
#[bench(args=[&*COLOUR_SCIENCE, &*PANDAS, &*STATIC_FRAME], sample_size=1, sample_count=3)] #[bench(args=[&*COLOUR_SCIENCE, &*PANDAS], sample_size=1, sample_count=3)]
fn medium(bencher: Bencher, benchmark: &Benchmark) { fn medium(bencher: Bencher, benchmark: &Benchmark) {
run_single_threaded(bencher, benchmark); run_single_threaded(bencher, benchmark);
} }

View File

@@ -14,7 +14,6 @@ license = { workspace = true }
ruff_annotate_snippets = { workspace = true } ruff_annotate_snippets = { workspace = true }
ruff_cache = { workspace = true, optional = true } ruff_cache = { workspace = true, optional = true }
ruff_diagnostics = { workspace = true } ruff_diagnostics = { workspace = true }
ruff_memory_usage = { workspace = true }
ruff_notebook = { workspace = true } ruff_notebook = { workspace = true }
ruff_python_ast = { workspace = true, features = ["get-size"] } ruff_python_ast = { workspace = true, features = ["get-size"] }
ruff_python_parser = { workspace = true } ruff_python_parser = { workspace = true }
@@ -34,14 +33,12 @@ glob = { workspace = true }
ignore = { workspace = true, optional = true } ignore = { workspace = true, optional = true }
matchit = { workspace = true } matchit = { workspace = true }
path-slash = { workspace = true } path-slash = { workspace = true }
pathdiff = { workspace = true }
quick-junit = { workspace = true, optional = true } quick-junit = { workspace = true, optional = true }
rustc-hash = { workspace = true } rustc-hash = { workspace = true }
salsa = { workspace = true } salsa = { workspace = true }
schemars = { workspace = true, optional = true } schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true } serde = { workspace = true, optional = true }
serde_json = { workspace = true, optional = true } serde_json = { workspace = true, optional = true }
similar = { workspace = true }
thiserror = { workspace = true } thiserror = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
tracing-subscriber = { workspace = true, optional = true } tracing-subscriber = { workspace = true, optional = true }
@@ -54,7 +51,7 @@ web-time = { version = "1.1.0" }
etcetera = { workspace = true, optional = true } etcetera = { workspace = true, optional = true }
[dev-dependencies] [dev-dependencies]
insta = { workspace = true, features = ["filters"] } insta = { workspace = true }
tempfile = { workspace = true } tempfile = { workspace = true }
[features] [features]

View File

@@ -8,7 +8,6 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
pub use self::render::{ pub use self::render::{
DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary, DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary,
github::{DisplayGithubDiagnostics, GithubRenderer},
}; };
use crate::{Db, files::File}; use crate::{Db, files::File};
@@ -213,7 +212,7 @@ impl Diagnostic {
/// The type returned implements the `std::fmt::Display` trait. In most /// The type returned implements the `std::fmt::Display` trait. In most
/// cases, just converting it to a string (or printing it) will do what /// cases, just converting it to a string (or printing it) will do what
/// you want. /// you want.
pub fn concise_message(&self) -> ConciseMessage<'_> { pub fn concise_message(&self) -> ConciseMessage {
let main = self.inner.message.as_str(); let main = self.inner.message.as_str();
let annotation = self let annotation = self
.primary_annotation() .primary_annotation()
@@ -255,11 +254,6 @@ impl Diagnostic {
.find(|ann| ann.is_primary) .find(|ann| ann.is_primary)
} }
/// Returns a mutable borrow of all annotations of this diagnostic.
pub fn annotations_mut(&mut self) -> impl Iterator<Item = &mut Annotation> {
Arc::make_mut(&mut self.inner).annotations.iter_mut()
}
/// Returns the "primary" span of this diagnostic if one exists. /// Returns the "primary" span of this diagnostic if one exists.
/// ///
/// When there are multiple primary spans, then the first one that was /// When there are multiple primary spans, then the first one that was
@@ -316,21 +310,11 @@ impl Diagnostic {
&self.inner.subs &self.inner.subs
} }
/// Returns a mutable borrow of the sub-diagnostics of this diagnostic.
pub fn sub_diagnostics_mut(&mut self) -> impl Iterator<Item = &mut SubDiagnostic> {
Arc::make_mut(&mut self.inner).subs.iter_mut()
}
/// Returns the fix for this diagnostic if it exists. /// Returns the fix for this diagnostic if it exists.
pub fn fix(&self) -> Option<&Fix> { pub fn fix(&self) -> Option<&Fix> {
self.inner.fix.as_ref() self.inner.fix.as_ref()
} }
#[cfg(test)]
pub(crate) fn fix_mut(&mut self) -> Option<&mut Fix> {
Arc::make_mut(&mut self.inner).fix.as_mut()
}
/// Set the fix for this diagnostic. /// Set the fix for this diagnostic.
pub fn set_fix(&mut self, fix: Fix) { pub fn set_fix(&mut self, fix: Fix) {
debug_assert!( debug_assert!(
@@ -350,13 +334,6 @@ impl Diagnostic {
self.fix().is_some() self.fix().is_some()
} }
/// Returns `true` if the diagnostic is [`fixable`](Diagnostic::fixable) and applies at the
/// configured applicability level.
pub fn has_applicable_fix(&self, config: &DisplayDiagnosticConfig) -> bool {
self.fix()
.is_some_and(|fix| fix.applies(config.fix_applicability))
}
/// Returns the offset of the parent statement for this diagnostic if it exists. /// Returns the offset of the parent statement for this diagnostic if it exists.
/// ///
/// This is primarily used for checking noqa/secondary code suppressions. /// This is primarily used for checking noqa/secondary code suppressions.
@@ -389,16 +366,6 @@ impl Diagnostic {
self.inner.secondary_code.as_ref() self.inner.secondary_code.as_ref()
} }
/// Returns the secondary code for the diagnostic if it exists, or the lint name otherwise.
///
/// This is a common pattern for Ruff diagnostics, which want to use the noqa code in general,
/// but fall back on the `invalid-syntax` identifier for syntax errors, which don't have
/// secondary codes.
pub fn secondary_code_or_id(&self) -> &str {
self.secondary_code()
.map_or_else(|| self.inner.id.as_str(), SecondaryCode::as_str)
}
/// Set the secondary code for this diagnostic. /// Set the secondary code for this diagnostic.
pub fn set_secondary_code(&mut self, code: SecondaryCode) { pub fn set_secondary_code(&mut self, code: SecondaryCode) {
Arc::make_mut(&mut self.inner).secondary_code = Some(code); Arc::make_mut(&mut self.inner).secondary_code = Some(code);
@@ -455,26 +422,24 @@ impl Diagnostic {
/// Computes the start source location for the message. /// Computes the start source location for the message.
/// ///
/// Returns None if the diagnostic has no primary span, if its file is not a `SourceFile`, /// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
/// or if the span has no range. /// span has no range.
pub fn ruff_start_location(&self) -> Option<LineColumn> { pub fn expect_ruff_start_location(&self) -> LineColumn {
Some( self.expect_primary_span()
self.ruff_source_file()? .expect_ruff_file()
.to_source_code() .to_source_code()
.line_column(self.range()?.start()), .line_column(self.expect_range().start())
)
} }
/// Computes the end source location for the message. /// Computes the end source location for the message.
/// ///
/// Returns None if the diagnostic has no primary span, if its file is not a `SourceFile`, /// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
/// or if the span has no range. /// span has no range.
pub fn ruff_end_location(&self) -> Option<LineColumn> { pub fn expect_ruff_end_location(&self) -> LineColumn {
Some( self.expect_primary_span()
self.ruff_source_file()? .expect_ruff_file()
.to_source_code() .to_source_code()
.line_column(self.range()?.end()), .line_column(self.expect_range().end())
)
} }
/// Returns the [`SourceFile`] which the message belongs to. /// Returns the [`SourceFile`] which the message belongs to.
@@ -495,22 +460,22 @@ impl Diagnostic {
self.primary_span()?.range() self.primary_span()?.range()
} }
/// Returns the [`TextRange`] for the diagnostic.
///
/// Panics if the diagnostic has no primary span or if the span has no range.
pub fn expect_range(&self) -> TextRange {
self.range().expect("Expected a range for the primary span")
}
/// Returns the ordering of diagnostics based on the start of their ranges, if they have any. /// Returns the ordering of diagnostics based on the start of their ranges, if they have any.
/// ///
/// Panics if either diagnostic has no primary span, or if its file is not a `SourceFile`. /// Panics if either diagnostic has no primary span, if the span has no range, or if its file is
/// not a `SourceFile`.
pub fn ruff_start_ordering(&self, other: &Self) -> std::cmp::Ordering { pub fn ruff_start_ordering(&self, other: &Self) -> std::cmp::Ordering {
let a = ( (self.expect_ruff_source_file(), self.expect_range().start()).cmp(&(
self.severity().is_fatal(),
self.expect_ruff_source_file(),
self.range().map(|r| r.start()),
);
let b = (
other.severity().is_fatal(),
other.expect_ruff_source_file(), other.expect_ruff_source_file(),
other.range().map(|r| r.start()), other.expect_range().start(),
); ))
a.cmp(&b)
} }
} }
@@ -646,11 +611,6 @@ impl SubDiagnostic {
&self.inner.annotations &self.inner.annotations
} }
/// Returns a mutable borrow of the annotations of this sub-diagnostic.
pub fn annotations_mut(&mut self) -> impl Iterator<Item = &mut Annotation> {
self.inner.annotations.iter_mut()
}
/// Returns a shared borrow of the "primary" annotation of this diagnostic /// Returns a shared borrow of the "primary" annotation of this diagnostic
/// if one exists. /// if one exists.
/// ///
@@ -684,7 +644,7 @@ impl SubDiagnostic {
/// The type returned implements the `std::fmt::Display` trait. In most /// The type returned implements the `std::fmt::Display` trait. In most
/// cases, just converting it to a string (or printing it) will do what /// cases, just converting it to a string (or printing it) will do what
/// you want. /// you want.
pub fn concise_message(&self) -> ConciseMessage<'_> { pub fn concise_message(&self) -> ConciseMessage {
let main = self.inner.message.as_str(); let main = self.inner.message.as_str();
let annotation = self let annotation = self
.primary_annotation() .primary_annotation()
@@ -742,11 +702,6 @@ pub struct Annotation {
is_primary: bool, is_primary: bool,
/// The diagnostic tags associated with this annotation. /// The diagnostic tags associated with this annotation.
tags: Vec<DiagnosticTag>, tags: Vec<DiagnosticTag>,
/// Whether this annotation is a file-level or full-file annotation.
///
/// When set, rendering will only include the file's name and (optional) range. Everything else
/// is omitted, including any file snippet or message.
is_file_level: bool,
} }
impl Annotation { impl Annotation {
@@ -765,7 +720,6 @@ impl Annotation {
message: None, message: None,
is_primary: true, is_primary: true,
tags: Vec::new(), tags: Vec::new(),
is_file_level: false,
} }
} }
@@ -782,7 +736,6 @@ impl Annotation {
message: None, message: None,
is_primary: false, is_primary: false,
tags: Vec::new(), tags: Vec::new(),
is_file_level: false,
} }
} }
@@ -848,21 +801,6 @@ impl Annotation {
pub fn push_tag(&mut self, tag: DiagnosticTag) { pub fn push_tag(&mut self, tag: DiagnosticTag) {
self.tags.push(tag); self.tags.push(tag);
} }
/// Set whether or not this annotation is file-level.
///
/// File-level annotations are only rendered with their file name and range, if available. This
/// is intended for backwards compatibility with Ruff diagnostics, which historically used
/// `TextRange::default` to indicate a file-level diagnostic. In the new diagnostic model, a
/// [`Span`] with a range of `None` should be used instead, as mentioned in the `Span`
/// documentation.
///
/// TODO(brent) update this usage in Ruff and remove `is_file_level` entirely. See
/// <https://github.com/astral-sh/ruff/issues/19688>, especially my first comment, for more
/// details.
pub fn set_file_level(&mut self, yes: bool) {
self.is_file_level = yes;
}
} }
/// Tags that can be associated with an annotation. /// Tags that can be associated with an annotation.
@@ -1129,7 +1067,7 @@ enum DiagnosticSource {
impl DiagnosticSource { impl DiagnosticSource {
/// Returns this input as a `SourceCode` for convenient querying. /// Returns this input as a `SourceCode` for convenient querying.
fn as_source_code(&self) -> SourceCode<'_, '_> { fn as_source_code(&self) -> SourceCode {
match self { match self {
DiagnosticSource::Ty(input) => SourceCode::new(input.text.as_str(), &input.line_index), DiagnosticSource::Ty(input) => SourceCode::new(input.text.as_str(), &input.line_index),
DiagnosticSource::Ruff(source) => SourceCode::new(source.source_text(), source.index()), DiagnosticSource::Ruff(source) => SourceCode::new(source.source_text(), source.index()),
@@ -1309,10 +1247,6 @@ pub struct DisplayDiagnosticConfig {
hide_severity: bool, hide_severity: bool,
/// Whether to show the availability of a fix in a diagnostic. /// Whether to show the availability of a fix in a diagnostic.
show_fix_status: bool, show_fix_status: bool,
/// Whether to show the diff for an available fix after the main diagnostic.
///
/// This currently only applies to `DiagnosticFormat::Full`.
show_fix_diff: bool,
/// The lowest applicability that should be shown when reporting diagnostics. /// The lowest applicability that should be shown when reporting diagnostics.
fix_applicability: Applicability, fix_applicability: Applicability,
} }
@@ -1360,14 +1294,6 @@ impl DisplayDiagnosticConfig {
} }
} }
/// Whether to show a diff for an available fix after the main diagnostic.
pub fn show_fix_diff(self, yes: bool) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
show_fix_diff: yes,
..self
}
}
/// Set the lowest fix applicability that should be shown. /// Set the lowest fix applicability that should be shown.
/// ///
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix /// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
@@ -1391,7 +1317,6 @@ impl Default for DisplayDiagnosticConfig {
preview: false, preview: false,
hide_severity: false, hide_severity: false,
show_fix_status: false, show_fix_status: false,
show_fix_diff: false,
fix_applicability: Applicability::Safe, fix_applicability: Applicability::Safe,
} }
} }
@@ -1445,16 +1370,6 @@ pub enum DiagnosticFormat {
/// Print diagnostics in the format expected by JUnit. /// Print diagnostics in the format expected by JUnit.
#[cfg(feature = "junit")] #[cfg(feature = "junit")]
Junit, Junit,
/// Print diagnostics in the JSON format used by GitLab [Code Quality] reports.
///
/// [Code Quality]: https://docs.gitlab.com/ci/testing/code_quality/#code-quality-report-format
#[cfg(feature = "serde")]
Gitlab,
/// Print diagnostics in the format used by [GitHub Actions] workflow error annotations.
///
/// [GitHub Actions]: https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-commands#setting-an-error-message
Github,
} }
/// A representation of the kinds of messages inside a diagnostic. /// A representation of the kinds of messages inside a diagnostic.

View File

@@ -2,15 +2,15 @@ use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::path::Path; use std::path::Path;
use full::FullRenderer;
use ruff_annotate_snippets::{ use ruff_annotate_snippets::{
Annotation as AnnotateAnnotation, Level as AnnotateLevel, Message as AnnotateMessage, Annotation as AnnotateAnnotation, Level as AnnotateLevel, Message as AnnotateMessage,
Snippet as AnnotateSnippet, Renderer as AnnotateRenderer, Snippet as AnnotateSnippet,
}; };
use ruff_notebook::{Notebook, NotebookIndex}; use ruff_notebook::{Notebook, NotebookIndex};
use ruff_source_file::{LineIndex, OneIndexed, SourceCode}; use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
use ruff_text_size::{TextLen, TextRange, TextSize}; use ruff_text_size::{TextLen, TextRange, TextSize};
use crate::diagnostic::stylesheet::DiagnosticStylesheet;
use crate::{ use crate::{
Db, Db,
files::File, files::File,
@@ -25,15 +25,11 @@ use super::{
use azure::AzureRenderer; use azure::AzureRenderer;
use concise::ConciseRenderer; use concise::ConciseRenderer;
use github::GithubRenderer;
use pylint::PylintRenderer; use pylint::PylintRenderer;
mod azure; mod azure;
mod concise; mod concise;
mod full; mod full;
pub mod github;
#[cfg(feature = "serde")]
mod gitlab;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
mod json; mod json;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
@@ -115,7 +111,37 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
ConciseRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?; ConciseRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?;
} }
DiagnosticFormat::Full => { DiagnosticFormat::Full => {
FullRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?; let stylesheet = if self.config.color {
DiagnosticStylesheet::styled()
} else {
DiagnosticStylesheet::plain()
};
let mut renderer = if self.config.color {
AnnotateRenderer::styled()
} else {
AnnotateRenderer::plain()
}
.cut_indicator("");
renderer = renderer
.error(stylesheet.error)
.warning(stylesheet.warning)
.info(stylesheet.info)
.note(stylesheet.note)
.help(stylesheet.help)
.line_no(stylesheet.line_no)
.emphasis(stylesheet.emphasis)
.none(stylesheet.none);
for diag in self.diagnostics {
let resolved = Resolved::new(self.resolver, diag);
let renderable = resolved.to_renderable(self.config.context);
for diag in renderable.diagnostics.iter() {
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
}
writeln!(f)?;
}
} }
DiagnosticFormat::Azure => { DiagnosticFormat::Azure => {
AzureRenderer::new(self.resolver).render(f, self.diagnostics)?; AzureRenderer::new(self.resolver).render(f, self.diagnostics)?;
@@ -140,13 +166,6 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
DiagnosticFormat::Junit => { DiagnosticFormat::Junit => {
junit::JunitRenderer::new(self.resolver).render(f, self.diagnostics)?; junit::JunitRenderer::new(self.resolver).render(f, self.diagnostics)?;
} }
#[cfg(feature = "serde")]
DiagnosticFormat::Gitlab => {
gitlab::GitlabRenderer::new(self.resolver).render(f, self.diagnostics)?;
}
DiagnosticFormat::Github => {
GithubRenderer::new(self.resolver, "ty").render(f, self.diagnostics)?;
}
} }
Ok(()) Ok(())
@@ -172,13 +191,9 @@ struct Resolved<'a> {
impl<'a> Resolved<'a> { impl<'a> Resolved<'a> {
/// Creates a new resolved set of diagnostics. /// Creates a new resolved set of diagnostics.
fn new( fn new(resolver: &'a dyn FileResolver, diag: &'a Diagnostic) -> Resolved<'a> {
resolver: &'a dyn FileResolver,
diag: &'a Diagnostic,
config: &DisplayDiagnosticConfig,
) -> Resolved<'a> {
let mut diagnostics = vec![]; let mut diagnostics = vec![];
diagnostics.push(ResolvedDiagnostic::from_diagnostic(resolver, config, diag)); diagnostics.push(ResolvedDiagnostic::from_diagnostic(resolver, diag));
for sub in &diag.inner.subs { for sub in &diag.inner.subs {
diagnostics.push(ResolvedDiagnostic::from_sub_diagnostic(resolver, sub)); diagnostics.push(ResolvedDiagnostic::from_sub_diagnostic(resolver, sub));
} }
@@ -208,14 +223,12 @@ struct ResolvedDiagnostic<'a> {
id: Option<String>, id: Option<String>,
message: String, message: String,
annotations: Vec<ResolvedAnnotation<'a>>, annotations: Vec<ResolvedAnnotation<'a>>,
is_fixable: bool,
} }
impl<'a> ResolvedDiagnostic<'a> { impl<'a> ResolvedDiagnostic<'a> {
/// Resolve a single diagnostic. /// Resolve a single diagnostic.
fn from_diagnostic( fn from_diagnostic(
resolver: &'a dyn FileResolver, resolver: &'a dyn FileResolver,
config: &DisplayDiagnosticConfig,
diag: &'a Diagnostic, diag: &'a Diagnostic,
) -> ResolvedDiagnostic<'a> { ) -> ResolvedDiagnostic<'a> {
let annotations: Vec<_> = diag let annotations: Vec<_> = diag
@@ -223,43 +236,18 @@ impl<'a> ResolvedDiagnostic<'a> {
.annotations .annotations
.iter() .iter()
.filter_map(|ann| { .filter_map(|ann| {
let path = ann let path = ann.span.file.path(resolver);
.span
.file
.relative_path(resolver)
.to_str()
.unwrap_or_else(|| ann.span.file.path(resolver));
let diagnostic_source = ann.span.file.diagnostic_source(resolver); let diagnostic_source = ann.span.file.diagnostic_source(resolver);
ResolvedAnnotation::new(path, &diagnostic_source, ann, resolver) ResolvedAnnotation::new(path, &diagnostic_source, ann)
}) })
.collect(); .collect();
let id = Some(diag.inner.id.to_string());
let id = if config.hide_severity { let message = diag.inner.message.as_str().to_string();
// Either the rule code alone (e.g. `F401`), or the lint id with a colon (e.g.
// `invalid-syntax:`). When Ruff gets real severities, we should put the colon back in
// `DisplaySet::format_annotation` for both cases, but this is a small hack to improve
// the formatting of syntax errors for now. This should also be kept consistent with the
// concise formatting.
Some(diag.secondary_code().map_or_else(
|| format!("{id}:", id = diag.inner.id),
|code| code.to_string(),
))
} else {
Some(diag.inner.id.to_string())
};
let level = if config.hide_severity {
AnnotateLevel::None
} else {
diag.inner.severity.to_annotate()
};
ResolvedDiagnostic { ResolvedDiagnostic {
level, level: diag.inner.severity.to_annotate(),
id, id,
message: diag.inner.message.as_str().to_string(), message,
annotations, annotations,
is_fixable: diag.has_applicable_fix(config),
} }
} }
@@ -273,14 +261,9 @@ impl<'a> ResolvedDiagnostic<'a> {
.annotations .annotations
.iter() .iter()
.filter_map(|ann| { .filter_map(|ann| {
let path = ann let path = ann.span.file.path(resolver);
.span
.file
.relative_path(resolver)
.to_str()
.unwrap_or_else(|| ann.span.file.path(resolver));
let diagnostic_source = ann.span.file.diagnostic_source(resolver); let diagnostic_source = ann.span.file.diagnostic_source(resolver);
ResolvedAnnotation::new(path, &diagnostic_source, ann, resolver) ResolvedAnnotation::new(path, &diagnostic_source, ann)
}) })
.collect(); .collect();
ResolvedDiagnostic { ResolvedDiagnostic {
@@ -288,7 +271,6 @@ impl<'a> ResolvedDiagnostic<'a> {
id: None, id: None,
message: diag.inner.message.as_str().to_string(), message: diag.inner.message.as_str().to_string(),
annotations, annotations,
is_fixable: false,
} }
} }
@@ -319,49 +301,20 @@ impl<'a> ResolvedDiagnostic<'a> {
&prev.diagnostic_source.as_source_code(), &prev.diagnostic_source.as_source_code(),
context, context,
prev.line_end, prev.line_end,
prev.notebook_index.as_ref(),
) )
.get(); .get();
let this_context_begins = context_before( let this_context_begins = context_before(
&ann.diagnostic_source.as_source_code(), &ann.diagnostic_source.as_source_code(),
context, context,
ann.line_start, ann.line_start,
ann.notebook_index.as_ref(),
) )
.get(); .get();
// For notebooks, check whether the end of the
// previous annotation and the start of the current
// annotation are in different cells.
let prev_cell_index = prev.notebook_index.as_ref().map(|notebook_index| {
let prev_end = prev
.diagnostic_source
.as_source_code()
.line_column(prev.range.end());
notebook_index.cell(prev_end.line).unwrap_or_default().get()
});
let this_cell_index = ann.notebook_index.as_ref().map(|notebook_index| {
let this_start = ann
.diagnostic_source
.as_source_code()
.line_column(ann.range.start());
notebook_index
.cell(this_start.line)
.unwrap_or_default()
.get()
});
let in_different_cells = prev_cell_index != this_cell_index;
// The boundary case here is when `prev_context_ends` // The boundary case here is when `prev_context_ends`
// is exactly one less than `this_context_begins`. In // is exactly one less than `this_context_begins`. In
// that case, the context windows are adjacent and we // that case, the context windows are adjacent and we
// should fall through below to add this annotation to // should fall through below to add this annotation to
// the existing snippet. // the existing snippet.
// if this_context_begins.saturating_sub(prev_context_ends) > 1 {
// For notebooks, also check that the context windows
// are in the same cell. Windows from different cells
// should never be considered adjacent.
if in_different_cells || this_context_begins.saturating_sub(prev_context_ends) > 1 {
snippet_by_path snippet_by_path
.entry(path) .entry(path)
.or_default() .or_default()
@@ -385,7 +338,6 @@ impl<'a> ResolvedDiagnostic<'a> {
id: self.id.as_deref(), id: self.id.as_deref(),
message: &self.message, message: &self.message,
snippets_by_input, snippets_by_input,
is_fixable: self.is_fixable,
} }
} }
} }
@@ -405,8 +357,6 @@ struct ResolvedAnnotation<'a> {
line_end: OneIndexed, line_end: OneIndexed,
message: Option<&'a str>, message: Option<&'a str>,
is_primary: bool, is_primary: bool,
is_file_level: bool,
notebook_index: Option<NotebookIndex>,
} }
impl<'a> ResolvedAnnotation<'a> { impl<'a> ResolvedAnnotation<'a> {
@@ -419,7 +369,6 @@ impl<'a> ResolvedAnnotation<'a> {
path: &'a str, path: &'a str,
diagnostic_source: &DiagnosticSource, diagnostic_source: &DiagnosticSource,
ann: &'a Annotation, ann: &'a Annotation,
resolver: &'a dyn FileResolver,
) -> Option<ResolvedAnnotation<'a>> { ) -> Option<ResolvedAnnotation<'a>> {
let source = diagnostic_source.as_source_code(); let source = diagnostic_source.as_source_code();
let (range, line_start, line_end) = match (ann.span.range(), ann.message.is_some()) { let (range, line_start, line_end) = match (ann.span.range(), ann.message.is_some()) {
@@ -453,8 +402,6 @@ impl<'a> ResolvedAnnotation<'a> {
line_end, line_end,
message: ann.get_message(), message: ann.get_message(),
is_primary: ann.is_primary, is_primary: ann.is_primary,
is_file_level: ann.is_file_level,
notebook_index: resolver.notebook_index(&ann.span.file),
}) })
} }
} }
@@ -489,10 +436,6 @@ struct RenderableDiagnostic<'r> {
/// should be from the same file, and none of the snippets inside of a /// should be from the same file, and none of the snippets inside of a
/// collection should overlap with one another or be directly adjacent. /// collection should overlap with one another or be directly adjacent.
snippets_by_input: Vec<RenderableSnippets<'r>>, snippets_by_input: Vec<RenderableSnippets<'r>>,
/// Whether or not the diagnostic is fixable.
///
/// This is rendered as a `[*]` indicator after the diagnostic ID.
is_fixable: bool,
} }
impl RenderableDiagnostic<'_> { impl RenderableDiagnostic<'_> {
@@ -505,7 +448,7 @@ impl RenderableDiagnostic<'_> {
.iter() .iter()
.map(|snippet| snippet.to_annotate(path)) .map(|snippet| snippet.to_annotate(path))
}); });
let mut message = self.level.title(self.message).is_fixable(self.is_fixable); let mut message = self.level.title(self.message);
if let Some(id) = self.id { if let Some(id) = self.id {
message = message.id(id); message = message.id(id);
} }
@@ -587,27 +530,17 @@ struct RenderableSnippet<'r> {
/// Whether this snippet contains at least one primary /// Whether this snippet contains at least one primary
/// annotation. /// annotation.
has_primary: bool, has_primary: bool,
/// The cell index in a Jupyter notebook, if this snippet refers to a notebook.
///
/// This is used for rendering annotations with offsets like `cell 1:2:3` instead of simple row
/// and column numbers.
cell_index: Option<usize>,
} }
impl<'r> RenderableSnippet<'r> { impl<'r> RenderableSnippet<'r> {
/// Creates a new snippet with one or more annotations that is ready to be /// Creates a new snippet with one or more annotations that is ready to be
/// rendered. /// renderer.
/// ///
/// The first line of the snippet is the smallest line number on which one /// The first line of the snippet is the smallest line number on which one
/// of the annotations begins, minus the context window size. The last line /// of the annotations begins, minus the context window size. The last line
/// is the largest line number on which one of the annotations ends, plus /// is the largest line number on which one of the annotations ends, plus
/// the context window size. /// the context window size.
/// ///
/// For Jupyter notebooks, the context window may also be truncated at cell
/// boundaries. If multiple annotations are present, and they point to
/// different cells, these will have already been split into separate
/// snippets by `ResolvedDiagnostic::to_renderable`.
///
/// Callers should guarantee that the `input` on every `ResolvedAnnotation` /// Callers should guarantee that the `input` on every `ResolvedAnnotation`
/// given is identical. /// given is identical.
/// ///
@@ -624,19 +557,19 @@ impl<'r> RenderableSnippet<'r> {
"creating a renderable snippet requires a non-zero number of annotations", "creating a renderable snippet requires a non-zero number of annotations",
); );
let diagnostic_source = &anns[0].diagnostic_source; let diagnostic_source = &anns[0].diagnostic_source;
let notebook_index = anns[0].notebook_index.as_ref();
let source = diagnostic_source.as_source_code(); let source = diagnostic_source.as_source_code();
let has_primary = anns.iter().any(|ann| ann.is_primary); let has_primary = anns.iter().any(|ann| ann.is_primary);
let content_start_index = anns.iter().map(|ann| ann.line_start).min().unwrap(); let line_start = context_before(
let line_start = context_before(&source, context, content_start_index, notebook_index); &source,
context,
let start = source.line_column(anns[0].range.start()); anns.iter().map(|ann| ann.line_start).min().unwrap(),
let cell_index = notebook_index );
.map(|notebook_index| notebook_index.cell(start.line).unwrap_or_default().get()); let line_end = context_after(
&source,
let content_end_index = anns.iter().map(|ann| ann.line_end).max().unwrap(); context,
let line_end = context_after(&source, context, content_end_index, notebook_index); anns.iter().map(|ann| ann.line_end).max().unwrap(),
);
let snippet_start = source.line_start(line_start); let snippet_start = source.line_start(line_start);
let snippet_end = source.line_end(line_end); let snippet_end = source.line_end(line_end);
@@ -644,22 +577,6 @@ impl<'r> RenderableSnippet<'r> {
.as_source_code() .as_source_code()
.slice(TextRange::new(snippet_start, snippet_end)); .slice(TextRange::new(snippet_start, snippet_end));
// Strip the BOM from the beginning of the snippet, if present. Doing this here saves us the
// trouble of updating the annotation ranges in `replace_unprintable`, and also allows us to
// check that the BOM is at the very beginning of the file, not just the beginning of the
// snippet.
const BOM: char = '\u{feff}';
let bom_len = BOM.text_len();
let (snippet, snippet_start) =
if snippet_start == TextSize::ZERO && snippet.starts_with(BOM) {
(
&snippet[bom_len.to_usize()..],
snippet_start + TextSize::new(bom_len.to_u32()),
)
} else {
(snippet, snippet_start)
};
let annotations = anns let annotations = anns
.iter() .iter()
.map(|ann| RenderableAnnotation::new(snippet_start, ann)) .map(|ann| RenderableAnnotation::new(snippet_start, ann))
@@ -670,18 +587,11 @@ impl<'r> RenderableSnippet<'r> {
annotations, annotations,
} = replace_unprintable(snippet, annotations).fix_up_empty_spans_after_line_terminator(); } = replace_unprintable(snippet, annotations).fix_up_empty_spans_after_line_terminator();
let line_start = notebook_index.map_or(line_start, |notebook_index| {
notebook_index
.cell_row(line_start)
.unwrap_or(OneIndexed::MIN)
});
RenderableSnippet { RenderableSnippet {
snippet, snippet,
line_start, line_start,
annotations, annotations,
has_primary, has_primary,
cell_index,
} }
} }
@@ -695,7 +605,6 @@ impl<'r> RenderableSnippet<'r> {
.iter() .iter()
.map(RenderableAnnotation::to_annotate), .map(RenderableAnnotation::to_annotate),
) )
.cell_index(self.cell_index)
} }
} }
@@ -710,8 +619,6 @@ struct RenderableAnnotation<'r> {
message: Option<&'r str>, message: Option<&'r str>,
/// Whether this annotation is considered "primary" or not. /// Whether this annotation is considered "primary" or not.
is_primary: bool, is_primary: bool,
/// Whether this annotation applies to an entire file, rather than a snippet within it.
is_file_level: bool,
} }
impl<'r> RenderableAnnotation<'r> { impl<'r> RenderableAnnotation<'r> {
@@ -724,16 +631,11 @@ impl<'r> RenderableAnnotation<'r> {
/// lifetime parameter here refers to the lifetime of the resolver that /// lifetime parameter here refers to the lifetime of the resolver that
/// created the given `ResolvedAnnotation`. /// created the given `ResolvedAnnotation`.
fn new(snippet_start: TextSize, ann: &'_ ResolvedAnnotation<'r>) -> RenderableAnnotation<'r> { fn new(snippet_start: TextSize, ann: &'_ ResolvedAnnotation<'r>) -> RenderableAnnotation<'r> {
// This should only ever saturate if a BOM is present _and_ the annotation range points let range = ann.range - snippet_start;
// before the BOM (i.e. at offset 0). In Ruff this typically results from the use of
// `TextRange::default()` for a diagnostic range instead of a range relative to file
// contents.
let range = ann.range.checked_sub(snippet_start).unwrap_or(ann.range);
RenderableAnnotation { RenderableAnnotation {
range, range,
message: ann.message, message: ann.message,
is_primary: ann.is_primary, is_primary: ann.is_primary,
is_file_level: ann.is_file_level,
} }
} }
@@ -759,7 +661,7 @@ impl<'r> RenderableAnnotation<'r> {
if let Some(message) = self.message { if let Some(message) = self.message {
ann = ann.label(message); ann = ann.label(message);
} }
ann.is_file_level(self.is_file_level) ann
} }
} }
@@ -886,15 +788,7 @@ pub struct Input {
/// ///
/// The line number returned is guaranteed to be less than /// The line number returned is guaranteed to be less than
/// or equal to `start`. /// or equal to `start`.
/// fn context_before(source: &SourceCode<'_, '_>, len: usize, start: OneIndexed) -> OneIndexed {
/// In Jupyter notebooks, lines outside the cell containing
/// `start` will be omitted.
fn context_before(
source: &SourceCode<'_, '_>,
len: usize,
start: OneIndexed,
notebook_index: Option<&NotebookIndex>,
) -> OneIndexed {
let mut line = start.saturating_sub(len); let mut line = start.saturating_sub(len);
// Trim leading empty lines. // Trim leading empty lines.
while line < start { while line < start {
@@ -903,17 +797,6 @@ fn context_before(
} }
line = line.saturating_add(1); line = line.saturating_add(1);
} }
if let Some(index) = notebook_index {
let content_start_cell = index.cell(start).unwrap_or(OneIndexed::MIN);
while line < start {
if index.cell(line).unwrap_or(OneIndexed::MIN) == content_start_cell {
break;
}
line = line.saturating_add(1);
}
}
line line
} }
@@ -923,15 +806,7 @@ fn context_before(
/// The line number returned is guaranteed to be greater /// The line number returned is guaranteed to be greater
/// than or equal to `start` and no greater than the /// than or equal to `start` and no greater than the
/// number of lines in `source`. /// number of lines in `source`.
/// fn context_after(source: &SourceCode<'_, '_>, len: usize, start: OneIndexed) -> OneIndexed {
/// In Jupyter notebooks, lines outside the cell containing
/// `start` will be omitted.
fn context_after(
source: &SourceCode<'_, '_>,
len: usize,
start: OneIndexed,
notebook_index: Option<&NotebookIndex>,
) -> OneIndexed {
let max_lines = OneIndexed::from_zero_indexed(source.line_count()); let max_lines = OneIndexed::from_zero_indexed(source.line_count());
let mut line = start.saturating_add(len).min(max_lines); let mut line = start.saturating_add(len).min(max_lines);
// Trim trailing empty lines. // Trim trailing empty lines.
@@ -941,17 +816,6 @@ fn context_after(
} }
line = line.saturating_sub(1); line = line.saturating_sub(1);
} }
if let Some(index) = notebook_index {
let content_end_cell = index.cell(start).unwrap_or(OneIndexed::MIN);
while line > start {
if index.cell(line).unwrap_or(OneIndexed::MIN) == content_end_cell {
break;
}
line = line.saturating_sub(1);
}
}
line line
} }
@@ -1009,12 +873,7 @@ fn replace_unprintable<'r>(
let mut last_end = 0; let mut last_end = 0;
let mut result = String::new(); let mut result = String::new();
for (index, c) in source.char_indices() { for (index, c) in source.char_indices() {
// normalize `\r` line endings but don't double `\r\n` if let Some(printable) = unprintable_replacement(c) {
if c == '\r' && !source[index + 1..].starts_with("\n") {
result.push_str(&source[last_end..index]);
result.push('\n');
last_end = index + 1;
} else if let Some(printable) = unprintable_replacement(c) {
result.push_str(&source[last_end..index]); result.push_str(&source[last_end..index]);
let len = printable.text_len().to_u32(); let len = printable.text_len().to_u32();
@@ -2631,13 +2490,6 @@ watermelon
self.config = config; self.config = config;
} }
/// Show a diff for the fix when rendering.
pub(super) fn show_fix_diff(&mut self, yes: bool) {
let mut config = std::mem::take(&mut self.config);
config = config.show_fix_diff(yes);
self.config = config;
}
/// The lowest fix applicability to show when rendering. /// The lowest fix applicability to show when rendering.
pub(super) fn fix_applicability(&mut self, applicability: Applicability) { pub(super) fn fix_applicability(&mut self, applicability: Applicability) {
let mut config = std::mem::take(&mut self.config); let mut config = std::mem::take(&mut self.config);
@@ -2665,12 +2517,7 @@ watermelon
/// of the corresponding line minus one. (The "minus one" is because /// of the corresponding line minus one. (The "minus one" is because
/// otherwise, the span will end where the next line begins, and this /// otherwise, the span will end where the next line begins, and this
/// confuses `ruff_annotate_snippets` as of 2025-03-13.) /// confuses `ruff_annotate_snippets` as of 2025-03-13.)
pub(super) fn span( fn span(&self, path: &str, line_offset_start: &str, line_offset_end: &str) -> Span {
&self,
path: &str,
line_offset_start: &str,
line_offset_end: &str,
) -> Span {
let span = self.path(path); let span = self.path(path);
let file = span.expect_ty_file(); let file = span.expect_ty_file();
@@ -2693,7 +2540,7 @@ watermelon
} }
/// Like `span`, but only attaches a file path. /// Like `span`, but only attaches a file path.
pub(super) fn path(&self, path: &str) -> Span { fn path(&self, path: &str) -> Span {
let file = system_path_to_file(&self.db, path).unwrap(); let file = system_path_to_file(&self.db, path).unwrap();
Span::from(file) Span::from(file)
} }
@@ -2807,7 +2654,7 @@ watermelon
/// ///
/// See the docs on `TestEnvironment::span` for the meaning of /// See the docs on `TestEnvironment::span` for the meaning of
/// `path`, `line_offset_start` and `line_offset_end`. /// `path`, `line_offset_start` and `line_offset_end`.
pub(super) fn secondary( fn secondary(
mut self, mut self,
path: &str, path: &str,
line_offset_start: &str, line_offset_start: &str,
@@ -2843,7 +2690,7 @@ watermelon
} }
/// Adds a "help" sub-diagnostic with the given message. /// Adds a "help" sub-diagnostic with the given message.
pub(super) fn help(mut self, message: impl IntoDiagnosticMessage) -> DiagnosticBuilder<'e> { fn help(mut self, message: impl IntoDiagnosticMessage) -> DiagnosticBuilder<'e> {
self.diag.help(message); self.diag.help(message);
self self
} }
@@ -3003,10 +2850,10 @@ if call(foo
env.format(format); env.format(format);
let diagnostics = vec![ let diagnostics = vec![
env.invalid_syntax("Expected one or more symbol names after import") env.invalid_syntax("SyntaxError: Expected one or more symbol names after import")
.primary("syntax_errors.py", "1:14", "1:15", "") .primary("syntax_errors.py", "1:14", "1:15", "")
.build(), .build(),
env.invalid_syntax("Expected ')', found newline") env.invalid_syntax("SyntaxError: Expected ')', found newline")
.primary("syntax_errors.py", "3:11", "3:12", "") .primary("syntax_errors.py", "3:11", "3:12", "")
.build(), .build(),
]; ];
@@ -3014,8 +2861,7 @@ if call(foo
(env, diagnostics) (env, diagnostics)
} }
/// A Jupyter notebook for testing diagnostics. /// Create Ruff-style diagnostics for testing the various output formats for a notebook.
///
/// ///
/// The concatenated cells look like this: /// The concatenated cells look like this:
/// ///
@@ -3035,7 +2881,17 @@ if call(foo
/// The first diagnostic is on the unused `os` import with location cell 1, row 2, column 8 /// The first diagnostic is on the unused `os` import with location cell 1, row 2, column 8
/// (`cell 1:2:8`). The second diagnostic is the unused `math` import at `cell 2:2:8`, and the /// (`cell 1:2:8`). The second diagnostic is the unused `math` import at `cell 2:2:8`, and the
/// third diagnostic is an unfixable unused variable at `cell 3:4:5`. /// third diagnostic is an unfixable unused variable at `cell 3:4:5`.
pub(super) static NOTEBOOK: &str = r##" #[allow(
dead_code,
reason = "This is currently only used for JSON but will be needed soon for other formats"
)]
pub(crate) fn create_notebook_diagnostics(
format: DiagnosticFormat,
) -> (TestEnvironment, Vec<Diagnostic>) {
let mut env = TestEnvironment::new();
env.add(
"notebook.ipynb",
r##"
{ {
"cells": [ "cells": [
{ {
@@ -3074,14 +2930,8 @@ if call(foo
"nbformat": 4, "nbformat": 4,
"nbformat_minor": 5 "nbformat_minor": 5
} }
"##; "##,
);
/// Create Ruff-style diagnostics for testing the various output formats for a notebook.
pub(crate) fn create_notebook_diagnostics(
format: DiagnosticFormat,
) -> (TestEnvironment, Vec<Diagnostic>) {
let mut env = TestEnvironment::new();
env.add("notebook.ipynb", NOTEBOOK);
env.format(format); env.format(format);
let diagnostics = vec![ let diagnostics = vec![

View File

@@ -50,8 +50,10 @@ impl AzureRenderer<'_> {
} }
writeln!( writeln!(
f, f,
"code={code};]{body}", "{code}]{body}",
code = diag.secondary_code_or_id(), code = diag
.secondary_code()
.map_or_else(String::new, |code| format!("code={code};")),
body = diag.body(), body = diag.body(),
)?; )?;
} }

View File

@@ -69,17 +69,13 @@ impl<'a> ConciseRenderer<'a> {
"{code} ", "{code} ",
code = fmt_styled(code, stylesheet.secondary_code) code = fmt_styled(code, stylesheet.secondary_code)
)?; )?;
} else {
write!(
f,
"{id}: ",
id = fmt_styled(diag.inner.id.as_str(), stylesheet.secondary_code)
)?;
} }
if self.config.show_fix_status { if self.config.show_fix_status {
// Do not display an indicator for inapplicable fixes if let Some(fix) = diag.fix() {
if diag.has_applicable_fix(self.config) { // Do not display an indicator for inapplicable fixes
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?; if fix.applies(self.config.fix_applicability) {
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
}
} }
} }
} else { } else {
@@ -160,8 +156,8 @@ mod tests {
env.show_fix_status(true); env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly); env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r" insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
"); ");
} }
@@ -169,8 +165,8 @@ mod tests {
fn syntax_errors() { fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise); let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r" insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
syntax_errors.py:1:15: error[invalid-syntax] Expected one or more symbol names after import syntax_errors.py:1:15: error[invalid-syntax] SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3:12: error[invalid-syntax] Expected ')', found newline syntax_errors.py:3:12: error[invalid-syntax] SyntaxError: Expected ')', found newline
"); ");
} }

View File

@@ -1,305 +1,8 @@
use std::borrow::Cow;
use std::num::NonZeroUsize;
use similar::{ChangeTag, TextDiff};
use ruff_annotate_snippets::Renderer as AnnotateRenderer;
use ruff_diagnostics::{Applicability, Fix};
use ruff_notebook::NotebookIndex;
use ruff_source_file::OneIndexed;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::diagnostic::render::{FileResolver, Resolved};
use crate::diagnostic::stylesheet::{DiagnosticStylesheet, fmt_styled};
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig};
pub(super) struct FullRenderer<'a> {
resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig,
}
impl<'a> FullRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
Self { resolver, config }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
let stylesheet = if self.config.color {
DiagnosticStylesheet::styled()
} else {
DiagnosticStylesheet::plain()
};
let mut renderer = if self.config.color {
AnnotateRenderer::styled()
} else {
AnnotateRenderer::plain()
}
.cut_indicator("");
renderer = renderer
.error(stylesheet.error)
.warning(stylesheet.warning)
.info(stylesheet.info)
.note(stylesheet.note)
.help(stylesheet.help)
.line_no(stylesheet.line_no)
.emphasis(stylesheet.emphasis)
.none(stylesheet.none);
for diag in diagnostics {
let resolved = Resolved::new(self.resolver, diag, self.config);
let renderable = resolved.to_renderable(self.config.context);
for diag in renderable.diagnostics.iter() {
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
}
if self.config.show_fix_diff && diag.has_applicable_fix(self.config) {
if let Some(diff) = Diff::from_diagnostic(diag, &stylesheet, self.resolver) {
write!(f, "{diff}")?;
}
}
writeln!(f)?;
}
Ok(())
}
}
/// Renders a diff that shows the code fixes.
///
/// The implementation isn't fully fledged out and only used by tests. Before using in production, try
/// * Improve layout
/// * Replace tabs with spaces for a consistent experience across terminals
/// * Replace zero-width whitespaces
/// * Print a simpler diff if only a single line has changed
/// * Compute the diff from the `Edit` because diff calculation is expensive.
struct Diff<'a> {
fix: &'a Fix,
diagnostic_source: DiagnosticSource,
notebook_index: Option<NotebookIndex>,
stylesheet: &'a DiagnosticStylesheet,
}
impl<'a> Diff<'a> {
fn from_diagnostic(
diagnostic: &'a Diagnostic,
stylesheet: &'a DiagnosticStylesheet,
resolver: &'a dyn FileResolver,
) -> Option<Diff<'a>> {
let file = &diagnostic.primary_span_ref()?.file;
Some(Diff {
fix: diagnostic.fix()?,
diagnostic_source: file.diagnostic_source(resolver),
notebook_index: resolver.notebook_index(file),
stylesheet,
})
}
}
impl std::fmt::Display for Diff<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let source_code = self.diagnostic_source.as_source_code();
let source_text = source_code.text();
// Partition the source code into end offsets for each cell. If `self.notebook_index` is
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
// `None` key.
let cells = if let Some(notebook_index) = &self.notebook_index {
let mut last_cell = OneIndexed::MIN;
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
for (row, cell) in notebook_index.iter() {
if cell != last_cell {
let offset = source_code.line_start(row);
cells.push((Some(last_cell), offset));
last_cell = cell;
}
}
cells.push((Some(last_cell), source_text.text_len()));
cells
} else {
vec![(None, source_text.text_len())]
};
let mut last_end = TextSize::ZERO;
for (cell, offset) in cells {
let range = TextRange::new(last_end, offset);
last_end = offset;
let input = source_code.slice(range);
let mut output = String::with_capacity(input.len());
let mut last_end = range.start();
let mut applied = 0;
for edit in self.fix.edits() {
if range.contains_range(edit.range()) {
output.push_str(source_code.slice(TextRange::new(last_end, edit.start())));
output.push_str(edit.content().unwrap_or_default());
last_end = edit.end();
applied += 1;
}
}
// No edits were applied, so there's no need to diff.
if applied == 0 {
continue;
}
output.push_str(&source_text[usize::from(last_end)..usize::from(range.end())]);
let diff = TextDiff::from_lines(input, &output);
let grouped_ops = diff.grouped_ops(3);
// Find the new line number with the largest number of digits to align all of the line
// number separators.
let last_op = grouped_ops.last().and_then(|group| group.last());
let largest_new = last_op.map(|op| op.new_range().end).unwrap_or_default();
let digit_with = OneIndexed::new(largest_new).unwrap_or_default().digits();
if let Some(cell) = cell {
// Room for 1 digit, 1 space, 1 `|`, and 1 more following space. This centers the
// three colons on the pipe.
writeln!(f, "{:>1$} cell {cell}", ":::", digit_with.get() + 3)?;
}
for (idx, group) in grouped_ops.iter().enumerate() {
if idx > 0 {
writeln!(f, "{:-^1$}", "-", 80)?;
}
for op in group {
for change in diff.iter_inline_changes(op) {
let (sign, style, line_no_style, index) = match change.tag() {
ChangeTag::Delete => (
"-",
self.stylesheet.deletion,
self.stylesheet.deletion_line_no,
None,
),
ChangeTag::Insert => (
"+",
self.stylesheet.insertion,
self.stylesheet.insertion_line_no,
change.new_index(),
),
ChangeTag::Equal => (
"|",
self.stylesheet.none,
self.stylesheet.line_no,
change.new_index(),
),
};
let line = Line {
index: index.map(OneIndexed::from_zero_indexed),
width: digit_with,
};
write!(
f,
"{line} {sign} ",
line = fmt_styled(line, self.stylesheet.line_no),
sign = fmt_styled(sign, line_no_style),
)?;
for (emphasized, value) in change.iter_strings_lossy() {
let value = show_nonprinting(&value);
let styled = fmt_styled(value, style);
if emphasized {
write!(f, "{}", fmt_styled(styled, self.stylesheet.emphasis))?;
} else {
write!(f, "{styled}")?;
}
}
if change.missing_newline() {
writeln!(f)?;
}
}
}
}
}
match self.fix.applicability() {
Applicability::Safe => {}
Applicability::Unsafe => {
writeln!(
f,
"{note}: {msg}",
note = fmt_styled("note", self.stylesheet.warning),
msg = fmt_styled(
"This is an unsafe fix and may change runtime behavior",
self.stylesheet.emphasis
)
)?;
}
Applicability::DisplayOnly => {
// Note that this is still only used in tests. There's no `--display-only-fixes`
// analog to `--unsafe-fixes` for users to activate this or see the styling.
writeln!(
f,
"{note}: {msg}",
note = fmt_styled("note", self.stylesheet.error),
msg = fmt_styled(
"This is a display-only fix and is likely to be incorrect",
self.stylesheet.emphasis
)
)?;
}
}
Ok(())
}
}
struct Line {
index: Option<OneIndexed>,
width: NonZeroUsize,
}
impl std::fmt::Display for Line {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self.index {
None => {
for _ in 0..self.width.get() {
f.write_str(" ")?;
}
Ok(())
}
Some(idx) => write!(f, "{:<width$}", idx, width = self.width.get()),
}
}
}
fn show_nonprinting(s: &str) -> Cow<'_, str> {
if s.find(['\x07', '\x08', '\x1b', '\x7f']).is_some() {
Cow::Owned(
s.replace('\x07', "")
.replace('\x08', "")
.replace('\x1b', "")
.replace('\x7f', ""),
)
} else {
Cow::Borrowed(s)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use ruff_diagnostics::{Applicability, Edit, Fix};
use ruff_text_size::{TextLen, TextRange, TextSize};
use crate::diagnostic::{ use crate::diagnostic::{
Annotation, DiagnosticFormat, Severity, DiagnosticFormat, Severity,
render::tests::{ render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
NOTEBOOK, TestEnvironment, create_diagnostics, create_notebook_diagnostics,
create_syntax_error_diagnostics,
},
}; };
#[test] #[test]
@@ -339,7 +42,7 @@ mod tests {
fn syntax_errors() { fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full); let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r" insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
error[invalid-syntax]: Expected one or more symbol names after import error[invalid-syntax]: SyntaxError: Expected one or more symbol names after import
--> syntax_errors.py:1:15 --> syntax_errors.py:1:15
| |
1 | from os import 1 | from os import
@@ -348,71 +51,7 @@ mod tests {
3 | if call(foo 3 | if call(foo
| |
error[invalid-syntax]: Expected ')', found newline error[invalid-syntax]: SyntaxError: Expected ')', found newline
--> syntax_errors.py:3:12
|
1 | from os import
2 |
3 | if call(foo
| ^
4 | def bar():
5 | pass
|
");
}
#[test]
fn hide_severity_output() {
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
env.hide_severity(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
F401 [*] `os` imported but unused
--> fib.py:1:8
|
1 | import os
| ^^
|
help: Remove unused import: `os`
F841 [*] Local variable `x` is assigned to but never used
--> fib.py:6:5
|
4 | def fibonacci(n):
5 | """Compute the nth number in the Fibonacci sequence."""
6 | x = 1
| ^
7 | if n == 0:
8 | return 0
|
help: Remove assignment to unused variable `x`
F821 Undefined name `a`
--> undef.py:1:4
|
1 | if a == 1: pass
| ^
|
"#);
}
#[test]
fn hide_severity_syntax_errors() {
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
env.hide_severity(true);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
invalid-syntax: Expected one or more symbol names after import
--> syntax_errors.py:1:15
|
1 | from os import
| ^
2 |
3 | if call(foo
|
invalid-syntax: Expected ')', found newline
--> syntax_errors.py:3:12 --> syntax_errors.py:3:12
| |
1 | from os import 1 | from os import
@@ -477,7 +116,7 @@ print()
/// For example, without the fix, we get diagnostics like this: /// For example, without the fix, we get diagnostics like this:
/// ///
/// ``` /// ```
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead /// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
/// --> example.py:1:25 /// --> example.py:1:25
/// | /// |
/// 1 | nested_fstrings = f'␈{f'{f'␛'}'}' /// 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
@@ -497,13 +136,13 @@ print()
.builder( .builder(
"invalid-character-sub", "invalid-character-sub",
Severity::Error, Severity::Error,
r#"Invalid unescaped character SUB, use "\x1a" instead"#, r#"Invalid unescaped character SUB, use "\x1A" instead"#,
) )
.primary("example.py", "1:24", "1:24", "") .primary("example.py", "1:24", "1:24", "")
.build(); .build();
insta::assert_snapshot!(env.render(&diagnostic), @r#" insta::assert_snapshot!(env.render(&diagnostic), @r#"
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
--> example.py:1:25 --> example.py:1:25
| |
1 | nested_fstrings = f'␈{f'{f'␛'}'}' 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
@@ -522,13 +161,13 @@ print()
.builder( .builder(
"invalid-character-sub", "invalid-character-sub",
Severity::Error, Severity::Error,
r#"Invalid unescaped character SUB, use "\x1a" instead"#, r#"Invalid unescaped character SUB, use "\x1A" instead"#,
) )
.primary("example.py", "1:1", "1:1", "") .primary("example.py", "1:1", "1:1", "")
.build(); .build();
insta::assert_snapshot!(env.render(&diagnostic), @r#" insta::assert_snapshot!(env.render(&diagnostic), @r#"
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
--> example.py:1:2 --> example.py:1:2
| |
1 | ␈ 1 | ␈
@@ -559,407 +198,4 @@ print()
| |
"); ");
} }
/// For file-level diagnostics, we expect to see the header line with the diagnostic information
/// and the `-->` line with the file information but no lines of source code.
#[test]
fn file_level() {
let mut env = TestEnvironment::new();
env.add("example.py", "");
env.format(DiagnosticFormat::Full);
let mut diagnostic = env.err().build();
let span = env.path("example.py").with_range(TextRange::default());
let mut annotation = Annotation::primary(span);
annotation.set_file_level(true);
diagnostic.annotate(annotation);
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[test-diagnostic]: main diagnostic message
--> example.py:1:1
");
}
/// Check that ranges in notebooks are remapped relative to the cells.
#[test]
fn notebook_output() {
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
error[unused-import][*]: `os` imported but unused
--> notebook.ipynb:cell 1:2:8
|
1 | # cell 1
2 | import os
| ^^
|
help: Remove unused import: `os`
error[unused-import][*]: `math` imported but unused
--> notebook.ipynb:cell 2:2:8
|
1 | # cell 2
2 | import math
| ^^^^
3 |
4 | print('hello world')
|
help: Remove unused import: `math`
error[unused-variable]: Local variable `x` is assigned to but never used
--> notebook.ipynb:cell 3:4:5
|
2 | def foo():
3 | print()
4 | x = 1
| ^
|
help: Remove assignment to unused variable `x`
");
}
/// Check notebook handling for multiple annotations in a single diagnostic that span cells.
#[test]
fn notebook_output_multiple_annotations() {
let mut env = TestEnvironment::new();
env.add("notebook.ipynb", NOTEBOOK);
let diagnostics = vec![
// adjacent context windows
env.builder("unused-import", Severity::Error, "`os` imported but unused")
.primary("notebook.ipynb", "2:7", "2:9", "")
.secondary("notebook.ipynb", "4:7", "4:11", "second cell")
.help("Remove unused import: `os`")
.build(),
// non-adjacent context windows
env.builder("unused-import", Severity::Error, "`os` imported but unused")
.primary("notebook.ipynb", "2:7", "2:9", "")
.secondary("notebook.ipynb", "10:4", "10:5", "second cell")
.help("Remove unused import: `os`")
.build(),
// adjacent context windows in the same cell
env.err()
.primary("notebook.ipynb", "4:7", "4:11", "second cell")
.secondary("notebook.ipynb", "6:0", "6:5", "print statement")
.help("Remove `print` statement")
.build(),
];
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
error[unused-import]: `os` imported but unused
--> notebook.ipynb:cell 1:2:8
|
1 | # cell 1
2 | import os
| ^^
|
::: notebook.ipynb:cell 2:2:8
|
1 | # cell 2
2 | import math
| ---- second cell
3 |
4 | print('hello world')
|
help: Remove unused import: `os`
error[unused-import]: `os` imported but unused
--> notebook.ipynb:cell 1:2:8
|
1 | # cell 1
2 | import os
| ^^
|
::: notebook.ipynb:cell 3:4:5
|
2 | def foo():
3 | print()
4 | x = 1
| - second cell
|
help: Remove unused import: `os`
error[test-diagnostic]: main diagnostic message
--> notebook.ipynb:cell 2:2:8
|
1 | # cell 2
2 | import math
| ^^^^ second cell
3 |
4 | print('hello world')
| ----- print statement
|
help: Remove `print` statement
");
}
/// Test that we remap notebook cell line numbers in the diff as well as the main diagnostic.
#[test]
fn notebook_output_with_diff() {
let (mut env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
env.show_fix_diff(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
error[unused-import][*]: `os` imported but unused
--> notebook.ipynb:cell 1:2:8
|
1 | # cell 1
2 | import os
| ^^
|
help: Remove unused import: `os`
::: cell 1
1 | # cell 1
- import os
error[unused-import][*]: `math` imported but unused
--> notebook.ipynb:cell 2:2:8
|
1 | # cell 2
2 | import math
| ^^^^
3 |
4 | print('hello world')
|
help: Remove unused import: `math`
::: cell 2
1 | # cell 2
- import math
2 |
3 | print('hello world')
error[unused-variable][*]: Local variable `x` is assigned to but never used
--> notebook.ipynb:cell 3:4:5
|
2 | def foo():
3 | print()
4 | x = 1
| ^
|
help: Remove assignment to unused variable `x`
::: cell 3
1 | # cell 3
2 | def foo():
3 | print()
- x = 1
4 |
note: This is an unsafe fix and may change runtime behavior
");
}
#[test]
fn notebook_output_with_diff_spanning_cells() {
let (mut env, mut diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
env.show_fix_diff(true);
env.fix_applicability(Applicability::DisplayOnly);
// Move all of the edits from the later diagnostics to the first diagnostic to simulate a
// single diagnostic with edits in different cells.
let mut diagnostic = diagnostics.swap_remove(0);
let fix = diagnostic.fix_mut().unwrap();
let mut edits = fix.edits().to_vec();
for diag in diagnostics {
edits.extend_from_slice(diag.fix().unwrap().edits());
}
*fix = Fix::unsafe_edits(edits.remove(0), edits);
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[unused-import][*]: `os` imported but unused
--> notebook.ipynb:cell 1:2:8
|
1 | # cell 1
2 | import os
| ^^
|
help: Remove unused import: `os`
::: cell 1
1 | # cell 1
- import os
::: cell 2
1 | # cell 2
- import math
2 |
3 | print('hello world')
::: cell 3
1 | # cell 3
2 | def foo():
3 | print()
- x = 1
4 |
note: This is an unsafe fix and may change runtime behavior
");
}
/// Carriage return (`\r`) is a valid line-ending in Python, so we should normalize this to a
/// line feed (`\n`) for rendering. Otherwise we report a single long line for this case.
#[test]
fn normalize_carriage_return() {
let mut env = TestEnvironment::new();
env.add(
"example.py",
"# Keep parenthesis around preserved CR\rint(-\r 1)\rint(+\r 1)",
);
env.format(DiagnosticFormat::Full);
let mut diagnostic = env.err().build();
let span = env
.path("example.py")
.with_range(TextRange::at(TextSize::new(39), TextSize::new(0)));
let annotation = Annotation::primary(span);
diagnostic.annotate(annotation);
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[test-diagnostic]: main diagnostic message
--> example.py:2:1
|
1 | # Keep parenthesis around preserved CR
2 | int(-
| ^
3 | 1)
4 | int(+
|
");
}
/// Without stripping the BOM, we report an error in column 2, unlike Ruff.
#[test]
fn strip_bom() {
let mut env = TestEnvironment::new();
env.add("example.py", "\u{feff}import foo");
env.format(DiagnosticFormat::Full);
let mut diagnostic = env.err().build();
let span = env
.path("example.py")
.with_range(TextRange::at(TextSize::new(3), TextSize::new(0)));
let annotation = Annotation::primary(span);
diagnostic.annotate(annotation);
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[test-diagnostic]: main diagnostic message
--> example.py:1:1
|
1 | import foo
| ^
|
");
}
#[test]
fn bom_with_default_range() {
let mut env = TestEnvironment::new();
env.add("example.py", "\u{feff}import foo");
env.format(DiagnosticFormat::Full);
let mut diagnostic = env.err().build();
let span = env.path("example.py").with_range(TextRange::default());
let annotation = Annotation::primary(span);
diagnostic.annotate(annotation);
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[test-diagnostic]: main diagnostic message
--> example.py:1:1
|
1 | import foo
| ^
|
");
}
/// We previously rendered this correctly, but the header was falling back to 1:1 for ranges
/// pointing to the final newline in a file. Like Ruff, we now use the offset of the first
/// character in the nonexistent final line in the header.
#[test]
fn end_of_file() {
let mut env = TestEnvironment::new();
let contents = "unexpected eof\n";
env.add("example.py", contents);
env.format(DiagnosticFormat::Full);
let mut diagnostic = env.err().build();
let span = env
.path("example.py")
.with_range(TextRange::at(contents.text_len(), TextSize::new(0)));
let annotation = Annotation::primary(span);
diagnostic.annotate(annotation);
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[test-diagnostic]: main diagnostic message
--> example.py:2:1
|
1 | unexpected eof
| ^
|
");
}
/// Test that we handle the width calculation for the line number correctly even for context
/// lines at the end of a diff. For example, we want it to render like this:
///
/// ```
/// 8 |
/// 9 |
/// 10 |
/// ```
///
/// and not like this:
///
/// ```
/// 8 |
/// 9 |
/// 10 |
/// ```
#[test]
fn longer_line_number_end_of_context() {
let mut env = TestEnvironment::new();
let contents = "\
line 1
line 2
line 3
line 4
line 5
line 6
line 7
line 8
line 9
line 10
";
env.add("example.py", contents);
env.format(DiagnosticFormat::Full);
env.show_fix_diff(true);
env.fix_applicability(Applicability::DisplayOnly);
let mut diagnostic = env.err().primary("example.py", "3", "3", "label").build();
diagnostic.help("Start of diff:");
let target = "line 7";
let line9 = contents.find(target).unwrap();
let range = TextRange::at(TextSize::try_from(line9).unwrap(), target.text_len());
diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement(
format!("fixed {target}"),
range,
)));
insta::assert_snapshot!(env.render(&diagnostic), @r"
error[test-diagnostic][*]: main diagnostic message
--> example.py:3:1
|
1 | line 1
2 | line 2
3 | line 3
| ^^^^^^ label
4 | line 4
5 | line 5
|
help: Start of diff:
4 | line 4
5 | line 5
6 | line 6
- line 7
7 + fixed line 7
8 | line 8
9 | line 9
10 | line 10
note: This is an unsafe fix and may change runtime behavior
");
}
} }

View File

@@ -1,136 +0,0 @@
use crate::diagnostic::{Diagnostic, FileResolver, Severity};
pub struct GithubRenderer<'a> {
resolver: &'a dyn FileResolver,
program: &'a str,
}
impl<'a> GithubRenderer<'a> {
pub fn new(resolver: &'a dyn FileResolver, program: &'a str) -> Self {
Self { resolver, program }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
for diagnostic in diagnostics {
let severity = match diagnostic.severity() {
Severity::Info => "notice",
Severity::Warning => "warning",
Severity::Error | Severity::Fatal => "error",
};
write!(
f,
"::{severity} title={program} ({code})",
program = self.program,
code = diagnostic.secondary_code_or_id()
)?;
if let Some(span) = diagnostic.primary_span() {
let file = span.file();
write!(f, ",file={file}", file = file.path(self.resolver))?;
let (start_location, end_location) = if self.resolver.is_notebook(file) {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
None
} else {
let diagnostic_source = file.diagnostic_source(self.resolver);
let source_code = diagnostic_source.as_source_code();
span.range().map(|range| {
(
source_code.line_column(range.start()),
source_code.line_column(range.end()),
)
})
}
.unwrap_or_default();
write!(
f,
",line={row},col={column},endLine={end_row},endColumn={end_column}::",
row = start_location.line,
column = start_location.column,
end_row = end_location.line,
end_column = end_location.column,
)?;
write!(
f,
"{path}:{row}:{column}: ",
path = file.relative_path(self.resolver).display(),
row = start_location.line,
column = start_location.column,
)?;
} else {
write!(f, "::")?;
}
if let Some(code) = diagnostic.secondary_code() {
write!(f, "{code}")?;
} else {
write!(f, "{id}:", id = diagnostic.id())?;
}
writeln!(f, " {}", diagnostic.body())?;
}
Ok(())
}
}
pub struct DisplayGithubDiagnostics<'a> {
renderer: &'a GithubRenderer<'a>,
diagnostics: &'a [Diagnostic],
}
impl<'a> DisplayGithubDiagnostics<'a> {
pub fn new(renderer: &'a GithubRenderer<'a>, diagnostics: &'a [Diagnostic]) -> Self {
Self {
renderer,
diagnostics,
}
}
}
impl std::fmt::Display for DisplayGithubDiagnostics<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.renderer.render(f, self.diagnostics)
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Github);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Github);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn missing_file() {
let mut env = TestEnvironment::new();
env.format(DiagnosticFormat::Github);
let diag = env.err().build();
insta::assert_snapshot!(
env.render(&diag),
@"::error title=ty (test-diagnostic)::test-diagnostic: main diagnostic message",
);
}
}

View File

@@ -1,205 +0,0 @@
use std::{
collections::HashSet,
hash::{DefaultHasher, Hash, Hasher},
path::Path,
};
use ruff_source_file::LineColumn;
use serde::{Serialize, Serializer, ser::SerializeSeq};
use crate::diagnostic::{Diagnostic, Severity};
use super::FileResolver;
pub(super) struct GitlabRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> GitlabRenderer<'a> {
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
}
impl GitlabRenderer<'_> {
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
write!(
f,
"{}",
serde_json::to_string_pretty(&SerializedMessages {
diagnostics,
resolver: self.resolver,
#[expect(
clippy::disallowed_methods,
reason = "We don't have access to a `System` here, \
and this is only intended for use by GitLab CI, \
which runs on a real `System`."
)]
project_dir: std::env::var("CI_PROJECT_DIR").ok().as_deref(),
})
.unwrap()
)
}
}
struct SerializedMessages<'a> {
diagnostics: &'a [Diagnostic],
resolver: &'a dyn FileResolver,
project_dir: Option<&'a str>,
}
impl Serialize for SerializedMessages<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
let mut fingerprints = HashSet::<u64>::with_capacity(self.diagnostics.len());
for diagnostic in self.diagnostics {
let location = diagnostic
.primary_span()
.map(|span| {
let file = span.file();
let positions = if self.resolver.is_notebook(file) {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
Default::default()
} else {
let diagnostic_source = file.diagnostic_source(self.resolver);
let source_code = diagnostic_source.as_source_code();
span.range()
.map(|range| Positions {
begin: source_code.line_column(range.start()),
end: source_code.line_column(range.end()),
})
.unwrap_or_default()
};
let path = self.project_dir.as_ref().map_or_else(
|| file.relative_path(self.resolver).display().to_string(),
|project_dir| relativize_path_to(file.path(self.resolver), project_dir),
);
Location { path, positions }
})
.unwrap_or_default();
let mut message_fingerprint = fingerprint(diagnostic, &location.path, 0);
// Make sure that we do not get a fingerprint that is already in use
// by adding in the previously generated one.
while fingerprints.contains(&message_fingerprint) {
message_fingerprint = fingerprint(diagnostic, &location.path, message_fingerprint);
}
fingerprints.insert(message_fingerprint);
let description = diagnostic.body();
let check_name = diagnostic.secondary_code_or_id();
let severity = match diagnostic.severity() {
Severity::Info => "info",
Severity::Warning => "minor",
Severity::Error => "major",
// Another option here is `blocker`
Severity::Fatal => "critical",
};
let value = Message {
check_name,
// GitLab doesn't display the separate `check_name` field in a Code Quality report,
// so prepend it to the description too.
description: format!("{check_name}: {description}"),
severity,
fingerprint: format!("{:x}", message_fingerprint),
location,
};
s.serialize_element(&value)?;
}
s.end()
}
}
#[derive(Serialize)]
struct Message<'a> {
check_name: &'a str,
description: String,
severity: &'static str,
fingerprint: String,
location: Location,
}
/// The place in the source code where the issue was discovered.
///
/// According to the CodeClimate report format [specification] linked from the GitLab [docs], this
/// field is required, so we fall back on a default `path` and position if the diagnostic doesn't
/// have a primary span.
///
/// [specification]: https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types
/// [docs]: https://docs.gitlab.com/ci/testing/code_quality/#code-quality-report-format
#[derive(Default, Serialize)]
struct Location {
path: String,
positions: Positions,
}
#[derive(Default, Serialize)]
struct Positions {
begin: LineColumn,
end: LineColumn,
}
/// Generate a unique fingerprint to identify a violation.
fn fingerprint(diagnostic: &Diagnostic, project_path: &str, salt: u64) -> u64 {
let mut hasher = DefaultHasher::new();
salt.hash(&mut hasher);
diagnostic.name().hash(&mut hasher);
project_path.hash(&mut hasher);
hasher.finish()
}
/// Convert an absolute path to be relative to the specified project root.
fn relativize_path_to<P: AsRef<Path>, R: AsRef<Path>>(path: P, project_root: R) -> String {
format!(
"{}",
pathdiff::diff_paths(&path, project_root)
.expect("Could not diff paths")
.display()
)
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
};
const FINGERPRINT_FILTERS: [(&str, &str); 1] = [(
r#""fingerprint": "[a-z0-9]+","#,
r#""fingerprint": "<redacted>","#,
)];
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Gitlab);
insta::with_settings!({filters => FINGERPRINT_FILTERS}, {
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
});
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Gitlab);
insta::with_settings!({filters => FINGERPRINT_FILTERS}, {
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
});
}
}

View File

@@ -6,7 +6,7 @@ use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, OneIndexed}; use ruff_source_file::{LineColumn, OneIndexed};
use ruff_text_size::Ranged; use ruff_text_size::Ranged;
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig}; use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig, SecondaryCode};
use super::FileResolver; use super::FileResolver;
@@ -99,7 +99,7 @@ pub(super) fn diagnostic_to_json<'a>(
// In preview, the locations and filename can be optional. // In preview, the locations and filename can be optional.
if config.preview { if config.preview {
JsonDiagnostic { JsonDiagnostic {
code: diagnostic.secondary_code_or_id(), code: diagnostic.secondary_code(),
url: diagnostic.to_ruff_url(), url: diagnostic.to_ruff_url(),
message: diagnostic.body(), message: diagnostic.body(),
fix, fix,
@@ -111,7 +111,7 @@ pub(super) fn diagnostic_to_json<'a>(
} }
} else { } else {
JsonDiagnostic { JsonDiagnostic {
code: diagnostic.secondary_code_or_id(), code: diagnostic.secondary_code(),
url: diagnostic.to_ruff_url(), url: diagnostic.to_ruff_url(),
message: diagnostic.body(), message: diagnostic.body(),
fix, fix,
@@ -221,7 +221,7 @@ impl Serialize for ExpandedEdits<'_> {
#[derive(Serialize)] #[derive(Serialize)]
pub(crate) struct JsonDiagnostic<'a> { pub(crate) struct JsonDiagnostic<'a> {
cell: Option<OneIndexed>, cell: Option<OneIndexed>,
code: &'a str, code: Option<&'a SecondaryCode>,
end_location: Option<JsonLocation>, end_location: Option<JsonLocation>,
filename: Option<&'a str>, filename: Option<&'a str>,
fix: Option<JsonFix<'a>>, fix: Option<JsonFix<'a>>,
@@ -302,7 +302,7 @@ mod tests {
[ [
{ {
"cell": null, "cell": null,
"code": "test-diagnostic", "code": null,
"end_location": { "end_location": {
"column": 1, "column": 1,
"row": 1 "row": 1
@@ -336,7 +336,7 @@ mod tests {
[ [
{ {
"cell": null, "cell": null,
"code": "test-diagnostic", "code": null,
"end_location": null, "end_location": null,
"filename": null, "filename": null,
"fix": null, "fix": null,

View File

@@ -2,5 +2,5 @@
source: crates/ruff_db/src/diagnostic/render/azure.rs source: crates/ruff_db/src/diagnostic/render/azure.rs
expression: env.render_diagnostics(&diagnostics) expression: env.render_diagnostics(&diagnostics)
--- ---
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;code=invalid-syntax;]Expected one or more symbol names after import ##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;]SyntaxError: Expected one or more symbol names after import
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;code=invalid-syntax;]Expected ')', found newline ##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;]SyntaxError: Expected ')', found newline

View File

@@ -1,7 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/github.rs
expression: env.render_diagnostics(&diagnostics)
---
::error title=ty (F401),file=fib.py,line=1,col=8,endLine=1,endColumn=10::fib.py:1:8: F401 `os` imported but unused
::error title=ty (F841),file=fib.py,line=6,col=5,endLine=6,endColumn=6::fib.py:6:5: F841 Local variable `x` is assigned to but never used
::error title=ty (F821),file=undef.py,line=1,col=4,endLine=1,endColumn=5::undef.py:1:4: F821 Undefined name `a`

View File

@@ -1,6 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/github.rs
expression: env.render_diagnostics(&diagnostics)
---
::error title=ty (invalid-syntax),file=syntax_errors.py,line=1,col=15,endLine=2,endColumn=1::syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import
::error title=ty (invalid-syntax),file=syntax_errors.py,line=3,col=12,endLine=4,endColumn=1::syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline

View File

@@ -1,63 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/gitlab.rs
expression: env.render_diagnostics(&diagnostics)
---
[
{
"check_name": "F401",
"description": "F401: `os` imported but unused",
"severity": "major",
"fingerprint": "<redacted>",
"location": {
"path": "fib.py",
"positions": {
"begin": {
"line": 1,
"column": 8
},
"end": {
"line": 1,
"column": 10
}
}
}
},
{
"check_name": "F841",
"description": "F841: Local variable `x` is assigned to but never used",
"severity": "major",
"fingerprint": "<redacted>",
"location": {
"path": "fib.py",
"positions": {
"begin": {
"line": 6,
"column": 5
},
"end": {
"line": 6,
"column": 6
}
}
}
},
{
"check_name": "F821",
"description": "F821: Undefined name `a`",
"severity": "major",
"fingerprint": "<redacted>",
"location": {
"path": "undef.py",
"positions": {
"begin": {
"line": 1,
"column": 4
},
"end": {
"line": 1,
"column": 5
}
}
}
}
]

View File

@@ -1,44 +0,0 @@
---
source: crates/ruff_db/src/diagnostic/render/gitlab.rs
expression: env.render_diagnostics(&diagnostics)
---
[
{
"check_name": "invalid-syntax",
"description": "invalid-syntax: Expected one or more symbol names after import",
"severity": "major",
"fingerprint": "<redacted>",
"location": {
"path": "syntax_errors.py",
"positions": {
"begin": {
"line": 1,
"column": 15
},
"end": {
"line": 2,
"column": 1
}
}
}
},
{
"check_name": "invalid-syntax",
"description": "invalid-syntax: Expected ')', found newline",
"severity": "major",
"fingerprint": "<redacted>",
"location": {
"path": "syntax_errors.py",
"positions": {
"begin": {
"line": 3,
"column": 12
},
"end": {
"line": 4,
"column": 1
}
}
}
}
]

View File

@@ -5,7 +5,7 @@ expression: env.render_diagnostics(&diagnostics)
[ [
{ {
"cell": null, "cell": null,
"code": "invalid-syntax", "code": null,
"end_location": { "end_location": {
"column": 1, "column": 1,
"row": 2 "row": 2
@@ -16,13 +16,13 @@ expression: env.render_diagnostics(&diagnostics)
"column": 15, "column": 15,
"row": 1 "row": 1
}, },
"message": "Expected one or more symbol names after import", "message": "SyntaxError: Expected one or more symbol names after import",
"noqa_row": null, "noqa_row": null,
"url": null "url": null
}, },
{ {
"cell": null, "cell": null,
"code": "invalid-syntax", "code": null,
"end_location": { "end_location": {
"column": 1, "column": 1,
"row": 4 "row": 4
@@ -33,7 +33,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 12, "column": 12,
"row": 3 "row": 3
}, },
"message": "Expected ')', found newline", "message": "SyntaxError: Expected ')', found newline",
"noqa_row": null, "noqa_row": null,
"url": null "url": null
} }

View File

@@ -2,5 +2,5 @@
source: crates/ruff_db/src/diagnostic/render/json_lines.rs source: crates/ruff_db/src/diagnostic/render/json_lines.rs
expression: env.render_diagnostics(&diagnostics) expression: env.render_diagnostics(&diagnostics)
--- ---
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"Expected one or more symbol names after import","noqa_row":null,"url":null} {"cell":null,"code":null,"end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"SyntaxError: Expected one or more symbol names after import","noqa_row":null,"url":null}
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"Expected ')', found newline","noqa_row":null,"url":null} {"cell":null,"code":null,"end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"SyntaxError: Expected ')', found newline","noqa_row":null,"url":null}

View File

@@ -6,10 +6,10 @@ expression: env.render_diagnostics(&diagnostics)
<testsuites name="ruff" tests="2" failures="2" errors="0"> <testsuites name="ruff" tests="2" failures="2" errors="0">
<testsuite name="syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff"> <testsuite name="syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="1" column="15"> <testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="1" column="15">
<failure message="Expected one or more symbol names after import">line 1, col 15, Expected one or more symbol names after import</failure> <failure message="SyntaxError: Expected one or more symbol names after import">line 1, col 15, SyntaxError: Expected one or more symbol names after import</failure>
</testcase> </testcase>
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="3" column="12"> <testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="3" column="12">
<failure message="Expected &apos;)&apos;, found newline">line 3, col 12, Expected &apos;)&apos;, found newline</failure> <failure message="SyntaxError: Expected &apos;)&apos;, found newline">line 3, col 12, SyntaxError: Expected &apos;)&apos;, found newline</failure>
</testcase> </testcase>
</testsuite> </testsuite>
</testsuites> </testsuites>

View File

@@ -2,5 +2,5 @@
source: crates/ruff_db/src/diagnostic/render/pylint.rs source: crates/ruff_db/src/diagnostic/render/pylint.rs
expression: env.render_diagnostics(&diagnostics) expression: env.render_diagnostics(&diagnostics)
--- ---
syntax_errors.py:1: [invalid-syntax] Expected one or more symbol names after import syntax_errors.py:1: [invalid-syntax] SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3: [invalid-syntax] Expected ')', found newline syntax_errors.py:3: [invalid-syntax] SyntaxError: Expected ')', found newline

View File

@@ -21,7 +21,7 @@ expression: env.render_diagnostics(&diagnostics)
} }
} }
}, },
"message": "Expected one or more symbol names after import" "message": "SyntaxError: Expected one or more symbol names after import"
}, },
{ {
"code": { "code": {
@@ -40,7 +40,7 @@ expression: env.render_diagnostics(&diagnostics)
} }
} }
}, },
"message": "Expected ')', found newline" "message": "SyntaxError: Expected ')', found newline"
} }
], ],
"severity": "WARNING", "severity": "WARNING",

View File

@@ -43,10 +43,6 @@ pub struct DiagnosticStylesheet {
pub(crate) none: Style, pub(crate) none: Style,
pub(crate) separator: Style, pub(crate) separator: Style,
pub(crate) secondary_code: Style, pub(crate) secondary_code: Style,
pub(crate) insertion: Style,
pub(crate) deletion: Style,
pub(crate) insertion_line_no: Style,
pub(crate) deletion_line_no: Style,
} }
impl Default for DiagnosticStylesheet { impl Default for DiagnosticStylesheet {
@@ -70,10 +66,6 @@ impl DiagnosticStylesheet {
none: Style::new(), none: Style::new(),
separator: AnsiColor::Cyan.on_default(), separator: AnsiColor::Cyan.on_default(),
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD), secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
insertion: AnsiColor::Green.on_default(),
deletion: AnsiColor::Red.on_default(),
insertion_line_no: AnsiColor::Green.on_default().effects(Effects::BOLD),
deletion_line_no: AnsiColor::Red.on_default().effects(Effects::BOLD),
} }
} }
@@ -89,10 +81,6 @@ impl DiagnosticStylesheet {
none: Style::new(), none: Style::new(),
separator: Style::new(), separator: Style::new(),
secondary_code: Style::new(), secondary_code: Style::new(),
insertion: Style::new(),
deletion: Style::new(),
insertion_line_no: Style::new(),
deletion_line_no: Style::new(),
} }
} }
} }

View File

@@ -9,7 +9,7 @@ use crate::system::file_time_now;
/// * The last modification time of the file. /// * The last modification time of the file.
/// * The hash of the file's content. /// * The hash of the file's content.
/// * The revision as it comes from an external system, for example the LSP. /// * The revision as it comes from an external system, for example the LSP.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default, get_size2::GetSize)] #[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
pub struct FileRevision(u128); pub struct FileRevision(u128);
impl FileRevision { impl FileRevision {

View File

@@ -87,12 +87,11 @@ impl Files {
.system_by_path .system_by_path
.entry(absolute.clone()) .entry(absolute.clone())
.or_insert_with(|| { .or_insert_with(|| {
tracing::trace!("Adding file '{path}'");
let metadata = db.system().path_metadata(path); let metadata = db.system().path_metadata(path);
tracing::trace!("Adding file '{absolute}'");
let durability = self let durability = self
.root(db, &absolute) .root(db, path)
.map_or(Durability::default(), |root| root.durability(db)); .map_or(Durability::default(), |root| root.durability(db));
let builder = File::builder(FilePath::System(absolute)) let builder = File::builder(FilePath::System(absolute))
@@ -290,7 +289,7 @@ impl std::panic::RefUnwindSafe for Files {}
/// # Ordering /// # Ordering
/// Ordering is based on the file's salsa-assigned id and not on its values. /// Ordering is based on the file's salsa-assigned id and not on its values.
/// The id may change between runs. /// The id may change between runs.
#[salsa::input(heap_size=ruff_memory_usage::heap_size)] #[salsa::input]
#[derive(PartialOrd, Ord)] #[derive(PartialOrd, Ord)]
pub struct File { pub struct File {
/// The path of the file (immutable). /// The path of the file (immutable).
@@ -459,12 +458,6 @@ impl File {
self.source_type(db).is_stub() self.source_type(db).is_stub()
} }
/// Returns `true` if the file is an `__init__.py(i)`
pub fn is_init(self, db: &dyn Db) -> bool {
let path = self.path(db).as_str();
path.ends_with("__init__.py") || path.ends_with("__init__.pyi")
}
pub fn source_type(self, db: &dyn Db) -> PySourceType { pub fn source_type(self, db: &dyn Db) -> PySourceType {
match self.path(db) { match self.path(db) {
FilePath::System(path) => path FilePath::System(path) => path
@@ -528,7 +521,7 @@ impl VirtualFile {
// The types in here need to be public because they're salsa ingredients but we // The types in here need to be public because they're salsa ingredients but we
// don't want them to be publicly accessible. That's why we put them into a private module. // don't want them to be publicly accessible. That's why we put them into a private module.
mod private { mod private {
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default, get_size2::GetSize)] #[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
pub enum FileStatus { pub enum FileStatus {
/// The file exists. /// The file exists.
#[default] #[default]

View File

@@ -16,7 +16,7 @@ use crate::system::{SystemPath, SystemPathBuf};
/// The main usage of file roots is to determine a file's durability. But it can also be used /// The main usage of file roots is to determine a file's durability. But it can also be used
/// to make a salsa query dependent on whether a file in a root has changed without writing any /// to make a salsa query dependent on whether a file in a root has changed without writing any
/// manual invalidation logic. /// manual invalidation logic.
#[salsa::input(debug, heap_size=ruff_memory_usage::heap_size)] #[salsa::input(debug)]
pub struct FileRoot { pub struct FileRoot {
/// The path of a root is guaranteed to never change. /// The path of a root is guaranteed to never change.
#[returns(deref)] #[returns(deref)]
@@ -37,7 +37,7 @@ impl FileRoot {
} }
} }
#[derive(Copy, Clone, Debug, Eq, PartialEq, get_size2::GetSize)] #[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum FileRootKind { pub enum FileRootKind {
/// The root of a project. /// The root of a project.
Project, Project,

View File

@@ -11,7 +11,7 @@ use std::fmt::{Display, Formatter};
/// * a file stored on the [host system](crate::system::System). /// * a file stored on the [host system](crate::system::System).
/// * a virtual file stored on the [host system](crate::system::System). /// * a virtual file stored on the [host system](crate::system::System).
/// * a vendored file stored in the [vendored file system](crate::vendored::VendoredFileSystem). /// * a vendored file stored in the [vendored file system](crate::vendored::VendoredFileSystem).
#[derive(Clone, Debug, Eq, PartialEq, Hash, get_size2::GetSize)] #[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub enum FilePath { pub enum FilePath {
/// Path to a file on the [host system](crate::system::System). /// Path to a file on the [host system](crate::system::System).
System(SystemPathBuf), System(SystemPathBuf),

View File

@@ -1,8 +1,3 @@
#![warn(
clippy::disallowed_methods,
reason = "Prefer System trait methods over std methods"
)]
use crate::files::Files; use crate::files::Files;
use crate::system::System; use crate::system::System;
use crate::vendored::VendoredFileSystem; use crate::vendored::VendoredFileSystem;
@@ -70,10 +65,6 @@ pub trait Db: salsa::Database {
/// to process work in parallel. For example, to index a directory or checking the files of a project. /// to process work in parallel. For example, to index a directory or checking the files of a project.
/// ty can still spawn more threads for other tasks, e.g. to wait for a Ctrl+C signal or /// ty can still spawn more threads for other tasks, e.g. to wait for a Ctrl+C signal or
/// watching the files for changes. /// watching the files for changes.
#[expect(
clippy::disallowed_methods,
reason = "We don't have access to System here, but this is also only used by the CLI and the server which always run on a real system."
)]
pub fn max_parallelism() -> NonZeroUsize { pub fn max_parallelism() -> NonZeroUsize {
std::env::var(EnvVars::TY_MAX_PARALLELISM) std::env::var(EnvVars::TY_MAX_PARALLELISM)
.or_else(|_| std::env::var(EnvVars::RAYON_NUM_THREADS)) .or_else(|_| std::env::var(EnvVars::RAYON_NUM_THREADS))

View File

@@ -21,7 +21,7 @@ use crate::source::source_text;
/// reflected in the changed AST offsets. /// reflected in the changed AST offsets.
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires /// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
/// for determining if a query result is unchanged. /// for determining if a query result is unchanged.
#[salsa::tracked(returns(ref), no_eq, heap_size=ruff_memory_usage::heap_size)] #[salsa::tracked(returns(ref), no_eq, heap_size=get_size2::heap_size)]
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule { pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
let _span = tracing::trace_span!("parsed_module", ?file).entered(); let _span = tracing::trace_span!("parsed_module", ?file).entered();
@@ -92,14 +92,14 @@ impl ParsedModule {
self.inner.store(None); self.inner.store(None);
} }
/// Returns the pointer address of this [`ParsedModule`]. /// Returns a pointer for this [`ParsedModule`].
/// ///
/// The pointer uniquely identifies the module within the current Salsa revision, /// The pointer uniquely identifies the module within the current Salsa revision,
/// regardless of whether particular [`ParsedModuleRef`] instances are garbage collected. /// regardless of whether particular [`ParsedModuleRef`] instances are garbage collected.
pub fn addr(&self) -> usize { pub fn as_ptr(&self) -> *const () {
// Note that the outer `Arc` in `inner` is stable across garbage collection, while the inner // Note that the outer `Arc` in `inner` is stable across garbage collection, while the inner
// `Arc` within the `ArcSwap` may change. // `Arc` within the `ArcSwap` may change.
Arc::as_ptr(&self.inner).addr() Arc::as_ptr(&self.inner).cast()
} }
} }
@@ -202,13 +202,9 @@ mod indexed {
/// Returns the node at the given index. /// Returns the node at the given index.
pub fn get_by_index<'ast>(&'ast self, index: NodeIndex) -> AnyRootNodeRef<'ast> { pub fn get_by_index<'ast>(&'ast self, index: NodeIndex) -> AnyRootNodeRef<'ast> {
let index = index
.as_u32()
.expect("attempted to access uninitialized `NodeIndex`");
// Note that this method restores the correct lifetime: the nodes are valid for as // Note that this method restores the correct lifetime: the nodes are valid for as
// long as the reference to `IndexedModule` is alive. // long as the reference to `IndexedModule` is alive.
self.index[index as usize] self.index[index.as_usize()]
} }
} }
@@ -224,7 +220,7 @@ mod indexed {
T: HasNodeIndex + std::fmt::Debug, T: HasNodeIndex + std::fmt::Debug,
AnyRootNodeRef<'a>: From<&'a T>, AnyRootNodeRef<'a>: From<&'a T>,
{ {
node.node_index().set(NodeIndex::from(self.index)); node.node_index().set(self.index);
self.nodes.push(AnyRootNodeRef::from(node)); self.nodes.push(AnyRootNodeRef::from(node));
self.index += 1; self.index += 1;
} }

View File

@@ -9,7 +9,7 @@ use crate::Db;
use crate::files::{File, FilePath}; use crate::files::{File, FilePath};
/// Reads the source text of a python text file (must be valid UTF8) or notebook. /// Reads the source text of a python text file (must be valid UTF8) or notebook.
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] #[salsa::tracked(heap_size=get_size2::heap_size)]
pub fn source_text(db: &dyn Db, file: File) -> SourceText { pub fn source_text(db: &dyn Db, file: File) -> SourceText {
let path = file.path(db); let path = file.path(db);
let _span = tracing::trace_span!("source_text", file = %path).entered(); let _span = tracing::trace_span!("source_text", file = %path).entered();
@@ -157,7 +157,7 @@ pub enum SourceTextError {
} }
/// Computes the [`LineIndex`] for `file`. /// Computes the [`LineIndex`] for `file`.
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] #[salsa::tracked(heap_size=get_size2::heap_size)]
pub fn line_index(db: &dyn Db, file: File) -> LineIndex { pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
let _span = tracing::trace_span!("line_index", ?file).entered(); let _span = tracing::trace_span!("line_index", ?file).entered();

View File

@@ -46,7 +46,7 @@ pub type Result<T> = std::io::Result<T>;
/// * File watching isn't supported. /// * File watching isn't supported.
/// ///
/// Abstracting the system also enables tests to use a more efficient in-memory file system. /// Abstracting the system also enables tests to use a more efficient in-memory file system.
pub trait System: Debug + Sync + Send { pub trait System: Debug {
/// Reads the metadata of the file or directory at `path`. /// Reads the metadata of the file or directory at `path`.
/// ///
/// This function will traverse symbolic links to query information about the destination file. /// This function will traverse symbolic links to query information about the destination file.
@@ -197,8 +197,6 @@ pub trait System: Debug + Sync + Send {
fn as_any(&self) -> &dyn std::any::Any; fn as_any(&self) -> &dyn std::any::Any;
fn as_any_mut(&mut self) -> &mut dyn std::any::Any; fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
fn dyn_clone(&self) -> Box<dyn System>;
} }
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] #[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]

View File

@@ -1,5 +1,3 @@
#![allow(clippy::disallowed_methods)]
use super::walk_directory::{ use super::walk_directory::{
self, DirectoryWalker, WalkDirectoryBuilder, WalkDirectoryConfiguration, self, DirectoryWalker, WalkDirectoryBuilder, WalkDirectoryConfiguration,
WalkDirectoryVisitorBuilder, WalkState, WalkDirectoryVisitorBuilder, WalkState,
@@ -257,10 +255,6 @@ impl System for OsSystem {
fn env_var(&self, name: &str) -> std::result::Result<String, std::env::VarError> { fn env_var(&self, name: &str) -> std::result::Result<String, std::env::VarError> {
std::env::var(name) std::env::var(name)
} }
fn dyn_clone(&self) -> Box<dyn System> {
Box::new(self.clone())
}
} }
impl OsSystem { impl OsSystem {

View File

@@ -236,7 +236,7 @@ impl SystemPath {
/// ///
/// [`CurDir`]: camino::Utf8Component::CurDir /// [`CurDir`]: camino::Utf8Component::CurDir
#[inline] #[inline]
pub fn components(&self) -> camino::Utf8Components<'_> { pub fn components(&self) -> camino::Utf8Components {
self.0.components() self.0.components()
} }
@@ -762,7 +762,7 @@ impl SystemVirtualPath {
} }
/// An owned, virtual path on [`System`](`super::System`) (akin to [`String`]). /// An owned, virtual path on [`System`](`super::System`) (akin to [`String`]).
#[derive(Eq, PartialEq, Clone, Hash, PartialOrd, Ord, get_size2::GetSize)] #[derive(Eq, PartialEq, Clone, Hash, PartialOrd, Ord)]
pub struct SystemVirtualPathBuf(String); pub struct SystemVirtualPathBuf(String);
impl SystemVirtualPathBuf { impl SystemVirtualPathBuf {

View File

@@ -146,10 +146,6 @@ impl System for TestSystem {
fn case_sensitivity(&self) -> CaseSensitivity { fn case_sensitivity(&self) -> CaseSensitivity {
self.system().case_sensitivity() self.system().case_sensitivity()
} }
fn dyn_clone(&self) -> Box<dyn System> {
Box::new(self.clone())
}
} }
impl Default for TestSystem { impl Default for TestSystem {
@@ -398,13 +394,6 @@ impl System for InMemorySystem {
fn case_sensitivity(&self) -> CaseSensitivity { fn case_sensitivity(&self) -> CaseSensitivity {
CaseSensitivity::CaseSensitive CaseSensitivity::CaseSensitive
} }
fn dyn_clone(&self) -> Box<dyn System> {
Box::new(Self {
user_config_directory: Mutex::new(self.user_config_directory.lock().unwrap().clone()),
memory_fs: self.memory_fs.clone(),
})
}
} }
impl WritableSystem for InMemorySystem { impl WritableSystem for InMemorySystem {

View File

@@ -195,7 +195,7 @@ impl VendoredFileSystem {
/// ///
/// ## Panics: /// ## Panics:
/// If the current thread already holds the lock. /// If the current thread already holds the lock.
fn lock_archive(&self) -> LockedZipArchive<'_> { fn lock_archive(&self) -> LockedZipArchive {
self.inner.lock().unwrap() self.inner.lock().unwrap()
} }
} }
@@ -360,7 +360,7 @@ impl VendoredZipArchive {
Ok(Self(ZipArchive::new(io::Cursor::new(data))?)) Ok(Self(ZipArchive::new(io::Cursor::new(data))?))
} }
fn lookup_path(&mut self, path: &NormalizedVendoredPath) -> Result<ZipFile<'_>> { fn lookup_path(&mut self, path: &NormalizedVendoredPath) -> Result<ZipFile> {
Ok(self.0.by_name(path.as_str())?) Ok(self.0.by_name(path.as_str())?)
} }

View File

@@ -37,7 +37,7 @@ impl VendoredPath {
self.0.as_std_path() self.0.as_std_path()
} }
pub fn components(&self) -> Utf8Components<'_> { pub fn components(&self) -> Utf8Components {
self.0.components() self.0.components()
} }

View File

@@ -13,7 +13,6 @@ license = { workspace = true }
[dependencies] [dependencies]
ty = { workspace = true } ty = { workspace = true }
ty_project = { workspace = true, features = ["schemars"] } ty_project = { workspace = true, features = ["schemars"] }
ty_python_semantic = { workspace = true }
ty_static = { workspace = true } ty_static = { workspace = true }
ruff = { workspace = true } ruff = { workspace = true }
ruff_formatter = { workspace = true } ruff_formatter = { workspace = true }

View File

@@ -348,7 +348,7 @@ fn format_dev_multi_project(
debug!(parent: None, "Starting {}", project_path.display()); debug!(parent: None, "Starting {}", project_path.display());
match format_dev_project( match format_dev_project(
std::slice::from_ref(&project_path), &[project_path.clone()],
args.stability_check, args.stability_check,
args.write, args.write,
args.preview, args.preview,
@@ -628,7 +628,7 @@ struct CheckRepoResult {
} }
impl CheckRepoResult { impl CheckRepoResult {
fn display(&self, format: Format) -> DisplayCheckRepoResult<'_> { fn display(&self, format: Format) -> DisplayCheckRepoResult {
DisplayCheckRepoResult { DisplayCheckRepoResult {
result: self, result: self,
format, format,
@@ -665,7 +665,7 @@ struct Diagnostic {
} }
impl Diagnostic { impl Diagnostic {
fn display(&self, format: Format) -> DisplayDiagnostic<'_> { fn display(&self, format: Format) -> DisplayDiagnostic {
DisplayDiagnostic { DisplayDiagnostic {
diagnostic: self, diagnostic: self,
format, format,

View File

@@ -52,7 +52,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
} }
fn generate_markdown() -> String { fn generate_markdown() -> String {
let registry = ty_python_semantic::default_lint_registry(); let registry = &*ty_project::DEFAULT_LINT_REGISTRY;
let mut output = String::new(); let mut output = String::new();

View File

@@ -14,11 +14,8 @@ license = { workspace = true }
doctest = false doctest = false
[dependencies] [dependencies]
ruff_text_size = { workspace = true, features = ["get-size"] } ruff_text_size = { workspace = true }
get-size2 = { workspace = true } get-size2 = { workspace = true }
is-macro = { workspace = true } is-macro = { workspace = true }
serde = { workspace = true, optional = true, features = [] } serde = { workspace = true, optional = true, features = [] }
[features]
serde = ["dep:serde", "ruff_text_size/serde"]

View File

@@ -562,7 +562,7 @@ struct RemoveSoftLinebreaksSnapshot {
pub trait BufferExtensions: Buffer + Sized { pub trait BufferExtensions: Buffer + Sized {
/// Returns a new buffer that calls the passed inspector for every element that gets written to the output /// Returns a new buffer that calls the passed inspector for every element that gets written to the output
#[must_use] #[must_use]
fn inspect<F>(&mut self, inspector: F) -> Inspect<'_, Self::Context, F> fn inspect<F>(&mut self, inspector: F) -> Inspect<Self::Context, F>
where where
F: FnMut(&FormatElement), F: FnMut(&FormatElement),
{ {
@@ -607,7 +607,7 @@ pub trait BufferExtensions: Buffer + Sized {
/// # } /// # }
/// ``` /// ```
#[must_use] #[must_use]
fn start_recording(&mut self) -> Recording<'_, Self> { fn start_recording(&mut self) -> Recording<Self> {
Recording::new(self) Recording::new(self)
} }

View File

@@ -340,7 +340,7 @@ impl<Context> Format<Context> for SourcePosition {
/// Creates a text from a dynamic string. /// Creates a text from a dynamic string.
/// ///
/// This is done by allocating a new string internally. /// This is done by allocating a new string internally.
pub fn text(text: &str) -> Text<'_> { pub fn text(text: &str) -> Text {
debug_assert_no_newlines(text); debug_assert_no_newlines(text);
Text { text } Text { text }
@@ -459,10 +459,7 @@ fn debug_assert_no_newlines(text: &str) {
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn line_suffix<Content, Context>( pub fn line_suffix<Content, Context>(inner: &Content, reserved_width: u32) -> LineSuffix<Context>
inner: &Content,
reserved_width: u32,
) -> LineSuffix<'_, Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -600,10 +597,7 @@ impl<Context> Format<Context> for LineSuffixBoundary {
/// Use `Memoized.inspect(f)?.has_label(LabelId::of::<SomeLabelId>()` if you need to know if some content breaks that should /// Use `Memoized.inspect(f)?.has_label(LabelId::of::<SomeLabelId>()` if you need to know if some content breaks that should
/// only be written later. /// only be written later.
#[inline] #[inline]
pub fn labelled<Content, Context>( pub fn labelled<Content, Context>(label_id: LabelId, content: &Content) -> FormatLabelled<Context>
label_id: LabelId,
content: &Content,
) -> FormatLabelled<'_, Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -706,7 +700,7 @@ impl<Context> Format<Context> for Space {
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn indent<Content, Context>(content: &Content) -> Indent<'_, Context> pub fn indent<Content, Context>(content: &Content) -> Indent<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -777,7 +771,7 @@ impl<Context> std::fmt::Debug for Indent<'_, Context> {
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn dedent<Content, Context>(content: &Content) -> Dedent<'_, Context> pub fn dedent<Content, Context>(content: &Content) -> Dedent<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -852,7 +846,7 @@ impl<Context> std::fmt::Debug for Dedent<'_, Context> {
/// ///
/// This resembles the behaviour of Prettier's `align(Number.NEGATIVE_INFINITY, content)` IR element. /// This resembles the behaviour of Prettier's `align(Number.NEGATIVE_INFINITY, content)` IR element.
#[inline] #[inline]
pub fn dedent_to_root<Content, Context>(content: &Content) -> Dedent<'_, Context> pub fn dedent_to_root<Content, Context>(content: &Content) -> Dedent<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -966,7 +960,7 @@ where
/// ///
/// - tab indentation: Printer indents the expression with two tabs because the `align` increases the indentation level. /// - tab indentation: Printer indents the expression with two tabs because the `align` increases the indentation level.
/// - space indentation: Printer indents the expression by 4 spaces (one indentation level) **and** 2 spaces for the align. /// - space indentation: Printer indents the expression by 4 spaces (one indentation level) **and** 2 spaces for the align.
pub fn align<Content, Context>(count: u8, content: &Content) -> Align<'_, Context> pub fn align<Content, Context>(count: u8, content: &Content) -> Align<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -1036,7 +1030,7 @@ impl<Context> std::fmt::Debug for Align<'_, Context> {
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<'_, Context> { pub fn block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<Context> {
BlockIndent { BlockIndent {
content: Argument::new(content), content: Argument::new(content),
mode: IndentMode::Block, mode: IndentMode::Block,
@@ -1107,7 +1101,7 @@ pub fn block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<'_,
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn soft_block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<'_, Context> { pub fn soft_block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<Context> {
BlockIndent { BlockIndent {
content: Argument::new(content), content: Argument::new(content),
mode: IndentMode::Soft, mode: IndentMode::Soft,
@@ -1181,9 +1175,7 @@ pub fn soft_block_indent<Context>(content: &impl Format<Context>) -> BlockIndent
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn soft_line_indent_or_space<Context>( pub fn soft_line_indent_or_space<Context>(content: &impl Format<Context>) -> BlockIndent<Context> {
content: &impl Format<Context>,
) -> BlockIndent<'_, Context> {
BlockIndent { BlockIndent {
content: Argument::new(content), content: Argument::new(content),
mode: IndentMode::SoftLineOrSpace, mode: IndentMode::SoftLineOrSpace,
@@ -1316,9 +1308,7 @@ impl<Context> std::fmt::Debug for BlockIndent<'_, Context> {
/// # Ok(()) /// # Ok(())
/// # } /// # }
/// ``` /// ```
pub fn soft_space_or_block_indent<Context>( pub fn soft_space_or_block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<Context> {
content: &impl Format<Context>,
) -> BlockIndent<'_, Context> {
BlockIndent { BlockIndent {
content: Argument::new(content), content: Argument::new(content),
mode: IndentMode::SoftSpace, mode: IndentMode::SoftSpace,
@@ -1398,7 +1388,7 @@ pub fn soft_space_or_block_indent<Context>(
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn group<Context>(content: &impl Format<Context>) -> Group<'_, Context> { pub fn group<Context>(content: &impl Format<Context>) -> Group<Context> {
Group { Group {
content: Argument::new(content), content: Argument::new(content),
id: None, id: None,
@@ -1561,7 +1551,7 @@ impl<Context> std::fmt::Debug for Group<'_, Context> {
#[inline] #[inline]
pub fn best_fit_parenthesize<Context>( pub fn best_fit_parenthesize<Context>(
content: &impl Format<Context>, content: &impl Format<Context>,
) -> BestFitParenthesize<'_, Context> { ) -> BestFitParenthesize<Context> {
BestFitParenthesize { BestFitParenthesize {
content: Argument::new(content), content: Argument::new(content),
group_id: None, group_id: None,
@@ -1701,7 +1691,7 @@ impl<Context> std::fmt::Debug for BestFitParenthesize<'_, Context> {
pub fn conditional_group<Content, Context>( pub fn conditional_group<Content, Context>(
content: &Content, content: &Content,
condition: Condition, condition: Condition,
) -> ConditionalGroup<'_, Context> ) -> ConditionalGroup<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -1862,7 +1852,7 @@ impl<Context> Format<Context> for ExpandParent {
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn if_group_breaks<Content, Context>(content: &Content) -> IfGroupBreaks<'_, Context> pub fn if_group_breaks<Content, Context>(content: &Content) -> IfGroupBreaks<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -1943,7 +1933,7 @@ where
/// # } /// # }
/// ``` /// ```
#[inline] #[inline]
pub fn if_group_fits_on_line<Content, Context>(flat_content: &Content) -> IfGroupBreaks<'_, Context> pub fn if_group_fits_on_line<Content, Context>(flat_content: &Content) -> IfGroupBreaks<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -2132,7 +2122,7 @@ impl<Context> std::fmt::Debug for IfGroupBreaks<'_, Context> {
pub fn indent_if_group_breaks<Content, Context>( pub fn indent_if_group_breaks<Content, Context>(
content: &Content, content: &Content,
group_id: GroupId, group_id: GroupId,
) -> IndentIfGroupBreaks<'_, Context> ) -> IndentIfGroupBreaks<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {
@@ -2215,7 +2205,7 @@ impl<Context> std::fmt::Debug for IndentIfGroupBreaks<'_, Context> {
/// # Ok(()) /// # Ok(())
/// # } /// # }
/// ``` /// ```
pub fn fits_expanded<Content, Context>(content: &Content) -> FitsExpanded<'_, Context> pub fn fits_expanded<Content, Context>(content: &Content) -> FitsExpanded<Context>
where where
Content: Format<Context>, Content: Format<Context>,
{ {

View File

@@ -197,7 +197,7 @@ pub const LINE_TERMINATORS: [char; 3] = ['\r', LINE_SEPARATOR, PARAGRAPH_SEPARAT
/// Replace the line terminators matching the provided list with "\n" /// Replace the line terminators matching the provided list with "\n"
/// since its the only line break type supported by the printer /// since its the only line break type supported by the printer
pub fn normalize_newlines<const N: usize>(text: &str, terminators: [char; N]) -> Cow<'_, str> { pub fn normalize_newlines<const N: usize>(text: &str, terminators: [char; N]) -> Cow<str> {
let mut result = String::new(); let mut result = String::new();
let mut last_end = 0; let mut last_end = 0;

View File

@@ -222,7 +222,7 @@ impl FormatContext for IrFormatContext<'_> {
&IrFormatOptions &IrFormatOptions
} }
fn source_code(&self) -> SourceCode<'_> { fn source_code(&self) -> SourceCode {
self.source_code self.source_code
} }
} }

Some files were not shown because too many files have changed in this diff Show More