Compare commits
1 Commits
alex/newty
...
david/enum
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e935bc5578 |
6
.github/CODEOWNERS
vendored
6
.github/CODEOWNERS
vendored
@@ -19,10 +19,6 @@
|
||||
|
||||
# ty
|
||||
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
|
||||
20
.github/workflows/build-binaries.yml
vendored
20
.github/workflows/build-binaries.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
@@ -79,7 +79,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
@@ -177,7 +177,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
@@ -230,7 +230,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
@@ -292,8 +292,6 @@ jobs:
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: arm-unknown-linux-musleabihf
|
||||
arch: arm
|
||||
- target: riscv64gc-unknown-linux-gnu
|
||||
arch: riscv64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -306,7 +304,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
@@ -321,7 +319,7 @@ jobs:
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip libatomic1
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
@@ -372,7 +370,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
@@ -437,7 +435,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
|
||||
16
.github/workflows/build-docker.yml
vendored
16
.github/workflows/build-docker.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
@@ -131,7 +131,7 @@ jobs:
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -169,7 +169,7 @@ jobs:
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -219,7 +219,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
@@ -266,7 +266,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
@@ -276,7 +276,7 @@ jobs:
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
||||
62
.github/workflows/ci.yaml
vendored
62
.github/workflows/ci.yaml
vendored
@@ -38,8 +38,7 @@ jobs:
|
||||
fuzz: ${{ steps.check_fuzzer.outputs.changed }}
|
||||
# Flag that is set to "true" when code related to ty changes.
|
||||
ty: ${{ steps.check_ty.outputs.changed }}
|
||||
# Flag that is set to "true" when code related to the py-fuzzer folder changes.
|
||||
py-fuzzer: ${{ steps.check_py_fuzzer.outputs.changed }}
|
||||
|
||||
# Flag that is set to "true" when code related to the playground changes.
|
||||
playground: ${{ steps.check_playground.outputs.changed }}
|
||||
steps:
|
||||
@@ -69,6 +68,7 @@ jobs:
|
||||
':crates/ruff_text_size/**' \
|
||||
':crates/ruff_python_ast/**' \
|
||||
':crates/ruff_python_parser/**' \
|
||||
':python/py-fuzzer/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
@@ -138,18 +138,6 @@ jobs:
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if the py-fuzzer code changed
|
||||
id: check_py_fuzzer
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- 'python/py_fuzzer/**' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if there was any code related change
|
||||
id: check_code
|
||||
env:
|
||||
@@ -250,13 +238,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -308,13 +296,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -337,7 +325,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
@@ -393,7 +381,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -418,7 +406,7 @@ jobs:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Build tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -441,7 +429,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@0dca8cf8dfb40cb77a29cece06933ce674674523 # v1.15.1
|
||||
uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -455,7 +443,7 @@ jobs:
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -463,7 +451,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -645,7 +633,7 @@ jobs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.ty == 'true' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -664,7 +652,7 @@ jobs:
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -694,7 +682,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@0dca8cf8dfb40cb77a29cece06933ce674674523 # v1.15.1
|
||||
- uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -715,7 +703,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
args: --out dist
|
||||
- name: "Test wheel"
|
||||
@@ -734,13 +722,13 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
- name: "Cache pre-commit"
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
@@ -777,7 +765,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -909,13 +897,13 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -942,13 +930,13 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
||||
4
.github/workflows/daily_fuzz.yaml
vendored
4
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,11 +34,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
|
||||
7
.github/workflows/mypy_primer.yaml
vendored
7
.github/workflows/mypy_primer.yaml
vendored
@@ -11,7 +11,6 @@ on:
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
- "scripts/mypy_primer.sh"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
@@ -39,7 +38,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
@@ -82,9 +81,9 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
18
.github/workflows/release.yml
vendored
18
.github/workflows/release.yml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -124,19 +124,19 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -175,19 +175,19 @@ jobs:
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -251,13 +251,13 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
|
||||
6
.github/workflows/sync_typeshed.yaml
vendored
6
.github/workflows/sync_typeshed.yaml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
|
||||
2
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
2
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
|
||||
2
.github/workflows/ty-ecosystem-report.yaml
vendored
2
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
|
||||
29
.github/workflows/typing_conformance.yaml
vendored
29
.github/workflows/typing_conformance.yaml
vendored
@@ -24,7 +24,6 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CONFORMANCE_SUITE_COMMIT: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
|
||||
jobs:
|
||||
typing_conformance:
|
||||
@@ -41,10 +40,13 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
repository: python/typing
|
||||
ref: ${{ env.CONFORMANCE_SUITE_COMMIT }}
|
||||
ref: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
path: typing
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
@@ -54,9 +56,6 @@ jobs:
|
||||
|
||||
- name: Compute diagnostic diff
|
||||
shell: bash
|
||||
env:
|
||||
# TODO: Remove this once we fixed the remaining panics in the conformance suite.
|
||||
TY_MAX_PARALLELISM: 1
|
||||
run: |
|
||||
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
|
||||
|
||||
@@ -65,16 +64,17 @@ jobs:
|
||||
cd ruff
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
cargo build --bin ty
|
||||
mv target/debug/ty ty-new
|
||||
git checkout -b new_commit "${{ github.event.pull_request.head.sha }}"
|
||||
git rev-list --format=%s --max-count=1 new_commit
|
||||
cargo build --release --bin ty
|
||||
mv target/release/ty ty-new
|
||||
|
||||
echo "old commit (merge base)"
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b old_commit "$MERGE_BASE"
|
||||
echo "old commit (merge base)"
|
||||
git rev-list --format=%s --max-count=1 old_commit
|
||||
cargo build --bin ty
|
||||
mv target/debug/ty ty-old
|
||||
cargo build --release --bin ty
|
||||
mv target/release/ty ty-old
|
||||
)
|
||||
|
||||
(
|
||||
@@ -95,7 +95,6 @@ jobs:
|
||||
fi
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
@@ -108,9 +107,3 @@ jobs:
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload conformance suite commit
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: conformance-suite-commit
|
||||
path: conformance-suite-commit
|
||||
|
||||
@@ -32,14 +32,6 @@ jobs:
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download typing conformance suite commit
|
||||
with:
|
||||
name: conformance-suite-commit
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download typing_conformance results"
|
||||
id: download-typing_conformance_diff
|
||||
@@ -69,14 +61,7 @@ jobs:
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||
|
||||
if [[ -f conformance-suite-commit ]]
|
||||
then
|
||||
echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(<conformance-suite-commit)/conformance)" >> comment.txt
|
||||
else
|
||||
echo "conformance-suite-commit file not found"
|
||||
echo "## Diagnostic diff on typing conformance tests" >> comment.txt
|
||||
fi
|
||||
|
||||
echo '## Diagnostic diff on typing conformance tests' >> comment.txt
|
||||
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||
|
||||
@@ -81,7 +81,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.7
|
||||
rev: v0.12.5
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff-check
|
||||
|
||||
137
CHANGELOG.md
137
CHANGELOG.md
@@ -1,142 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.10
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Implement fix for `maxsplit` without separator (`SIM905`) ([#19851](https://github.com/astral-sh/ruff/pull/19851))
|
||||
- \[`flake8-use-pathlib`\] Add fixes for `PTH102` and `PTH103` ([#19514](https://github.com/astral-sh/ruff/pull/19514))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`isort`\] Handle multiple continuation lines after module docstring (`I002`) ([#19818](https://github.com/astral-sh/ruff/pull/19818))
|
||||
- \[`pyupgrade`\] Avoid reporting `__future__` features as unnecessary when they are used (`UP010`) ([#19769](https://github.com/astral-sh/ruff/pull/19769))
|
||||
- \[`pyupgrade`\] Handle nested `Optional`s (`UP045`) ([#19770](https://github.com/astral-sh/ruff/pull/19770))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Make `E731` fix unsafe instead of display-only for class assignments ([#19700](https://github.com/astral-sh/ruff/pull/19700))
|
||||
- \[`pyflakes`\] Add secondary annotation showing previous definition (`F811`) ([#19900](https://github.com/astral-sh/ruff/pull/19900))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix description of global config file discovery strategy ([#19188](https://github.com/astral-sh/ruff/pull/19188))
|
||||
- Update outdated links to <https://typing.python.org/en/latest/source/stubs.html> ([#19992](https://github.com/astral-sh/ruff/pull/19992))
|
||||
- \[`flake8-annotations`\] Remove unused import in example (`ANN401`) ([#20000](https://github.com/astral-sh/ruff/pull/20000))
|
||||
|
||||
## 0.12.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add check for `airflow.secrets.cache.SecretCache` (`AIR301`) ([#17707](https://github.com/astral-sh/ruff/pull/17707))
|
||||
- \[`ruff`\] Offer a safe fix for multi-digit zeros (`RUF064`) ([#19847](https://github.com/astral-sh/ruff/pull/19847))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-blind-except`\] Fix `BLE001` false-positive on `raise ... from None` ([#19755](https://github.com/astral-sh/ruff/pull/19755))
|
||||
- \[`flake8-comprehensions`\] Fix false positive for `C420` with attribute, subscript, or slice assignment targets ([#19513](https://github.com/astral-sh/ruff/pull/19513))
|
||||
- \[`flake8-simplify`\] Fix handling of U+001C..U+001F whitespace (`SIM905`) ([#19849](https://github.com/astral-sh/ruff/pull/19849))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Use lowercase hex characters to match the formatter (`PLE2513`) ([#19808](https://github.com/astral-sh/ruff/pull/19808))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix `lint.future-annotations` link ([#19876](https://github.com/astral-sh/ruff/pull/19876))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Build `riscv64` binaries for release ([#19819](https://github.com/astral-sh/ruff/pull/19819))
|
||||
|
||||
- Add rule code to error description in GitLab output ([#19896](https://github.com/astral-sh/ruff/pull/19896))
|
||||
|
||||
- Improve rendering of the `full` output format ([#19415](https://github.com/astral-sh/ruff/pull/19415))
|
||||
|
||||
Below is an example diff for [`F401`](https://docs.astral.sh/ruff/rules/unused-import/):
|
||||
|
||||
```diff
|
||||
-unused.py:8:19: F401 [*] `pathlib` imported but unused
|
||||
+F401 [*] `pathlib` imported but unused
|
||||
+ --> unused.py:8:19
|
||||
|
|
||||
7 | # Unused, _not_ marked as required (due to the alias).
|
||||
8 | import pathlib as non_alias
|
||||
- | ^^^^^^^^^ F401
|
||||
+ | ^^^^^^^^^
|
||||
9 |
|
||||
10 | # Unused, marked as required.
|
||||
|
|
||||
- = help: Remove unused import: `pathlib`
|
||||
+help: Remove unused import: `pathlib`
|
||||
```
|
||||
|
||||
For now, the primary difference is the movement of the filename, line number, and column information to a second line in the header. This new representation will allow us to make further additions to Ruff's diagnostics, such as adding sub-diagnostics and multiple annotations to the same snippet.
|
||||
|
||||
## 0.12.8
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-use-pathlib`\] Expand `PTH201` to check all `PurePath` subclasses ([#19440](https://github.com/astral-sh/ruff/pull/19440))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-blind-except`\] Change `BLE001` to correctly parse exception tuples ([#19747](https://github.com/astral-sh/ruff/pull/19747))
|
||||
- \[`flake8-errmsg`\] Exclude `typing.cast` from `EM101` ([#19656](https://github.com/astral-sh/ruff/pull/19656))
|
||||
- \[`flake8-simplify`\] Fix raw string handling in `SIM905` for embedded quotes ([#19591](https://github.com/astral-sh/ruff/pull/19591))
|
||||
- \[`flake8-import-conventions`\] Avoid false positives for NFKC-normalized `__debug__` import aliases in `ICN001` ([#19411](https://github.com/astral-sh/ruff/pull/19411))
|
||||
- \[`isort`\] Fix syntax error after docstring ending with backslash (`I002`) ([#19505](https://github.com/astral-sh/ruff/pull/19505))
|
||||
- \[`pylint`\] Mark `PLC0207` fixes as unsafe when `*args` unpacking is present ([#19679](https://github.com/astral-sh/ruff/pull/19679))
|
||||
- \[`pyupgrade`\] Prevent infinite loop with `I002` (`UP010`, `UP035`) ([#19413](https://github.com/astral-sh/ruff/pull/19413))
|
||||
- \[`ruff`\] Parenthesize generator expressions in f-strings (`RUF010`) ([#19434](https://github.com/astral-sh/ruff/pull/19434))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Don't flag `pyrefly` pragmas as unused code (`ERA001`) ([#19731](https://github.com/astral-sh/ruff/pull/19731))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Replace "associative" with "commutative" in docs for `RUF036` ([#19706](https://github.com/astral-sh/ruff/pull/19706))
|
||||
- Fix copy and line separator colors in dark mode ([#19630](https://github.com/astral-sh/ruff/pull/19630))
|
||||
- Fix link to `typing` documentation ([#19648](https://github.com/astral-sh/ruff/pull/19648))
|
||||
- \[`refurb`\] Make more examples error out-of-the-box ([#19695](https://github.com/astral-sh/ruff/pull/19695),[#19673](https://github.com/astral-sh/ruff/pull/19673),[#19672](https://github.com/astral-sh/ruff/pull/19672))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Include column numbers in GitLab output format ([#19708](https://github.com/astral-sh/ruff/pull/19708))
|
||||
- Always expand tabs to four spaces in diagnostics ([#19618](https://github.com/astral-sh/ruff/pull/19618))
|
||||
- Update pre-commit's `ruff` id ([#19654](https://github.com/astral-sh/ruff/pull/19654))
|
||||
|
||||
## 0.12.7
|
||||
|
||||
This is a follow-up release to 0.12.6. Because of an issue in the package metadata, 0.12.6 failed to publish fully to PyPI and has been yanked. Similarly, there is no GitHub release or Git tag for 0.12.6. The contents of the 0.12.7 release are identical to 0.12.6, except for the updated metadata.
|
||||
|
||||
## 0.12.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-commas`\] Add support for trailing comma checks in type parameter lists (`COM812`, `COM819`) ([#19390](https://github.com/astral-sh/ruff/pull/19390))
|
||||
- \[`pylint`\] Implement auto-fix for `missing-maxsplit-arg` (`PLC0207`) ([#19387](https://github.com/astral-sh/ruff/pull/19387))
|
||||
- \[`ruff`\] Offer fixes for `RUF039` in more cases ([#19065](https://github.com/astral-sh/ruff/pull/19065))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support `.pyi` files in ruff analyze graph ([#19611](https://github.com/astral-sh/ruff/pull/19611))
|
||||
- \[`flake8-pyi`\] Preserve inline comment in ellipsis removal (`PYI013`) ([#19399](https://github.com/astral-sh/ruff/pull/19399))
|
||||
- \[`perflint`\] Ignore rule if target is `global` or `nonlocal` (`PERF401`) ([#19539](https://github.com/astral-sh/ruff/pull/19539))
|
||||
- \[`pyupgrade`\] Fix `UP030` to avoid modifying double curly braces in format strings ([#19378](https://github.com/astral-sh/ruff/pull/19378))
|
||||
- \[`refurb`\] Ignore decorated functions for `FURB118` ([#19339](https://github.com/astral-sh/ruff/pull/19339))
|
||||
- \[`refurb`\] Mark `int` and `bool` cases for `Decimal.from_float` as safe fixes (`FURB164`) ([#19468](https://github.com/astral-sh/ruff/pull/19468))
|
||||
- \[`ruff`\] Fix `RUF033` for named default expressions ([#19115](https://github.com/astral-sh/ruff/pull/19115))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-blind-except`\] Change `BLE001` to permit `logging.critical(..., exc_info=True)` ([#19520](https://github.com/astral-sh/ruff/pull/19520))
|
||||
|
||||
### Performance
|
||||
|
||||
- Add support for specifying minimum dots in detected string imports ([#19538](https://github.com/astral-sh/ruff/pull/19538))
|
||||
|
||||
## 0.12.5
|
||||
|
||||
### Preview features
|
||||
|
||||
594
Cargo.lock
generated
594
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
16
Cargo.toml
16
Cargo.toml
@@ -5,7 +5,7 @@ resolver = "2"
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.87"
|
||||
rust-version = "1.86"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -23,7 +23,6 @@ ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
ruff_memory_usage = { path = "crates/ruff_memory_usage" }
|
||||
ruff_notebook = { path = "crates/ruff_notebook" }
|
||||
ruff_options_metadata = { path = "crates/ruff_options_metadata" }
|
||||
ruff_python_ast = { path = "crates/ruff_python_ast" }
|
||||
@@ -41,7 +40,6 @@ ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
ty = { path = "crates/ty" }
|
||||
ty_combine = { path = "crates/ty_combine" }
|
||||
ty_ide = { path = "crates/ty_ide" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
@@ -85,7 +83,7 @@ etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
get-size2 = { version = "0.6.2", features = [
|
||||
get-size2 = { version = "0.6.0", features = [
|
||||
"derive",
|
||||
"smallvec",
|
||||
"hashbrown",
|
||||
@@ -143,12 +141,7 @@ regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a3ffa22cb26756473d56f867aedec3fd907c4dd9", default-features = false, features = [
|
||||
"compact_str",
|
||||
"macros",
|
||||
"salsa_unstable",
|
||||
"inventory",
|
||||
] }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "dba66f1a37acca014c2402f231ed5b361bd7d8fe" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -215,8 +208,6 @@ unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
# Enabled at the crate level
|
||||
disallowed_methods = "allow"
|
||||
# Allowed pedantic lints
|
||||
char_lit_as_u8 = "allow"
|
||||
collapsible_else_if = "allow"
|
||||
@@ -255,7 +246,6 @@ unused_peekable = "warn"
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
# were chosen based on a trade-off between compile times
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.10/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.10/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.5/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.5/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.10
|
||||
rev: v0.12.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
|
||||
17
clippy.toml
17
clippy.toml
@@ -24,20 +24,3 @@ ignore-interior-mutability = [
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
disallowed-methods = [
|
||||
{ path = "std::env::var", reason = "Use System::env_var instead in ty crates" },
|
||||
{ path = "std::env::current_dir", reason = "Use System::current_directory instead in ty crates" },
|
||||
{ path = "std::fs::read_to_string", reason = "Use System::read_to_string instead in ty crates" },
|
||||
{ path = "std::fs::metadata", reason = "Use System::path_metadata instead in ty crates" },
|
||||
{ path = "std::fs::canonicalize", reason = "Use System::canonicalize_path instead in ty crates" },
|
||||
{ path = "dunce::canonicalize", reason = "Use System::canonicalize_path instead in ty crates" },
|
||||
{ path = "std::fs::read_dir", reason = "Use System::read_directory instead in ty crates" },
|
||||
{ path = "std::fs::write", reason = "Use WritableSystem::write_file instead in ty crates" },
|
||||
{ path = "std::fs::create_dir_all", reason = "Use WritableSystem::create_directory_all instead in ty crates" },
|
||||
{ path = "std::fs::File::create_new", reason = "Use WritableSystem::create_new_file instead in ty crates" },
|
||||
# Path methods that have System trait equivalents
|
||||
{ path = "std::path::Path::exists", reason = "Use System::path_exists instead in ty crates" },
|
||||
{ path = "std::path::Path::is_dir", reason = "Use System::is_directory instead in ty crates" },
|
||||
{ path = "std::path::Path::is_file", reason = "Use System::is_file instead in ty crates" },
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.10"
|
||||
version = "0.12.5"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -85,7 +85,7 @@ dist = true
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
[lints]
|
||||
|
||||
@@ -13,16 +13,25 @@ use itertools::Itertools;
|
||||
use log::{debug, error};
|
||||
use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelBridge};
|
||||
use ruff_linter::codes::Rule;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_linter::message::create_lint_diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::{VERSION, warn_user};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
use ruff_workspace::resolver::Resolver;
|
||||
|
||||
use crate::diagnostics::Diagnostics;
|
||||
|
||||
/// [`Path`] that is relative to the package root in [`PackageCache`].
|
||||
pub(crate) type RelativePath = Path;
|
||||
/// [`PathBuf`] that is relative to the package root in [`PackageCache`].
|
||||
@@ -289,8 +298,13 @@ impl Cache {
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn set_linted(&self, path: RelativePathBuf, key: &FileCacheKey, yes: bool) {
|
||||
self.update(path, key, ChangeData::Linted(yes));
|
||||
pub(crate) fn update_lint(
|
||||
&self,
|
||||
path: RelativePathBuf,
|
||||
key: &FileCacheKey,
|
||||
data: LintCacheData,
|
||||
) {
|
||||
self.update(path, key, ChangeData::Lint(data));
|
||||
}
|
||||
|
||||
pub(crate) fn set_formatted(&self, path: RelativePathBuf, key: &FileCacheKey) {
|
||||
@@ -325,15 +339,42 @@ pub(crate) struct FileCache {
|
||||
}
|
||||
|
||||
impl FileCache {
|
||||
/// Return whether or not the file in the cache was linted and found to have no diagnostics.
|
||||
pub(crate) fn linted(&self) -> bool {
|
||||
self.data.linted
|
||||
/// Convert the file cache into `Diagnostics`, using `path` as file name.
|
||||
pub(crate) fn to_diagnostics(&self, path: &Path) -> Option<Diagnostics> {
|
||||
self.data.lint.as_ref().map(|lint| {
|
||||
let diagnostics = if lint.messages.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
let file = SourceFileBuilder::new(path.to_string_lossy(), &*lint.source).finish();
|
||||
lint.messages
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
create_lint_diagnostic(
|
||||
&msg.body,
|
||||
msg.suggestion.as_ref(),
|
||||
msg.range,
|
||||
msg.fix.clone(),
|
||||
msg.parent,
|
||||
file.clone(),
|
||||
msg.noqa_offset,
|
||||
msg.rule,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
let notebook_indexes = if let Some(notebook_index) = lint.notebook_index.as_ref() {
|
||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
};
|
||||
Diagnostics::new(diagnostics, notebook_indexes)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, bincode::Decode, bincode::Encode)]
|
||||
struct FileCacheData {
|
||||
linted: bool,
|
||||
lint: Option<LintCacheData>,
|
||||
formatted: bool,
|
||||
}
|
||||
|
||||
@@ -369,6 +410,88 @@ pub(crate) fn init(path: &Path) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(bincode::Decode, Debug, bincode::Encode, PartialEq)]
|
||||
pub(crate) struct LintCacheData {
|
||||
/// Imports made.
|
||||
// pub(super) imports: ImportMap,
|
||||
/// Diagnostic messages.
|
||||
pub(super) messages: Vec<CacheMessage>,
|
||||
/// Source code of the file.
|
||||
///
|
||||
/// # Notes
|
||||
///
|
||||
/// This will be empty if `messages` is empty.
|
||||
pub(super) source: String,
|
||||
/// Notebook index if this file is a Jupyter Notebook.
|
||||
#[bincode(with_serde)]
|
||||
pub(super) notebook_index: Option<NotebookIndex>,
|
||||
}
|
||||
|
||||
impl LintCacheData {
|
||||
pub(crate) fn from_diagnostics(
|
||||
diagnostics: &[Diagnostic],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
) -> Self {
|
||||
let source = if let Some(msg) = diagnostics.first() {
|
||||
msg.expect_ruff_source_file().source_text().to_owned()
|
||||
} else {
|
||||
String::new() // No messages, no need to keep the source!
|
||||
};
|
||||
|
||||
let messages = diagnostics
|
||||
.iter()
|
||||
// Parse the kebab-case rule name into a `Rule`. This will fail for syntax errors, so
|
||||
// this also serves to filter them out, but we shouldn't be caching files with syntax
|
||||
// errors anyway.
|
||||
.filter_map(|msg| Some((msg.name().parse().ok()?, msg)))
|
||||
.map(|(rule, msg)| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
msg.expect_ruff_source_file(),
|
||||
diagnostics.first().unwrap().expect_ruff_source_file(),
|
||||
"message uses a different source file"
|
||||
);
|
||||
CacheMessage {
|
||||
rule,
|
||||
body: msg.body().to_string(),
|
||||
suggestion: msg.first_help_text().map(ToString::to_string),
|
||||
range: msg.expect_range(),
|
||||
parent: msg.parent(),
|
||||
fix: msg.fix().cloned(),
|
||||
noqa_offset: msg.noqa_offset(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
messages,
|
||||
source,
|
||||
notebook_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// On disk representation of a diagnostic message.
|
||||
#[derive(bincode::Decode, Debug, bincode::Encode, PartialEq)]
|
||||
pub(super) struct CacheMessage {
|
||||
/// The rule for the cached diagnostic.
|
||||
#[bincode(with_serde)]
|
||||
rule: Rule,
|
||||
/// The message body to display to the user, to explain the diagnostic.
|
||||
body: String,
|
||||
/// The message to display to the user, to explain the suggested fix.
|
||||
suggestion: Option<String>,
|
||||
/// Range into the message's [`FileCache::source`].
|
||||
#[bincode(with_serde)]
|
||||
range: TextRange,
|
||||
#[bincode(with_serde)]
|
||||
parent: Option<TextSize>,
|
||||
#[bincode(with_serde)]
|
||||
fix: Option<Fix>,
|
||||
#[bincode(with_serde)]
|
||||
noqa_offset: Option<TextSize>,
|
||||
}
|
||||
|
||||
pub(crate) trait PackageCaches {
|
||||
fn get(&self, package_root: &Path) -> Option<&Cache>;
|
||||
|
||||
@@ -456,15 +579,15 @@ struct Change {
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ChangeData {
|
||||
Linted(bool),
|
||||
Lint(LintCacheData),
|
||||
Formatted,
|
||||
}
|
||||
|
||||
impl ChangeData {
|
||||
fn apply(self, data: &mut FileCacheData) {
|
||||
match self {
|
||||
ChangeData::Linted(yes) => {
|
||||
data.linted = yes;
|
||||
ChangeData::Lint(new_lint) => {
|
||||
data.lint = Some(new_lint);
|
||||
}
|
||||
ChangeData::Formatted => {
|
||||
data.formatted = true;
|
||||
@@ -489,6 +612,7 @@ mod tests {
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_cache::CACHE_DIR_NAME;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use ruff_linter::settings::flags;
|
||||
@@ -496,7 +620,7 @@ mod tests {
|
||||
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache::{self, ChangeData, FileCache, FileCacheData, FileCacheKey};
|
||||
use crate::cache::{self, FileCache, FileCacheData, FileCacheKey};
|
||||
use crate::cache::{Cache, RelativePathBuf};
|
||||
use crate::commands::format::{FormatCommandError, FormatMode, FormatResult, format_path};
|
||||
use crate::diagnostics::{Diagnostics, lint_path};
|
||||
@@ -523,7 +647,7 @@ mod tests {
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
|
||||
let mut paths = Vec::new();
|
||||
let mut paths_with_diagnostics = Vec::new();
|
||||
let mut parse_errors = Vec::new();
|
||||
let mut expected_diagnostics = Diagnostics::default();
|
||||
for entry in fs::read_dir(&package_root).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
@@ -547,7 +671,7 @@ mod tests {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut diagnostics = lint_path(
|
||||
let diagnostics = lint_path(
|
||||
&path,
|
||||
Some(PackageRoot::root(&package_root)),
|
||||
&settings.linter,
|
||||
@@ -557,15 +681,8 @@ mod tests {
|
||||
UnsafeFixes::Enabled,
|
||||
)
|
||||
.unwrap();
|
||||
if diagnostics.inner.is_empty() {
|
||||
// We won't load a notebook index from the cache for files without diagnostics,
|
||||
// so remove them from `expected_diagnostics` too. This allows us to keep the
|
||||
// full equality assertion below.
|
||||
diagnostics
|
||||
.notebook_indexes
|
||||
.remove(&path.to_string_lossy().to_string());
|
||||
} else {
|
||||
paths_with_diagnostics.push(path.clone());
|
||||
if diagnostics.inner.iter().any(Diagnostic::is_invalid_syntax) {
|
||||
parse_errors.push(path.clone());
|
||||
}
|
||||
paths.push(path);
|
||||
expected_diagnostics += diagnostics;
|
||||
@@ -578,11 +695,11 @@ mod tests {
|
||||
let cache = Cache::open(package_root.clone(), &settings);
|
||||
assert_ne!(cache.package.files.len(), 0);
|
||||
|
||||
paths_with_diagnostics.sort();
|
||||
parse_errors.sort();
|
||||
|
||||
for path in &paths {
|
||||
if paths_with_diagnostics.binary_search(path).is_ok() {
|
||||
continue; // We don't cache files with diagnostics.
|
||||
if parse_errors.binary_search(path).is_ok() {
|
||||
continue; // We don't cache parsing errors.
|
||||
}
|
||||
|
||||
let relative_path = cache.relative_path(path).unwrap();
|
||||
@@ -616,7 +733,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn cache_adds_file_on_lint() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_adds_file_on_lint");
|
||||
let cache = test_cache.open();
|
||||
@@ -640,7 +757,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn cache_adds_files_on_lint() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_adds_files_on_lint");
|
||||
let cache = test_cache.open();
|
||||
@@ -665,40 +782,6 @@ mod tests {
|
||||
cache.persist().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_does_not_add_file_on_lint_with_diagnostic() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_does_not_add_file_on_lint_with_diagnostic");
|
||||
let cache = test_cache.open();
|
||||
test_cache.write_source_file("source.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
|
||||
cache.persist().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
let results = test_cache
|
||||
.lint_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to lint test file");
|
||||
assert_eq!(results.inner.len(), 1, "Expected one F822 diagnostic");
|
||||
assert_eq!(
|
||||
cache.changes.lock().unwrap().len(),
|
||||
1,
|
||||
"Files with diagnostics still trigger change events"
|
||||
);
|
||||
assert!(
|
||||
cache
|
||||
.changes
|
||||
.lock()
|
||||
.unwrap()
|
||||
.last()
|
||||
.is_some_and(|change| matches!(change.new_data, ChangeData::Linted(false))),
|
||||
"Files with diagnostics are marked as unlinted"
|
||||
);
|
||||
|
||||
cache.persist().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_adds_files_on_format() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
@@ -729,7 +812,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn cache_invalidated_on_file_modified_time() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_invalidated_on_file_modified_time");
|
||||
let cache = test_cache.open();
|
||||
@@ -786,7 +869,7 @@ mod tests {
|
||||
file.set_permissions(perms)
|
||||
}
|
||||
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_invalidated_on_permission_change");
|
||||
let cache = test_cache.open();
|
||||
@@ -839,7 +922,7 @@ mod tests {
|
||||
);
|
||||
|
||||
// Now actually lint a file.
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
test_cache.write_source_file("new.py", source);
|
||||
let new_path_key = RelativePathBuf::from("new.py");
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
@@ -862,7 +945,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_updates_cache_entry() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("format_updates_cache_entry");
|
||||
let cache = test_cache.open();
|
||||
@@ -896,7 +979,7 @@ mod tests {
|
||||
panic!("Cache entry for `source.py` is missing.");
|
||||
};
|
||||
|
||||
assert!(file_cache.data.linted);
|
||||
assert!(file_cache.data.lint.is_some());
|
||||
assert!(file_cache.data.formatted);
|
||||
}
|
||||
|
||||
@@ -946,7 +1029,7 @@ mod tests {
|
||||
panic!("Cache entry for `source.py` is missing.");
|
||||
};
|
||||
|
||||
assert!(!file_cache.data.linted);
|
||||
assert_eq!(file_cache.data.lint, None);
|
||||
assert!(file_cache.data.formatted);
|
||||
}
|
||||
|
||||
|
||||
@@ -20,21 +20,15 @@ use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::{IOError, Violation, fs};
|
||||
use ruff_notebook::{NotebookError, NotebookIndex};
|
||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::Settings;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::cache::{Cache, FileCache, FileCacheKey};
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
|
||||
/// A collection of [`Diagnostic`]s and additional information needed to render them.
|
||||
///
|
||||
/// Note that `notebook_indexes` may be empty if there are no diagnostics because the
|
||||
/// `NotebookIndex` isn't cached in this case. This isn't a problem for any current uses as of
|
||||
/// 2025-08-12, which are all related to diagnostic rendering, but could be surprising if used
|
||||
/// differently in the future.
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) inner: Vec<Diagnostic>,
|
||||
@@ -199,9 +193,19 @@ pub(crate) fn lint_path(
|
||||
let cache_key = FileCacheKey::from_path(path).context("Failed to create cache key")?;
|
||||
let cached_diagnostics = cache
|
||||
.get(relative_path, &cache_key)
|
||||
.is_some_and(FileCache::linted);
|
||||
if cached_diagnostics {
|
||||
return Ok(Diagnostics::default());
|
||||
.and_then(|entry| entry.to_diagnostics(path));
|
||||
if let Some(diagnostics) = cached_diagnostics {
|
||||
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||
// need to avoid reading from and writing to the cache in these modes.
|
||||
if match fix_mode {
|
||||
flags::FixMode::Generate => true,
|
||||
flags::FixMode::Apply | flags::FixMode::Diff => {
|
||||
diagnostics.inner.is_empty() && diagnostics.fixed.is_empty()
|
||||
}
|
||||
} {
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
// Stash the file metadata for later so when we update the cache it reflects the prerun
|
||||
@@ -318,21 +322,31 @@ pub(crate) fn lint_path(
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
let has_error = result.has_syntax_errors();
|
||||
let diagnostics = result.diagnostics;
|
||||
|
||||
if let Some((cache, relative_path, key)) = caching {
|
||||
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics
|
||||
// with fixes, we need to avoid reading from and writing to the cache in these
|
||||
// modes.
|
||||
let use_fixes = match fix_mode {
|
||||
flags::FixMode::Generate => true,
|
||||
flags::FixMode::Apply | flags::FixMode::Diff => fixed.is_empty(),
|
||||
};
|
||||
|
||||
// We don't cache files with diagnostics.
|
||||
let linted = diagnostics.is_empty() && use_fixes;
|
||||
cache.set_linted(relative_path.to_owned(), &key, linted);
|
||||
// We don't cache parsing errors.
|
||||
if !has_error {
|
||||
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||
// need to avoid reading from and writing to the cache in these modes.
|
||||
if match fix_mode {
|
||||
flags::FixMode::Generate => true,
|
||||
flags::FixMode::Apply | flags::FixMode::Diff => {
|
||||
diagnostics.is_empty() && fixed.is_empty()
|
||||
}
|
||||
} {
|
||||
cache.update_lint(
|
||||
relative_path.to_owned(),
|
||||
&key,
|
||||
LintCacheData::from_diagnostics(
|
||||
&diagnostics,
|
||||
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
||||
|
||||
@@ -19,8 +19,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
|
||||
@@ -115,13 +115,12 @@ fn stdin_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> -:1:8
|
||||
-:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -140,13 +139,12 @@ fn stdin_filename() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> F401.py:1:8
|
||||
F401.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -176,21 +174,19 @@ import bar # unused import
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `bar` imported but unused
|
||||
--> bar.py:2:8
|
||||
bar.py:2:8: F401 [*] `bar` imported but unused
|
||||
|
|
||||
2 | import bar # unused import
|
||||
| ^^^
|
||||
| ^^^ F401
|
||||
|
|
||||
help: Remove unused import: `bar`
|
||||
= help: Remove unused import: `bar`
|
||||
|
||||
F401 [*] `foo` imported but unused
|
||||
--> foo.py:2:8
|
||||
foo.py:2:8: F401 [*] `foo` imported but unused
|
||||
|
|
||||
2 | import foo # unused import
|
||||
| ^^^
|
||||
| ^^^ F401
|
||||
|
|
||||
help: Remove unused import: `foo`
|
||||
= help: Remove unused import: `foo`
|
||||
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
@@ -212,13 +208,12 @@ fn check_warn_stdin_filename_with_files() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> F401.py:1:8
|
||||
F401.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -239,13 +234,12 @@ fn stdin_source_type_py() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> TCH.py:1:8
|
||||
TCH.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -477,11 +471,10 @@ fn stdin_fix_jupyter() {
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
----- stderr -----
|
||||
F821 Undefined name `x`
|
||||
--> Jupyter.ipynb:cell 3:1:7
|
||||
Jupyter.ipynb:cell 3:1:7: F821 Undefined name `x`
|
||||
|
|
||||
1 | print(x)
|
||||
| ^
|
||||
| ^ F821
|
||||
|
|
||||
|
||||
Found 3 errors (2 fixed, 1 remaining).
|
||||
@@ -576,21 +569,19 @@ fn stdin_override_parser_ipynb() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> Jupyter.py:cell 1:1:8
|
||||
Jupyter.py:cell 1:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
F401 [*] `sys` imported but unused
|
||||
--> Jupyter.py:cell 3:1:8
|
||||
Jupyter.py:cell 3:1:8: F401 [*] `sys` imported but unused
|
||||
|
|
||||
1 | import sys
|
||||
| ^^^
|
||||
| ^^^ F401
|
||||
|
|
||||
help: Remove unused import: `sys`
|
||||
= help: Remove unused import: `sys`
|
||||
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
@@ -614,13 +605,12 @@ fn stdin_override_parser_py() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> F401.ipynb:1:8
|
||||
F401.ipynb:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -643,13 +633,12 @@ fn stdin_fix_when_not_fixable_should_still_print_contents() {
|
||||
print(sys.version)
|
||||
|
||||
----- stderr -----
|
||||
F634 If test is a tuple, which is always `True`
|
||||
--> -:3:4
|
||||
-:3:4: F634 If test is a tuple, which is always `True`
|
||||
|
|
||||
1 | import sys
|
||||
2 |
|
||||
3 | if (1, 2):
|
||||
| ^^^^^^
|
||||
| ^^^^^^ F634
|
||||
4 | print(sys.version)
|
||||
|
|
||||
|
||||
@@ -809,8 +798,7 @@ fn stdin_parse_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
invalid-syntax: Expected one or more symbol names after import
|
||||
--> -:1:16
|
||||
-:1:16: SyntaxError: Expected one or more symbol names after import
|
||||
|
|
||||
1 | from foo import
|
||||
| ^
|
||||
@@ -830,16 +818,14 @@ fn stdin_multiple_parse_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
invalid-syntax: Expected one or more symbol names after import
|
||||
--> -:1:16
|
||||
-:1:16: SyntaxError: Expected one or more symbol names after import
|
||||
|
|
||||
1 | from foo import
|
||||
| ^
|
||||
2 | bar =
|
||||
|
|
||||
|
||||
invalid-syntax: Expected an expression
|
||||
--> -:2:6
|
||||
-:2:6: SyntaxError: Expected an expression
|
||||
|
|
||||
1 | from foo import
|
||||
2 | bar =
|
||||
@@ -861,8 +847,7 @@ fn parse_error_not_included() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
invalid-syntax: Expected an expression
|
||||
--> -:1:6
|
||||
-:1:6: SyntaxError: Expected an expression
|
||||
|
|
||||
1 | foo =
|
||||
| ^
|
||||
@@ -882,11 +867,10 @@ fn full_output_preview() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
E741 Ambiguous variable name: `l`
|
||||
--> -:1:1
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
|
|
||||
1 | l = 1
|
||||
| ^
|
||||
| ^ E741
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -911,11 +895,10 @@ preview = true
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
E741 Ambiguous variable name: `l`
|
||||
--> -:1:1
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
|
|
||||
1 | l = 1
|
||||
| ^
|
||||
| ^ E741
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -933,11 +916,10 @@ fn full_output_format() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
E741 Ambiguous variable name: `l`
|
||||
--> -:1:1
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
|
|
||||
1 | l = 1
|
||||
| ^
|
||||
| ^ E741
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -1424,9 +1406,7 @@ fn redirect_direct() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF950 Hey this is a test rule that was redirected from another.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF950 Hey this is a test rule that was redirected from another.
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1458,9 +1438,7 @@ fn redirect_prefix() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF950 Hey this is a test rule that was redirected from another.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF950 Hey this is a test rule that was redirected from another.
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1477,9 +1455,7 @@ fn deprecated_direct() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF920 Hey this is a deprecated test rule.
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1496,12 +1472,8 @@ fn deprecated_multiple_direct() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF921 Hey this is another deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF920 Hey this is a deprecated test rule.
|
||||
-:1:1: RUF921 Hey this is another deprecated test rule.
|
||||
Found 2 errors.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1519,12 +1491,8 @@ fn deprecated_indirect() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF921 Hey this is another deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF920 Hey this is a deprecated test rule.
|
||||
-:1:1: RUF921 Hey this is another deprecated test rule.
|
||||
Found 2 errors.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1670,23 +1638,22 @@ fn check_input_from_argfile() -> Result<()> {
|
||||
(file_a_path.display().to_string().as_str(), "/path/to/a.py"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin(""), @r"
|
||||
.pass_stdin(""), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> /path/to/a.py:1:8
|
||||
/path/to/a.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -1702,12 +1669,8 @@ fn check_hints_hidden_unsafe_fixes() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -1724,9 +1687,7 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 1 error.
|
||||
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -1744,12 +1705,8 @@ fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the --fix option.
|
||||
|
||||
@@ -1768,9 +1725,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1787,12 +1742,8 @@ fn check_shows_unsafe_fixes_with_opt_in() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the --fix option.
|
||||
|
||||
@@ -1813,9 +1764,7 @@ fn fix_applies_safe_fixes_by_default() {
|
||||
# fix from stable-test-rule-safe-fix
|
||||
|
||||
----- stderr -----
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors (1 fixed, 1 remaining).
|
||||
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
");
|
||||
@@ -1852,9 +1801,7 @@ fn fix_does_not_apply_display_only_fixes() {
|
||||
----- stdout -----
|
||||
def add_to_list(item, some_list=[]): ...
|
||||
----- stderr -----
|
||||
RUF903 Hey this is a stable test rule with a display only fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF903 Hey this is a stable test rule with a display only fix.
|
||||
Found 1 error.
|
||||
");
|
||||
}
|
||||
@@ -1872,9 +1819,7 @@ fn fix_does_not_apply_display_only_fixes_with_unsafe_fixes_enabled() {
|
||||
----- stdout -----
|
||||
def add_to_list(item, some_list=[]): ...
|
||||
----- stderr -----
|
||||
RUF903 Hey this is a stable test rule with a display only fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF903 Hey this is a stable test rule with a display only fix.
|
||||
Found 1 error.
|
||||
");
|
||||
}
|
||||
@@ -1891,9 +1836,7 @@ fn fix_only_unsafe_fixes_available() {
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 1 error.
|
||||
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
");
|
||||
@@ -2029,12 +1972,8 @@ extend-unsafe-fixes = ["RUF901"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
No fixes available (2 hidden fixes can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -2065,12 +2004,8 @@ extend-safe-fixes = ["RUF902"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
@@ -2103,12 +2038,8 @@ extend-safe-fixes = ["RUF902"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -2143,27 +2074,13 @@ extend-safe-fixes = ["RUF9"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF900 Hey this is a stable test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF901 Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF903 Hey this is a stable test rule with a display only fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF921 Hey this is another deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF950 Hey this is a test rule that was redirected from another.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF900 Hey this is a stable test rule.
|
||||
-:1:1: RUF901 Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
-:1:1: RUF903 Hey this is a stable test rule with a display only fix.
|
||||
-:1:1: RUF920 Hey this is a deprecated test rule.
|
||||
-:1:1: RUF921 Hey this is another deprecated test rule.
|
||||
-:1:1: RUF950 Hey this is a test rule that was redirected from another.
|
||||
Found 7 errors.
|
||||
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -2224,11 +2141,10 @@ def log(x, base) -> float:
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
D417 Missing argument description in the docstring for `log`: `base`
|
||||
--> -:2:5
|
||||
-:2:5: D417 Missing argument description in the docstring for `log`: `base`
|
||||
|
|
||||
2 | def log(x, base) -> float:
|
||||
| ^^^
|
||||
| ^^^ D417
|
||||
3 | """Calculate natural log of a value
|
||||
|
|
||||
|
||||
@@ -2261,15 +2177,14 @@ select = ["RUF017"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF017 Avoid quadratic list summation
|
||||
--> -:3:1
|
||||
-:3:1: RUF017 Avoid quadratic list summation
|
||||
|
|
||||
1 | x = [1, 2, 3]
|
||||
2 | y = [4, 5, 6]
|
||||
3 | sum([x, y], [])
|
||||
| ^^^^^^^^^^^^^^^
|
||||
| ^^^^^^^^^^^^^^^ RUF017
|
||||
|
|
||||
help: Replace with `functools.reduce`
|
||||
= help: Replace with `functools.reduce`
|
||||
|
||||
Found 1 error.
|
||||
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
@@ -2302,15 +2217,14 @@ unfixable = ["RUF"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF017 Avoid quadratic list summation
|
||||
--> -:3:1
|
||||
-:3:1: RUF017 Avoid quadratic list summation
|
||||
|
|
||||
1 | x = [1, 2, 3]
|
||||
2 | y = [4, 5, 6]
|
||||
3 | sum([x, y], [])
|
||||
| ^^^^^^^^^^^^^^^
|
||||
| ^^^^^^^^^^^^^^^ RUF017
|
||||
|
|
||||
help: Replace with `functools.reduce`
|
||||
= help: Replace with `functools.reduce`
|
||||
|
||||
Found 1 error.
|
||||
|
||||
@@ -2332,11 +2246,10 @@ fn pyproject_toml_stdin_syntax_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF200 Failed to parse pyproject.toml: unclosed table, expected `]`
|
||||
--> pyproject.toml:1:9
|
||||
pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: unclosed table, expected `]`
|
||||
|
|
||||
1 | [project
|
||||
| ^
|
||||
| ^ RUF200
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -2358,12 +2271,11 @@ fn pyproject_toml_stdin_schema_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
|
||||
--> pyproject.toml:2:8
|
||||
pyproject.toml:2:8: RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
|
||||
|
|
||||
1 | [project]
|
||||
2 | name = 1
|
||||
| ^
|
||||
| ^ RUF200
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -2451,12 +2363,11 @@ fn pyproject_toml_stdin_schema_error_fix() {
|
||||
[project]
|
||||
name = 1
|
||||
----- stderr -----
|
||||
RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
|
||||
--> pyproject.toml:2:8
|
||||
pyproject.toml:2:8: RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
|
||||
|
|
||||
1 | [project]
|
||||
2 | name = 1
|
||||
| ^
|
||||
| ^ RUF200
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
|
||||
@@ -4996,37 +4996,6 @@ fn flake8_import_convention_invalid_aliases_config_module_name() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flake8_import_convention_nfkc_normalization() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint.flake8-import-conventions.aliases]
|
||||
"test.module" = "_﹏𝘥𝘦𝘣𝘶𝘨﹏﹏"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
, @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Invalid alias for module 'test.module': alias normalizes to '__debug__', which is not allowed.
|
||||
");});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flake8_import_convention_unused_aliased_import() {
|
||||
assert_cmd_snapshot!(
|
||||
@@ -5420,7 +5389,7 @@ fn walrus_before_py38() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:1:2: invalid-syntax: Cannot use named assignment expression (`:=`) on Python 3.7 (syntax was added in Python 3.8)
|
||||
test.py:1:2: SyntaxError: Cannot use named assignment expression (`:=`) on Python 3.7 (syntax was added in Python 3.8)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -5466,15 +5435,15 @@ match 2:
|
||||
print("it's one")
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:2:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
// syntax error on 3.9 with preview
|
||||
@@ -5495,7 +5464,7 @@ match 2:
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:2:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -5523,7 +5492,7 @@ fn cache_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:1:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
main.py:1:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
@@ -5536,7 +5505,7 @@ fn cache_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:1:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
main.py:1:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
@@ -5588,15 +5557,15 @@ fn cookiecutter_globbing() -> Result<()> {
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--select=F811")
|
||||
.current_dir(tempdir.path()), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{{cookiecutter.repo_name}}/tests/maintest.py:3:8: F811 [*] Redefinition of unused `foo` from line 1: `foo` redefined here
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{{cookiecutter.repo_name}}/tests/maintest.py:3:8: F811 [*] Redefinition of unused `foo` from line 1
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
----- stderr -----
|
||||
");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -5649,7 +5618,7 @@ fn semantic_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable
|
||||
main.py:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
|
||||
main.py:1:20: F821 Undefined name `foo`
|
||||
|
||||
----- stderr -----
|
||||
@@ -5663,7 +5632,7 @@ fn semantic_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable
|
||||
main.py:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
|
||||
main.py:1:20: F821 Undefined name `foo`
|
||||
|
||||
----- stderr -----
|
||||
@@ -5682,7 +5651,7 @@ fn semantic_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable
|
||||
-:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -5801,32 +5770,3 @@ fn future_annotations_preview_warning() {
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn up045_nested_optional_flatten_all() {
|
||||
let contents = "\
|
||||
from typing import Optional
|
||||
nested_optional: Optional[Optional[Optional[str]]] = None
|
||||
";
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--select", "UP045", "--diff", "--target-version", "py312"])
|
||||
.arg("-")
|
||||
.pass_stdin(contents),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1,2 +1,2 @@
|
||||
from typing import Optional
|
||||
-nested_optional: Optional[Optional[Optional[str]]] = None
|
||||
+nested_optional: str | None = None
|
||||
|
||||
|
||||
----- stderr -----
|
||||
Would fix 1 error.
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -95,6 +95,6 @@ is stricter, which could affect the suggested fix. See [this FAQ section](https:
|
||||
## References
|
||||
- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)
|
||||
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
|
||||
- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)
|
||||
- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -18,6 +18,6 @@ exit_code: 1
|
||||
----- stdout -----
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y`
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;code=invalid-syntax;]Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;]SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -18,7 +18,7 @@ exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
input.py:3:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
|
||||
@@ -16,28 +16,25 @@ info:
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> input.py:1:8
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os # F401
|
||||
| ^^
|
||||
| ^^ F401
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
F821 Undefined name `y`
|
||||
--> input.py:2:5
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
| ^
|
||||
| ^ F821
|
||||
3 | match 42: # invalid-syntax
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
--> input.py:3:1
|
||||
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
|
||||
@@ -18,6 +18,6 @@ exit_code: 1
|
||||
----- stdout -----
|
||||
::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused
|
||||
::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y`
|
||||
::error title=Ruff (invalid-syntax),file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
::error title=Ruff,file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -19,58 +19,40 @@ exit_code: 1
|
||||
[
|
||||
{
|
||||
"check_name": "F401",
|
||||
"description": "F401: `os` imported but unused",
|
||||
"description": "`os` imported but unused",
|
||||
"fingerprint": "4dbad37161e65c72",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 8,
|
||||
"line": 1
|
||||
},
|
||||
"end": {
|
||||
"column": 10,
|
||||
"line": 1
|
||||
}
|
||||
}
|
||||
"lines": {
|
||||
"begin": 1,
|
||||
"end": 1
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "F821",
|
||||
"description": "F821: Undefined name `y`",
|
||||
"description": "Undefined name `y`",
|
||||
"fingerprint": "7af59862a085230",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 5,
|
||||
"line": 2
|
||||
},
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 2
|
||||
}
|
||||
}
|
||||
"lines": {
|
||||
"begin": 2,
|
||||
"end": 2
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "invalid-syntax",
|
||||
"description": "invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"check_name": "syntax-error",
|
||||
"description": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"fingerprint": "e558cec859bb66e8",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 1,
|
||||
"line": 3
|
||||
},
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 3
|
||||
}
|
||||
}
|
||||
"lines": {
|
||||
"begin": 3,
|
||||
"end": 3
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ exit_code: 1
|
||||
input.py:
|
||||
1:8 F401 [*] `os` imported but unused
|
||||
2:5 F821 Undefined name `y`
|
||||
3:1 invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
3:1 SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
@@ -18,6 +18,6 @@ exit_code: 1
|
||||
----- stdout -----
|
||||
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
|
||||
{"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
|
||||
{"cell":null,"code":"invalid-syntax","end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
|
||||
{"cell":null,"code":null,"end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -69,7 +69,7 @@ exit_code: 1
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": "invalid-syntax",
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 3
|
||||
@@ -80,7 +80,7 @@ exit_code: 1
|
||||
"column": 1,
|
||||
"row": 3
|
||||
},
|
||||
"message": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"noqa_row": null,
|
||||
"url": null
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ exit_code: 1
|
||||
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1">
|
||||
<failure message="Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
|
||||
<failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
|
||||
@@ -18,6 +18,6 @@ exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1: [F401] `os` imported but unused
|
||||
input.py:2: [F821] Undefined name `y`
|
||||
input.py:3: [invalid-syntax] Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
input.py:3: [invalid-syntax] SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -90,7 +90,7 @@ exit_code: 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
|
||||
@@ -83,9 +83,9 @@ exit_code: 1
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
"text": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
},
|
||||
"ruleId": "invalid-syntax"
|
||||
"ruleId": null
|
||||
}
|
||||
],
|
||||
"tool": {
|
||||
@@ -95,7 +95,7 @@ exit_code: 1
|
||||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![expect(clippy::needless_doctest_main)]
|
||||
|
||||
//! A library for formatting of text or programming code snippets.
|
||||
//!
|
||||
//! It's primary purpose is to build an ASCII-graphical representation of the snippet
|
||||
|
||||
@@ -193,14 +193,9 @@ impl DisplaySet<'_> {
|
||||
stylesheet: &Stylesheet,
|
||||
buffer: &mut StyledBuffer,
|
||||
) -> fmt::Result {
|
||||
let hide_severity = annotation.annotation_type.is_none();
|
||||
let color = get_annotation_style(&annotation.annotation_type, stylesheet);
|
||||
let formatted_len = if let Some(id) = &annotation.id {
|
||||
if hide_severity {
|
||||
id.len()
|
||||
} else {
|
||||
2 + id.len() + annotation_type_len(&annotation.annotation_type)
|
||||
}
|
||||
2 + id.len() + annotation_type_len(&annotation.annotation_type)
|
||||
} else {
|
||||
annotation_type_len(&annotation.annotation_type)
|
||||
};
|
||||
@@ -214,66 +209,18 @@ impl DisplaySet<'_> {
|
||||
if formatted_len == 0 {
|
||||
self.format_label(line_offset, &annotation.label, stylesheet, buffer)
|
||||
} else {
|
||||
// TODO(brent) All of this complicated checking of `hide_severity` should be reverted
|
||||
// once we have real severities in Ruff. This code is trying to account for two
|
||||
// different cases:
|
||||
//
|
||||
// - main diagnostic message
|
||||
// - subdiagnostic message
|
||||
//
|
||||
// In the first case, signaled by `hide_severity = true`, we want to print the ID (the
|
||||
// noqa code for a ruff lint diagnostic, e.g. `F401`, or `invalid-syntax` for a syntax
|
||||
// error) without brackets. Instead, for subdiagnostics, we actually want to print the
|
||||
// severity (usually `help`) regardless of the `hide_severity` setting. This is signaled
|
||||
// by an ID of `None`.
|
||||
//
|
||||
// With real severities these should be reported more like in ty:
|
||||
//
|
||||
// ```
|
||||
// error[F401]: `math` imported but unused
|
||||
// error[invalid-syntax]: Cannot use `match` statement on Python 3.9...
|
||||
// ```
|
||||
//
|
||||
// instead of the current versions intended to mimic the old Ruff output format:
|
||||
//
|
||||
// ```
|
||||
// F401 `math` imported but unused
|
||||
// invalid-syntax: Cannot use `match` statement on Python 3.9...
|
||||
// ```
|
||||
//
|
||||
// Note that the `invalid-syntax` colon is added manually in `ruff_db`, not here. We
|
||||
// could eventually add a colon to Ruff lint diagnostics (`F401:`) and then make the
|
||||
// colon below unconditional again.
|
||||
//
|
||||
// This also applies to the hard-coded `stylesheet.error()` styling of the
|
||||
// hidden-severity `id`. This should just be `*color` again later, but for now we don't
|
||||
// want an unformatted `id`, which is what `get_annotation_style` returns for
|
||||
// `DisplayAnnotationType::None`.
|
||||
let annotation_type = annotation_type_str(&annotation.annotation_type);
|
||||
if let Some(id) = annotation.id {
|
||||
if hide_severity {
|
||||
buffer.append(line_offset, &format!("{id} "), *stylesheet.error());
|
||||
} else {
|
||||
buffer.append(line_offset, &format!("{annotation_type}[{id}]"), *color);
|
||||
}
|
||||
} else {
|
||||
buffer.append(line_offset, annotation_type, *color);
|
||||
}
|
||||
|
||||
if annotation.is_fixable {
|
||||
buffer.append(line_offset, "[", stylesheet.none);
|
||||
buffer.append(line_offset, "*", stylesheet.help);
|
||||
buffer.append(line_offset, "]", stylesheet.none);
|
||||
// In the hide-severity case, we need a space instead of the colon and space below.
|
||||
if hide_severity {
|
||||
buffer.append(line_offset, " ", stylesheet.none);
|
||||
}
|
||||
}
|
||||
let id = match &annotation.id {
|
||||
Some(id) => format!("[{id}]"),
|
||||
None => String::new(),
|
||||
};
|
||||
buffer.append(
|
||||
line_offset,
|
||||
&format!("{}{}", annotation_type_str(&annotation.annotation_type), id),
|
||||
*color,
|
||||
);
|
||||
|
||||
if !is_annotation_empty(annotation) {
|
||||
if annotation.id.is_none() || !hide_severity {
|
||||
buffer.append(line_offset, ": ", stylesheet.none);
|
||||
}
|
||||
buffer.append(line_offset, ": ", stylesheet.none);
|
||||
self.format_label(line_offset, &annotation.label, stylesheet, buffer)?;
|
||||
}
|
||||
Ok(())
|
||||
@@ -302,15 +249,11 @@ impl DisplaySet<'_> {
|
||||
let lineno_color = stylesheet.line_no();
|
||||
buffer.puts(line_offset, lineno_width, header_sigil, *lineno_color);
|
||||
buffer.puts(line_offset, lineno_width + 4, path, stylesheet.none);
|
||||
if let Some(Position { row, col, cell }) = pos {
|
||||
if let Some(cell) = cell {
|
||||
buffer.append(line_offset, ":", stylesheet.none);
|
||||
buffer.append(line_offset, &format!("cell {cell}"), stylesheet.none);
|
||||
}
|
||||
buffer.append(line_offset, ":", stylesheet.none);
|
||||
buffer.append(line_offset, row.to_string().as_str(), stylesheet.none);
|
||||
if let Some((col, row)) = pos {
|
||||
buffer.append(line_offset, ":", stylesheet.none);
|
||||
buffer.append(line_offset, col.to_string().as_str(), stylesheet.none);
|
||||
buffer.append(line_offset, ":", stylesheet.none);
|
||||
buffer.append(line_offset, row.to_string().as_str(), stylesheet.none);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -825,7 +768,6 @@ pub(crate) struct Annotation<'a> {
|
||||
pub(crate) annotation_type: DisplayAnnotationType,
|
||||
pub(crate) id: Option<&'a str>,
|
||||
pub(crate) label: Vec<DisplayTextFragment<'a>>,
|
||||
pub(crate) is_fixable: bool,
|
||||
}
|
||||
|
||||
/// A single line used in `DisplayList`.
|
||||
@@ -891,13 +833,6 @@ impl DisplaySourceAnnotation<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub(crate) struct Position {
|
||||
row: usize,
|
||||
col: usize,
|
||||
cell: Option<usize>,
|
||||
}
|
||||
|
||||
/// Raw line - a line which does not have the `lineno` part and is not considered
|
||||
/// a part of the snippet.
|
||||
#[derive(Debug, PartialEq)]
|
||||
@@ -906,7 +841,7 @@ pub(crate) enum DisplayRawLine<'a> {
|
||||
/// slice in the project structure.
|
||||
Origin {
|
||||
path: &'a str,
|
||||
pos: Option<Position>,
|
||||
pos: Option<(usize, usize)>,
|
||||
header_type: DisplayHeaderType,
|
||||
},
|
||||
|
||||
@@ -985,13 +920,6 @@ pub(crate) enum DisplayAnnotationType {
|
||||
Help,
|
||||
}
|
||||
|
||||
impl DisplayAnnotationType {
|
||||
#[inline]
|
||||
const fn is_none(&self) -> bool {
|
||||
matches!(self, Self::None)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<snippet::Level> for DisplayAnnotationType {
|
||||
fn from(at: snippet::Level) -> Self {
|
||||
match at {
|
||||
@@ -1087,12 +1015,11 @@ fn format_message<'m>(
|
||||
title,
|
||||
footer,
|
||||
snippets,
|
||||
is_fixable,
|
||||
} = message;
|
||||
|
||||
let mut sets = vec![];
|
||||
let body = if !snippets.is_empty() || primary {
|
||||
vec![format_title(level, id, title, is_fixable)]
|
||||
vec![format_title(level, id, title)]
|
||||
} else {
|
||||
format_footer(level, id, title)
|
||||
};
|
||||
@@ -1133,18 +1060,12 @@ fn format_message<'m>(
|
||||
sets
|
||||
}
|
||||
|
||||
fn format_title<'a>(
|
||||
level: crate::Level,
|
||||
id: Option<&'a str>,
|
||||
label: &'a str,
|
||||
is_fixable: bool,
|
||||
) -> DisplayLine<'a> {
|
||||
fn format_title<'a>(level: crate::Level, id: Option<&'a str>, label: &'a str) -> DisplayLine<'a> {
|
||||
DisplayLine::Raw(DisplayRawLine::Annotation {
|
||||
annotation: Annotation {
|
||||
annotation_type: DisplayAnnotationType::from(level),
|
||||
id,
|
||||
label: format_label(Some(label), Some(DisplayTextStyle::Emphasis)),
|
||||
is_fixable,
|
||||
},
|
||||
source_aligned: false,
|
||||
continuation: false,
|
||||
@@ -1163,7 +1084,6 @@ fn format_footer<'a>(
|
||||
annotation_type: DisplayAnnotationType::from(level),
|
||||
id,
|
||||
label: format_label(Some(line), None),
|
||||
is_fixable: false,
|
||||
},
|
||||
source_aligned: true,
|
||||
continuation: i != 0,
|
||||
@@ -1198,28 +1118,6 @@ fn format_snippet<'m>(
|
||||
let main_range = snippet.annotations.first().map(|x| x.range.start);
|
||||
let origin = snippet.origin;
|
||||
let need_empty_header = origin.is_some() || is_first;
|
||||
|
||||
let is_file_level = snippet.annotations.iter().any(|ann| ann.is_file_level);
|
||||
if is_file_level {
|
||||
// TODO(brent) enable this assertion again once we set `is_file_level` for individual rules.
|
||||
// It's causing too many false positives currently when the default is to make any
|
||||
// annotation with a default range file-level. See
|
||||
// https://github.com/astral-sh/ruff/issues/19688.
|
||||
//
|
||||
// assert!(
|
||||
// snippet.source.is_empty(),
|
||||
// "Non-empty file-level snippet that won't be rendered: {:?}",
|
||||
// snippet.source
|
||||
// );
|
||||
let header = format_header(origin, main_range, &[], is_first, snippet.cell_index);
|
||||
return DisplaySet {
|
||||
display_lines: header.map_or_else(Vec::new, |header| vec![header]),
|
||||
margin: Margin::new(0, 0, 0, 0, term_width, 0),
|
||||
};
|
||||
}
|
||||
|
||||
let cell_index = snippet.cell_index;
|
||||
|
||||
let mut body = format_body(
|
||||
snippet,
|
||||
need_empty_header,
|
||||
@@ -1228,13 +1126,7 @@ fn format_snippet<'m>(
|
||||
anonymized_line_numbers,
|
||||
cut_indicator,
|
||||
);
|
||||
let header = format_header(
|
||||
origin,
|
||||
main_range,
|
||||
&body.display_lines,
|
||||
is_first,
|
||||
cell_index,
|
||||
);
|
||||
let header = format_header(origin, main_range, &body.display_lines, is_first);
|
||||
|
||||
if let Some(header) = header {
|
||||
body.display_lines.insert(0, header);
|
||||
@@ -1254,7 +1146,6 @@ fn format_header<'a>(
|
||||
main_range: Option<usize>,
|
||||
body: &[DisplayLine<'_>],
|
||||
is_first: bool,
|
||||
cell_index: Option<usize>,
|
||||
) -> Option<DisplayLine<'a>> {
|
||||
let display_header = if is_first {
|
||||
DisplayHeaderType::Initial
|
||||
@@ -1278,31 +1169,20 @@ fn format_header<'a>(
|
||||
..
|
||||
} = item
|
||||
{
|
||||
// At the very end of the `main_range`, report the location as the first character
|
||||
// in the next line instead of falling back to the default location of `1:1`. This
|
||||
// is another divergence from upstream.
|
||||
let end_of_range = range.1 + max(*end_line as usize, 1);
|
||||
if main_range >= range.0 && main_range < end_of_range {
|
||||
if main_range >= range.0 && main_range < range.1 + max(*end_line as usize, 1) {
|
||||
let char_column = text[0..(main_range - range.0).min(text.len())]
|
||||
.chars()
|
||||
.count();
|
||||
col = char_column + 1;
|
||||
line_offset = lineno.unwrap_or(1);
|
||||
break;
|
||||
} else if main_range == end_of_range {
|
||||
line_offset = lineno.map_or(1, |line| line + 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Some(DisplayLine::Raw(DisplayRawLine::Origin {
|
||||
path,
|
||||
pos: Some(Position {
|
||||
row: line_offset,
|
||||
col,
|
||||
cell: cell_index,
|
||||
}),
|
||||
pos: Some((line_offset, col)),
|
||||
header_type: display_header,
|
||||
}));
|
||||
}
|
||||
@@ -1592,7 +1472,6 @@ fn format_body<'m>(
|
||||
annotation_type,
|
||||
id: None,
|
||||
label: format_label(annotation.label, None),
|
||||
is_fixable: false,
|
||||
},
|
||||
range,
|
||||
annotation_type: DisplayAnnotationType::from(annotation.level),
|
||||
@@ -1632,7 +1511,6 @@ fn format_body<'m>(
|
||||
annotation_type,
|
||||
id: None,
|
||||
label: vec![],
|
||||
is_fixable: false,
|
||||
},
|
||||
range,
|
||||
annotation_type: DisplayAnnotationType::from(annotation.level),
|
||||
@@ -1702,7 +1580,6 @@ fn format_body<'m>(
|
||||
annotation_type,
|
||||
id: None,
|
||||
label: format_label(annotation.label, None),
|
||||
is_fixable: false,
|
||||
},
|
||||
range,
|
||||
annotation_type: DisplayAnnotationType::from(annotation.level),
|
||||
|
||||
@@ -22,7 +22,6 @@ pub struct Message<'a> {
|
||||
pub(crate) title: &'a str,
|
||||
pub(crate) snippets: Vec<Snippet<'a>>,
|
||||
pub(crate) footer: Vec<Message<'a>>,
|
||||
pub(crate) is_fixable: bool,
|
||||
}
|
||||
|
||||
impl<'a> Message<'a> {
|
||||
@@ -50,15 +49,6 @@ impl<'a> Message<'a> {
|
||||
self.footer.extend(footer);
|
||||
self
|
||||
}
|
||||
|
||||
/// Whether or not the diagnostic for this message is fixable.
|
||||
///
|
||||
/// This is rendered as a `[*]` indicator after the `id` in an annotation header, if the
|
||||
/// annotation also has `Level::None`.
|
||||
pub fn is_fixable(mut self, yes: bool) -> Self {
|
||||
self.is_fixable = yes;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Structure containing the slice of text to be annotated and
|
||||
@@ -75,10 +65,6 @@ pub struct Snippet<'a> {
|
||||
pub(crate) annotations: Vec<Annotation<'a>>,
|
||||
|
||||
pub(crate) fold: bool,
|
||||
|
||||
/// The optional cell index in a Jupyter notebook, used for reporting source locations along
|
||||
/// with the ranges on `annotations`.
|
||||
pub(crate) cell_index: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a> Snippet<'a> {
|
||||
@@ -89,7 +75,6 @@ impl<'a> Snippet<'a> {
|
||||
source,
|
||||
annotations: vec![],
|
||||
fold: false,
|
||||
cell_index: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,12 +103,6 @@ impl<'a> Snippet<'a> {
|
||||
self.fold = fold;
|
||||
self
|
||||
}
|
||||
|
||||
/// Attach a Jupyter notebook cell index.
|
||||
pub fn cell_index(mut self, index: Option<usize>) -> Self {
|
||||
self.cell_index = index;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// An annotation for a [`Snippet`].
|
||||
@@ -135,7 +114,6 @@ pub struct Annotation<'a> {
|
||||
pub(crate) range: Range<usize>,
|
||||
pub(crate) label: Option<&'a str>,
|
||||
pub(crate) level: Level,
|
||||
pub(crate) is_file_level: bool,
|
||||
}
|
||||
|
||||
impl<'a> Annotation<'a> {
|
||||
@@ -143,11 +121,6 @@ impl<'a> Annotation<'a> {
|
||||
self.label = Some(label);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn is_file_level(mut self, yes: bool) -> Self {
|
||||
self.is_file_level = yes;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Types of annotations.
|
||||
@@ -172,7 +145,6 @@ impl Level {
|
||||
title,
|
||||
snippets: vec![],
|
||||
footer: vec![],
|
||||
is_fixable: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,7 +154,6 @@ impl Level {
|
||||
range: span,
|
||||
label: None,
|
||||
level: self,
|
||||
is_file_level: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,5 +86,5 @@ walltime = ["ruff_db/os", "ty_project", "divan"]
|
||||
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dev-dependencies]
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
@@ -21,8 +21,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
|
||||
@@ -18,8 +18,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
|
||||
@@ -26,8 +26,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
@@ -43,8 +42,7 @@ static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[unsafe(export_name = "_rjem_malloc_conf")]
|
||||
@@ -79,11 +77,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
||||
b.iter_batched(
|
||||
|| parsed.clone(),
|
||||
|parsed| {
|
||||
// Assert that file contains no parse errors
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
let path = case.path();
|
||||
lint_only(
|
||||
let result = lint_only(
|
||||
&path,
|
||||
None,
|
||||
settings,
|
||||
@@ -91,7 +86,10 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
||||
&SourceKind::Python(case.code().to_string()),
|
||||
PySourceType::from(path.as_path()),
|
||||
ParseSource::Precomputed(parsed),
|
||||
)
|
||||
);
|
||||
|
||||
// Assert that file contains no parse errors
|
||||
assert!(!result.has_syntax_errors());
|
||||
},
|
||||
criterion::BatchSize::SmallInput,
|
||||
);
|
||||
|
||||
@@ -20,8 +20,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
|
||||
@@ -351,41 +351,6 @@ fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_tuple_implicit_instance_attributes(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
criterion.bench_function("ty_micro[many_tuple_assignments]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
// This is a regression benchmark for a case that used to hang:
|
||||
// https://github.com/astral-sh/ty/issues/765
|
||||
setup_micro_case(
|
||||
r#"
|
||||
from typing import Any
|
||||
|
||||
class A:
|
||||
foo: tuple[Any, ...]
|
||||
|
||||
class B(A):
|
||||
def __init__(self, parent: "C", x: tuple[Any]):
|
||||
self.foo = parent.foo + x
|
||||
|
||||
class C(A):
|
||||
def __init__(self, parent: B, x: tuple[Any]):
|
||||
self.foo = parent.foo + x
|
||||
"#,
|
||||
)
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_complex_constrained_attributes_1(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
@@ -665,7 +630,6 @@ criterion_group!(
|
||||
micro,
|
||||
benchmark_many_string_assignments,
|
||||
benchmark_many_tuple_assignments,
|
||||
benchmark_tuple_implicit_instance_attributes,
|
||||
benchmark_complex_constrained_attributes_1,
|
||||
benchmark_complex_constrained_attributes_2,
|
||||
benchmark_many_enum_members,
|
||||
|
||||
@@ -218,24 +218,6 @@ static TANJUN: std::sync::LazyLock<Benchmark<'static>> = std::sync::LazyLock::ne
|
||||
)
|
||||
});
|
||||
|
||||
static STATIC_FRAME: std::sync::LazyLock<Benchmark<'static>> = std::sync::LazyLock::new(|| {
|
||||
Benchmark::new(
|
||||
RealWorldProject {
|
||||
name: "static-frame",
|
||||
repository: "https://github.com/static-frame/static-frame",
|
||||
commit: "34962b41baca5e7f98f5a758d530bff02748a421",
|
||||
paths: vec![SystemPath::new("static_frame")],
|
||||
// N.B. `arraykit` is installed as a dependency during mypy_primer runs,
|
||||
// but it takes much longer to be installed in a Codspeed run than it does in a mypy_primer run
|
||||
// (seems to be built from source on the Codspeed CI runners for some reason).
|
||||
dependencies: vec!["numpy"],
|
||||
max_dep_date: "2025-08-09",
|
||||
python_version: PythonVersion::PY311,
|
||||
},
|
||||
500,
|
||||
)
|
||||
});
|
||||
|
||||
#[track_caller]
|
||||
fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
bencher
|
||||
@@ -250,7 +232,7 @@ fn small(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&*COLOUR_SCIENCE, &*PANDAS, &*STATIC_FRAME], sample_size=1, sample_count=3)]
|
||||
#[bench(args=[&*COLOUR_SCIENCE, &*PANDAS], sample_size=1, sample_count=3)]
|
||||
fn medium(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ license = { workspace = true }
|
||||
ruff_annotate_snippets = { workspace = true }
|
||||
ruff_cache = { workspace = true, optional = true }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_memory_usage = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
@@ -40,10 +39,10 @@ salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
serde_json = { workspace = true, optional = true }
|
||||
similar = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
unicode-width = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[target.'cfg(target_arch="wasm32")'.dependencies]
|
||||
|
||||
@@ -21,7 +21,7 @@ mod stylesheet;
|
||||
/// characteristics in the inputs given to the tool. Typically, but not always,
|
||||
/// a characteristic is a deficiency. An example of a characteristic that is
|
||||
/// _not_ a deficiency is the `reveal_type` diagnostic for our type checker.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub struct Diagnostic {
|
||||
/// The actual diagnostic.
|
||||
///
|
||||
@@ -212,7 +212,7 @@ impl Diagnostic {
|
||||
/// The type returned implements the `std::fmt::Display` trait. In most
|
||||
/// cases, just converting it to a string (or printing it) will do what
|
||||
/// you want.
|
||||
pub fn concise_message(&self) -> ConciseMessage<'_> {
|
||||
pub fn concise_message(&self) -> ConciseMessage {
|
||||
let main = self.inner.message.as_str();
|
||||
let annotation = self
|
||||
.primary_annotation()
|
||||
@@ -254,11 +254,6 @@ impl Diagnostic {
|
||||
.find(|ann| ann.is_primary)
|
||||
}
|
||||
|
||||
/// Returns a mutable borrow of all annotations of this diagnostic.
|
||||
pub fn annotations_mut(&mut self) -> impl Iterator<Item = &mut Annotation> {
|
||||
Arc::make_mut(&mut self.inner).annotations.iter_mut()
|
||||
}
|
||||
|
||||
/// Returns the "primary" span of this diagnostic if one exists.
|
||||
///
|
||||
/// When there are multiple primary spans, then the first one that was
|
||||
@@ -315,21 +310,11 @@ impl Diagnostic {
|
||||
&self.inner.subs
|
||||
}
|
||||
|
||||
/// Returns a mutable borrow of the sub-diagnostics of this diagnostic.
|
||||
pub fn sub_diagnostics_mut(&mut self) -> impl Iterator<Item = &mut SubDiagnostic> {
|
||||
Arc::make_mut(&mut self.inner).subs.iter_mut()
|
||||
}
|
||||
|
||||
/// Returns the fix for this diagnostic if it exists.
|
||||
pub fn fix(&self) -> Option<&Fix> {
|
||||
self.inner.fix.as_ref()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn fix_mut(&mut self) -> Option<&mut Fix> {
|
||||
Arc::make_mut(&mut self.inner).fix.as_mut()
|
||||
}
|
||||
|
||||
/// Set the fix for this diagnostic.
|
||||
pub fn set_fix(&mut self, fix: Fix) {
|
||||
debug_assert!(
|
||||
@@ -381,16 +366,6 @@ impl Diagnostic {
|
||||
self.inner.secondary_code.as_ref()
|
||||
}
|
||||
|
||||
/// Returns the secondary code for the diagnostic if it exists, or the lint name otherwise.
|
||||
///
|
||||
/// This is a common pattern for Ruff diagnostics, which want to use the noqa code in general,
|
||||
/// but fall back on the `invalid-syntax` identifier for syntax errors, which don't have
|
||||
/// secondary codes.
|
||||
pub fn secondary_code_or_id(&self) -> &str {
|
||||
self.secondary_code()
|
||||
.map_or_else(|| self.inner.id.as_str(), SecondaryCode::as_str)
|
||||
}
|
||||
|
||||
/// Set the secondary code for this diagnostic.
|
||||
pub fn set_secondary_code(&mut self, code: SecondaryCode) {
|
||||
Arc::make_mut(&mut self.inner).secondary_code = Some(code);
|
||||
@@ -504,7 +479,7 @@ impl Diagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
struct DiagnosticInner {
|
||||
id: DiagnosticId,
|
||||
severity: Severity,
|
||||
@@ -580,7 +555,7 @@ impl Eq for RenderingSortKey<'_> {}
|
||||
/// Currently, the order in which sub-diagnostics are rendered relative to one
|
||||
/// another (for a single parent diagnostic) is the order in which they were
|
||||
/// attached to the diagnostic.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub struct SubDiagnostic {
|
||||
/// Like with `Diagnostic`, we box the `SubDiagnostic` to make it
|
||||
/// pointer-sized.
|
||||
@@ -636,11 +611,6 @@ impl SubDiagnostic {
|
||||
&self.inner.annotations
|
||||
}
|
||||
|
||||
/// Returns a mutable borrow of the annotations of this sub-diagnostic.
|
||||
pub fn annotations_mut(&mut self) -> impl Iterator<Item = &mut Annotation> {
|
||||
self.inner.annotations.iter_mut()
|
||||
}
|
||||
|
||||
/// Returns a shared borrow of the "primary" annotation of this diagnostic
|
||||
/// if one exists.
|
||||
///
|
||||
@@ -674,7 +644,7 @@ impl SubDiagnostic {
|
||||
/// The type returned implements the `std::fmt::Display` trait. In most
|
||||
/// cases, just converting it to a string (or printing it) will do what
|
||||
/// you want.
|
||||
pub fn concise_message(&self) -> ConciseMessage<'_> {
|
||||
pub fn concise_message(&self) -> ConciseMessage {
|
||||
let main = self.inner.message.as_str();
|
||||
let annotation = self
|
||||
.primary_annotation()
|
||||
@@ -689,7 +659,7 @@ impl SubDiagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SubDiagnosticInner {
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: DiagnosticMessage,
|
||||
@@ -717,7 +687,7 @@ struct SubDiagnosticInner {
|
||||
///
|
||||
/// Messages attached to annotations should also be as brief and specific as
|
||||
/// possible. Long messages could negative impact the quality of rendering.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub struct Annotation {
|
||||
/// The span of this annotation, corresponding to some subsequence of the
|
||||
/// user's input that we want to highlight.
|
||||
@@ -732,11 +702,6 @@ pub struct Annotation {
|
||||
is_primary: bool,
|
||||
/// The diagnostic tags associated with this annotation.
|
||||
tags: Vec<DiagnosticTag>,
|
||||
/// Whether this annotation is a file-level or full-file annotation.
|
||||
///
|
||||
/// When set, rendering will only include the file's name and (optional) range. Everything else
|
||||
/// is omitted, including any file snippet or message.
|
||||
is_file_level: bool,
|
||||
}
|
||||
|
||||
impl Annotation {
|
||||
@@ -755,7 +720,6 @@ impl Annotation {
|
||||
message: None,
|
||||
is_primary: true,
|
||||
tags: Vec::new(),
|
||||
is_file_level: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -772,7 +736,6 @@ impl Annotation {
|
||||
message: None,
|
||||
is_primary: false,
|
||||
tags: Vec::new(),
|
||||
is_file_level: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -838,28 +801,13 @@ impl Annotation {
|
||||
pub fn push_tag(&mut self, tag: DiagnosticTag) {
|
||||
self.tags.push(tag);
|
||||
}
|
||||
|
||||
/// Set whether or not this annotation is file-level.
|
||||
///
|
||||
/// File-level annotations are only rendered with their file name and range, if available. This
|
||||
/// is intended for backwards compatibility with Ruff diagnostics, which historically used
|
||||
/// `TextRange::default` to indicate a file-level diagnostic. In the new diagnostic model, a
|
||||
/// [`Span`] with a range of `None` should be used instead, as mentioned in the `Span`
|
||||
/// documentation.
|
||||
///
|
||||
/// TODO(brent) update this usage in Ruff and remove `is_file_level` entirely. See
|
||||
/// <https://github.com/astral-sh/ruff/issues/19688>, especially my first comment, for more
|
||||
/// details.
|
||||
pub fn set_file_level(&mut self, yes: bool) {
|
||||
self.is_file_level = yes;
|
||||
}
|
||||
}
|
||||
|
||||
/// Tags that can be associated with an annotation.
|
||||
///
|
||||
/// These tags are used to provide additional information about the annotation.
|
||||
/// and are passed through to the language server protocol.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub enum DiagnosticTag {
|
||||
/// Unused or unnecessary code. Used for unused parameters, unreachable code, etc.
|
||||
Unnecessary,
|
||||
@@ -1068,7 +1016,7 @@ impl std::fmt::Display for DiagnosticId {
|
||||
///
|
||||
/// This enum presents a unified interface to these two types for the sake of creating [`Span`]s and
|
||||
/// emitting diagnostics from both ty and ruff.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
|
||||
pub enum UnifiedFile {
|
||||
Ty(File),
|
||||
Ruff(SourceFile),
|
||||
@@ -1119,7 +1067,7 @@ enum DiagnosticSource {
|
||||
|
||||
impl DiagnosticSource {
|
||||
/// Returns this input as a `SourceCode` for convenient querying.
|
||||
fn as_source_code(&self) -> SourceCode<'_, '_> {
|
||||
fn as_source_code(&self) -> SourceCode {
|
||||
match self {
|
||||
DiagnosticSource::Ty(input) => SourceCode::new(input.text.as_str(), &input.line_index),
|
||||
DiagnosticSource::Ruff(source) => SourceCode::new(source.source_text(), source.index()),
|
||||
@@ -1132,7 +1080,7 @@ impl DiagnosticSource {
|
||||
/// It consists of a `File` and an optional range into that file. When the
|
||||
/// range isn't present, it semantically implies that the diagnostic refers to
|
||||
/// the entire file. For example, when the file should be executable but isn't.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
|
||||
pub struct Span {
|
||||
file: UnifiedFile,
|
||||
range: Option<TextRange>,
|
||||
@@ -1210,7 +1158,7 @@ impl From<crate::files::FileRange> for Span {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
|
||||
pub enum Severity {
|
||||
Info,
|
||||
Warning,
|
||||
@@ -1245,7 +1193,7 @@ impl Severity {
|
||||
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
|
||||
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
|
||||
/// deleted and the two combined again.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
|
||||
pub enum SubDiagnosticSeverity {
|
||||
Help,
|
||||
Info,
|
||||
@@ -1299,10 +1247,6 @@ pub struct DisplayDiagnosticConfig {
|
||||
hide_severity: bool,
|
||||
/// Whether to show the availability of a fix in a diagnostic.
|
||||
show_fix_status: bool,
|
||||
/// Whether to show the diff for an available fix after the main diagnostic.
|
||||
///
|
||||
/// This currently only applies to `DiagnosticFormat::Full`.
|
||||
show_fix_diff: bool,
|
||||
/// The lowest applicability that should be shown when reporting diagnostics.
|
||||
fix_applicability: Applicability,
|
||||
}
|
||||
@@ -1350,14 +1294,6 @@ impl DisplayDiagnosticConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to show a diff for an available fix after the main diagnostic.
|
||||
pub fn show_fix_diff(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
show_fix_diff: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the lowest fix applicability that should be shown.
|
||||
///
|
||||
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
|
||||
@@ -1381,7 +1317,6 @@ impl Default for DisplayDiagnosticConfig {
|
||||
preview: false,
|
||||
hide_severity: false,
|
||||
show_fix_status: false,
|
||||
show_fix_diff: false,
|
||||
fix_applicability: Applicability::Safe,
|
||||
}
|
||||
}
|
||||
@@ -1493,7 +1428,7 @@ impl std::fmt::Display for ConciseMessage<'_> {
|
||||
/// In most cases, callers shouldn't need to use this. Instead, there is
|
||||
/// a blanket trait implementation for `IntoDiagnosticMessage` for
|
||||
/// anything that implements `std::fmt::Display`.
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub struct DiagnosticMessage(Box<str>);
|
||||
|
||||
impl DiagnosticMessage {
|
||||
|
||||
@@ -2,15 +2,15 @@ use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
use full::FullRenderer;
|
||||
use ruff_annotate_snippets::{
|
||||
Annotation as AnnotateAnnotation, Level as AnnotateLevel, Message as AnnotateMessage,
|
||||
Snippet as AnnotateSnippet,
|
||||
Renderer as AnnotateRenderer, Snippet as AnnotateSnippet,
|
||||
};
|
||||
use ruff_notebook::{Notebook, NotebookIndex};
|
||||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::stylesheet::DiagnosticStylesheet;
|
||||
use crate::{
|
||||
Db,
|
||||
files::File,
|
||||
@@ -111,7 +111,37 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
|
||||
ConciseRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?;
|
||||
}
|
||||
DiagnosticFormat::Full => {
|
||||
FullRenderer::new(self.resolver, self.config).render(f, self.diagnostics)?;
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let mut renderer = if self.config.color {
|
||||
AnnotateRenderer::styled()
|
||||
} else {
|
||||
AnnotateRenderer::plain()
|
||||
}
|
||||
.cut_indicator("…");
|
||||
|
||||
renderer = renderer
|
||||
.error(stylesheet.error)
|
||||
.warning(stylesheet.warning)
|
||||
.info(stylesheet.info)
|
||||
.note(stylesheet.note)
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none);
|
||||
|
||||
for diag in self.diagnostics {
|
||||
let resolved = Resolved::new(self.resolver, diag);
|
||||
let renderable = resolved.to_renderable(self.config.context);
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
DiagnosticFormat::Azure => {
|
||||
AzureRenderer::new(self.resolver).render(f, self.diagnostics)?;
|
||||
@@ -161,13 +191,9 @@ struct Resolved<'a> {
|
||||
|
||||
impl<'a> Resolved<'a> {
|
||||
/// Creates a new resolved set of diagnostics.
|
||||
fn new(
|
||||
resolver: &'a dyn FileResolver,
|
||||
diag: &'a Diagnostic,
|
||||
config: &DisplayDiagnosticConfig,
|
||||
) -> Resolved<'a> {
|
||||
fn new(resolver: &'a dyn FileResolver, diag: &'a Diagnostic) -> Resolved<'a> {
|
||||
let mut diagnostics = vec![];
|
||||
diagnostics.push(ResolvedDiagnostic::from_diagnostic(resolver, config, diag));
|
||||
diagnostics.push(ResolvedDiagnostic::from_diagnostic(resolver, diag));
|
||||
for sub in &diag.inner.subs {
|
||||
diagnostics.push(ResolvedDiagnostic::from_sub_diagnostic(resolver, sub));
|
||||
}
|
||||
@@ -197,14 +223,12 @@ struct ResolvedDiagnostic<'a> {
|
||||
id: Option<String>,
|
||||
message: String,
|
||||
annotations: Vec<ResolvedAnnotation<'a>>,
|
||||
is_fixable: bool,
|
||||
}
|
||||
|
||||
impl<'a> ResolvedDiagnostic<'a> {
|
||||
/// Resolve a single diagnostic.
|
||||
fn from_diagnostic(
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &DisplayDiagnosticConfig,
|
||||
diag: &'a Diagnostic,
|
||||
) -> ResolvedDiagnostic<'a> {
|
||||
let annotations: Vec<_> = diag
|
||||
@@ -212,45 +236,18 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
.annotations
|
||||
.iter()
|
||||
.filter_map(|ann| {
|
||||
let path = ann
|
||||
.span
|
||||
.file
|
||||
.relative_path(resolver)
|
||||
.to_str()
|
||||
.unwrap_or_else(|| ann.span.file.path(resolver));
|
||||
let path = ann.span.file.path(resolver);
|
||||
let diagnostic_source = ann.span.file.diagnostic_source(resolver);
|
||||
ResolvedAnnotation::new(path, &diagnostic_source, ann, resolver)
|
||||
ResolvedAnnotation::new(path, &diagnostic_source, ann)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let id = if config.hide_severity {
|
||||
// Either the rule code alone (e.g. `F401`), or the lint id with a colon (e.g.
|
||||
// `invalid-syntax:`). When Ruff gets real severities, we should put the colon back in
|
||||
// `DisplaySet::format_annotation` for both cases, but this is a small hack to improve
|
||||
// the formatting of syntax errors for now. This should also be kept consistent with the
|
||||
// concise formatting.
|
||||
Some(diag.secondary_code().map_or_else(
|
||||
|| format!("{id}:", id = diag.inner.id),
|
||||
|code| code.to_string(),
|
||||
))
|
||||
} else {
|
||||
Some(diag.inner.id.to_string())
|
||||
};
|
||||
|
||||
let level = if config.hide_severity {
|
||||
AnnotateLevel::None
|
||||
} else {
|
||||
diag.inner.severity.to_annotate()
|
||||
};
|
||||
|
||||
let id = Some(diag.inner.id.to_string());
|
||||
let message = diag.inner.message.as_str().to_string();
|
||||
ResolvedDiagnostic {
|
||||
level,
|
||||
level: diag.inner.severity.to_annotate(),
|
||||
id,
|
||||
message: diag.inner.message.as_str().to_string(),
|
||||
message,
|
||||
annotations,
|
||||
is_fixable: diag
|
||||
.fix()
|
||||
.is_some_and(|fix| fix.applies(config.fix_applicability)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -264,14 +261,9 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
.annotations
|
||||
.iter()
|
||||
.filter_map(|ann| {
|
||||
let path = ann
|
||||
.span
|
||||
.file
|
||||
.relative_path(resolver)
|
||||
.to_str()
|
||||
.unwrap_or_else(|| ann.span.file.path(resolver));
|
||||
let path = ann.span.file.path(resolver);
|
||||
let diagnostic_source = ann.span.file.diagnostic_source(resolver);
|
||||
ResolvedAnnotation::new(path, &diagnostic_source, ann, resolver)
|
||||
ResolvedAnnotation::new(path, &diagnostic_source, ann)
|
||||
})
|
||||
.collect();
|
||||
ResolvedDiagnostic {
|
||||
@@ -279,7 +271,6 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
id: None,
|
||||
message: diag.inner.message.as_str().to_string(),
|
||||
annotations,
|
||||
is_fixable: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -310,49 +301,20 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
&prev.diagnostic_source.as_source_code(),
|
||||
context,
|
||||
prev.line_end,
|
||||
prev.notebook_index.as_ref(),
|
||||
)
|
||||
.get();
|
||||
let this_context_begins = context_before(
|
||||
&ann.diagnostic_source.as_source_code(),
|
||||
context,
|
||||
ann.line_start,
|
||||
ann.notebook_index.as_ref(),
|
||||
)
|
||||
.get();
|
||||
|
||||
// For notebooks, check whether the end of the
|
||||
// previous annotation and the start of the current
|
||||
// annotation are in different cells.
|
||||
let prev_cell_index = prev.notebook_index.as_ref().map(|notebook_index| {
|
||||
let prev_end = prev
|
||||
.diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(prev.range.end());
|
||||
notebook_index.cell(prev_end.line).unwrap_or_default().get()
|
||||
});
|
||||
let this_cell_index = ann.notebook_index.as_ref().map(|notebook_index| {
|
||||
let this_start = ann
|
||||
.diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(ann.range.start());
|
||||
notebook_index
|
||||
.cell(this_start.line)
|
||||
.unwrap_or_default()
|
||||
.get()
|
||||
});
|
||||
let in_different_cells = prev_cell_index != this_cell_index;
|
||||
|
||||
// The boundary case here is when `prev_context_ends`
|
||||
// is exactly one less than `this_context_begins`. In
|
||||
// that case, the context windows are adjacent and we
|
||||
// should fall through below to add this annotation to
|
||||
// the existing snippet.
|
||||
//
|
||||
// For notebooks, also check that the context windows
|
||||
// are in the same cell. Windows from different cells
|
||||
// should never be considered adjacent.
|
||||
if in_different_cells || this_context_begins.saturating_sub(prev_context_ends) > 1 {
|
||||
if this_context_begins.saturating_sub(prev_context_ends) > 1 {
|
||||
snippet_by_path
|
||||
.entry(path)
|
||||
.or_default()
|
||||
@@ -376,7 +338,6 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
id: self.id.as_deref(),
|
||||
message: &self.message,
|
||||
snippets_by_input,
|
||||
is_fixable: self.is_fixable,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -396,8 +357,6 @@ struct ResolvedAnnotation<'a> {
|
||||
line_end: OneIndexed,
|
||||
message: Option<&'a str>,
|
||||
is_primary: bool,
|
||||
is_file_level: bool,
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
}
|
||||
|
||||
impl<'a> ResolvedAnnotation<'a> {
|
||||
@@ -410,7 +369,6 @@ impl<'a> ResolvedAnnotation<'a> {
|
||||
path: &'a str,
|
||||
diagnostic_source: &DiagnosticSource,
|
||||
ann: &'a Annotation,
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> Option<ResolvedAnnotation<'a>> {
|
||||
let source = diagnostic_source.as_source_code();
|
||||
let (range, line_start, line_end) = match (ann.span.range(), ann.message.is_some()) {
|
||||
@@ -444,8 +402,6 @@ impl<'a> ResolvedAnnotation<'a> {
|
||||
line_end,
|
||||
message: ann.get_message(),
|
||||
is_primary: ann.is_primary,
|
||||
is_file_level: ann.is_file_level,
|
||||
notebook_index: resolver.notebook_index(&ann.span.file),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -480,10 +436,6 @@ struct RenderableDiagnostic<'r> {
|
||||
/// should be from the same file, and none of the snippets inside of a
|
||||
/// collection should overlap with one another or be directly adjacent.
|
||||
snippets_by_input: Vec<RenderableSnippets<'r>>,
|
||||
/// Whether or not the diagnostic is fixable.
|
||||
///
|
||||
/// This is rendered as a `[*]` indicator after the diagnostic ID.
|
||||
is_fixable: bool,
|
||||
}
|
||||
|
||||
impl RenderableDiagnostic<'_> {
|
||||
@@ -496,7 +448,7 @@ impl RenderableDiagnostic<'_> {
|
||||
.iter()
|
||||
.map(|snippet| snippet.to_annotate(path))
|
||||
});
|
||||
let mut message = self.level.title(self.message).is_fixable(self.is_fixable);
|
||||
let mut message = self.level.title(self.message);
|
||||
if let Some(id) = self.id {
|
||||
message = message.id(id);
|
||||
}
|
||||
@@ -578,27 +530,17 @@ struct RenderableSnippet<'r> {
|
||||
/// Whether this snippet contains at least one primary
|
||||
/// annotation.
|
||||
has_primary: bool,
|
||||
/// The cell index in a Jupyter notebook, if this snippet refers to a notebook.
|
||||
///
|
||||
/// This is used for rendering annotations with offsets like `cell 1:2:3` instead of simple row
|
||||
/// and column numbers.
|
||||
cell_index: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'r> RenderableSnippet<'r> {
|
||||
/// Creates a new snippet with one or more annotations that is ready to be
|
||||
/// rendered.
|
||||
/// renderer.
|
||||
///
|
||||
/// The first line of the snippet is the smallest line number on which one
|
||||
/// of the annotations begins, minus the context window size. The last line
|
||||
/// is the largest line number on which one of the annotations ends, plus
|
||||
/// the context window size.
|
||||
///
|
||||
/// For Jupyter notebooks, the context window may also be truncated at cell
|
||||
/// boundaries. If multiple annotations are present, and they point to
|
||||
/// different cells, these will have already been split into separate
|
||||
/// snippets by `ResolvedDiagnostic::to_renderable`.
|
||||
///
|
||||
/// Callers should guarantee that the `input` on every `ResolvedAnnotation`
|
||||
/// given is identical.
|
||||
///
|
||||
@@ -615,19 +557,19 @@ impl<'r> RenderableSnippet<'r> {
|
||||
"creating a renderable snippet requires a non-zero number of annotations",
|
||||
);
|
||||
let diagnostic_source = &anns[0].diagnostic_source;
|
||||
let notebook_index = anns[0].notebook_index.as_ref();
|
||||
let source = diagnostic_source.as_source_code();
|
||||
let has_primary = anns.iter().any(|ann| ann.is_primary);
|
||||
|
||||
let content_start_index = anns.iter().map(|ann| ann.line_start).min().unwrap();
|
||||
let line_start = context_before(&source, context, content_start_index, notebook_index);
|
||||
|
||||
let start = source.line_column(anns[0].range.start());
|
||||
let cell_index = notebook_index
|
||||
.map(|notebook_index| notebook_index.cell(start.line).unwrap_or_default().get());
|
||||
|
||||
let content_end_index = anns.iter().map(|ann| ann.line_end).max().unwrap();
|
||||
let line_end = context_after(&source, context, content_end_index, notebook_index);
|
||||
let line_start = context_before(
|
||||
&source,
|
||||
context,
|
||||
anns.iter().map(|ann| ann.line_start).min().unwrap(),
|
||||
);
|
||||
let line_end = context_after(
|
||||
&source,
|
||||
context,
|
||||
anns.iter().map(|ann| ann.line_end).max().unwrap(),
|
||||
);
|
||||
|
||||
let snippet_start = source.line_start(line_start);
|
||||
let snippet_end = source.line_end(line_end);
|
||||
@@ -635,22 +577,6 @@ impl<'r> RenderableSnippet<'r> {
|
||||
.as_source_code()
|
||||
.slice(TextRange::new(snippet_start, snippet_end));
|
||||
|
||||
// Strip the BOM from the beginning of the snippet, if present. Doing this here saves us the
|
||||
// trouble of updating the annotation ranges in `replace_unprintable`, and also allows us to
|
||||
// check that the BOM is at the very beginning of the file, not just the beginning of the
|
||||
// snippet.
|
||||
const BOM: char = '\u{feff}';
|
||||
let bom_len = BOM.text_len();
|
||||
let (snippet, snippet_start) =
|
||||
if snippet_start == TextSize::ZERO && snippet.starts_with(BOM) {
|
||||
(
|
||||
&snippet[bom_len.to_usize()..],
|
||||
snippet_start + TextSize::new(bom_len.to_u32()),
|
||||
)
|
||||
} else {
|
||||
(snippet, snippet_start)
|
||||
};
|
||||
|
||||
let annotations = anns
|
||||
.iter()
|
||||
.map(|ann| RenderableAnnotation::new(snippet_start, ann))
|
||||
@@ -659,20 +585,14 @@ impl<'r> RenderableSnippet<'r> {
|
||||
let EscapedSourceCode {
|
||||
text: snippet,
|
||||
annotations,
|
||||
} = replace_unprintable(snippet, annotations).fix_up_empty_spans_after_line_terminator();
|
||||
|
||||
let line_start = notebook_index.map_or(line_start, |notebook_index| {
|
||||
notebook_index
|
||||
.cell_row(line_start)
|
||||
.unwrap_or(OneIndexed::MIN)
|
||||
});
|
||||
} = replace_whitespace_and_unprintable(snippet, annotations)
|
||||
.fix_up_empty_spans_after_line_terminator();
|
||||
|
||||
RenderableSnippet {
|
||||
snippet,
|
||||
line_start,
|
||||
annotations,
|
||||
has_primary,
|
||||
cell_index,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -686,7 +606,6 @@ impl<'r> RenderableSnippet<'r> {
|
||||
.iter()
|
||||
.map(RenderableAnnotation::to_annotate),
|
||||
)
|
||||
.cell_index(self.cell_index)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -701,8 +620,6 @@ struct RenderableAnnotation<'r> {
|
||||
message: Option<&'r str>,
|
||||
/// Whether this annotation is considered "primary" or not.
|
||||
is_primary: bool,
|
||||
/// Whether this annotation applies to an entire file, rather than a snippet within it.
|
||||
is_file_level: bool,
|
||||
}
|
||||
|
||||
impl<'r> RenderableAnnotation<'r> {
|
||||
@@ -715,16 +632,11 @@ impl<'r> RenderableAnnotation<'r> {
|
||||
/// lifetime parameter here refers to the lifetime of the resolver that
|
||||
/// created the given `ResolvedAnnotation`.
|
||||
fn new(snippet_start: TextSize, ann: &'_ ResolvedAnnotation<'r>) -> RenderableAnnotation<'r> {
|
||||
// This should only ever saturate if a BOM is present _and_ the annotation range points
|
||||
// before the BOM (i.e. at offset 0). In Ruff this typically results from the use of
|
||||
// `TextRange::default()` for a diagnostic range instead of a range relative to file
|
||||
// contents.
|
||||
let range = ann.range.checked_sub(snippet_start).unwrap_or(ann.range);
|
||||
let range = ann.range - snippet_start;
|
||||
RenderableAnnotation {
|
||||
range,
|
||||
message: ann.message,
|
||||
is_primary: ann.is_primary,
|
||||
is_file_level: ann.is_file_level,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -750,7 +662,7 @@ impl<'r> RenderableAnnotation<'r> {
|
||||
if let Some(message) = self.message {
|
||||
ann = ann.label(message);
|
||||
}
|
||||
ann.is_file_level(self.is_file_level)
|
||||
ann
|
||||
}
|
||||
}
|
||||
|
||||
@@ -877,15 +789,7 @@ pub struct Input {
|
||||
///
|
||||
/// The line number returned is guaranteed to be less than
|
||||
/// or equal to `start`.
|
||||
///
|
||||
/// In Jupyter notebooks, lines outside the cell containing
|
||||
/// `start` will be omitted.
|
||||
fn context_before(
|
||||
source: &SourceCode<'_, '_>,
|
||||
len: usize,
|
||||
start: OneIndexed,
|
||||
notebook_index: Option<&NotebookIndex>,
|
||||
) -> OneIndexed {
|
||||
fn context_before(source: &SourceCode<'_, '_>, len: usize, start: OneIndexed) -> OneIndexed {
|
||||
let mut line = start.saturating_sub(len);
|
||||
// Trim leading empty lines.
|
||||
while line < start {
|
||||
@@ -894,17 +798,6 @@ fn context_before(
|
||||
}
|
||||
line = line.saturating_add(1);
|
||||
}
|
||||
|
||||
if let Some(index) = notebook_index {
|
||||
let content_start_cell = index.cell(start).unwrap_or(OneIndexed::MIN);
|
||||
while line < start {
|
||||
if index.cell(line).unwrap_or(OneIndexed::MIN) == content_start_cell {
|
||||
break;
|
||||
}
|
||||
line = line.saturating_add(1);
|
||||
}
|
||||
}
|
||||
|
||||
line
|
||||
}
|
||||
|
||||
@@ -914,15 +807,7 @@ fn context_before(
|
||||
/// The line number returned is guaranteed to be greater
|
||||
/// than or equal to `start` and no greater than the
|
||||
/// number of lines in `source`.
|
||||
///
|
||||
/// In Jupyter notebooks, lines outside the cell containing
|
||||
/// `start` will be omitted.
|
||||
fn context_after(
|
||||
source: &SourceCode<'_, '_>,
|
||||
len: usize,
|
||||
start: OneIndexed,
|
||||
notebook_index: Option<&NotebookIndex>,
|
||||
) -> OneIndexed {
|
||||
fn context_after(source: &SourceCode<'_, '_>, len: usize, start: OneIndexed) -> OneIndexed {
|
||||
let max_lines = OneIndexed::from_zero_indexed(source.line_count());
|
||||
let mut line = start.saturating_add(len).min(max_lines);
|
||||
// Trim trailing empty lines.
|
||||
@@ -932,17 +817,6 @@ fn context_after(
|
||||
}
|
||||
line = line.saturating_sub(1);
|
||||
}
|
||||
|
||||
if let Some(index) = notebook_index {
|
||||
let content_end_cell = index.cell(start).unwrap_or(OneIndexed::MIN);
|
||||
while line > start {
|
||||
if index.cell(line).unwrap_or(OneIndexed::MIN) == content_end_cell {
|
||||
break;
|
||||
}
|
||||
line = line.saturating_sub(1);
|
||||
}
|
||||
}
|
||||
|
||||
line
|
||||
}
|
||||
|
||||
@@ -954,18 +828,13 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
|
||||
path
|
||||
}
|
||||
|
||||
/// Given some source code and annotation ranges, this routine replaces
|
||||
/// unprintable characters with printable representations of them.
|
||||
/// Given some source code and annotation ranges, this routine replaces tabs
|
||||
/// with ASCII whitespace, and unprintable characters with printable
|
||||
/// representations of them.
|
||||
///
|
||||
/// The source code and annotations returned are updated to reflect changes made
|
||||
/// to the source code (if any).
|
||||
///
|
||||
/// We don't need to normalize whitespace, such as converting tabs to spaces,
|
||||
/// because `annotate-snippets` handles that internally. Similarly, it's safe to
|
||||
/// modify the annotation ranges by inserting 3-byte Unicode replacements
|
||||
/// because `annotate-snippets` will account for their actual width when
|
||||
/// rendering and displaying the column to the user.
|
||||
fn replace_unprintable<'r>(
|
||||
fn replace_whitespace_and_unprintable<'r>(
|
||||
source: &'r str,
|
||||
mut annotations: Vec<RenderableAnnotation<'r>>,
|
||||
) -> EscapedSourceCode<'r> {
|
||||
@@ -997,22 +866,48 @@ fn replace_unprintable<'r>(
|
||||
}
|
||||
};
|
||||
|
||||
const TAB_SIZE: usize = 4;
|
||||
let mut width = 0;
|
||||
let mut column = 0;
|
||||
let mut last_end = 0;
|
||||
let mut result = String::new();
|
||||
for (index, c) in source.char_indices() {
|
||||
// normalize `\r` line endings but don't double `\r\n`
|
||||
if c == '\r' && !source[index + 1..].starts_with("\n") {
|
||||
result.push_str(&source[last_end..index]);
|
||||
result.push('\n');
|
||||
last_end = index + 1;
|
||||
} else if let Some(printable) = unprintable_replacement(c) {
|
||||
result.push_str(&source[last_end..index]);
|
||||
let old_width = width;
|
||||
match c {
|
||||
'\n' | '\r' => {
|
||||
width = 0;
|
||||
column = 0;
|
||||
}
|
||||
'\t' => {
|
||||
let tab_offset = TAB_SIZE - (column % TAB_SIZE);
|
||||
width += tab_offset;
|
||||
column += tab_offset;
|
||||
|
||||
let len = printable.text_len().to_u32();
|
||||
update_ranges(result.text_len().to_usize(), len);
|
||||
let tab_width =
|
||||
u32::try_from(width - old_width).expect("small width because of tab size");
|
||||
result.push_str(&source[last_end..index]);
|
||||
|
||||
result.push(printable);
|
||||
last_end = index + 1;
|
||||
update_ranges(result.text_len().to_usize(), tab_width);
|
||||
|
||||
for _ in 0..tab_width {
|
||||
result.push(' ');
|
||||
}
|
||||
last_end = index + 1;
|
||||
}
|
||||
_ => {
|
||||
width += unicode_width::UnicodeWidthChar::width(c).unwrap_or(0);
|
||||
column += 1;
|
||||
|
||||
if let Some(printable) = unprintable_replacement(c) {
|
||||
result.push_str(&source[last_end..index]);
|
||||
|
||||
let len = printable.text_len().to_u32();
|
||||
update_ranges(result.text_len().to_usize(), len);
|
||||
|
||||
result.push(printable);
|
||||
last_end = index + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2622,13 +2517,6 @@ watermelon
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Show a diff for the fix when rendering.
|
||||
pub(super) fn show_fix_diff(&mut self, yes: bool) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.show_fix_diff(yes);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// The lowest fix applicability to show when rendering.
|
||||
pub(super) fn fix_applicability(&mut self, applicability: Applicability) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
@@ -2656,12 +2544,7 @@ watermelon
|
||||
/// of the corresponding line minus one. (The "minus one" is because
|
||||
/// otherwise, the span will end where the next line begins, and this
|
||||
/// confuses `ruff_annotate_snippets` as of 2025-03-13.)
|
||||
pub(super) fn span(
|
||||
&self,
|
||||
path: &str,
|
||||
line_offset_start: &str,
|
||||
line_offset_end: &str,
|
||||
) -> Span {
|
||||
fn span(&self, path: &str, line_offset_start: &str, line_offset_end: &str) -> Span {
|
||||
let span = self.path(path);
|
||||
|
||||
let file = span.expect_ty_file();
|
||||
@@ -2684,7 +2567,7 @@ watermelon
|
||||
}
|
||||
|
||||
/// Like `span`, but only attaches a file path.
|
||||
pub(super) fn path(&self, path: &str) -> Span {
|
||||
fn path(&self, path: &str) -> Span {
|
||||
let file = system_path_to_file(&self.db, path).unwrap();
|
||||
Span::from(file)
|
||||
}
|
||||
@@ -2798,7 +2681,7 @@ watermelon
|
||||
///
|
||||
/// See the docs on `TestEnvironment::span` for the meaning of
|
||||
/// `path`, `line_offset_start` and `line_offset_end`.
|
||||
pub(super) fn secondary(
|
||||
fn secondary(
|
||||
mut self,
|
||||
path: &str,
|
||||
line_offset_start: &str,
|
||||
@@ -2834,7 +2717,7 @@ watermelon
|
||||
}
|
||||
|
||||
/// Adds a "help" sub-diagnostic with the given message.
|
||||
pub(super) fn help(mut self, message: impl IntoDiagnosticMessage) -> DiagnosticBuilder<'e> {
|
||||
fn help(mut self, message: impl IntoDiagnosticMessage) -> DiagnosticBuilder<'e> {
|
||||
self.diag.help(message);
|
||||
self
|
||||
}
|
||||
@@ -2994,10 +2877,10 @@ if call(foo
|
||||
env.format(format);
|
||||
|
||||
let diagnostics = vec![
|
||||
env.invalid_syntax("Expected one or more symbol names after import")
|
||||
env.invalid_syntax("SyntaxError: Expected one or more symbol names after import")
|
||||
.primary("syntax_errors.py", "1:14", "1:15", "")
|
||||
.build(),
|
||||
env.invalid_syntax("Expected ')', found newline")
|
||||
env.invalid_syntax("SyntaxError: Expected ')', found newline")
|
||||
.primary("syntax_errors.py", "3:11", "3:12", "")
|
||||
.build(),
|
||||
];
|
||||
@@ -3005,8 +2888,7 @@ if call(foo
|
||||
(env, diagnostics)
|
||||
}
|
||||
|
||||
/// A Jupyter notebook for testing diagnostics.
|
||||
///
|
||||
/// Create Ruff-style diagnostics for testing the various output formats for a notebook.
|
||||
///
|
||||
/// The concatenated cells look like this:
|
||||
///
|
||||
@@ -3026,7 +2908,17 @@ if call(foo
|
||||
/// The first diagnostic is on the unused `os` import with location cell 1, row 2, column 8
|
||||
/// (`cell 1:2:8`). The second diagnostic is the unused `math` import at `cell 2:2:8`, and the
|
||||
/// third diagnostic is an unfixable unused variable at `cell 3:4:5`.
|
||||
pub(super) static NOTEBOOK: &str = r##"
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
pub(crate) fn create_notebook_diagnostics(
|
||||
format: DiagnosticFormat,
|
||||
) -> (TestEnvironment, Vec<Diagnostic>) {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"notebook.ipynb",
|
||||
r##"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
@@ -3065,14 +2957,8 @@ if call(foo
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"##;
|
||||
|
||||
/// Create Ruff-style diagnostics for testing the various output formats for a notebook.
|
||||
pub(crate) fn create_notebook_diagnostics(
|
||||
format: DiagnosticFormat,
|
||||
) -> (TestEnvironment, Vec<Diagnostic>) {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("notebook.ipynb", NOTEBOOK);
|
||||
"##,
|
||||
);
|
||||
env.format(format);
|
||||
|
||||
let diagnostics = vec![
|
||||
|
||||
@@ -50,8 +50,10 @@ impl AzureRenderer<'_> {
|
||||
}
|
||||
writeln!(
|
||||
f,
|
||||
"code={code};]{body}",
|
||||
code = diag.secondary_code_or_id(),
|
||||
"{code}]{body}",
|
||||
code = diag
|
||||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!("code={code};")),
|
||||
body = diag.body(),
|
||||
)?;
|
||||
}
|
||||
|
||||
@@ -69,12 +69,6 @@ impl<'a> ConciseRenderer<'a> {
|
||||
"{code} ",
|
||||
code = fmt_styled(code, stylesheet.secondary_code)
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{id}: ",
|
||||
id = fmt_styled(diag.inner.id.as_str(), stylesheet.secondary_code)
|
||||
)?;
|
||||
}
|
||||
if self.config.show_fix_status {
|
||||
if let Some(fix) = diag.fix() {
|
||||
@@ -162,8 +156,8 @@ mod tests {
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline
|
||||
syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
@@ -171,8 +165,8 @@ mod tests {
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: error[invalid-syntax] Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: error[invalid-syntax] Expected ')', found newline
|
||||
syntax_errors.py:1:15: error[invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: error[invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
|
||||
@@ -1,311 +1,8 @@
|
||||
use std::borrow::Cow;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use anstyle::Style;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use similar::{ChangeTag, TextDiff};
|
||||
|
||||
use ruff_annotate_snippets::Renderer as AnnotateRenderer;
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::render::{FileResolver, Resolved};
|
||||
use crate::diagnostic::stylesheet::{DiagnosticStylesheet, fmt_styled};
|
||||
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig};
|
||||
|
||||
pub(super) struct FullRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> FullRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let mut renderer = if self.config.color {
|
||||
AnnotateRenderer::styled()
|
||||
} else {
|
||||
AnnotateRenderer::plain()
|
||||
}
|
||||
.cut_indicator("…");
|
||||
|
||||
renderer = renderer
|
||||
.error(stylesheet.error)
|
||||
.warning(stylesheet.warning)
|
||||
.info(stylesheet.info)
|
||||
.note(stylesheet.note)
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none);
|
||||
|
||||
for diag in diagnostics {
|
||||
let resolved = Resolved::new(self.resolver, diag, self.config);
|
||||
let renderable = resolved.to_renderable(self.config.context);
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)?;
|
||||
|
||||
if self.config.show_fix_diff {
|
||||
if let Some(diff) = Diff::from_diagnostic(diag, &stylesheet, self.resolver) {
|
||||
writeln!(f, "{diff}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Renders a diff that shows the code fixes.
|
||||
///
|
||||
/// The implementation isn't fully fledged out and only used by tests. Before using in production, try
|
||||
/// * Improve layout
|
||||
/// * Replace tabs with spaces for a consistent experience across terminals
|
||||
/// * Replace zero-width whitespaces
|
||||
/// * Print a simpler diff if only a single line has changed
|
||||
/// * Compute the diff from the `Edit` because diff calculation is expensive.
|
||||
struct Diff<'a> {
|
||||
fix: &'a Fix,
|
||||
diagnostic_source: DiagnosticSource,
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
stylesheet: &'a DiagnosticStylesheet,
|
||||
}
|
||||
|
||||
impl<'a> Diff<'a> {
|
||||
fn from_diagnostic(
|
||||
diagnostic: &'a Diagnostic,
|
||||
stylesheet: &'a DiagnosticStylesheet,
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> Option<Diff<'a>> {
|
||||
let file = &diagnostic.primary_span_ref()?.file;
|
||||
Some(Diff {
|
||||
fix: diagnostic.fix()?,
|
||||
diagnostic_source: file.diagnostic_source(resolver),
|
||||
notebook_index: resolver.notebook_index(file),
|
||||
stylesheet,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Diff<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let source_code = self.diagnostic_source.as_source_code();
|
||||
let source_text = source_code.text();
|
||||
|
||||
// Partition the source code into end offsets for each cell. If `self.notebook_index` is
|
||||
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
|
||||
// `None` key.
|
||||
let cells = if let Some(notebook_index) = &self.notebook_index {
|
||||
let mut last_cell = OneIndexed::MIN;
|
||||
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
|
||||
for (row, cell) in notebook_index.iter() {
|
||||
if cell != last_cell {
|
||||
let offset = source_code.line_start(row);
|
||||
cells.push((Some(last_cell), offset));
|
||||
last_cell = cell;
|
||||
}
|
||||
}
|
||||
cells.push((Some(last_cell), source_text.text_len()));
|
||||
cells
|
||||
} else {
|
||||
vec![(None, source_text.text_len())]
|
||||
};
|
||||
|
||||
let message = match self.fix.applicability() {
|
||||
// TODO(zanieb): Adjust this messaging once it's user-facing
|
||||
Applicability::Safe => "Safe fix",
|
||||
Applicability::Unsafe => "Unsafe fix",
|
||||
Applicability::DisplayOnly => "Display-only fix",
|
||||
};
|
||||
|
||||
// TODO(brent) `stylesheet.separator` is cyan rather than blue, as we had before. I think
|
||||
// we're getting rid of this soon anyway, so I didn't think it was worth adding another
|
||||
// style to the stylesheet temporarily. The color doesn't appear at all in the snapshot
|
||||
// tests, which is the only place these are currently used.
|
||||
writeln!(f, "ℹ {}", fmt_styled(message, self.stylesheet.separator))?;
|
||||
|
||||
let mut last_end = TextSize::ZERO;
|
||||
for (cell, offset) in cells {
|
||||
let range = TextRange::new(last_end, offset);
|
||||
last_end = offset;
|
||||
let input = source_code.slice(range);
|
||||
|
||||
let mut output = String::with_capacity(input.len());
|
||||
let mut last_end = range.start();
|
||||
|
||||
let mut applied = 0;
|
||||
for edit in self.fix.edits() {
|
||||
if range.contains_range(edit.range()) {
|
||||
output.push_str(source_code.slice(TextRange::new(last_end, edit.start())));
|
||||
output.push_str(edit.content().unwrap_or_default());
|
||||
last_end = edit.end();
|
||||
applied += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// No edits were applied, so there's no need to diff.
|
||||
if applied == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
output.push_str(&source_text[usize::from(last_end)..usize::from(range.end())]);
|
||||
|
||||
let diff = TextDiff::from_lines(input, &output);
|
||||
|
||||
let (largest_old, largest_new) = diff
|
||||
.ops()
|
||||
.last()
|
||||
.map(|op| (op.old_range().start, op.new_range().start))
|
||||
.unwrap_or_default();
|
||||
|
||||
let digit_with = OneIndexed::from_zero_indexed(largest_new.max(largest_old)).digits();
|
||||
|
||||
if let Some(cell) = cell {
|
||||
// Room for 2 digits, 2 x 1 space before each digit, 1 space, and 1 `|`. This
|
||||
// centers the three colons on the pipe.
|
||||
writeln!(f, "{:>1$} cell {cell}", ":::", 2 * digit_with.get() + 4)?;
|
||||
}
|
||||
|
||||
for (idx, group) in diff.grouped_ops(3).iter().enumerate() {
|
||||
if idx > 0 {
|
||||
writeln!(f, "{:-^1$}", "-", 80)?;
|
||||
}
|
||||
for op in group {
|
||||
for change in diff.iter_inline_changes(op) {
|
||||
let sign = match change.tag() {
|
||||
ChangeTag::Delete => "-",
|
||||
ChangeTag::Insert => "+",
|
||||
ChangeTag::Equal => " ",
|
||||
};
|
||||
|
||||
let line_style = LineStyle::from(change.tag(), self.stylesheet);
|
||||
|
||||
let old_index = change.old_index().map(OneIndexed::from_zero_indexed);
|
||||
let new_index = change.new_index().map(OneIndexed::from_zero_indexed);
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{} {} |{}",
|
||||
Line {
|
||||
index: old_index,
|
||||
width: digit_with,
|
||||
},
|
||||
Line {
|
||||
index: new_index,
|
||||
width: digit_with,
|
||||
},
|
||||
fmt_styled(line_style.apply_to(sign), self.stylesheet.emphasis),
|
||||
)?;
|
||||
|
||||
for (emphasized, value) in change.iter_strings_lossy() {
|
||||
let value = show_nonprinting(&value);
|
||||
if emphasized {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
fmt_styled(
|
||||
line_style.apply_to(&value),
|
||||
self.stylesheet.underline
|
||||
)
|
||||
)?;
|
||||
} else {
|
||||
write!(f, "{}", line_style.apply_to(&value))?;
|
||||
}
|
||||
}
|
||||
if change.missing_newline() {
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct LineStyle {
|
||||
style: Style,
|
||||
}
|
||||
|
||||
impl LineStyle {
|
||||
fn apply_to(&self, input: &str) -> impl std::fmt::Display {
|
||||
fmt_styled(input, self.style)
|
||||
}
|
||||
|
||||
fn from(value: ChangeTag, stylesheet: &DiagnosticStylesheet) -> LineStyle {
|
||||
match value {
|
||||
ChangeTag::Equal => LineStyle {
|
||||
style: stylesheet.none,
|
||||
},
|
||||
ChangeTag::Delete => LineStyle {
|
||||
style: stylesheet.deletion,
|
||||
},
|
||||
ChangeTag::Insert => LineStyle {
|
||||
style: stylesheet.insertion,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Line {
|
||||
index: Option<OneIndexed>,
|
||||
width: NonZeroUsize,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Line {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self.index {
|
||||
None => {
|
||||
for _ in 0..self.width.get() {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Some(idx) => write!(f, "{:<width$}", idx, width = self.width.get()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn show_nonprinting(s: &str) -> Cow<'_, str> {
|
||||
if s.find(['\x07', '\x08', '\x1b', '\x7f']).is_some() {
|
||||
Cow::Owned(
|
||||
s.replace('\x07', "␇")
|
||||
.replace('\x08', "␈")
|
||||
.replace('\x1b', "␛")
|
||||
.replace('\x7f', "␡"),
|
||||
)
|
||||
} else {
|
||||
Cow::Borrowed(s)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::{
|
||||
Annotation, DiagnosticFormat, Severity,
|
||||
render::tests::{
|
||||
NOTEBOOK, TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
DiagnosticFormat, Severity,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
@@ -345,7 +42,7 @@ mod tests {
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[invalid-syntax]: Expected one or more symbol names after import
|
||||
error[invalid-syntax]: SyntaxError: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
@@ -354,71 +51,7 @@ mod tests {
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
error[invalid-syntax]: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
2 |
|
||||
3 | if call(foo
|
||||
| ^
|
||||
4 | def bar():
|
||||
5 | pass
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hide_severity_output() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
|
||||
env.hide_severity(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
|
||||
F401 [*] `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
F841 [*] Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
F821 Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hide_severity_syntax_errors() {
|
||||
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
|
||||
env.hide_severity(true);
|
||||
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
invalid-syntax: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
| ^
|
||||
2 |
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
invalid-syntax: Expected ')', found newline
|
||||
error[invalid-syntax]: SyntaxError: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
@@ -483,7 +116,7 @@ print()
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead
|
||||
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
/// --> example.py:1:25
|
||||
/// |
|
||||
/// 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
@@ -503,13 +136,13 @@ print()
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1a" instead"#,
|
||||
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:24", "1:24", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
--> example.py:1:25
|
||||
|
|
||||
1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
@@ -528,13 +161,13 @@ print()
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1a" instead"#,
|
||||
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:1", "1:1", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
--> example.py:1:2
|
||||
|
|
||||
1 | ␈␛
|
||||
@@ -544,361 +177,4 @@ print()
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure that the header column matches the column in the user's input, even if we've replaced
|
||||
/// tabs with spaces for rendering purposes.
|
||||
#[test]
|
||||
fn tab_replacement() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "def foo():\n\treturn 1");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env.err().primary("example.py", "2:1", "2:9", "").build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:2:2
|
||||
|
|
||||
1 | def foo():
|
||||
2 | return 1
|
||||
| ^^^^^^^^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// For file-level diagnostics, we expect to see the header line with the diagnostic information
|
||||
/// and the `-->` line with the file information but no lines of source code.
|
||||
#[test]
|
||||
fn file_level() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env.path("example.py").with_range(TextRange::default());
|
||||
let mut annotation = Annotation::primary(span);
|
||||
annotation.set_file_level(true);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:1:1
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that ranges in notebooks are remapped relative to the cells.
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-import][*]: `math` imported but unused
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
");
|
||||
}
|
||||
|
||||
/// Check notebook handling for multiple annotations in a single diagnostic that span cells.
|
||||
#[test]
|
||||
fn notebook_output_multiple_annotations() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("notebook.ipynb", NOTEBOOK);
|
||||
|
||||
let diagnostics = vec![
|
||||
// adjacent context windows
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "")
|
||||
.secondary("notebook.ipynb", "4:7", "4:11", "second cell")
|
||||
.help("Remove unused import: `os`")
|
||||
.build(),
|
||||
// non-adjacent context windows
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "")
|
||||
.secondary("notebook.ipynb", "10:4", "10:5", "second cell")
|
||||
.help("Remove unused import: `os`")
|
||||
.build(),
|
||||
// adjacent context windows in the same cell
|
||||
env.err()
|
||||
.primary("notebook.ipynb", "4:7", "4:11", "second cell")
|
||||
.secondary("notebook.ipynb", "6:0", "6:5", "print statement")
|
||||
.help("Remove `print` statement")
|
||||
.build(),
|
||||
];
|
||||
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
::: notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ---- second cell
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
::: notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| - second cell
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^ second cell
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
| ----- print statement
|
||||
|
|
||||
help: Remove `print` statement
|
||||
");
|
||||
}
|
||||
|
||||
/// Test that we remap notebook cell line numbers in the diff as well as the main diagnostic.
|
||||
#[test]
|
||||
fn notebook_output_with_diff() {
|
||||
let (mut env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
|
||||
env.show_fix_diff(true);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
ℹ Safe fix
|
||||
::: cell 1
|
||||
1 1 | # cell 1
|
||||
2 |-import os
|
||||
|
||||
error[unused-import][*]: `math` imported but unused
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
|
||||
ℹ Safe fix
|
||||
::: cell 2
|
||||
1 1 | # cell 2
|
||||
2 |-import math
|
||||
3 2 |
|
||||
4 3 | print('hello world')
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
ℹ Unsafe fix
|
||||
::: cell 3
|
||||
1 1 | # cell 3
|
||||
2 2 | def foo():
|
||||
3 3 | print()
|
||||
4 |- x = 1
|
||||
5 4 |
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output_with_diff_spanning_cells() {
|
||||
let (mut env, mut diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
|
||||
env.show_fix_diff(true);
|
||||
|
||||
// Move all of the edits from the later diagnostics to the first diagnostic to simulate a
|
||||
// single diagnostic with edits in different cells.
|
||||
let mut diagnostic = diagnostics.swap_remove(0);
|
||||
let fix = diagnostic.fix_mut().unwrap();
|
||||
let mut edits = fix.edits().to_vec();
|
||||
for diag in diagnostics {
|
||||
edits.extend_from_slice(diag.fix().unwrap().edits());
|
||||
}
|
||||
*fix = Fix::unsafe_edits(edits.remove(0), edits);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
ℹ Unsafe fix
|
||||
::: cell 1
|
||||
1 1 | # cell 1
|
||||
2 |-import os
|
||||
::: cell 2
|
||||
1 1 | # cell 2
|
||||
2 |-import math
|
||||
3 2 |
|
||||
4 3 | print('hello world')
|
||||
::: cell 3
|
||||
1 1 | # cell 3
|
||||
2 2 | def foo():
|
||||
3 3 | print()
|
||||
4 |- x = 1
|
||||
5 4 |
|
||||
");
|
||||
}
|
||||
|
||||
/// Carriage return (`\r`) is a valid line-ending in Python, so we should normalize this to a
|
||||
/// line feed (`\n`) for rendering. Otherwise we report a single long line for this case.
|
||||
#[test]
|
||||
fn normalize_carriage_return() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"example.py",
|
||||
"# Keep parenthesis around preserved CR\rint(-\r 1)\rint(+\r 1)",
|
||||
);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env
|
||||
.path("example.py")
|
||||
.with_range(TextRange::at(TextSize::new(39), TextSize::new(0)));
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:2:1
|
||||
|
|
||||
1 | # Keep parenthesis around preserved CR
|
||||
2 | int(-
|
||||
| ^
|
||||
3 | 1)
|
||||
4 | int(+
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Without stripping the BOM, we report an error in column 2, unlike Ruff.
|
||||
#[test]
|
||||
fn strip_bom() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "\u{feff}import foo");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env
|
||||
.path("example.py")
|
||||
.with_range(TextRange::at(TextSize::new(3), TextSize::new(0)));
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:1:1
|
||||
|
|
||||
1 | import foo
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bom_with_default_range() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "\u{feff}import foo");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env.path("example.py").with_range(TextRange::default());
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:1:1
|
||||
|
|
||||
1 | import foo
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// We previously rendered this correctly, but the header was falling back to 1:1 for ranges
|
||||
/// pointing to the final newline in a file. Like Ruff, we now use the offset of the first
|
||||
/// character in the nonexistent final line in the header.
|
||||
#[test]
|
||||
fn end_of_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
let contents = "unexpected eof\n";
|
||||
env.add("example.py", contents);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env
|
||||
.path("example.py")
|
||||
.with_range(TextRange::at(contents.text_len(), TextSize::new(0)));
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:2:1
|
||||
|
|
||||
1 | unexpected eof
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig};
|
||||
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig, SecondaryCode};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
@@ -99,7 +99,7 @@ pub(super) fn diagnostic_to_json<'a>(
|
||||
// In preview, the locations and filename can be optional.
|
||||
if config.preview {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code_or_id(),
|
||||
code: diagnostic.secondary_code(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
@@ -111,7 +111,7 @@ pub(super) fn diagnostic_to_json<'a>(
|
||||
}
|
||||
} else {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code_or_id(),
|
||||
code: diagnostic.secondary_code(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
@@ -221,7 +221,7 @@ impl Serialize for ExpandedEdits<'_> {
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct JsonDiagnostic<'a> {
|
||||
cell: Option<OneIndexed>,
|
||||
code: &'a str,
|
||||
code: Option<&'a SecondaryCode>,
|
||||
end_location: Option<JsonLocation>,
|
||||
filename: Option<&'a str>,
|
||||
fix: Option<JsonFix<'a>>,
|
||||
@@ -302,7 +302,7 @@ mod tests {
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "test-diagnostic",
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
@@ -336,7 +336,7 @@ mod tests {
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "test-diagnostic",
|
||||
"code": null,
|
||||
"end_location": null,
|
||||
"filename": null,
|
||||
"fix": null,
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
source: crates/ruff_db/src/diagnostic/render/azure.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;code=invalid-syntax;]Expected one or more symbol names after import
|
||||
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;code=invalid-syntax;]Expected ')', found newline
|
||||
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;]SyntaxError: Expected one or more symbol names after import
|
||||
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;]SyntaxError: Expected ')', found newline
|
||||
|
||||
@@ -5,7 +5,7 @@ expression: env.render_diagnostics(&diagnostics)
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "invalid-syntax",
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 2
|
||||
@@ -16,13 +16,13 @@ expression: env.render_diagnostics(&diagnostics)
|
||||
"column": 15,
|
||||
"row": 1
|
||||
},
|
||||
"message": "Expected one or more symbol names after import",
|
||||
"message": "SyntaxError: Expected one or more symbol names after import",
|
||||
"noqa_row": null,
|
||||
"url": null
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": "invalid-syntax",
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 4
|
||||
@@ -33,7 +33,7 @@ expression: env.render_diagnostics(&diagnostics)
|
||||
"column": 12,
|
||||
"row": 3
|
||||
},
|
||||
"message": "Expected ')', found newline",
|
||||
"message": "SyntaxError: Expected ')', found newline",
|
||||
"noqa_row": null,
|
||||
"url": null
|
||||
}
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"Expected one or more symbol names after import","noqa_row":null,"url":null}
|
||||
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"Expected ')', found newline","noqa_row":null,"url":null}
|
||||
{"cell":null,"code":null,"end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"SyntaxError: Expected one or more symbol names after import","noqa_row":null,"url":null}
|
||||
{"cell":null,"code":null,"end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"SyntaxError: Expected ')', found newline","noqa_row":null,"url":null}
|
||||
|
||||
@@ -6,10 +6,10 @@ expression: env.render_diagnostics(&diagnostics)
|
||||
<testsuites name="ruff" tests="2" failures="2" errors="0">
|
||||
<testsuite name="syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
|
||||
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="1" column="15">
|
||||
<failure message="Expected one or more symbol names after import">line 1, col 15, Expected one or more symbol names after import</failure>
|
||||
<failure message="SyntaxError: Expected one or more symbol names after import">line 1, col 15, SyntaxError: Expected one or more symbol names after import</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="3" column="12">
|
||||
<failure message="Expected ')', found newline">line 3, col 12, Expected ')', found newline</failure>
|
||||
<failure message="SyntaxError: Expected ')', found newline">line 3, col 12, SyntaxError: Expected ')', found newline</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
source: crates/ruff_db/src/diagnostic/render/pylint.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
syntax_errors.py:1: [invalid-syntax] Expected one or more symbol names after import
|
||||
syntax_errors.py:3: [invalid-syntax] Expected ')', found newline
|
||||
syntax_errors.py:1: [invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3: [invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
|
||||
@@ -21,7 +21,7 @@ expression: env.render_diagnostics(&diagnostics)
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "Expected one or more symbol names after import"
|
||||
"message": "SyntaxError: Expected one or more symbol names after import"
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
@@ -40,7 +40,7 @@ expression: env.render_diagnostics(&diagnostics)
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "Expected ')', found newline"
|
||||
"message": "SyntaxError: Expected ')', found newline"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
|
||||
@@ -40,12 +40,9 @@ pub struct DiagnosticStylesheet {
|
||||
pub(crate) help: Style,
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) underline: Style,
|
||||
pub(crate) none: Style,
|
||||
pub(crate) separator: Style,
|
||||
pub(crate) secondary_code: Style,
|
||||
pub(crate) insertion: Style,
|
||||
pub(crate) deletion: Style,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
@@ -66,12 +63,9 @@ impl DiagnosticStylesheet {
|
||||
help: AnsiColor::BrightCyan.on_default().effects(Effects::BOLD),
|
||||
line_no: bright_blue.effects(Effects::BOLD),
|
||||
emphasis: Style::new().effects(Effects::BOLD),
|
||||
underline: Style::new().effects(Effects::UNDERLINE),
|
||||
none: Style::new(),
|
||||
separator: AnsiColor::Cyan.on_default(),
|
||||
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
insertion: AnsiColor::Green.on_default(),
|
||||
deletion: AnsiColor::Red.on_default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,12 +78,9 @@ impl DiagnosticStylesheet {
|
||||
help: Style::new(),
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
underline: Style::new(),
|
||||
none: Style::new(),
|
||||
separator: Style::new(),
|
||||
secondary_code: Style::new(),
|
||||
insertion: Style::new(),
|
||||
deletion: Style::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use crate::system::file_time_now;
|
||||
/// * The last modification time of the file.
|
||||
/// * The hash of the file's content.
|
||||
/// * The revision as it comes from an external system, for example the LSP.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default, get_size2::GetSize)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct FileRevision(u128);
|
||||
|
||||
impl FileRevision {
|
||||
|
||||
@@ -87,12 +87,11 @@ impl Files {
|
||||
.system_by_path
|
||||
.entry(absolute.clone())
|
||||
.or_insert_with(|| {
|
||||
tracing::trace!("Adding file '{path}'");
|
||||
|
||||
let metadata = db.system().path_metadata(path);
|
||||
|
||||
tracing::trace!("Adding file '{absolute}'");
|
||||
|
||||
let durability = self
|
||||
.root(db, &absolute)
|
||||
.root(db, path)
|
||||
.map_or(Durability::default(), |root| root.durability(db));
|
||||
|
||||
let builder = File::builder(FilePath::System(absolute))
|
||||
@@ -290,7 +289,7 @@ impl std::panic::RefUnwindSafe for Files {}
|
||||
/// # Ordering
|
||||
/// Ordering is based on the file's salsa-assigned id and not on its values.
|
||||
/// The id may change between runs.
|
||||
#[salsa::input(heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::input]
|
||||
#[derive(PartialOrd, Ord)]
|
||||
pub struct File {
|
||||
/// The path of the file (immutable).
|
||||
@@ -522,7 +521,7 @@ impl VirtualFile {
|
||||
// The types in here need to be public because they're salsa ingredients but we
|
||||
// don't want them to be publicly accessible. That's why we put them into a private module.
|
||||
mod private {
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default, get_size2::GetSize)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub enum FileStatus {
|
||||
/// The file exists.
|
||||
#[default]
|
||||
|
||||
@@ -16,7 +16,7 @@ use crate::system::{SystemPath, SystemPathBuf};
|
||||
/// The main usage of file roots is to determine a file's durability. But it can also be used
|
||||
/// to make a salsa query dependent on whether a file in a root has changed without writing any
|
||||
/// manual invalidation logic.
|
||||
#[salsa::input(debug, heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::input(debug)]
|
||||
pub struct FileRoot {
|
||||
/// The path of a root is guaranteed to never change.
|
||||
#[returns(deref)]
|
||||
@@ -37,7 +37,7 @@ impl FileRoot {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileRootKind {
|
||||
/// The root of a project.
|
||||
Project,
|
||||
|
||||
@@ -11,7 +11,7 @@ use std::fmt::{Display, Formatter};
|
||||
/// * a file stored on the [host system](crate::system::System).
|
||||
/// * a virtual file stored on the [host system](crate::system::System).
|
||||
/// * a vendored file stored in the [vendored file system](crate::vendored::VendoredFileSystem).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum FilePath {
|
||||
/// Path to a file on the [host system](crate::system::System).
|
||||
System(SystemPathBuf),
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
#![warn(
|
||||
clippy::disallowed_methods,
|
||||
reason = "Prefer System trait methods over std methods"
|
||||
)]
|
||||
|
||||
use crate::files::Files;
|
||||
use crate::system::System;
|
||||
use crate::vendored::VendoredFileSystem;
|
||||
@@ -70,10 +65,6 @@ pub trait Db: salsa::Database {
|
||||
/// to process work in parallel. For example, to index a directory or checking the files of a project.
|
||||
/// ty can still spawn more threads for other tasks, e.g. to wait for a Ctrl+C signal or
|
||||
/// watching the files for changes.
|
||||
#[expect(
|
||||
clippy::disallowed_methods,
|
||||
reason = "We don't have access to System here, but this is also only used by the CLI and the server which always run on a real system."
|
||||
)]
|
||||
pub fn max_parallelism() -> NonZeroUsize {
|
||||
std::env::var(EnvVars::TY_MAX_PARALLELISM)
|
||||
.or_else(|_| std::env::var(EnvVars::RAYON_NUM_THREADS))
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::source::source_text;
|
||||
/// reflected in the changed AST offsets.
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
|
||||
/// for determining if a query result is unchanged.
|
||||
#[salsa::tracked(returns(ref), no_eq, heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::tracked(returns(ref), no_eq, heap_size=get_size2::GetSize::get_heap_size)]
|
||||
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
let _span = tracing::trace_span!("parsed_module", ?file).entered();
|
||||
|
||||
@@ -92,14 +92,14 @@ impl ParsedModule {
|
||||
self.inner.store(None);
|
||||
}
|
||||
|
||||
/// Returns the pointer address of this [`ParsedModule`].
|
||||
/// Returns a pointer for this [`ParsedModule`].
|
||||
///
|
||||
/// The pointer uniquely identifies the module within the current Salsa revision,
|
||||
/// regardless of whether particular [`ParsedModuleRef`] instances are garbage collected.
|
||||
pub fn addr(&self) -> usize {
|
||||
pub fn as_ptr(&self) -> *const () {
|
||||
// Note that the outer `Arc` in `inner` is stable across garbage collection, while the inner
|
||||
// `Arc` within the `ArcSwap` may change.
|
||||
Arc::as_ptr(&self.inner).addr()
|
||||
Arc::as_ptr(&self.inner).cast()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,13 +202,9 @@ mod indexed {
|
||||
|
||||
/// Returns the node at the given index.
|
||||
pub fn get_by_index<'ast>(&'ast self, index: NodeIndex) -> AnyRootNodeRef<'ast> {
|
||||
let index = index
|
||||
.as_u32()
|
||||
.expect("attempted to access uninitialized `NodeIndex`");
|
||||
|
||||
// Note that this method restores the correct lifetime: the nodes are valid for as
|
||||
// long as the reference to `IndexedModule` is alive.
|
||||
self.index[index as usize]
|
||||
self.index[index.as_usize()]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,7 +220,7 @@ mod indexed {
|
||||
T: HasNodeIndex + std::fmt::Debug,
|
||||
AnyRootNodeRef<'a>: From<&'a T>,
|
||||
{
|
||||
node.node_index().set(NodeIndex::from(self.index));
|
||||
node.node_index().set(self.index);
|
||||
self.nodes.push(AnyRootNodeRef::from(node));
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use crate::Db;
|
||||
use crate::files::{File, FilePath};
|
||||
|
||||
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
|
||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
|
||||
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
@@ -69,21 +69,21 @@ impl SourceText {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match &self.inner.kind {
|
||||
SourceTextKind::Text(source) => source,
|
||||
SourceTextKind::Notebook { notebook } => notebook.source_code(),
|
||||
SourceTextKind::Notebook(notebook) => notebook.source_code(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the underlying notebook if this is a notebook file.
|
||||
pub fn as_notebook(&self) -> Option<&Notebook> {
|
||||
match &self.inner.kind {
|
||||
SourceTextKind::Notebook { notebook } => Some(notebook),
|
||||
SourceTextKind::Notebook(notebook) => Some(notebook),
|
||||
SourceTextKind::Text(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if this is a notebook source file.
|
||||
pub fn is_notebook(&self) -> bool {
|
||||
matches!(&self.inner.kind, SourceTextKind::Notebook { .. })
|
||||
matches!(&self.inner.kind, SourceTextKind::Notebook(_))
|
||||
}
|
||||
|
||||
/// Returns `true` if there was an error when reading the content of the file.
|
||||
@@ -108,7 +108,7 @@ impl std::fmt::Debug for SourceText {
|
||||
SourceTextKind::Text(text) => {
|
||||
dbg.field(text);
|
||||
}
|
||||
SourceTextKind::Notebook { notebook } => {
|
||||
SourceTextKind::Notebook(notebook) => {
|
||||
dbg.field(notebook);
|
||||
}
|
||||
}
|
||||
@@ -123,15 +123,23 @@ struct SourceTextInner {
|
||||
read_error: Option<SourceTextError>,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Eq, PartialEq)]
|
||||
enum SourceTextKind {
|
||||
Text(String),
|
||||
Notebook {
|
||||
// Jupyter notebooks are not very relevant for memory profiling, and contain
|
||||
// arbitrary JSON values that do not implement the `GetSize` trait.
|
||||
#[get_size(ignore)]
|
||||
notebook: Box<Notebook>,
|
||||
},
|
||||
Notebook(Box<Notebook>),
|
||||
}
|
||||
|
||||
impl get_size2::GetSize for SourceTextKind {
|
||||
fn get_heap_size(&self) -> usize {
|
||||
match self {
|
||||
SourceTextKind::Text(text) => text.get_heap_size(),
|
||||
// TODO: The `get-size` derive does not support ignoring enum variants.
|
||||
//
|
||||
// Jupyter notebooks are not very relevant for memory profiling, and contain
|
||||
// arbitrary JSON values that do not implement the `GetSize` trait.
|
||||
SourceTextKind::Notebook(_) => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for SourceTextKind {
|
||||
@@ -142,9 +150,7 @@ impl From<String> for SourceTextKind {
|
||||
|
||||
impl From<Notebook> for SourceTextKind {
|
||||
fn from(notebook: Notebook) -> Self {
|
||||
SourceTextKind::Notebook {
|
||||
notebook: Box::new(notebook),
|
||||
}
|
||||
SourceTextKind::Notebook(Box::new(notebook))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,7 +163,7 @@ pub enum SourceTextError {
|
||||
}
|
||||
|
||||
/// Computes the [`LineIndex`] for `file`.
|
||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
|
||||
pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
|
||||
let _span = tracing::trace_span!("line_index", ?file).entered();
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ pub type Result<T> = std::io::Result<T>;
|
||||
/// * File watching isn't supported.
|
||||
///
|
||||
/// Abstracting the system also enables tests to use a more efficient in-memory file system.
|
||||
pub trait System: Debug + Sync + Send {
|
||||
pub trait System: Debug {
|
||||
/// Reads the metadata of the file or directory at `path`.
|
||||
///
|
||||
/// This function will traverse symbolic links to query information about the destination file.
|
||||
@@ -197,8 +197,6 @@ pub trait System: Debug + Sync + Send {
|
||||
fn as_any(&self) -> &dyn std::any::Any;
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![allow(clippy::disallowed_methods)]
|
||||
|
||||
use super::walk_directory::{
|
||||
self, DirectoryWalker, WalkDirectoryBuilder, WalkDirectoryConfiguration,
|
||||
WalkDirectoryVisitorBuilder, WalkState,
|
||||
@@ -257,10 +255,6 @@ impl System for OsSystem {
|
||||
fn env_var(&self, name: &str) -> std::result::Result<String, std::env::VarError> {
|
||||
std::env::var(name)
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl OsSystem {
|
||||
|
||||
@@ -236,7 +236,7 @@ impl SystemPath {
|
||||
///
|
||||
/// [`CurDir`]: camino::Utf8Component::CurDir
|
||||
#[inline]
|
||||
pub fn components(&self) -> camino::Utf8Components<'_> {
|
||||
pub fn components(&self) -> camino::Utf8Components {
|
||||
self.0.components()
|
||||
}
|
||||
|
||||
@@ -762,7 +762,7 @@ impl SystemVirtualPath {
|
||||
}
|
||||
|
||||
/// An owned, virtual path on [`System`](`super::System`) (akin to [`String`]).
|
||||
#[derive(Eq, PartialEq, Clone, Hash, PartialOrd, Ord, get_size2::GetSize)]
|
||||
#[derive(Eq, PartialEq, Clone, Hash, PartialOrd, Ord)]
|
||||
pub struct SystemVirtualPathBuf(String);
|
||||
|
||||
impl SystemVirtualPathBuf {
|
||||
|
||||
@@ -146,10 +146,6 @@ impl System for TestSystem {
|
||||
fn case_sensitivity(&self) -> CaseSensitivity {
|
||||
self.system().case_sensitivity()
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TestSystem {
|
||||
@@ -398,13 +394,6 @@ impl System for InMemorySystem {
|
||||
fn case_sensitivity(&self) -> CaseSensitivity {
|
||||
CaseSensitivity::CaseSensitive
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System> {
|
||||
Box::new(Self {
|
||||
user_config_directory: Mutex::new(self.user_config_directory.lock().unwrap().clone()),
|
||||
memory_fs: self.memory_fs.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableSystem for InMemorySystem {
|
||||
|
||||
@@ -195,7 +195,7 @@ impl VendoredFileSystem {
|
||||
///
|
||||
/// ## Panics:
|
||||
/// If the current thread already holds the lock.
|
||||
fn lock_archive(&self) -> LockedZipArchive<'_> {
|
||||
fn lock_archive(&self) -> LockedZipArchive {
|
||||
self.inner.lock().unwrap()
|
||||
}
|
||||
}
|
||||
@@ -360,7 +360,7 @@ impl VendoredZipArchive {
|
||||
Ok(Self(ZipArchive::new(io::Cursor::new(data))?))
|
||||
}
|
||||
|
||||
fn lookup_path(&mut self, path: &NormalizedVendoredPath) -> Result<ZipFile<'_>> {
|
||||
fn lookup_path(&mut self, path: &NormalizedVendoredPath) -> Result<ZipFile> {
|
||||
Ok(self.0.by_name(path.as_str())?)
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ impl VendoredPath {
|
||||
self.0.as_std_path()
|
||||
}
|
||||
|
||||
pub fn components(&self) -> Utf8Components<'_> {
|
||||
pub fn components(&self) -> Utf8Components {
|
||||
self.0.components()
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ty = { workspace = true }
|
||||
ty_project = { workspace = true, features = ["schemars"] }
|
||||
ty_python_semantic = { workspace = true }
|
||||
ty_static = { workspace = true }
|
||||
ruff = { workspace = true }
|
||||
ruff_formatter = { workspace = true }
|
||||
|
||||
@@ -348,7 +348,7 @@ fn format_dev_multi_project(
|
||||
debug!(parent: None, "Starting {}", project_path.display());
|
||||
|
||||
match format_dev_project(
|
||||
std::slice::from_ref(&project_path),
|
||||
&[project_path.clone()],
|
||||
args.stability_check,
|
||||
args.write,
|
||||
args.preview,
|
||||
@@ -628,7 +628,7 @@ struct CheckRepoResult {
|
||||
}
|
||||
|
||||
impl CheckRepoResult {
|
||||
fn display(&self, format: Format) -> DisplayCheckRepoResult<'_> {
|
||||
fn display(&self, format: Format) -> DisplayCheckRepoResult {
|
||||
DisplayCheckRepoResult {
|
||||
result: self,
|
||||
format,
|
||||
@@ -665,7 +665,7 @@ struct Diagnostic {
|
||||
}
|
||||
|
||||
impl Diagnostic {
|
||||
fn display(&self, format: Format) -> DisplayDiagnostic<'_> {
|
||||
fn display(&self, format: Format) -> DisplayDiagnostic {
|
||||
DisplayDiagnostic {
|
||||
diagnostic: self,
|
||||
format,
|
||||
|
||||
@@ -52,7 +52,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
}
|
||||
|
||||
fn generate_markdown() -> String {
|
||||
let registry = ty_python_semantic::default_lint_registry();
|
||||
let registry = &*ty_project::DEFAULT_LINT_REGISTRY;
|
||||
|
||||
let mut output = String::new();
|
||||
|
||||
|
||||
@@ -14,11 +14,8 @@ license = { workspace = true }
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ruff_text_size = { workspace = true, features = ["get-size"] }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
get-size2 = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
serde = { workspace = true, optional = true, features = [] }
|
||||
|
||||
[features]
|
||||
serde = ["dep:serde", "ruff_text_size/serde"]
|
||||
|
||||
@@ -43,7 +43,7 @@ pub enum IsolationLevel {
|
||||
}
|
||||
|
||||
/// A collection of [`Edit`] elements to be applied to a source file.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, get_size2::GetSize)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct Fix {
|
||||
/// The [`Edit`] elements to be applied, sorted by [`Edit::start`] in ascending order.
|
||||
|
||||
@@ -562,7 +562,7 @@ struct RemoveSoftLinebreaksSnapshot {
|
||||
pub trait BufferExtensions: Buffer + Sized {
|
||||
/// Returns a new buffer that calls the passed inspector for every element that gets written to the output
|
||||
#[must_use]
|
||||
fn inspect<F>(&mut self, inspector: F) -> Inspect<'_, Self::Context, F>
|
||||
fn inspect<F>(&mut self, inspector: F) -> Inspect<Self::Context, F>
|
||||
where
|
||||
F: FnMut(&FormatElement),
|
||||
{
|
||||
@@ -607,7 +607,7 @@ pub trait BufferExtensions: Buffer + Sized {
|
||||
/// # }
|
||||
/// ```
|
||||
#[must_use]
|
||||
fn start_recording(&mut self) -> Recording<'_, Self> {
|
||||
fn start_recording(&mut self) -> Recording<Self> {
|
||||
Recording::new(self)
|
||||
}
|
||||
|
||||
|
||||
@@ -340,7 +340,7 @@ impl<Context> Format<Context> for SourcePosition {
|
||||
/// Creates a text from a dynamic string.
|
||||
///
|
||||
/// This is done by allocating a new string internally.
|
||||
pub fn text(text: &str) -> Text<'_> {
|
||||
pub fn text(text: &str) -> Text {
|
||||
debug_assert_no_newlines(text);
|
||||
|
||||
Text { text }
|
||||
@@ -459,10 +459,7 @@ fn debug_assert_no_newlines(text: &str) {
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn line_suffix<Content, Context>(
|
||||
inner: &Content,
|
||||
reserved_width: u32,
|
||||
) -> LineSuffix<'_, Context>
|
||||
pub fn line_suffix<Content, Context>(inner: &Content, reserved_width: u32) -> LineSuffix<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -600,10 +597,7 @@ impl<Context> Format<Context> for LineSuffixBoundary {
|
||||
/// Use `Memoized.inspect(f)?.has_label(LabelId::of::<SomeLabelId>()` if you need to know if some content breaks that should
|
||||
/// only be written later.
|
||||
#[inline]
|
||||
pub fn labelled<Content, Context>(
|
||||
label_id: LabelId,
|
||||
content: &Content,
|
||||
) -> FormatLabelled<'_, Context>
|
||||
pub fn labelled<Content, Context>(label_id: LabelId, content: &Content) -> FormatLabelled<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -706,7 +700,7 @@ impl<Context> Format<Context> for Space {
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn indent<Content, Context>(content: &Content) -> Indent<'_, Context>
|
||||
pub fn indent<Content, Context>(content: &Content) -> Indent<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -777,7 +771,7 @@ impl<Context> std::fmt::Debug for Indent<'_, Context> {
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn dedent<Content, Context>(content: &Content) -> Dedent<'_, Context>
|
||||
pub fn dedent<Content, Context>(content: &Content) -> Dedent<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -852,7 +846,7 @@ impl<Context> std::fmt::Debug for Dedent<'_, Context> {
|
||||
///
|
||||
/// This resembles the behaviour of Prettier's `align(Number.NEGATIVE_INFINITY, content)` IR element.
|
||||
#[inline]
|
||||
pub fn dedent_to_root<Content, Context>(content: &Content) -> Dedent<'_, Context>
|
||||
pub fn dedent_to_root<Content, Context>(content: &Content) -> Dedent<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -966,7 +960,7 @@ where
|
||||
///
|
||||
/// - tab indentation: Printer indents the expression with two tabs because the `align` increases the indentation level.
|
||||
/// - space indentation: Printer indents the expression by 4 spaces (one indentation level) **and** 2 spaces for the align.
|
||||
pub fn align<Content, Context>(count: u8, content: &Content) -> Align<'_, Context>
|
||||
pub fn align<Content, Context>(count: u8, content: &Content) -> Align<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -1036,7 +1030,7 @@ impl<Context> std::fmt::Debug for Align<'_, Context> {
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<'_, Context> {
|
||||
pub fn block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<Context> {
|
||||
BlockIndent {
|
||||
content: Argument::new(content),
|
||||
mode: IndentMode::Block,
|
||||
@@ -1107,7 +1101,7 @@ pub fn block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<'_,
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn soft_block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<'_, Context> {
|
||||
pub fn soft_block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<Context> {
|
||||
BlockIndent {
|
||||
content: Argument::new(content),
|
||||
mode: IndentMode::Soft,
|
||||
@@ -1181,9 +1175,7 @@ pub fn soft_block_indent<Context>(content: &impl Format<Context>) -> BlockIndent
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn soft_line_indent_or_space<Context>(
|
||||
content: &impl Format<Context>,
|
||||
) -> BlockIndent<'_, Context> {
|
||||
pub fn soft_line_indent_or_space<Context>(content: &impl Format<Context>) -> BlockIndent<Context> {
|
||||
BlockIndent {
|
||||
content: Argument::new(content),
|
||||
mode: IndentMode::SoftLineOrSpace,
|
||||
@@ -1316,9 +1308,7 @@ impl<Context> std::fmt::Debug for BlockIndent<'_, Context> {
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn soft_space_or_block_indent<Context>(
|
||||
content: &impl Format<Context>,
|
||||
) -> BlockIndent<'_, Context> {
|
||||
pub fn soft_space_or_block_indent<Context>(content: &impl Format<Context>) -> BlockIndent<Context> {
|
||||
BlockIndent {
|
||||
content: Argument::new(content),
|
||||
mode: IndentMode::SoftSpace,
|
||||
@@ -1398,7 +1388,7 @@ pub fn soft_space_or_block_indent<Context>(
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn group<Context>(content: &impl Format<Context>) -> Group<'_, Context> {
|
||||
pub fn group<Context>(content: &impl Format<Context>) -> Group<Context> {
|
||||
Group {
|
||||
content: Argument::new(content),
|
||||
id: None,
|
||||
@@ -1561,7 +1551,7 @@ impl<Context> std::fmt::Debug for Group<'_, Context> {
|
||||
#[inline]
|
||||
pub fn best_fit_parenthesize<Context>(
|
||||
content: &impl Format<Context>,
|
||||
) -> BestFitParenthesize<'_, Context> {
|
||||
) -> BestFitParenthesize<Context> {
|
||||
BestFitParenthesize {
|
||||
content: Argument::new(content),
|
||||
group_id: None,
|
||||
@@ -1701,7 +1691,7 @@ impl<Context> std::fmt::Debug for BestFitParenthesize<'_, Context> {
|
||||
pub fn conditional_group<Content, Context>(
|
||||
content: &Content,
|
||||
condition: Condition,
|
||||
) -> ConditionalGroup<'_, Context>
|
||||
) -> ConditionalGroup<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -1862,7 +1852,7 @@ impl<Context> Format<Context> for ExpandParent {
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn if_group_breaks<Content, Context>(content: &Content) -> IfGroupBreaks<'_, Context>
|
||||
pub fn if_group_breaks<Content, Context>(content: &Content) -> IfGroupBreaks<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -1943,7 +1933,7 @@ where
|
||||
/// # }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn if_group_fits_on_line<Content, Context>(flat_content: &Content) -> IfGroupBreaks<'_, Context>
|
||||
pub fn if_group_fits_on_line<Content, Context>(flat_content: &Content) -> IfGroupBreaks<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -2132,7 +2122,7 @@ impl<Context> std::fmt::Debug for IfGroupBreaks<'_, Context> {
|
||||
pub fn indent_if_group_breaks<Content, Context>(
|
||||
content: &Content,
|
||||
group_id: GroupId,
|
||||
) -> IndentIfGroupBreaks<'_, Context>
|
||||
) -> IndentIfGroupBreaks<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
@@ -2215,7 +2205,7 @@ impl<Context> std::fmt::Debug for IndentIfGroupBreaks<'_, Context> {
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn fits_expanded<Content, Context>(content: &Content) -> FitsExpanded<'_, Context>
|
||||
pub fn fits_expanded<Content, Context>(content: &Content) -> FitsExpanded<Context>
|
||||
where
|
||||
Content: Format<Context>,
|
||||
{
|
||||
|
||||
@@ -197,7 +197,7 @@ pub const LINE_TERMINATORS: [char; 3] = ['\r', LINE_SEPARATOR, PARAGRAPH_SEPARAT
|
||||
|
||||
/// Replace the line terminators matching the provided list with "\n"
|
||||
/// since its the only line break type supported by the printer
|
||||
pub fn normalize_newlines<const N: usize>(text: &str, terminators: [char; N]) -> Cow<'_, str> {
|
||||
pub fn normalize_newlines<const N: usize>(text: &str, terminators: [char; N]) -> Cow<str> {
|
||||
let mut result = String::new();
|
||||
let mut last_end = 0;
|
||||
|
||||
|
||||
@@ -222,7 +222,7 @@ impl FormatContext for IrFormatContext<'_> {
|
||||
&IrFormatOptions
|
||||
}
|
||||
|
||||
fn source_code(&self) -> SourceCode<'_> {
|
||||
fn source_code(&self) -> SourceCode {
|
||||
self.source_code
|
||||
}
|
||||
}
|
||||
|
||||
@@ -193,7 +193,7 @@ pub trait FormatContext {
|
||||
fn options(&self) -> &Self::Options;
|
||||
|
||||
/// Returns the source code from the document that gets formatted.
|
||||
fn source_code(&self) -> SourceCode<'_>;
|
||||
fn source_code(&self) -> SourceCode;
|
||||
}
|
||||
|
||||
/// Options customizing how the source code should be formatted.
|
||||
@@ -239,7 +239,7 @@ impl FormatContext for SimpleFormatContext {
|
||||
&self.options
|
||||
}
|
||||
|
||||
fn source_code(&self) -> SourceCode<'_> {
|
||||
fn source_code(&self) -> SourceCode {
|
||||
SourceCode::new(&self.source_code)
|
||||
}
|
||||
}
|
||||
@@ -326,7 +326,7 @@ where
|
||||
printer.print_with_indent(&self.document, indent)
|
||||
}
|
||||
|
||||
fn create_printer(&self) -> Printer<'_> {
|
||||
fn create_printer(&self) -> Printer {
|
||||
let source_code = self.context.source_code();
|
||||
let print_options = self.context.options().as_print_options();
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ impl<'a> Resolver<'a> {
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub(crate) fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||
fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||
let module = resolve_module(self.db, module_name)?;
|
||||
Some(module.file(self.db)?.path(self.db))
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.10"
|
||||
version = "0.12.5"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -13,6 +13,7 @@ license = { workspace = true }
|
||||
[lib]
|
||||
|
||||
[dependencies]
|
||||
ruff_annotate_snippets = { workspace = true }
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["junit", "serde"] }
|
||||
ruff_diagnostics = { workspace = true, features = ["serde"] }
|
||||
|
||||
@@ -13,7 +13,6 @@ from airflow.api_connexion.security import requires_access
|
||||
from airflow.contrib.aws_athena_hook import AWSAthenaHook
|
||||
from airflow.datasets import DatasetAliasEvent
|
||||
from airflow.operators.subdag import SubDagOperator
|
||||
from airflow.secrets.cache import SecretCache
|
||||
from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
from airflow.triggers.external_task import TaskStateTrigger
|
||||
from airflow.utils import dates
|
||||
@@ -57,9 +56,6 @@ SubDagOperator()
|
||||
# get_connection
|
||||
LocalFilesystemBackend()
|
||||
|
||||
# airflow.secrets.cache
|
||||
SecretCache()
|
||||
|
||||
|
||||
# airflow.triggers.external_task
|
||||
TaskStateTrigger()
|
||||
|
||||
@@ -34,7 +34,7 @@ task_group()
|
||||
setup()
|
||||
from airflow.decorators import teardown
|
||||
from airflow.io.path import ObjectStoragePath
|
||||
from airflow.io.store import attach
|
||||
from airflow.io.storage import attach
|
||||
from airflow.models import DAG as DAGFromModel
|
||||
from airflow.models import (
|
||||
Connection,
|
||||
|
||||
@@ -89,14 +89,3 @@ print(1)
|
||||
# ///
|
||||
#
|
||||
# Foobar
|
||||
|
||||
|
||||
# Regression tests for https://github.com/astral-sh/ruff/issues/19713
|
||||
|
||||
# mypy: ignore-errors
|
||||
# pyright: ignore-errors
|
||||
# pyrefly: ignore-errors
|
||||
# ty: ignore[unresolved-import]
|
||||
# pyrefly: ignore[unused-import]
|
||||
|
||||
print(1)
|
||||
|
||||
@@ -154,11 +154,6 @@ try:
|
||||
except Exception as e:
|
||||
raise ValueError from e
|
||||
|
||||
try:
|
||||
...
|
||||
except Exception as e:
|
||||
raise e from ValueError("hello")
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
@@ -167,92 +162,3 @@ except Exception:
|
||||
exception("An error occurred")
|
||||
else:
|
||||
exception("An error occurred")
|
||||
|
||||
# Test tuple exceptions
|
||||
try:
|
||||
pass
|
||||
except (Exception,):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, ValueError):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, Exception):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, Exception) as e:
|
||||
print(e)
|
||||
|
||||
try:
|
||||
pass
|
||||
except (BaseException, TypeError):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (TypeError, BaseException):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, BaseException):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
|
||||
# Test nested tuples
|
||||
try:
|
||||
pass
|
||||
except ((Exception, ValueError), TypeError):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError, (BaseException, TypeError)):
|
||||
pass
|
||||
|
||||
# Test valid tuple exceptions (should not trigger)
|
||||
try:
|
||||
pass
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (OSError, FileNotFoundError):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (OSError, FileNotFoundError) as e:
|
||||
print(e)
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, ValueError):
|
||||
critical("...", exc_info=True)
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, ValueError):
|
||||
raise
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, ValueError) as e:
|
||||
raise e
|
||||
|
||||
# `from None` cause
|
||||
try:
|
||||
pass
|
||||
except BaseException as e:
|
||||
raise e from None
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
class C: a = None
|
||||
{C.a: None for C.a in "abc"}
|
||||
print(C.a)
|
||||
|
||||
x = [None]
|
||||
{x[0]: None for x[0] in "abc"}
|
||||
print(x)
|
||||
|
||||
class C(list):
|
||||
def __getitem__(self, index, /):
|
||||
item = super().__getitem__(index)
|
||||
if isinstance(index, slice): item = tuple(item)
|
||||
return item
|
||||
x = C()
|
||||
{x[:0]: None for x[:0] in "abc"}
|
||||
print(x)
|
||||
|
||||
|
||||
class C:
|
||||
a = None
|
||||
|
||||
def func():
|
||||
{(C.a,): None for (C.a,) in "abc"} # OK
|
||||
|
||||
|
||||
def func():
|
||||
obj = type('obj', (), {'attr': 1})()
|
||||
{(obj.attr,): None for (obj.attr,) in "abc"} # OK
|
||||
|
||||
|
||||
def func():
|
||||
lst = [1, 2, 3]
|
||||
{(lst[0],): None for (lst[0],) in "abc"} # OK
|
||||
|
||||
|
||||
def func():
|
||||
lst = [1, 2, 3, 4, 5]
|
||||
{(lst[1:3],): None for (lst[1:3],) in "abc"} # OK
|
||||
|
||||
|
||||
# C420: side-effecting assignment targets
|
||||
# These should NOT trigger C420 because they have side-effecting assignment targets
|
||||
# See https://github.com/astral-sh/ruff/issues/19511
|
||||
@@ -88,25 +88,3 @@ def f_multi_line_string2():
|
||||
example="example"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def raise_typing_cast_exception():
|
||||
import typing
|
||||
raise typing.cast("Exception", None)
|
||||
|
||||
|
||||
def f_typing_cast_excluded():
|
||||
from typing import cast
|
||||
raise cast(RuntimeError, "This should not trigger EM101")
|
||||
|
||||
|
||||
def f_typing_cast_excluded_import():
|
||||
import typing
|
||||
raise typing.cast(RuntimeError, "This should not trigger EM101")
|
||||
|
||||
|
||||
def f_typing_cast_excluded_aliased():
|
||||
from typing import cast as my_cast
|
||||
raise my_cast(RuntimeError, "This should not trigger EM101")
|
||||
|
||||
|
||||
|
||||
@@ -39,11 +39,6 @@ class NonEmptyWithInit:
|
||||
pass
|
||||
|
||||
|
||||
class NonEmptyChildWithInlineComment:
|
||||
value: int
|
||||
... # preserve me
|
||||
|
||||
|
||||
class EmptyClass:
|
||||
...
|
||||
|
||||
|
||||
@@ -38,10 +38,6 @@ class NonEmptyWithInit:
|
||||
def __init__():
|
||||
pass
|
||||
|
||||
class NonEmptyChildWithInlineComment:
|
||||
value: int
|
||||
... # preserve me
|
||||
|
||||
# Not violations
|
||||
|
||||
class EmptyClass: ...
|
||||
|
||||
@@ -129,44 +129,4 @@ print(" x ".rsplit(maxsplit=0))
|
||||
print(" x ".rsplit(maxsplit=0))
|
||||
print(" x ".rsplit(sep=None, maxsplit=0))
|
||||
print(" x ".rsplit(maxsplit=0))
|
||||
print(" x ".rsplit(sep=None, maxsplit=0))
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/19581 - embedded quotes in raw strings
|
||||
r"""simple@example.com
|
||||
very.common@example.com
|
||||
FirstName.LastName@EasierReading.org
|
||||
x@example.com
|
||||
long.email-address-with-hyphens@and.subdomains.example.com
|
||||
user.name+tag+sorting@example.com
|
||||
name/surname@example.com
|
||||
xample@s.example
|
||||
" "@example.org
|
||||
"john..doe"@example.org
|
||||
mailhost!username@example.org
|
||||
"very.(),:;<>[]\".VERY.\"very@\\ \"very\".unusual"@strange.example.com
|
||||
user%example.com@example.org
|
||||
user-@example.org
|
||||
I❤️CHOCOLATE@example.com
|
||||
this\ still\"not\\allowed@example.com
|
||||
stellyamburrr985@example.com
|
||||
Abc.123@example.com
|
||||
user+mailbox/department=shipping@example.com
|
||||
!#$%&'*+-/=?^_`.{|}~@example.com
|
||||
"Abc@def"@example.com
|
||||
"Fred\ Bloggs"@example.com
|
||||
"Joe.\\Blow"@example.com""".split("\n")
|
||||
|
||||
|
||||
r"""first
|
||||
'no need' to escape
|
||||
"swap" quote style
|
||||
"use' ugly triple quotes""".split("\n")
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/19845
|
||||
print("S\x1cP\x1dL\x1eI\x1fT".split())
|
||||
print("\x1c\x1d\x1e\x1f>".split(maxsplit=0))
|
||||
print("<\x1c\x1d\x1e\x1f".rsplit(maxsplit=0))
|
||||
|
||||
# leading/trailing whitespace should not count towards maxsplit
|
||||
" a b c d ".split(maxsplit=2) # ["a", "b", "c d "]
|
||||
" a b c d ".rsplit(maxsplit=2) # [" a b", "c", "d"]
|
||||
print(" x ".rsplit(sep=None, maxsplit=0))
|
||||
@@ -1,4 +1,4 @@
|
||||
from pathlib import Path, PurePath, PosixPath, PurePosixPath, WindowsPath, PureWindowsPath
|
||||
from pathlib import Path, PurePath
|
||||
from pathlib import Path as pth
|
||||
|
||||
|
||||
@@ -68,11 +68,3 @@ Path(".", "folder")
|
||||
PurePath(".", "folder")
|
||||
|
||||
Path()
|
||||
|
||||
from importlib.metadata import PackagePath
|
||||
|
||||
_ = PosixPath(".")
|
||||
_ = PurePosixPath(".")
|
||||
_ = WindowsPath(".")
|
||||
_ = PureWindowsPath(".")
|
||||
_ = PackagePath(".")
|
||||
|
||||
@@ -13,11 +13,3 @@ Path("tmp/python").symlink_to("usr/bin/python", target_is_directory=True) # Ok
|
||||
fd = os.open(".", os.O_RDONLY)
|
||||
os.symlink("source.txt", "link.txt", dir_fd=fd) # Ok: dir_fd is not supported by pathlib
|
||||
os.close(fd)
|
||||
|
||||
os.symlink(src="usr/bin/python", dst="tmp/python", unknown=True)
|
||||
os.symlink("usr/bin/python", dst="tmp/python", target_is_directory=False)
|
||||
|
||||
os.symlink(src="usr/bin/python", dst="tmp/python", dir_fd=None)
|
||||
|
||||
os.symlink("usr/bin/python", dst="tmp/python", target_is_directory= True )
|
||||
os.symlink("usr/bin/python", dst="tmp/python", target_is_directory="nonboolean")
|
||||
|
||||
@@ -106,22 +106,4 @@ os.replace("src", "dst", src_dir_fd=1)
|
||||
os.replace("src", "dst", dst_dir_fd=2)
|
||||
|
||||
os.getcwd()
|
||||
os.getcwdb()
|
||||
|
||||
os.mkdir(path="directory")
|
||||
|
||||
os.mkdir(
|
||||
# comment 1
|
||||
"directory",
|
||||
mode=0o777
|
||||
)
|
||||
|
||||
os.mkdir("directory", mode=0o777, dir_fd=1)
|
||||
|
||||
os.makedirs("name", 0o777, exist_ok=False)
|
||||
|
||||
os.makedirs("name", 0o777, False)
|
||||
|
||||
os.makedirs(name="name", mode=0o777, exist_ok=False)
|
||||
|
||||
os.makedirs("name", unknown_kwarg=True)
|
||||
os.getcwdb()
|
||||
@@ -1,3 +0,0 @@
|
||||
"""Hello, world!"""\
|
||||
|
||||
x = 1; y = 2
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user