Compare commits
10 Commits
alex/newty
...
zb/dev-dri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40b4aa28f9 | ||
|
|
ea4bf00c23 | ||
|
|
7f4aa4b3fb | ||
|
|
34c98361ae | ||
|
|
38bb96a6c2 | ||
|
|
a014d55455 | ||
|
|
306f6f17a9 | ||
|
|
b233888f00 | ||
|
|
540cbd9085 | ||
|
|
0112f7f0e4 |
6
.github/CODEOWNERS
vendored
6
.github/CODEOWNERS
vendored
@@ -19,10 +19,6 @@
|
||||
|
||||
# ty
|
||||
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
|
||||
20
.github/workflows/build-binaries.yml
vendored
20
.github/workflows/build-binaries.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
@@ -79,7 +79,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
@@ -177,7 +177,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
@@ -230,7 +230,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
@@ -292,8 +292,6 @@ jobs:
|
||||
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
|
||||
- target: arm-unknown-linux-musleabihf
|
||||
arch: arm
|
||||
- target: riscv64gc-unknown-linux-gnu
|
||||
arch: riscv64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -306,7 +304,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
@@ -321,7 +319,7 @@ jobs:
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip libatomic1
|
||||
apt-get install -y --no-install-recommends python3 python3-pip
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
@@ -372,7 +370,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
@@ -437,7 +435,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
|
||||
16
.github/workflows/build-docker.yml
vendored
16
.github/workflows/build-docker.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
@@ -131,7 +131,7 @@ jobs:
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -169,7 +169,7 @@ jobs:
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -219,7 +219,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
@@ -266,7 +266,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
@@ -276,7 +276,7 @@ jobs:
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
||||
135
.github/workflows/ci.yaml
vendored
135
.github/workflows/ci.yaml
vendored
@@ -38,8 +38,7 @@ jobs:
|
||||
fuzz: ${{ steps.check_fuzzer.outputs.changed }}
|
||||
# Flag that is set to "true" when code related to ty changes.
|
||||
ty: ${{ steps.check_ty.outputs.changed }}
|
||||
# Flag that is set to "true" when code related to the py-fuzzer folder changes.
|
||||
py-fuzzer: ${{ steps.check_py_fuzzer.outputs.changed }}
|
||||
|
||||
# Flag that is set to "true" when code related to the playground changes.
|
||||
playground: ${{ steps.check_playground.outputs.changed }}
|
||||
steps:
|
||||
@@ -69,6 +68,7 @@ jobs:
|
||||
':crates/ruff_text_size/**' \
|
||||
':crates/ruff_python_ast/**' \
|
||||
':crates/ruff_python_parser/**' \
|
||||
':python/py-fuzzer/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
@@ -138,29 +138,17 @@ jobs:
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if the py-fuzzer code changed
|
||||
id: check_py_fuzzer
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- 'python/py_fuzzer/**' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if there was any code related change
|
||||
id: check_code
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
# NOTE: Do not exclude all Markdown files here, but rather use
|
||||
# specific exclude patterns like 'docs/**'), because tests for
|
||||
# 'ty' are written in Markdown.
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
|
||||
':!**/*.md' \
|
||||
':crates/ty_python_semantic/resources/mdtest/**/*.md' \
|
||||
':!docs/**' \
|
||||
':!assets/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
@@ -226,7 +214,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
@@ -246,17 +234,17 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -304,17 +292,17 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -333,14 +321,30 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: Setup Dev Drive
|
||||
run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1
|
||||
|
||||
# actions/checkout does not let us clone into anywhere outside `github.workspace`, so we have to copy the clone
|
||||
- name: Copy Git Repo to Dev Drive
|
||||
env:
|
||||
RUFF_WORKSPACE: ${{ env.RUFF_WORKSPACE }}
|
||||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${env:RUFF_WORKSPACE}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: ${{ env.RUFF_WORKSPACE }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
working-directory: ${{ env.RUFF_WORKSPACE }}
|
||||
run: rustup show
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
working-directory: ${{ env.RUFF_WORKSPACE }}
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
@@ -360,7 +364,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
@@ -389,11 +393,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -412,18 +416,27 @@ jobs:
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
- name: "Build tests"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: cargo "+${MSRV}" test --no-run --all-features
|
||||
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
@@ -435,13 +448,13 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@0dca8cf8dfb40cb77a29cece06933ce674674523 # v1.15.1
|
||||
uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -455,7 +468,7 @@ jobs:
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -463,7 +476,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -497,7 +510,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
@@ -645,7 +658,7 @@ jobs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.ty == 'true' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -664,7 +677,7 @@ jobs:
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -694,7 +707,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@0dca8cf8dfb40cb77a29cece06933ce674674523 # v1.15.1
|
||||
- uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -711,11 +724,11 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
args: --out dist
|
||||
- name: "Test wheel"
|
||||
@@ -734,13 +747,13 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
- name: "Cache pre-commit"
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
@@ -768,7 +781,7 @@ jobs:
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
@@ -777,7 +790,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -807,7 +820,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run checks"
|
||||
@@ -877,7 +890,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
@@ -908,14 +921,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -923,7 +936,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
@@ -941,14 +954,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -956,7 +969,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
6
.github/workflows/daily_fuzz.yaml
vendored
6
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,12 +34,12 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
|
||||
86
.github/workflows/mypy_primer.yaml
vendored
86
.github/workflows/mypy_primer.yaml
vendored
@@ -11,9 +11,7 @@ on:
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
- "scripts/mypy_primer.sh"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
@@ -39,9 +37,9 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
@@ -51,12 +49,46 @@ jobs:
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
DIFF_FILE: mypy_primer.diff
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
echo ${{ github.event.number }} > ../pr-number
|
||||
|
||||
echo "Enabling mypy primer specific configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/ty_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b base_commit "$MERGE_BASE"
|
||||
echo "base commit"
|
||||
git rev-list --format=%s --max-count=1 base_commit
|
||||
|
||||
cd ..
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx \
|
||||
--from="git+https://github.com/hauntsaninja/mypy_primer@e5f55447969d33ae3c7ccdb183e2a37101867270" \
|
||||
mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker ty \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
# Output diff with ANSI color codes
|
||||
cat mypy_primer.diff
|
||||
|
||||
# Remove ANSI color codes before uploading
|
||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
@@ -69,41 +101,3 @@ jobs:
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
TY_MAX_PARALLELISM: 1 # for deterministic memory numbers
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/memory.txt
|
||||
DIFF_FILE: mypy_primer_memory.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
path: mypy_primer_memory.diff
|
||||
|
||||
31
.github/workflows/mypy_primer_comment.yaml
vendored
31
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -45,28 +45,15 @@ jobs:
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer memory results"
|
||||
id: download-mypy_primer_memory_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_memory_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -87,18 +74,6 @@ jobs:
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
2
.github/workflows/publish-docs.yml
vendored
2
.github/workflows/publish-docs.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
18
.github/workflows/release.yml
vendored
18
.github/workflows/release.yml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -124,19 +124,19 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -175,19 +175,19 @@ jobs:
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -251,13 +251,13 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
|
||||
93
.github/workflows/setup-dev-drive.ps1
vendored
Normal file
93
.github/workflows/setup-dev-drive.ps1
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# Configures a drive for testing in CI.
|
||||
#
|
||||
# When using standard GitHub Actions runners, a `D:` drive is present and has
|
||||
# similar or better performance characteristics than a ReFS dev drive. Sometimes
|
||||
# using a larger runner is still more performant (e.g., when running the test
|
||||
# suite) and we need to create a dev drive. This script automatically configures
|
||||
# the appropriate drive.
|
||||
#
|
||||
# When using GitHub Actions' "larger runners", the `D:` drive is not present and
|
||||
# we create a DevDrive mount on `C:`. This is purported to be more performant
|
||||
# than an ReFS drive, though we did not see a change when we switched over.
|
||||
#
|
||||
# When using Depot runners, the underling infrastructure is EC2, which does not
|
||||
# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can
|
||||
# create a ReFS drive using `diskpart` and `format` directory. We cannot use a
|
||||
# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already,
|
||||
# so we must check if it's a Depot runner first, and we use `V:` as the target
|
||||
# instead.
|
||||
|
||||
|
||||
if ($env:DEPOT_RUNNER -eq "1") {
|
||||
Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..."
|
||||
|
||||
# Create VHD and configure drive using diskpart
|
||||
$vhdPath = "C:\ruff_dev_drive.vhdx"
|
||||
@"
|
||||
create vdisk file="$vhdPath" maximum=20480 type=expandable
|
||||
attach vdisk
|
||||
create partition primary
|
||||
active
|
||||
assign letter=V
|
||||
"@ | diskpart
|
||||
|
||||
# Format the drive as ReFS
|
||||
format V: /fs:ReFS /q /y
|
||||
$Drive = "V:"
|
||||
|
||||
Write-Output "Custom dev drive created at $Drive"
|
||||
} elseif (Test-Path "D:\") {
|
||||
# Note `Get-PSDrive` is not sufficient because the drive letter is assigned.
|
||||
Write-Output "Using existing drive at D:"
|
||||
$Drive = "D:"
|
||||
} else {
|
||||
# The size (20 GB) is chosen empirically to be large enough for our
|
||||
# workflows; larger drives can take longer to set up.
|
||||
$Volume = New-VHD -Path C:/ruff_dev_drive.vhdx -SizeBytes 20GB |
|
||||
Mount-VHD -Passthru |
|
||||
Initialize-Disk -Passthru |
|
||||
New-Partition -AssignDriveLetter -UseMaximumSize |
|
||||
Format-Volume -DevDrive -Confirm:$false -Force
|
||||
|
||||
$Drive = "$($Volume.DriveLetter):"
|
||||
|
||||
# Set the drive as trusted
|
||||
# See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-designate-a-dev-drive-as-trusted
|
||||
fsutil devdrv trust $Drive
|
||||
|
||||
# Disable antivirus filtering on dev drives
|
||||
# See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-configure-additional-filters-on-dev-drive
|
||||
fsutil devdrv enable /disallowAv
|
||||
|
||||
# Remount so the changes take effect
|
||||
Dismount-VHD -Path C:/ruff_dev_drive.vhdx
|
||||
Mount-VHD -Path C:/ruff_dev_drive.vhdx
|
||||
|
||||
# Show some debug information
|
||||
Write-Output $Volume
|
||||
fsutil devdrv query $Drive
|
||||
|
||||
Write-Output "Using Dev Drive at $Volume"
|
||||
}
|
||||
|
||||
$Tmp = "$($Drive)\ruff-tmp"
|
||||
|
||||
# Create the directory ahead of time in an attempt to avoid race-conditions
|
||||
New-Item $Tmp -ItemType Directory
|
||||
|
||||
# Move Cargo to the dev drive
|
||||
New-Item -Path "$($Drive)/.cargo/bin" -ItemType Directory -Force
|
||||
if (Test-Path "C:/Users/runneradmin/.cargo") {
|
||||
Copy-Item -Path "C:/Users/runneradmin/.cargo/*" -Destination "$($Drive)/.cargo/" -Recurse -Force
|
||||
}
|
||||
|
||||
Write-Output `
|
||||
"DEV_DRIVE=$($Drive)" `
|
||||
"TMP=$($Tmp)" `
|
||||
"TEMP=$($Tmp)" `
|
||||
"UV_INTERNAL__TEST_DIR=$($Tmp)" `
|
||||
"RUSTUP_HOME=$($Drive)/.rustup" `
|
||||
"CARGO_HOME=$($Drive)/.cargo" `
|
||||
"RUFF_WORKSPACE=$($Drive)/ruff" `
|
||||
"PATH=$($Drive)/.cargo/bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
166
.github/workflows/sync_typeshed.yaml
vendored
166
.github/workflows/sync_typeshed.yaml
vendored
@@ -1,25 +1,5 @@
|
||||
name: Sync typeshed
|
||||
|
||||
# How this works:
|
||||
#
|
||||
# 1. A Linux worker:
|
||||
# a. Checks out Ruff and typeshed
|
||||
# b. Deletes the vendored typeshed stdlib stubs from Ruff
|
||||
# c. Copies the latest versions of the stubs from typeshed
|
||||
# d. Uses docstring-adder to sync all docstrings available on Linux
|
||||
# e. Creates a new branch on the upstream astral-sh/ruff repository
|
||||
# f. Commits the changes it's made and pushes them to the new upstream branch
|
||||
# 2. Once the Linux worker is done, a Windows worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on Windows that are not available on Linux
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# 3. Once the Windows worker is done, a MacOS worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# d. Creates a PR against the `main` branch using the branch all three workers have pushed to
|
||||
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
@@ -30,17 +10,7 @@ env:
|
||||
FORCE_COLOR: 1
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
# The name of the upstream branch that the first worker creates,
|
||||
# and which all three workers push to.
|
||||
UPSTREAM_BRANCH: typeshedbot/sync-typeshed
|
||||
|
||||
# The path to the directory that contains the vendored typeshed stubs,
|
||||
# relative to the root of the Ruff repository.
|
||||
VENDORED_TYPESHED: crates/ty_vendored/vendor/typeshed
|
||||
|
||||
jobs:
|
||||
# Sync typeshed stubs, and sync all docstrings available on Linux.
|
||||
# Push the changes to a new branch on the upstream repository.
|
||||
sync:
|
||||
name: Sync typeshed
|
||||
runs-on: ubuntu-latest
|
||||
@@ -49,6 +19,7 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
@@ -65,130 +36,37 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- name: Sync typeshed stubs
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
mkdir "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/README.md "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/LICENSE "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
# The pyproject.toml file is needed by a later job for the black configuration.
|
||||
# It's deleted before creating the PR.
|
||||
cp typeshed/pyproject.toml "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
cp -r typeshed/stdlib "ruff/${VENDORED_TYPESHED}/stdlib"
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}/stdlib/@tests"
|
||||
git -C typeshed rev-parse HEAD > "ruff/${VENDORED_TYPESHED}/source_commit.txt"
|
||||
cd ruff
|
||||
git checkout -b "${UPSTREAM_BRANCH}"
|
||||
git add .
|
||||
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
|
||||
- name: Sync Linux docstrings
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
cd ruff
|
||||
./scripts/codemod_docstrings.sh
|
||||
git commit -am "Sync Linux docstrings" --allow-empty
|
||||
- name: Push the changes
|
||||
id: commit
|
||||
if: ${{ success() }}
|
||||
run: git -C ruff push --force --set-upstream origin "${UPSTREAM_BRANCH}"
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on Windows that are not available on Linux.
|
||||
# Commit the changes and push them to the same branch.
|
||||
docstrings-windows:
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync job failed, because it's specified in `needs` above,
|
||||
# and we haven't used `always()` in the `if` condition here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync Windows docstrings
|
||||
id: docstrings
|
||||
shell: bash
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
if: ${{ steps.docstrings.outcome == 'success' }}
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
run: |
|
||||
git commit -am "Sync Windows docstrings" --allow-empty
|
||||
git push
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on macOS that are not available on Linux or Windows.
|
||||
# Push the changes to the same branch and create a PR against the `main` branch using that branch.
|
||||
docstrings-macos-and-pr:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync, docstrings-windows]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync or docstrings-windows jobs failed,
|
||||
# because they're specified in `needs` above and we haven't used an `always()` condition in the `if` here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync macOS docstrings
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
- name: Commit and push the changes
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
git commit -am "Sync macOS docstrings" --allow-empty
|
||||
|
||||
# Here we just reformat the codemodded stubs so that they are
|
||||
# consistent with the other typeshed stubs around them.
|
||||
# Typeshed formats code using black in their CI, so we just invoke
|
||||
# black on the stubs the same way that typeshed does.
|
||||
uvx black "${VENDORED_TYPESHED}/stdlib" --config "${VENDORED_TYPESHED}/pyproject.toml" || true
|
||||
git commit -am "Format codemodded docstrings" --allow-empty
|
||||
|
||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
|
||||
git push
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
||||
- name: Create a PR
|
||||
if: ${{ success() }}
|
||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
||||
run: |
|
||||
gh pr list --repo "${GITHUB_REPOSITORY}" --head "${UPSTREAM_BRANCH}" --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
|
||||
needs: [sync]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
|
||||
80
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
80
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -17,7 +17,6 @@ env:
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-analyzer:
|
||||
@@ -33,9 +32,9 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
@@ -64,75 +63,32 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@9c34dc514ee9aef6735db1dfebb80f63acbc3440"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
diff \
|
||||
--projects-old ruff/projects_old.txt \
|
||||
--projects-new ruff/projects_new.txt \
|
||||
--old old_commit \
|
||||
--new new_commit \
|
||||
--output-old diagnostics-old.json \
|
||||
--output-new diagnostics-new.json
|
||||
analyze \
|
||||
--projects ruff/projects_old.txt \
|
||||
--commit old_commit \
|
||||
--output diagnostics_old.json
|
||||
|
||||
mkdir dist
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--projects ruff/projects_new.txt \
|
||||
--commit new_commit \
|
||||
--output diagnostics_new.json
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-diff \
|
||||
diagnostics-old.json \
|
||||
diagnostics-new.json \
|
||||
diagnostics_old.json \
|
||||
diagnostics_new.json \
|
||||
--old-name "main (merge base)" \
|
||||
--new-name "$REF_NAME" \
|
||||
--output-html dist/diff.html
|
||||
--output-html diff.html
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-diff-statistics \
|
||||
diagnostics-old.json \
|
||||
diagnostics-new.json \
|
||||
--old-name "main (merge base)" \
|
||||
--new-name "$REF_NAME" \
|
||||
--output diff-statistics.md
|
||||
|
||||
echo '## `ecosystem-analyzer` results' > comment.md
|
||||
echo >> comment.md
|
||||
cat diff-statistics.md >> comment.md
|
||||
|
||||
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch ${{ github.head_ref }} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
- name: "Append deployment URL"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
env:
|
||||
DEPLOYMENT_URL: ${{ steps.deploy.outputs.pages-deployment-alias-url }}
|
||||
run: |
|
||||
echo >> comment.md
|
||||
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)**" >> comment.md
|
||||
|
||||
- name: Upload comment
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: comment.md
|
||||
path: comment.md
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload diagnostics diff
|
||||
- name: Upload HTML diff report
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: diff.html
|
||||
path: dist/diff.html
|
||||
path: diff.html
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
name: PR comment (ty ecosystem-analyzer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [ty ecosystem-analyzer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ty ecosystem-analyzer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download comment.md"
|
||||
id: download-comment
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: comment.md
|
||||
workflow: ty-ecosystem-analyzer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/comment
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/comment/comment.md ]]
|
||||
then
|
||||
echo "Error: comment.md cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note: this identifier is used to find the comment to update on subsequent runs
|
||||
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
|
||||
echo >> comment.md
|
||||
cat pr/comment/comment.md >> comment.md
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.md >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
76
.github/workflows/ty-ecosystem-report.yaml
vendored
76
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -1,76 +0,0 @@
|
||||
name: ty ecosystem-report
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run every Wednesday at 5:00 UTC:
|
||||
- cron: 0 5 * * 3
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-report:
|
||||
name: Create ecosystem report
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Create report
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "Enabling configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
|
||||
--output ecosystem-diagnostics.json
|
||||
|
||||
mkdir dist
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-report \
|
||||
--max-diagnostics-per-project=1200 \
|
||||
ecosystem-diagnostics.json \
|
||||
--output dist/index.html
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}
|
||||
116
.github/workflows/typing_conformance.yaml
vendored
116
.github/workflows/typing_conformance.yaml
vendored
@@ -1,116 +0,0 @@
|
||||
name: Run typing conformance
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/typing_conformance.yaml"
|
||||
- ".github/workflows/typing_conformance_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CONFORMANCE_SUITE_COMMIT: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
|
||||
jobs:
|
||||
typing_conformance:
|
||||
name: Compute diagnostic diff
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
repository: python/typing
|
||||
ref: ${{ env.CONFORMANCE_SUITE_COMMIT }}
|
||||
path: typing
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Compute diagnostic diff
|
||||
shell: bash
|
||||
env:
|
||||
# TODO: Remove this once we fixed the remaining panics in the conformance suite.
|
||||
TY_MAX_PARALLELISM: 1
|
||||
run: |
|
||||
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
|
||||
|
||||
# Build the executable for the old and new commit
|
||||
(
|
||||
cd ruff
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
cargo build --bin ty
|
||||
mv target/debug/ty ty-new
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b old_commit "$MERGE_BASE"
|
||||
echo "old commit (merge base)"
|
||||
git rev-list --format=%s --max-count=1 old_commit
|
||||
cargo build --bin ty
|
||||
mv target/debug/ty ty-old
|
||||
)
|
||||
|
||||
(
|
||||
cd typing/conformance/tests
|
||||
|
||||
echo "Running ty on old commit (merge base)"
|
||||
"$RUFF_DIR/ty-old" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/old-output.txt" 2>&1 || true
|
||||
|
||||
echo "Running ty on new commit"
|
||||
"$RUFF_DIR/ty-new" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/new-output.txt" 2>&1 || true
|
||||
)
|
||||
|
||||
if ! diff -u old-output.txt new-output.txt > typing_conformance_diagnostics.diff; then
|
||||
echo "Differences found between base and PR"
|
||||
else
|
||||
echo "No differences found"
|
||||
touch typing_conformance_diagnostics.diff
|
||||
fi
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
path: typing_conformance_diagnostics.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload conformance suite commit
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: conformance-suite-commit
|
||||
path: conformance-suite-commit
|
||||
112
.github/workflows/typing_conformance_comment.yaml
vendored
112
.github/workflows/typing_conformance_comment.yaml
vendored
@@ -1,112 +0,0 @@
|
||||
name: PR comment (typing_conformance)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run typing conformance]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The typing_conformance workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download typing conformance suite commit
|
||||
with:
|
||||
name: conformance-suite-commit
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download typing_conformance results"
|
||||
id: download-typing_conformance_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
workflow: typing_conformance.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/typing_conformance_diagnostics_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious typing_conformance results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
|
||||
then
|
||||
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||
|
||||
if [[ -f conformance-suite-commit ]]
|
||||
then
|
||||
echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(<conformance-suite-commit)/conformance)" >> comment.txt
|
||||
else
|
||||
echo "conformance-suite-commit file not found"
|
||||
echo "## Diagnostic diff on typing conformance tests" >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
2
.github/zizmor.yml
vendored
2
.github/zizmor.yml
vendored
@@ -10,8 +10,6 @@ rules:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- ty-ecosystem-analyzer.yaml
|
||||
- ty-ecosystem-report.yaml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
|
||||
@@ -6,7 +6,7 @@ exclude: |
|
||||
crates/ty_vendored/vendor/.*|
|
||||
crates/ty_project/resources/.*|
|
||||
crates/ty_python_semantic/resources/corpus/.*|
|
||||
crates/ty/docs/(configuration|rules|cli|environment).md|
|
||||
crates/ty/docs/(configuration|rules|cli).md|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
@@ -67,7 +67,7 @@ repos:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.34.0
|
||||
rev: v1.33.1
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -81,10 +81,10 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.7
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff-check
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
@@ -99,12 +99,12 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.11.0
|
||||
rev: v1.10.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.2
|
||||
rev: 0.33.1
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
@@ -128,10 +128,5 @@ repos:
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.10.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
270
CHANGELOG.md
270
CHANGELOG.md
@@ -1,275 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.10
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Implement fix for `maxsplit` without separator (`SIM905`) ([#19851](https://github.com/astral-sh/ruff/pull/19851))
|
||||
- \[`flake8-use-pathlib`\] Add fixes for `PTH102` and `PTH103` ([#19514](https://github.com/astral-sh/ruff/pull/19514))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`isort`\] Handle multiple continuation lines after module docstring (`I002`) ([#19818](https://github.com/astral-sh/ruff/pull/19818))
|
||||
- \[`pyupgrade`\] Avoid reporting `__future__` features as unnecessary when they are used (`UP010`) ([#19769](https://github.com/astral-sh/ruff/pull/19769))
|
||||
- \[`pyupgrade`\] Handle nested `Optional`s (`UP045`) ([#19770](https://github.com/astral-sh/ruff/pull/19770))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Make `E731` fix unsafe instead of display-only for class assignments ([#19700](https://github.com/astral-sh/ruff/pull/19700))
|
||||
- \[`pyflakes`\] Add secondary annotation showing previous definition (`F811`) ([#19900](https://github.com/astral-sh/ruff/pull/19900))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix description of global config file discovery strategy ([#19188](https://github.com/astral-sh/ruff/pull/19188))
|
||||
- Update outdated links to <https://typing.python.org/en/latest/source/stubs.html> ([#19992](https://github.com/astral-sh/ruff/pull/19992))
|
||||
- \[`flake8-annotations`\] Remove unused import in example (`ANN401`) ([#20000](https://github.com/astral-sh/ruff/pull/20000))
|
||||
|
||||
## 0.12.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add check for `airflow.secrets.cache.SecretCache` (`AIR301`) ([#17707](https://github.com/astral-sh/ruff/pull/17707))
|
||||
- \[`ruff`\] Offer a safe fix for multi-digit zeros (`RUF064`) ([#19847](https://github.com/astral-sh/ruff/pull/19847))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-blind-except`\] Fix `BLE001` false-positive on `raise ... from None` ([#19755](https://github.com/astral-sh/ruff/pull/19755))
|
||||
- \[`flake8-comprehensions`\] Fix false positive for `C420` with attribute, subscript, or slice assignment targets ([#19513](https://github.com/astral-sh/ruff/pull/19513))
|
||||
- \[`flake8-simplify`\] Fix handling of U+001C..U+001F whitespace (`SIM905`) ([#19849](https://github.com/astral-sh/ruff/pull/19849))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Use lowercase hex characters to match the formatter (`PLE2513`) ([#19808](https://github.com/astral-sh/ruff/pull/19808))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix `lint.future-annotations` link ([#19876](https://github.com/astral-sh/ruff/pull/19876))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Build `riscv64` binaries for release ([#19819](https://github.com/astral-sh/ruff/pull/19819))
|
||||
|
||||
- Add rule code to error description in GitLab output ([#19896](https://github.com/astral-sh/ruff/pull/19896))
|
||||
|
||||
- Improve rendering of the `full` output format ([#19415](https://github.com/astral-sh/ruff/pull/19415))
|
||||
|
||||
Below is an example diff for [`F401`](https://docs.astral.sh/ruff/rules/unused-import/):
|
||||
|
||||
```diff
|
||||
-unused.py:8:19: F401 [*] `pathlib` imported but unused
|
||||
+F401 [*] `pathlib` imported but unused
|
||||
+ --> unused.py:8:19
|
||||
|
|
||||
7 | # Unused, _not_ marked as required (due to the alias).
|
||||
8 | import pathlib as non_alias
|
||||
- | ^^^^^^^^^ F401
|
||||
+ | ^^^^^^^^^
|
||||
9 |
|
||||
10 | # Unused, marked as required.
|
||||
|
|
||||
- = help: Remove unused import: `pathlib`
|
||||
+help: Remove unused import: `pathlib`
|
||||
```
|
||||
|
||||
For now, the primary difference is the movement of the filename, line number, and column information to a second line in the header. This new representation will allow us to make further additions to Ruff's diagnostics, such as adding sub-diagnostics and multiple annotations to the same snippet.
|
||||
|
||||
## 0.12.8
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-use-pathlib`\] Expand `PTH201` to check all `PurePath` subclasses ([#19440](https://github.com/astral-sh/ruff/pull/19440))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-blind-except`\] Change `BLE001` to correctly parse exception tuples ([#19747](https://github.com/astral-sh/ruff/pull/19747))
|
||||
- \[`flake8-errmsg`\] Exclude `typing.cast` from `EM101` ([#19656](https://github.com/astral-sh/ruff/pull/19656))
|
||||
- \[`flake8-simplify`\] Fix raw string handling in `SIM905` for embedded quotes ([#19591](https://github.com/astral-sh/ruff/pull/19591))
|
||||
- \[`flake8-import-conventions`\] Avoid false positives for NFKC-normalized `__debug__` import aliases in `ICN001` ([#19411](https://github.com/astral-sh/ruff/pull/19411))
|
||||
- \[`isort`\] Fix syntax error after docstring ending with backslash (`I002`) ([#19505](https://github.com/astral-sh/ruff/pull/19505))
|
||||
- \[`pylint`\] Mark `PLC0207` fixes as unsafe when `*args` unpacking is present ([#19679](https://github.com/astral-sh/ruff/pull/19679))
|
||||
- \[`pyupgrade`\] Prevent infinite loop with `I002` (`UP010`, `UP035`) ([#19413](https://github.com/astral-sh/ruff/pull/19413))
|
||||
- \[`ruff`\] Parenthesize generator expressions in f-strings (`RUF010`) ([#19434](https://github.com/astral-sh/ruff/pull/19434))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Don't flag `pyrefly` pragmas as unused code (`ERA001`) ([#19731](https://github.com/astral-sh/ruff/pull/19731))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Replace "associative" with "commutative" in docs for `RUF036` ([#19706](https://github.com/astral-sh/ruff/pull/19706))
|
||||
- Fix copy and line separator colors in dark mode ([#19630](https://github.com/astral-sh/ruff/pull/19630))
|
||||
- Fix link to `typing` documentation ([#19648](https://github.com/astral-sh/ruff/pull/19648))
|
||||
- \[`refurb`\] Make more examples error out-of-the-box ([#19695](https://github.com/astral-sh/ruff/pull/19695),[#19673](https://github.com/astral-sh/ruff/pull/19673),[#19672](https://github.com/astral-sh/ruff/pull/19672))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Include column numbers in GitLab output format ([#19708](https://github.com/astral-sh/ruff/pull/19708))
|
||||
- Always expand tabs to four spaces in diagnostics ([#19618](https://github.com/astral-sh/ruff/pull/19618))
|
||||
- Update pre-commit's `ruff` id ([#19654](https://github.com/astral-sh/ruff/pull/19654))
|
||||
|
||||
## 0.12.7
|
||||
|
||||
This is a follow-up release to 0.12.6. Because of an issue in the package metadata, 0.12.6 failed to publish fully to PyPI and has been yanked. Similarly, there is no GitHub release or Git tag for 0.12.6. The contents of the 0.12.7 release are identical to 0.12.6, except for the updated metadata.
|
||||
|
||||
## 0.12.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-commas`\] Add support for trailing comma checks in type parameter lists (`COM812`, `COM819`) ([#19390](https://github.com/astral-sh/ruff/pull/19390))
|
||||
- \[`pylint`\] Implement auto-fix for `missing-maxsplit-arg` (`PLC0207`) ([#19387](https://github.com/astral-sh/ruff/pull/19387))
|
||||
- \[`ruff`\] Offer fixes for `RUF039` in more cases ([#19065](https://github.com/astral-sh/ruff/pull/19065))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support `.pyi` files in ruff analyze graph ([#19611](https://github.com/astral-sh/ruff/pull/19611))
|
||||
- \[`flake8-pyi`\] Preserve inline comment in ellipsis removal (`PYI013`) ([#19399](https://github.com/astral-sh/ruff/pull/19399))
|
||||
- \[`perflint`\] Ignore rule if target is `global` or `nonlocal` (`PERF401`) ([#19539](https://github.com/astral-sh/ruff/pull/19539))
|
||||
- \[`pyupgrade`\] Fix `UP030` to avoid modifying double curly braces in format strings ([#19378](https://github.com/astral-sh/ruff/pull/19378))
|
||||
- \[`refurb`\] Ignore decorated functions for `FURB118` ([#19339](https://github.com/astral-sh/ruff/pull/19339))
|
||||
- \[`refurb`\] Mark `int` and `bool` cases for `Decimal.from_float` as safe fixes (`FURB164`) ([#19468](https://github.com/astral-sh/ruff/pull/19468))
|
||||
- \[`ruff`\] Fix `RUF033` for named default expressions ([#19115](https://github.com/astral-sh/ruff/pull/19115))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-blind-except`\] Change `BLE001` to permit `logging.critical(..., exc_info=True)` ([#19520](https://github.com/astral-sh/ruff/pull/19520))
|
||||
|
||||
### Performance
|
||||
|
||||
- Add support for specifying minimum dots in detected string imports ([#19538](https://github.com/astral-sh/ruff/pull/19538))
|
||||
|
||||
## 0.12.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
|
||||
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
|
||||
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
|
||||
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
|
||||
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
|
||||
|
||||
## 0.12.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
|
||||
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
|
||||
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
|
||||
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
|
||||
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
|
||||
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
|
||||
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
|
||||
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
|
||||
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
|
||||
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
|
||||
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
|
||||
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
|
||||
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
|
||||
|
||||
## 0.12.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Support non-context-manager calls in `B017` ([#19063](https://github.com/astral-sh/ruff/pull/19063))
|
||||
- \[`flake8-use-pathlib`\] Add autofixes for `PTH100`, `PTH106`, `PTH107`, `PTH108`, `PTH110`, `PTH111`, `PTH112`, `PTH113`, `PTH114`, `PTH115`, `PTH117`, `PTH119`, `PTH120` ([#19213](https://github.com/astral-sh/ruff/pull/19213))
|
||||
- \[`flake8-use-pathlib`\] Add autofixes for `PTH203`, `PTH204`, `PTH205` ([#18922](https://github.com/astral-sh/ruff/pull/18922))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-return`\] Fix false-positive for variables used inside nested functions in `RET504` ([#18433](https://github.com/astral-sh/ruff/pull/18433))
|
||||
- Treat form feed as valid whitespace before a line continuation ([#19220](https://github.com/astral-sh/ruff/pull/19220))
|
||||
- \[`flake8-type-checking`\] Fix syntax error introduced by fix (`TC008`) ([#19150](https://github.com/astral-sh/ruff/pull/19150))
|
||||
- \[`pyupgrade`\] Keyword arguments in `super` should suppress the `UP008` fix ([#19131](https://github.com/astral-sh/ruff/pull/19131))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI007`, `PYI008`) ([#19103](https://github.com/astral-sh/ruff/pull/19103))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM116`) ([#19111](https://github.com/astral-sh/ruff/pull/19111))
|
||||
- \[`flake8-type-checking`\] Make example error out-of-the-box (`TC001`) ([#19151](https://github.com/astral-sh/ruff/pull/19151))
|
||||
- \[`flake8-use-pathlib`\] Make example error out-of-the-box (`PTH210`) ([#19189](https://github.com/astral-sh/ruff/pull/19189))
|
||||
- \[`pycodestyle`\] Make example error out-of-the-box (`E272`) ([#19191](https://github.com/astral-sh/ruff/pull/19191))
|
||||
- \[`pycodestyle`\] Make example not raise unnecessary `SyntaxError` (`E114`) ([#19190](https://github.com/astral-sh/ruff/pull/19190))
|
||||
- \[`pydoclint`\] Make example error out-of-the-box (`DOC501`) ([#19218](https://github.com/astral-sh/ruff/pull/19218))
|
||||
- \[`pylint`, `pyupgrade`\] Fix syntax errors in examples (`PLW1501`, `UP028`) ([#19127](https://github.com/astral-sh/ruff/pull/19127))
|
||||
- \[`pylint`\] Update `missing-maxsplit-arg` docs and error to suggest proper usage (`PLC0207`) ([#18949](https://github.com/astral-sh/ruff/pull/18949))
|
||||
- \[`flake8-bandit`\] Make example error out-of-the-box (`S412`) ([#19241](https://github.com/astral-sh/ruff/pull/19241))
|
||||
|
||||
## 0.12.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-pyi`\] Expand `Optional[A]` to `A | None` (`PYI016`) ([#18572](https://github.com/astral-sh/ruff/pull/18572))
|
||||
- \[`pyupgrade`\] Mark `UP008` fix safe if no comments are in range ([#18683](https://github.com/astral-sh/ruff/pull/18683))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Fix `C420` to prepend whitespace when needed ([#18616](https://github.com/astral-sh/ruff/pull/18616))
|
||||
- \[`perflint`\] Fix `PERF403` panic on attribute or subscription loop variable ([#19042](https://github.com/astral-sh/ruff/pull/19042))
|
||||
- \[`pydocstyle`\] Fix `D413` infinite loop for parenthesized docstring ([#18930](https://github.com/astral-sh/ruff/pull/18930))
|
||||
- \[`pylint`\] Fix `PLW0108` autofix introducing a syntax error when the lambda's body contains an assignment expression ([#18678](https://github.com/astral-sh/ruff/pull/18678))
|
||||
- \[`refurb`\] Fix false positive on empty tuples (`FURB168`) ([#19058](https://github.com/astral-sh/ruff/pull/19058))
|
||||
- \[`ruff`\] Allow more `field` calls from `attrs` (`RUF009`) ([#19021](https://github.com/astral-sh/ruff/pull/19021))
|
||||
- \[`ruff`\] Fix syntax error introduced for an empty string followed by a u-prefixed string (`UP025`) ([#18899](https://github.com/astral-sh/ruff/pull/18899))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-executable`\] Allow `uvx` in shebang line (`EXE003`) ([#18967](https://github.com/astral-sh/ruff/pull/18967))
|
||||
- \[`pandas`\] Avoid flagging `PD002` if `pandas` is not imported ([#18963](https://github.com/astral-sh/ruff/pull/18963))
|
||||
- \[`pyupgrade`\] Avoid PEP-604 unions with `typing.NamedTuple` (`UP007`, `UP045`) ([#18682](https://github.com/astral-sh/ruff/pull/18682))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document link between `import-outside-top-level (PLC0415)` and `lint.flake8-tidy-imports.banned-module-level-imports` ([#18733](https://github.com/astral-sh/ruff/pull/18733))
|
||||
- Fix description of the `format.skip-magic-trailing-comma` example ([#19095](https://github.com/astral-sh/ruff/pull/19095))
|
||||
- \[`airflow`\] Make `AIR302` example error out-of-the-box ([#18988](https://github.com/astral-sh/ruff/pull/18988))
|
||||
- \[`airflow`\] Make `AIR312` example error out-of-the-box ([#18989](https://github.com/astral-sh/ruff/pull/18989))
|
||||
- \[`flake8-annotations`\] Make `ANN401` example error out-of-the-box ([#18974](https://github.com/astral-sh/ruff/pull/18974))
|
||||
- \[`flake8-async`\] Make `ASYNC100` example error out-of-the-box ([#18993](https://github.com/astral-sh/ruff/pull/18993))
|
||||
- \[`flake8-async`\] Make `ASYNC105` example error out-of-the-box ([#19002](https://github.com/astral-sh/ruff/pull/19002))
|
||||
- \[`flake8-async`\] Make `ASYNC110` example error out-of-the-box ([#18975](https://github.com/astral-sh/ruff/pull/18975))
|
||||
- \[`flake8-async`\] Make `ASYNC210` example error out-of-the-box ([#18977](https://github.com/astral-sh/ruff/pull/18977))
|
||||
- \[`flake8-async`\] Make `ASYNC220`, `ASYNC221`, and `ASYNC222` examples error out-of-the-box ([#18978](https://github.com/astral-sh/ruff/pull/18978))
|
||||
- \[`flake8-async`\] Make `ASYNC251` example error out-of-the-box ([#18990](https://github.com/astral-sh/ruff/pull/18990))
|
||||
- \[`flake8-bandit`\] Make `S201` example error out-of-the-box ([#19017](https://github.com/astral-sh/ruff/pull/19017))
|
||||
- \[`flake8-bandit`\] Make `S604` and `S609` examples error out-of-the-box ([#19049](https://github.com/astral-sh/ruff/pull/19049))
|
||||
- \[`flake8-bugbear`\] Make `B028` example error out-of-the-box ([#19054](https://github.com/astral-sh/ruff/pull/19054))
|
||||
- \[`flake8-bugbear`\] Make `B911` example error out-of-the-box ([#19051](https://github.com/astral-sh/ruff/pull/19051))
|
||||
- \[`flake8-datetimez`\] Make `DTZ011` example error out-of-the-box ([#19055](https://github.com/astral-sh/ruff/pull/19055))
|
||||
- \[`flake8-datetimez`\] Make `DTZ901` example error out-of-the-box ([#19056](https://github.com/astral-sh/ruff/pull/19056))
|
||||
- \[`flake8-pyi`\] Make `PYI032` example error out-of-the-box ([#19061](https://github.com/astral-sh/ruff/pull/19061))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI014`, `PYI015`) ([#19097](https://github.com/astral-sh/ruff/pull/19097))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI042`) ([#19101](https://github.com/astral-sh/ruff/pull/19101))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI059`) ([#19080](https://github.com/astral-sh/ruff/pull/19080))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI062`) ([#19079](https://github.com/astral-sh/ruff/pull/19079))
|
||||
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT023`) ([#19104](https://github.com/astral-sh/ruff/pull/19104))
|
||||
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT030`) ([#19105](https://github.com/astral-sh/ruff/pull/19105))
|
||||
- \[`flake8-quotes`\] Make example error out-of-the-box (`Q003`) ([#19106](https://github.com/astral-sh/ruff/pull/19106))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM110`) ([#19113](https://github.com/astral-sh/ruff/pull/19113))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM113`) ([#19109](https://github.com/astral-sh/ruff/pull/19109))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM401`) ([#19110](https://github.com/astral-sh/ruff/pull/19110))
|
||||
- \[`pyflakes`\] Fix backslash in docs (`F621`) ([#19098](https://github.com/astral-sh/ruff/pull/19098))
|
||||
- \[`pylint`\] Fix `PLC0415` example ([#18970](https://github.com/astral-sh/ruff/pull/18970))
|
||||
|
||||
## 0.12.1
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -266,13 +266,6 @@ Finally, regenerate the documentation and generated code with `cargo dev generat
|
||||
|
||||
## MkDocs
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
|
||||
> This means only members of the Astral organization can preview the documentation exactly as it
|
||||
> will appear in production.
|
||||
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
|
||||
|
||||
To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
980
Cargo.lock
generated
980
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
44
Cargo.toml
44
Cargo.toml
@@ -5,7 +5,7 @@ resolver = "2"
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.87"
|
||||
rust-version = "1.86"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -23,7 +23,6 @@ ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
ruff_memory_usage = { path = "crates/ruff_memory_usage" }
|
||||
ruff_notebook = { path = "crates/ruff_notebook" }
|
||||
ruff_options_metadata = { path = "crates/ruff_options_metadata" }
|
||||
ruff_python_ast = { path = "crates/ruff_python_ast" }
|
||||
@@ -41,12 +40,10 @@ ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
ty = { path = "crates/ty" }
|
||||
ty_combine = { path = "crates/ty_combine" }
|
||||
ty_ide = { path = "crates/ty_ide" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
ty_server = { path = "crates/ty_server" }
|
||||
ty_static = { path = "crates/ty_static" }
|
||||
ty_test = { path = "crates/ty_test" }
|
||||
ty_vendored = { path = "crates/ty_vendored" }
|
||||
|
||||
@@ -59,9 +56,6 @@ assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bitvec = { version = "1.0.1", default-features = false, features = [
|
||||
"alloc",
|
||||
] }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
@@ -75,7 +69,7 @@ console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.7.0", default-features = false }
|
||||
criterion = { version = "0.6.0", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
@@ -85,11 +79,11 @@ etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
get-size2 = { version = "0.6.2", features = [
|
||||
get-size2 = { version = "0.5.0", features = [
|
||||
"derive",
|
||||
"smallvec",
|
||||
"hashbrown",
|
||||
"compact-str",
|
||||
"compact-str"
|
||||
] }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
@@ -104,7 +98,7 @@ ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.18.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
@@ -143,12 +137,7 @@ regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a3ffa22cb26756473d56f867aedec3fd907c4dd9", default-features = false, features = [
|
||||
"compact_str",
|
||||
"macros",
|
||||
"salsa_unstable",
|
||||
"inventory",
|
||||
] }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "fc00eba89e5dcaa5edba51c41aa5f309b5cb126b" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -160,7 +149,7 @@ serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2", features = ["union", "const_generics", "const_new"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
snapbox = { version = "0.6.0", features = [
|
||||
"diff",
|
||||
"term-svg",
|
||||
@@ -175,16 +164,16 @@ tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.9.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.11" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-log = { version = "0.2.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
"ansi",
|
||||
"smallvec",
|
||||
"smallvec"
|
||||
] }
|
||||
tryfn = { version = "0.2.1" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
@@ -194,7 +183,11 @@ unicode-width = { version = "0.2.0" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
@@ -215,8 +208,6 @@ unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
# Enabled at the crate level
|
||||
disallowed_methods = "allow"
|
||||
# Allowed pedantic lints
|
||||
char_lit_as_u8 = "allow"
|
||||
collapsible_else_if = "allow"
|
||||
@@ -231,8 +222,8 @@ must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
@@ -255,7 +246,6 @@ unused_peekable = "warn"
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
# were chosen based on a trade-off between compile times
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.10/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.10/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.10
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
@@ -430,7 +430,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- Capital One ([datacompy](https://github.com/capitalone/datacompy))
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
@@ -507,7 +506,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- [Weblate](https://weblate.org/)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
- [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [Zulip](https://github.com/zulip/zulip)
|
||||
|
||||
17
clippy.toml
17
clippy.toml
@@ -24,20 +24,3 @@ ignore-interior-mutability = [
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
disallowed-methods = [
|
||||
{ path = "std::env::var", reason = "Use System::env_var instead in ty crates" },
|
||||
{ path = "std::env::current_dir", reason = "Use System::current_directory instead in ty crates" },
|
||||
{ path = "std::fs::read_to_string", reason = "Use System::read_to_string instead in ty crates" },
|
||||
{ path = "std::fs::metadata", reason = "Use System::path_metadata instead in ty crates" },
|
||||
{ path = "std::fs::canonicalize", reason = "Use System::canonicalize_path instead in ty crates" },
|
||||
{ path = "dunce::canonicalize", reason = "Use System::canonicalize_path instead in ty crates" },
|
||||
{ path = "std::fs::read_dir", reason = "Use System::read_directory instead in ty crates" },
|
||||
{ path = "std::fs::write", reason = "Use WritableSystem::write_file instead in ty crates" },
|
||||
{ path = "std::fs::create_dir_all", reason = "Use WritableSystem::create_directory_all instead in ty crates" },
|
||||
{ path = "std::fs::File::create_new", reason = "Use WritableSystem::create_new_file instead in ty crates" },
|
||||
# Path methods that have System trait equivalents
|
||||
{ path = "std::path::Path::exists", reason = "Use System::path_exists instead in ty crates" },
|
||||
{ path = "std::path::Path::is_dir", reason = "Use System::is_directory instead in ty crates" },
|
||||
{ path = "std::path::Path::is_file", reason = "Use System::is_file instead in ty crates" },
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.10"
|
||||
version = "0.12.1"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -85,7 +85,7 @@ dist = true
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
[lints]
|
||||
|
||||
@@ -169,9 +169,6 @@ pub struct AnalyzeGraphCommand {
|
||||
/// Attempt to detect imports from string literals.
|
||||
#[clap(long)]
|
||||
detect_string_imports: bool,
|
||||
/// The minimum number of dots in a string import to consider it a valid import.
|
||||
#[clap(long)]
|
||||
min_dots: Option<usize>,
|
||||
/// Enable preview mode. Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
@@ -811,7 +808,6 @@ impl AnalyzeGraphCommand {
|
||||
} else {
|
||||
None
|
||||
},
|
||||
string_imports_min_dots: self.min_dots,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
..ExplicitConfigOverrides::default()
|
||||
@@ -1309,7 +1305,6 @@ struct ExplicitConfigOverrides {
|
||||
show_fixes: Option<bool>,
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
string_imports_min_dots: Option<usize>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1397,9 +1392,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(detect_string_imports) = &self.detect_string_imports {
|
||||
config.analyze.detect_string_imports = Some(*detect_string_imports);
|
||||
}
|
||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -13,16 +13,24 @@ use itertools::Itertools;
|
||||
use log::{debug, error};
|
||||
use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelBridge};
|
||||
use ruff_linter::codes::Rule;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_linter::message::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::{VERSION, warn_user};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
use ruff_workspace::resolver::Resolver;
|
||||
|
||||
use crate::diagnostics::Diagnostics;
|
||||
|
||||
/// [`Path`] that is relative to the package root in [`PackageCache`].
|
||||
pub(crate) type RelativePath = Path;
|
||||
/// [`PathBuf`] that is relative to the package root in [`PackageCache`].
|
||||
@@ -289,8 +297,13 @@ impl Cache {
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn set_linted(&self, path: RelativePathBuf, key: &FileCacheKey, yes: bool) {
|
||||
self.update(path, key, ChangeData::Linted(yes));
|
||||
pub(crate) fn update_lint(
|
||||
&self,
|
||||
path: RelativePathBuf,
|
||||
key: &FileCacheKey,
|
||||
data: LintCacheData,
|
||||
) {
|
||||
self.update(path, key, ChangeData::Lint(data));
|
||||
}
|
||||
|
||||
pub(crate) fn set_formatted(&self, path: RelativePathBuf, key: &FileCacheKey) {
|
||||
@@ -325,15 +338,42 @@ pub(crate) struct FileCache {
|
||||
}
|
||||
|
||||
impl FileCache {
|
||||
/// Return whether or not the file in the cache was linted and found to have no diagnostics.
|
||||
pub(crate) fn linted(&self) -> bool {
|
||||
self.data.linted
|
||||
/// Convert the file cache into `Diagnostics`, using `path` as file name.
|
||||
pub(crate) fn to_diagnostics(&self, path: &Path) -> Option<Diagnostics> {
|
||||
self.data.lint.as_ref().map(|lint| {
|
||||
let diagnostics = if lint.messages.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
let file = SourceFileBuilder::new(path.to_string_lossy(), &*lint.source).finish();
|
||||
lint.messages
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
OldDiagnostic::lint(
|
||||
&msg.body,
|
||||
msg.suggestion.as_ref(),
|
||||
msg.range,
|
||||
msg.fix.clone(),
|
||||
msg.parent,
|
||||
file.clone(),
|
||||
msg.noqa_offset,
|
||||
msg.rule,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
let notebook_indexes = if let Some(notebook_index) = lint.notebook_index.as_ref() {
|
||||
FxHashMap::from_iter([(path.to_string_lossy().to_string(), notebook_index.clone())])
|
||||
} else {
|
||||
FxHashMap::default()
|
||||
};
|
||||
Diagnostics::new(diagnostics, notebook_indexes)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, bincode::Decode, bincode::Encode)]
|
||||
struct FileCacheData {
|
||||
linted: bool,
|
||||
lint: Option<LintCacheData>,
|
||||
formatted: bool,
|
||||
}
|
||||
|
||||
@@ -369,6 +409,88 @@ pub(crate) fn init(path: &Path) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(bincode::Decode, Debug, bincode::Encode, PartialEq)]
|
||||
pub(crate) struct LintCacheData {
|
||||
/// Imports made.
|
||||
// pub(super) imports: ImportMap,
|
||||
/// Diagnostic messages.
|
||||
pub(super) messages: Vec<CacheMessage>,
|
||||
/// Source code of the file.
|
||||
///
|
||||
/// # Notes
|
||||
///
|
||||
/// This will be empty if `messages` is empty.
|
||||
pub(super) source: String,
|
||||
/// Notebook index if this file is a Jupyter Notebook.
|
||||
#[bincode(with_serde)]
|
||||
pub(super) notebook_index: Option<NotebookIndex>,
|
||||
}
|
||||
|
||||
impl LintCacheData {
|
||||
pub(crate) fn from_diagnostics(
|
||||
diagnostics: &[OldDiagnostic],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
) -> Self {
|
||||
let source = if let Some(msg) = diagnostics.first() {
|
||||
msg.source_file().source_text().to_owned()
|
||||
} else {
|
||||
String::new() // No messages, no need to keep the source!
|
||||
};
|
||||
|
||||
let messages = diagnostics
|
||||
.iter()
|
||||
// Parse the kebab-case rule name into a `Rule`. This will fail for syntax errors, so
|
||||
// this also serves to filter them out, but we shouldn't be caching files with syntax
|
||||
// errors anyway.
|
||||
.filter_map(|msg| Some((msg.name().parse().ok()?, msg)))
|
||||
.map(|(rule, msg)| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
msg.source_file(),
|
||||
diagnostics.first().unwrap().source_file(),
|
||||
"message uses a different source file"
|
||||
);
|
||||
CacheMessage {
|
||||
rule,
|
||||
body: msg.body().to_string(),
|
||||
suggestion: msg.suggestion().map(ToString::to_string),
|
||||
range: msg.range(),
|
||||
parent: msg.parent,
|
||||
fix: msg.fix().cloned(),
|
||||
noqa_offset: msg.noqa_offset(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
messages,
|
||||
source,
|
||||
notebook_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// On disk representation of a diagnostic message.
|
||||
#[derive(bincode::Decode, Debug, bincode::Encode, PartialEq)]
|
||||
pub(super) struct CacheMessage {
|
||||
/// The rule for the cached diagnostic.
|
||||
#[bincode(with_serde)]
|
||||
rule: Rule,
|
||||
/// The message body to display to the user, to explain the diagnostic.
|
||||
body: String,
|
||||
/// The message to display to the user, to explain the suggested fix.
|
||||
suggestion: Option<String>,
|
||||
/// Range into the message's [`FileCache::source`].
|
||||
#[bincode(with_serde)]
|
||||
range: TextRange,
|
||||
#[bincode(with_serde)]
|
||||
parent: Option<TextSize>,
|
||||
#[bincode(with_serde)]
|
||||
fix: Option<Fix>,
|
||||
#[bincode(with_serde)]
|
||||
noqa_offset: Option<TextSize>,
|
||||
}
|
||||
|
||||
pub(crate) trait PackageCaches {
|
||||
fn get(&self, package_root: &Path) -> Option<&Cache>;
|
||||
|
||||
@@ -456,15 +578,15 @@ struct Change {
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ChangeData {
|
||||
Linted(bool),
|
||||
Lint(LintCacheData),
|
||||
Formatted,
|
||||
}
|
||||
|
||||
impl ChangeData {
|
||||
fn apply(self, data: &mut FileCacheData) {
|
||||
match self {
|
||||
ChangeData::Linted(yes) => {
|
||||
data.linted = yes;
|
||||
ChangeData::Lint(new_lint) => {
|
||||
data.lint = Some(new_lint);
|
||||
}
|
||||
ChangeData::Formatted => {
|
||||
data.formatted = true;
|
||||
@@ -486,17 +608,18 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use filetime::{FileTime, set_file_mtime};
|
||||
use itertools::Itertools;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_cache::CACHE_DIR_NAME;
|
||||
use ruff_linter::message::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||
use ruff_workspace::Settings;
|
||||
|
||||
use crate::cache::{self, ChangeData, FileCache, FileCacheData, FileCacheKey};
|
||||
use crate::cache::{self, FileCache, FileCacheData, FileCacheKey};
|
||||
use crate::cache::{Cache, RelativePathBuf};
|
||||
use crate::commands::format::{FormatCommandError, FormatMode, FormatResult, format_path};
|
||||
use crate::diagnostics::{Diagnostics, lint_path};
|
||||
@@ -523,7 +646,7 @@ mod tests {
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
|
||||
let mut paths = Vec::new();
|
||||
let mut paths_with_diagnostics = Vec::new();
|
||||
let mut parse_errors = Vec::new();
|
||||
let mut expected_diagnostics = Diagnostics::default();
|
||||
for entry in fs::read_dir(&package_root).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
@@ -547,7 +670,7 @@ mod tests {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut diagnostics = lint_path(
|
||||
let diagnostics = lint_path(
|
||||
&path,
|
||||
Some(PackageRoot::root(&package_root)),
|
||||
&settings.linter,
|
||||
@@ -557,15 +680,8 @@ mod tests {
|
||||
UnsafeFixes::Enabled,
|
||||
)
|
||||
.unwrap();
|
||||
if diagnostics.inner.is_empty() {
|
||||
// We won't load a notebook index from the cache for files without diagnostics,
|
||||
// so remove them from `expected_diagnostics` too. This allows us to keep the
|
||||
// full equality assertion below.
|
||||
diagnostics
|
||||
.notebook_indexes
|
||||
.remove(&path.to_string_lossy().to_string());
|
||||
} else {
|
||||
paths_with_diagnostics.push(path.clone());
|
||||
if diagnostics.inner.iter().any(OldDiagnostic::is_syntax_error) {
|
||||
parse_errors.push(path.clone());
|
||||
}
|
||||
paths.push(path);
|
||||
expected_diagnostics += diagnostics;
|
||||
@@ -578,11 +694,11 @@ mod tests {
|
||||
let cache = Cache::open(package_root.clone(), &settings);
|
||||
assert_ne!(cache.package.files.len(), 0);
|
||||
|
||||
paths_with_diagnostics.sort();
|
||||
parse_errors.sort();
|
||||
|
||||
for path in &paths {
|
||||
if paths_with_diagnostics.binary_search(path).is_ok() {
|
||||
continue; // We don't cache files with diagnostics.
|
||||
if parse_errors.binary_search(path).is_ok() {
|
||||
continue; // We don't cache parsing errors.
|
||||
}
|
||||
|
||||
let relative_path = cache.relative_path(path).unwrap();
|
||||
@@ -616,7 +732,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn cache_adds_file_on_lint() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_adds_file_on_lint");
|
||||
let cache = test_cache.open();
|
||||
@@ -640,7 +756,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn cache_adds_files_on_lint() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_adds_files_on_lint");
|
||||
let cache = test_cache.open();
|
||||
@@ -665,40 +781,6 @@ mod tests {
|
||||
cache.persist().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_does_not_add_file_on_lint_with_diagnostic() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_does_not_add_file_on_lint_with_diagnostic");
|
||||
let cache = test_cache.open();
|
||||
test_cache.write_source_file("source.py", source);
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
|
||||
cache.persist().unwrap();
|
||||
let cache = test_cache.open();
|
||||
|
||||
let results = test_cache
|
||||
.lint_file_with_cache("source.py", &cache)
|
||||
.expect("Failed to lint test file");
|
||||
assert_eq!(results.inner.len(), 1, "Expected one F822 diagnostic");
|
||||
assert_eq!(
|
||||
cache.changes.lock().unwrap().len(),
|
||||
1,
|
||||
"Files with diagnostics still trigger change events"
|
||||
);
|
||||
assert!(
|
||||
cache
|
||||
.changes
|
||||
.lock()
|
||||
.unwrap()
|
||||
.last()
|
||||
.is_some_and(|change| matches!(change.new_data, ChangeData::Linted(false))),
|
||||
"Files with diagnostics are marked as unlinted"
|
||||
);
|
||||
|
||||
cache.persist().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_adds_files_on_format() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
@@ -729,7 +811,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn cache_invalidated_on_file_modified_time() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_invalidated_on_file_modified_time");
|
||||
let cache = test_cache.open();
|
||||
@@ -786,7 +868,7 @@ mod tests {
|
||||
file.set_permissions(perms)
|
||||
}
|
||||
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("cache_invalidated_on_permission_change");
|
||||
let cache = test_cache.open();
|
||||
@@ -839,7 +921,7 @@ mod tests {
|
||||
);
|
||||
|
||||
// Now actually lint a file.
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
test_cache.write_source_file("new.py", source);
|
||||
let new_path_key = RelativePathBuf::from("new.py");
|
||||
assert_eq!(cache.changes.lock().unwrap().len(), 0);
|
||||
@@ -862,7 +944,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn format_updates_cache_entry() {
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\"])\n";
|
||||
let source: &[u8] = b"a = 1\n\n__all__ = list([\"a\", \"b\"])\n";
|
||||
|
||||
let test_cache = TestCache::new("format_updates_cache_entry");
|
||||
let cache = test_cache.open();
|
||||
@@ -896,7 +978,7 @@ mod tests {
|
||||
panic!("Cache entry for `source.py` is missing.");
|
||||
};
|
||||
|
||||
assert!(file_cache.data.linted);
|
||||
assert!(file_cache.data.lint.is_some());
|
||||
assert!(file_cache.data.formatted);
|
||||
}
|
||||
|
||||
@@ -946,7 +1028,7 @@ mod tests {
|
||||
panic!("Cache entry for `source.py` is missing.");
|
||||
};
|
||||
|
||||
assert!(!file_cache.data.linted);
|
||||
assert_eq!(file_cache.data.lint, None);
|
||||
assert!(file_cache.data.formatted);
|
||||
}
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ pub(crate) fn analyze_graph(
|
||||
|
||||
// Resolve the per-file settings.
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.string_imports;
|
||||
let string_imports = settings.analyze.detect_string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
|
||||
// Skip excluded files.
|
||||
|
||||
@@ -11,13 +11,13 @@ use log::{debug, error, warn};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::panic::catch_unwind;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::{IOError, Violation, fs, warn_user_once};
|
||||
use ruff_linter::{IOError, fs, warn_user_once};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::resolver::{
|
||||
@@ -129,7 +129,11 @@ pub(crate) fn check(
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||
|
||||
Diagnostics::new(
|
||||
vec![IOError { message }.into_diagnostic(TextRange::default(), &dummy)],
|
||||
vec![OldDiagnostic::new(
|
||||
IOError { message },
|
||||
TextRange::default(),
|
||||
&dummy,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -162,9 +166,7 @@ pub(crate) fn check(
|
||||
|a, b| (a.0 + b.0, a.1 + b.1),
|
||||
);
|
||||
|
||||
all_diagnostics
|
||||
.inner
|
||||
.sort_by(Diagnostic::ruff_start_ordering);
|
||||
all_diagnostics.inner.sort();
|
||||
|
||||
// Store the caches.
|
||||
caches.persist()?;
|
||||
@@ -279,7 +281,6 @@ mod test {
|
||||
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_color(false)
|
||||
.emit(
|
||||
&mut output,
|
||||
&diagnostics.inner,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::packaging;
|
||||
use ruff_linter::settings::flags;
|
||||
@@ -53,8 +52,6 @@ pub(crate) fn check_stdin(
|
||||
noqa,
|
||||
fix_mode,
|
||||
)?;
|
||||
diagnostics
|
||||
.inner
|
||||
.sort_unstable_by(Diagnostic::ruff_start_ordering);
|
||||
diagnostics.inner.sort_unstable();
|
||||
Ok(diagnostics)
|
||||
}
|
||||
|
||||
@@ -10,41 +10,35 @@ use std::path::Path;
|
||||
use anyhow::{Context, Result};
|
||||
use colored::Colorize;
|
||||
use log::{debug, warn};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||
use ruff_linter::message::create_syntax_error_diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::{IOError, Violation, fs};
|
||||
use ruff_notebook::{NotebookError, NotebookIndex};
|
||||
use ruff_linter::{IOError, fs};
|
||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::Settings;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::cache::{Cache, FileCache, FileCacheKey};
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
|
||||
/// A collection of [`Diagnostic`]s and additional information needed to render them.
|
||||
///
|
||||
/// Note that `notebook_indexes` may be empty if there are no diagnostics because the
|
||||
/// `NotebookIndex` isn't cached in this case. This isn't a problem for any current uses as of
|
||||
/// 2025-08-12, which are all related to diagnostic rendering, but could be surprising if used
|
||||
/// differently in the future.
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) inner: Vec<Diagnostic>,
|
||||
pub(crate) inner: Vec<OldDiagnostic>,
|
||||
pub(crate) fixed: FixMap,
|
||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub(crate) fn new(
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
diagnostics: Vec<OldDiagnostic>,
|
||||
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -68,12 +62,13 @@ impl Diagnostics {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let source_file = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![
|
||||
vec![OldDiagnostic::new(
|
||||
IOError {
|
||||
message: err.to_string(),
|
||||
}
|
||||
.into_diagnostic(TextRange::default(), &source_file),
|
||||
],
|
||||
},
|
||||
TextRange::default(),
|
||||
&source_file,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -103,10 +98,10 @@ impl Diagnostics {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let dummy = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![create_syntax_error_diagnostic(
|
||||
dummy,
|
||||
vec![OldDiagnostic::syntax_error(
|
||||
err,
|
||||
TextRange::default(),
|
||||
dummy,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
@@ -199,9 +194,19 @@ pub(crate) fn lint_path(
|
||||
let cache_key = FileCacheKey::from_path(path).context("Failed to create cache key")?;
|
||||
let cached_diagnostics = cache
|
||||
.get(relative_path, &cache_key)
|
||||
.is_some_and(FileCache::linted);
|
||||
if cached_diagnostics {
|
||||
return Ok(Diagnostics::default());
|
||||
.and_then(|entry| entry.to_diagnostics(path));
|
||||
if let Some(diagnostics) = cached_diagnostics {
|
||||
// `FixMode::Generate` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||
// need to avoid reading from and writing to the cache in these modes.
|
||||
if match fix_mode {
|
||||
flags::FixMode::Generate => true,
|
||||
flags::FixMode::Apply | flags::FixMode::Diff => {
|
||||
diagnostics.inner.is_empty() && diagnostics.fixed.is_empty()
|
||||
}
|
||||
} {
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
// Stash the file metadata for later so when we update the cache it reflects the prerun
|
||||
@@ -318,21 +323,31 @@ pub(crate) fn lint_path(
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
let has_error = result.has_syntax_errors();
|
||||
let diagnostics = result.diagnostics;
|
||||
|
||||
if let Some((cache, relative_path, key)) = caching {
|
||||
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics
|
||||
// with fixes, we need to avoid reading from and writing to the cache in these
|
||||
// modes.
|
||||
let use_fixes = match fix_mode {
|
||||
flags::FixMode::Generate => true,
|
||||
flags::FixMode::Apply | flags::FixMode::Diff => fixed.is_empty(),
|
||||
};
|
||||
|
||||
// We don't cache files with diagnostics.
|
||||
let linted = diagnostics.is_empty() && use_fixes;
|
||||
cache.set_linted(relative_path.to_owned(), &key, linted);
|
||||
// We don't cache parsing errors.
|
||||
if !has_error {
|
||||
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
|
||||
// and writing the diff to stdout, respectively). If a file has diagnostics, we
|
||||
// need to avoid reading from and writing to the cache in these modes.
|
||||
if match fix_mode {
|
||||
flags::FixMode::Generate => true,
|
||||
flags::FixMode::Apply | flags::FixMode::Diff => {
|
||||
diagnostics.is_empty() && fixed.is_empty()
|
||||
}
|
||||
} {
|
||||
cache.update_lint(
|
||||
relative_path.to_owned(),
|
||||
&key,
|
||||
LintCacheData::from_diagnostics(
|
||||
&diagnostics,
|
||||
transformed.as_ipy_notebook().map(Notebook::index).cloned(),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let notebook_indexes = if let SourceKind::IpyNotebook(notebook) = transformed {
|
||||
|
||||
@@ -131,7 +131,6 @@ pub fn run(
|
||||
}: Args,
|
||||
) -> Result<ExitStatus> {
|
||||
{
|
||||
ruff_db::set_program_version(crate::version::version().to_string()).unwrap();
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
#[expect(clippy::print_stderr)]
|
||||
@@ -440,7 +439,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
if cli.statistics {
|
||||
printer.write_statistics(&diagnostics, &mut summary_writer)?;
|
||||
} else {
|
||||
printer.write_once(&diagnostics, &mut summary_writer, preview)?;
|
||||
printer.write_once(&diagnostics, &mut summary_writer)?;
|
||||
}
|
||||
|
||||
if !cli.exit_zero {
|
||||
|
||||
@@ -19,8 +19,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
|
||||
@@ -9,14 +9,12 @@ use itertools::{Itertools, iterate};
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
|
||||
};
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, SarifEmitter,
|
||||
TextEmitter,
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter,
|
||||
SarifEmitter, SecondaryCode, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
@@ -203,7 +201,6 @@ impl Printer {
|
||||
&self,
|
||||
diagnostics: &Diagnostics,
|
||||
writer: &mut dyn Write,
|
||||
preview: bool,
|
||||
) -> Result<()> {
|
||||
if matches!(self.log_level, LogLevel::Silent) {
|
||||
return Ok(());
|
||||
@@ -231,32 +228,16 @@ impl Printer {
|
||||
|
||||
match self.format {
|
||||
OutputFormat::Json => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Json)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Rdjson => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Rdjson)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
RdjsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::JsonLines => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::JsonLines)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JsonLinesEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Junit => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Junit)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JunitEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Concise | OutputFormat::Full => {
|
||||
TextEmitter::default()
|
||||
@@ -264,7 +245,6 @@ impl Printer {
|
||||
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
|
||||
.with_show_source(self.format == OutputFormat::Full)
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.with_preview(preview)
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
@@ -299,18 +279,10 @@ impl Printer {
|
||||
GitlabEmitter::default().emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Pylint => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Pylint)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
PylintEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Azure => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Azure)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
AzureEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
@@ -334,7 +306,8 @@ impl Printer {
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &Diagnostic), usize)>, (code, message)| {
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &OldDiagnostic), usize)>,
|
||||
(code, message)| {
|
||||
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||
if *prev_code == code {
|
||||
*count += 1;
|
||||
|
||||
@@ -57,40 +57,33 @@ fn dependencies() -> Result<()> {
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f(): pass
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("e.pyi")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f() -> None: ...
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py",
|
||||
"ruff/e.pyi"
|
||||
],
|
||||
"ruff/e.py": [],
|
||||
"ruff/e.pyi": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py"
|
||||
],
|
||||
"ruff/e.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -204,43 +197,23 @@ fn string_detection() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").arg("--min-dots").arg("1").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -120,7 +120,7 @@ fn nonexistent_config_file() {
|
||||
#[test]
|
||||
fn config_override_rejected_if_invalid_toml() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--config", "foo = bar", "."]), @r"
|
||||
.args(["format", "--config", "foo = bar", "."]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -137,11 +137,12 @@ fn config_override_rejected_if_invalid_toml() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -115,13 +115,12 @@ fn stdin_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> -:1:8
|
||||
-:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -140,13 +139,12 @@ fn stdin_filename() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> F401.py:1:8
|
||||
F401.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -176,21 +174,19 @@ import bar # unused import
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `bar` imported but unused
|
||||
--> bar.py:2:8
|
||||
bar.py:2:8: F401 [*] `bar` imported but unused
|
||||
|
|
||||
2 | import bar # unused import
|
||||
| ^^^
|
||||
| ^^^ F401
|
||||
|
|
||||
help: Remove unused import: `bar`
|
||||
= help: Remove unused import: `bar`
|
||||
|
||||
F401 [*] `foo` imported but unused
|
||||
--> foo.py:2:8
|
||||
foo.py:2:8: F401 [*] `foo` imported but unused
|
||||
|
|
||||
2 | import foo # unused import
|
||||
| ^^^
|
||||
| ^^^ F401
|
||||
|
|
||||
help: Remove unused import: `foo`
|
||||
= help: Remove unused import: `foo`
|
||||
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
@@ -212,13 +208,12 @@ fn check_warn_stdin_filename_with_files() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> F401.py:1:8
|
||||
F401.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -239,13 +234,12 @@ fn stdin_source_type_py() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> TCH.py:1:8
|
||||
TCH.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -477,11 +471,10 @@ fn stdin_fix_jupyter() {
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
----- stderr -----
|
||||
F821 Undefined name `x`
|
||||
--> Jupyter.ipynb:cell 3:1:7
|
||||
Jupyter.ipynb:cell 3:1:7: F821 Undefined name `x`
|
||||
|
|
||||
1 | print(x)
|
||||
| ^
|
||||
| ^ F821
|
||||
|
|
||||
|
||||
Found 3 errors (2 fixed, 1 remaining).
|
||||
@@ -576,21 +569,19 @@ fn stdin_override_parser_ipynb() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> Jupyter.py:cell 1:1:8
|
||||
Jupyter.py:cell 1:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
F401 [*] `sys` imported but unused
|
||||
--> Jupyter.py:cell 3:1:8
|
||||
Jupyter.py:cell 3:1:8: F401 [*] `sys` imported but unused
|
||||
|
|
||||
1 | import sys
|
||||
| ^^^
|
||||
| ^^^ F401
|
||||
|
|
||||
help: Remove unused import: `sys`
|
||||
= help: Remove unused import: `sys`
|
||||
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
@@ -614,13 +605,12 @@ fn stdin_override_parser_py() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> F401.ipynb:1:8
|
||||
F401.ipynb:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
@@ -643,13 +633,12 @@ fn stdin_fix_when_not_fixable_should_still_print_contents() {
|
||||
print(sys.version)
|
||||
|
||||
----- stderr -----
|
||||
F634 If test is a tuple, which is always `True`
|
||||
--> -:3:4
|
||||
-:3:4: F634 If test is a tuple, which is always `True`
|
||||
|
|
||||
1 | import sys
|
||||
2 |
|
||||
3 | if (1, 2):
|
||||
| ^^^^^^
|
||||
| ^^^^^^ F634
|
||||
4 | print(sys.version)
|
||||
|
|
||||
|
||||
@@ -809,8 +798,7 @@ fn stdin_parse_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
invalid-syntax: Expected one or more symbol names after import
|
||||
--> -:1:16
|
||||
-:1:16: SyntaxError: Expected one or more symbol names after import
|
||||
|
|
||||
1 | from foo import
|
||||
| ^
|
||||
@@ -830,16 +818,14 @@ fn stdin_multiple_parse_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
invalid-syntax: Expected one or more symbol names after import
|
||||
--> -:1:16
|
||||
-:1:16: SyntaxError: Expected one or more symbol names after import
|
||||
|
|
||||
1 | from foo import
|
||||
| ^
|
||||
2 | bar =
|
||||
|
|
||||
|
||||
invalid-syntax: Expected an expression
|
||||
--> -:2:6
|
||||
-:2:6: SyntaxError: Expected an expression
|
||||
|
|
||||
1 | from foo import
|
||||
2 | bar =
|
||||
@@ -861,8 +847,7 @@ fn parse_error_not_included() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
invalid-syntax: Expected an expression
|
||||
--> -:1:6
|
||||
-:1:6: SyntaxError: Expected an expression
|
||||
|
|
||||
1 | foo =
|
||||
| ^
|
||||
@@ -882,11 +867,10 @@ fn full_output_preview() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
E741 Ambiguous variable name: `l`
|
||||
--> -:1:1
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
|
|
||||
1 | l = 1
|
||||
| ^
|
||||
| ^ E741
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -911,11 +895,10 @@ preview = true
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
E741 Ambiguous variable name: `l`
|
||||
--> -:1:1
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
|
|
||||
1 | l = 1
|
||||
| ^
|
||||
| ^ E741
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -933,11 +916,10 @@ fn full_output_format() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
E741 Ambiguous variable name: `l`
|
||||
--> -:1:1
|
||||
-:1:1: E741 Ambiguous variable name: `l`
|
||||
|
|
||||
1 | l = 1
|
||||
| ^
|
||||
| ^ E741
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -1085,7 +1067,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1098,7 +1080,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1111,7 +1093,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1424,9 +1406,7 @@ fn redirect_direct() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF950 Hey this is a test rule that was redirected from another.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF950 Hey this is a test rule that was redirected from another.
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1458,9 +1438,7 @@ fn redirect_prefix() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF950 Hey this is a test rule that was redirected from another.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF950 Hey this is a test rule that was redirected from another.
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1477,9 +1455,7 @@ fn deprecated_direct() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF920 Hey this is a deprecated test rule.
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1496,12 +1472,8 @@ fn deprecated_multiple_direct() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF921 Hey this is another deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF920 Hey this is a deprecated test rule.
|
||||
-:1:1: RUF921 Hey this is another deprecated test rule.
|
||||
Found 2 errors.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1519,12 +1491,8 @@ fn deprecated_indirect() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF921 Hey this is another deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF920 Hey this is a deprecated test rule.
|
||||
-:1:1: RUF921 Hey this is another deprecated test rule.
|
||||
Found 2 errors.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1670,23 +1638,22 @@ fn check_input_from_argfile() -> Result<()> {
|
||||
(file_a_path.display().to_string().as_str(), "/path/to/a.py"),
|
||||
]}, {
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin(""), @r"
|
||||
.pass_stdin(""), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> /path/to/a.py:1:8
|
||||
/path/to/a.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
| ^^ F401
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -1702,12 +1669,8 @@ fn check_hints_hidden_unsafe_fixes() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -1724,9 +1687,7 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 1 error.
|
||||
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -1744,12 +1705,8 @@ fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the --fix option.
|
||||
|
||||
@@ -1768,9 +1725,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1787,12 +1742,8 @@ fn check_shows_unsafe_fixes_with_opt_in() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the --fix option.
|
||||
|
||||
@@ -1813,9 +1764,7 @@ fn fix_applies_safe_fixes_by_default() {
|
||||
# fix from stable-test-rule-safe-fix
|
||||
|
||||
----- stderr -----
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors (1 fixed, 1 remaining).
|
||||
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
");
|
||||
@@ -1852,9 +1801,7 @@ fn fix_does_not_apply_display_only_fixes() {
|
||||
----- stdout -----
|
||||
def add_to_list(item, some_list=[]): ...
|
||||
----- stderr -----
|
||||
RUF903 Hey this is a stable test rule with a display only fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF903 Hey this is a stable test rule with a display only fix.
|
||||
Found 1 error.
|
||||
");
|
||||
}
|
||||
@@ -1872,9 +1819,7 @@ fn fix_does_not_apply_display_only_fixes_with_unsafe_fixes_enabled() {
|
||||
----- stdout -----
|
||||
def add_to_list(item, some_list=[]): ...
|
||||
----- stderr -----
|
||||
RUF903 Hey this is a stable test rule with a display only fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF903 Hey this is a stable test rule with a display only fix.
|
||||
Found 1 error.
|
||||
");
|
||||
}
|
||||
@@ -1891,9 +1836,7 @@ fn fix_only_unsafe_fixes_available() {
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 1 error.
|
||||
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
");
|
||||
@@ -2029,12 +1972,8 @@ extend-unsafe-fixes = ["RUF901"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
No fixes available (2 hidden fixes can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -2065,12 +2004,8 @@ extend-safe-fixes = ["RUF902"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
@@ -2103,12 +2038,8 @@ extend-safe-fixes = ["RUF902"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF901 [*] Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 Hey this is a stable test rule with an unsafe fix.
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -2143,27 +2074,13 @@ extend-safe-fixes = ["RUF9"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF900 Hey this is a stable test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF901 Hey this is a stable test rule with a safe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF903 Hey this is a stable test rule with a display only fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF921 Hey this is another deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF950 Hey this is a test rule that was redirected from another.
|
||||
--> -:1:1
|
||||
|
||||
-:1:1: RUF900 Hey this is a stable test rule.
|
||||
-:1:1: RUF901 Hey this is a stable test rule with a safe fix.
|
||||
-:1:1: RUF902 [*] Hey this is a stable test rule with an unsafe fix.
|
||||
-:1:1: RUF903 Hey this is a stable test rule with a display only fix.
|
||||
-:1:1: RUF920 Hey this is a deprecated test rule.
|
||||
-:1:1: RUF921 Hey this is another deprecated test rule.
|
||||
-:1:1: RUF950 Hey this is a test rule that was redirected from another.
|
||||
Found 7 errors.
|
||||
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
@@ -2224,11 +2141,10 @@ def log(x, base) -> float:
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
D417 Missing argument description in the docstring for `log`: `base`
|
||||
--> -:2:5
|
||||
-:2:5: D417 Missing argument description in the docstring for `log`: `base`
|
||||
|
|
||||
2 | def log(x, base) -> float:
|
||||
| ^^^
|
||||
| ^^^ D417
|
||||
3 | """Calculate natural log of a value
|
||||
|
|
||||
|
||||
@@ -2261,15 +2177,14 @@ select = ["RUF017"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF017 Avoid quadratic list summation
|
||||
--> -:3:1
|
||||
-:3:1: RUF017 Avoid quadratic list summation
|
||||
|
|
||||
1 | x = [1, 2, 3]
|
||||
2 | y = [4, 5, 6]
|
||||
3 | sum([x, y], [])
|
||||
| ^^^^^^^^^^^^^^^
|
||||
| ^^^^^^^^^^^^^^^ RUF017
|
||||
|
|
||||
help: Replace with `functools.reduce`
|
||||
= help: Replace with `functools.reduce`
|
||||
|
||||
Found 1 error.
|
||||
No fixes available (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
@@ -2302,15 +2217,14 @@ unfixable = ["RUF"]
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF017 Avoid quadratic list summation
|
||||
--> -:3:1
|
||||
-:3:1: RUF017 Avoid quadratic list summation
|
||||
|
|
||||
1 | x = [1, 2, 3]
|
||||
2 | y = [4, 5, 6]
|
||||
3 | sum([x, y], [])
|
||||
| ^^^^^^^^^^^^^^^
|
||||
| ^^^^^^^^^^^^^^^ RUF017
|
||||
|
|
||||
help: Replace with `functools.reduce`
|
||||
= help: Replace with `functools.reduce`
|
||||
|
||||
Found 1 error.
|
||||
|
||||
@@ -2332,11 +2246,11 @@ fn pyproject_toml_stdin_syntax_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF200 Failed to parse pyproject.toml: unclosed table, expected `]`
|
||||
--> pyproject.toml:1:9
|
||||
pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: invalid table header
|
||||
expected `.`, `]`
|
||||
|
|
||||
1 | [project
|
||||
| ^
|
||||
| ^ RUF200
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -2358,12 +2272,11 @@ fn pyproject_toml_stdin_schema_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
|
||||
--> pyproject.toml:2:8
|
||||
pyproject.toml:2:8: RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
|
||||
|
|
||||
1 | [project]
|
||||
2 | name = 1
|
||||
| ^
|
||||
| ^ RUF200
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
@@ -2451,12 +2364,11 @@ fn pyproject_toml_stdin_schema_error_fix() {
|
||||
[project]
|
||||
name = 1
|
||||
----- stderr -----
|
||||
RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
|
||||
--> pyproject.toml:2:8
|
||||
pyproject.toml:2:8: RUF200 Failed to parse pyproject.toml: invalid type: integer `1`, expected a string
|
||||
|
|
||||
1 | [project]
|
||||
2 | name = 1
|
||||
| ^
|
||||
| ^ RUF200
|
||||
|
|
||||
|
||||
Found 1 error.
|
||||
|
||||
@@ -534,7 +534,7 @@ fn nonexistent_config_file() {
|
||||
fn config_override_rejected_if_invalid_toml() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "foo = bar", "."]), @r"
|
||||
.args(["--config", "foo = bar", "."]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -551,11 +551,12 @@ fn config_override_rejected_if_invalid_toml() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -732,8 +733,9 @@ select = [E501]
|
||||
Cause: TOML parse error at line 3, column 11
|
||||
|
|
||||
3 | select = [E501]
|
||||
| ^^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid array
|
||||
expected `]`
|
||||
");
|
||||
});
|
||||
|
||||
@@ -874,7 +876,7 @@ fn each_toml_option_requires_a_new_flag_1() {
|
||||
|
|
||||
1 | extend-select=['F841'], line-length=90
|
||||
| ^
|
||||
unexpected key or value, expected newline, `#`
|
||||
expected newline, `#`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -905,7 +907,7 @@ fn each_toml_option_requires_a_new_flag_2() {
|
||||
|
|
||||
1 | extend-select=['F841'] line-length=90
|
||||
| ^
|
||||
unexpected key or value, expected newline, `#`
|
||||
expected newline, `#`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -993,7 +995,6 @@ fn value_given_to_table_key_is_not_inline_table_2() {
|
||||
- `lint.exclude`
|
||||
- `lint.preview`
|
||||
- `lint.typing-extensions`
|
||||
- `lint.future-annotations`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -2422,7 +2423,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -2734,7 +2735,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3098,7 +3099,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3478,7 +3479,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3806,7 +3807,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4134,7 +4135,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4419,7 +4420,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4757,7 +4758,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4996,37 +4997,6 @@ fn flake8_import_convention_invalid_aliases_config_module_name() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flake8_import_convention_nfkc_normalization() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[lint.flake8-import-conventions.aliases]
|
||||
"test.module" = "_﹏𝘥𝘦𝘣𝘶𝘨﹏﹏"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--config")
|
||||
.arg(&ruff_toml)
|
||||
, @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Invalid alias for module 'test.module': alias normalizes to '__debug__', which is not allowed.
|
||||
");});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flake8_import_convention_unused_aliased_import() {
|
||||
assert_cmd_snapshot!(
|
||||
@@ -5420,7 +5390,7 @@ fn walrus_before_py38() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:1:2: invalid-syntax: Cannot use named assignment expression (`:=`) on Python 3.7 (syntax was added in Python 3.8)
|
||||
test.py:1:2: SyntaxError: Cannot use named assignment expression (`:=`) on Python 3.7 (syntax was added in Python 3.8)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -5466,15 +5436,15 @@ match 2:
|
||||
print("it's one")
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:2:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
// syntax error on 3.9 with preview
|
||||
@@ -5495,7 +5465,7 @@ match 2:
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
test.py:2:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -5523,7 +5493,7 @@ fn cache_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:1:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
main.py:1:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
@@ -5536,7 +5506,7 @@ fn cache_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:1:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
main.py:1:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
@@ -5588,15 +5558,15 @@ fn cookiecutter_globbing() -> Result<()> {
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--select=F811")
|
||||
.current_dir(tempdir.path()), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{{cookiecutter.repo_name}}/tests/maintest.py:3:8: F811 [*] Redefinition of unused `foo` from line 1: `foo` redefined here
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{{cookiecutter.repo_name}}/tests/maintest.py:3:8: F811 [*] Redefinition of unused `foo` from line 1
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
----- stderr -----
|
||||
");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -5649,7 +5619,7 @@ fn semantic_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable
|
||||
main.py:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
|
||||
main.py:1:20: F821 Undefined name `foo`
|
||||
|
||||
----- stderr -----
|
||||
@@ -5663,7 +5633,7 @@ fn semantic_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
main.py:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable
|
||||
main.py:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
|
||||
main.py:1:20: F821 Undefined name `foo`
|
||||
|
||||
----- stderr -----
|
||||
@@ -5682,7 +5652,7 @@ fn semantic_syntax_errors() -> Result<()> {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:3: invalid-syntax: assignment expression cannot rebind comprehension variable
|
||||
-:1:3: SyntaxError: assignment expression cannot rebind comprehension variable
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -5722,111 +5692,3 @@ class Foo:
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test_case::test_case("concise")]
|
||||
#[test_case::test_case("full")]
|
||||
#[test_case::test_case("json")]
|
||||
#[test_case::test_case("json-lines")]
|
||||
#[test_case::test_case("junit")]
|
||||
#[test_case::test_case("grouped")]
|
||||
#[test_case::test_case("github")]
|
||||
#[test_case::test_case("gitlab")]
|
||||
#[test_case::test_case("pylint")]
|
||||
#[test_case::test_case("rdjson")]
|
||||
#[test_case::test_case("azure")]
|
||||
#[test_case::test_case("sarif")]
|
||||
fn output_format(output_format: &str) -> Result<()> {
|
||||
const CONTENT: &str = "\
|
||||
import os # F401
|
||||
x = y # F821
|
||||
match 42: # invalid-syntax
|
||||
case _: ...
|
||||
";
|
||||
|
||||
let tempdir = TempDir::new()?;
|
||||
let input = tempdir.path().join("input.py");
|
||||
fs::write(&input, CONTENT)?;
|
||||
|
||||
let snapshot = format!("output_format_{output_format}");
|
||||
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![
|
||||
(tempdir_filter(&project_dir).as_str(), "[TMP]/"),
|
||||
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
|
||||
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
|
||||
(ruff_linter::VERSION, "[VERSION]"),
|
||||
]
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
snapshot,
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args([
|
||||
"check",
|
||||
"--no-cache",
|
||||
"--output-format",
|
||||
output_format,
|
||||
"--select",
|
||||
"F401,F821",
|
||||
"--target-version",
|
||||
"py39",
|
||||
"input.py",
|
||||
])
|
||||
.current_dir(&tempdir),
|
||||
);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn future_annotations_preview_warning() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "lint.future-annotations = true"])
|
||||
.args(["--select", "F"])
|
||||
.arg("--no-preview")
|
||||
.arg("-")
|
||||
.pass_stdin("1"),
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: The `lint.future-annotations` setting will have no effect because `preview` is disabled
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn up045_nested_optional_flatten_all() {
|
||||
let contents = "\
|
||||
from typing import Optional
|
||||
nested_optional: Optional[Optional[Optional[str]]] = None
|
||||
";
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--select", "UP045", "--diff", "--target-version", "py312"])
|
||||
.arg("-")
|
||||
.pass_stdin(contents),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -1,2 +1,2 @@
|
||||
from typing import Optional
|
||||
-nested_optional: Optional[Optional[Optional[str]]] = None
|
||||
+nested_optional: str | None = None
|
||||
|
||||
|
||||
----- stderr -----
|
||||
Would fix 1 error.
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -95,6 +95,6 @@ is stricter, which could affect the suggested fix. See [this FAQ section](https:
|
||||
## References
|
||||
- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)
|
||||
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
|
||||
- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)
|
||||
- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- azure
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y`
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;code=invalid-syntax;]Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,25 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- concise
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
input.py:3:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,52 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- full
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `os` imported but unused
|
||||
--> input.py:1:8
|
||||
|
|
||||
1 | import os # F401
|
||||
| ^^
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
F821 Undefined name `y`
|
||||
--> input.py:2:5
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
| ^
|
||||
3 | match 42: # invalid-syntax
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
--> input.py:3:1
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
| ^^^^^
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- github
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused
|
||||
::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y`
|
||||
::error title=Ruff (invalid-syntax),file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,78 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- gitlab
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"check_name": "F401",
|
||||
"description": "F401: `os` imported but unused",
|
||||
"fingerprint": "4dbad37161e65c72",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 8,
|
||||
"line": 1
|
||||
},
|
||||
"end": {
|
||||
"column": 10,
|
||||
"line": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "F821",
|
||||
"description": "F821: Undefined name `y`",
|
||||
"fingerprint": "7af59862a085230",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 5,
|
||||
"line": 2
|
||||
},
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 2
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "invalid-syntax",
|
||||
"description": "invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"fingerprint": "e558cec859bb66e8",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 1,
|
||||
"line": 3
|
||||
},
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 3
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
@@ -1,27 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- grouped
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:
|
||||
1:8 F401 [*] `os` imported but unused
|
||||
2:5 F821 Undefined name `y`
|
||||
3:1 invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- json-lines
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
|
||||
{"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
|
||||
{"cell":null,"code":"invalid-syntax","end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,88 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- json
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "F401",
|
||||
"end_location": {
|
||||
"column": 10,
|
||||
"row": 1
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": {
|
||||
"applicability": "safe",
|
||||
"edits": [
|
||||
{
|
||||
"content": "",
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 2
|
||||
},
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": "Remove unused import: `os`"
|
||||
},
|
||||
"location": {
|
||||
"column": 8,
|
||||
"row": 1
|
||||
},
|
||||
"message": "`os` imported but unused",
|
||||
"noqa_row": 1,
|
||||
"url": "https://docs.astral.sh/ruff/rules/unused-import"
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": "F821",
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 2
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 5,
|
||||
"row": 2
|
||||
},
|
||||
"message": "Undefined name `y`",
|
||||
"noqa_row": 2,
|
||||
"url": "https://docs.astral.sh/ruff/rules/undefined-name"
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": "invalid-syntax",
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 3
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 3
|
||||
},
|
||||
"message": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"noqa_row": null,
|
||||
"url": null
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
@@ -1,34 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- junit
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites name="ruff" tests="3" failures="3" errors="0">
|
||||
<testsuite name="[TMP]/input.py" tests="3" disabled="0" errors="0" failures="3" package="org.ruff">
|
||||
<testcase name="org.ruff.F401" classname="[TMP]/input" line="1" column="8">
|
||||
<failure message="`os` imported but unused">line 1, col 8, `os` imported but unused</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.F821" classname="[TMP]/input" line="2" column="5">
|
||||
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1">
|
||||
<failure message="Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- pylint
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1: [F401] `os` imported but unused
|
||||
input.py:2: [F821] Undefined name `y`
|
||||
input.py:3: [invalid-syntax] Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,102 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- rdjson
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/unused-import",
|
||||
"value": "F401"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 10,
|
||||
"line": 1
|
||||
},
|
||||
"start": {
|
||||
"column": 8,
|
||||
"line": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "`os` imported but unused",
|
||||
"suggestions": [
|
||||
{
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 1,
|
||||
"line": 2
|
||||
},
|
||||
"start": {
|
||||
"column": 1,
|
||||
"line": 1
|
||||
}
|
||||
},
|
||||
"text": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/undefined-name",
|
||||
"value": "F821"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 2
|
||||
},
|
||||
"start": {
|
||||
"column": 5,
|
||||
"line": 2
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "Undefined name `y`"
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"value": "invalid-syntax"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 3
|
||||
},
|
||||
"start": {
|
||||
"column": 1,
|
||||
"line": 3
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
----- stderr -----
|
||||
@@ -1,142 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- sarif
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
"runs": [
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 10,
|
||||
"endLine": 1,
|
||||
"startColumn": 8,
|
||||
"startLine": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "`os` imported but unused"
|
||||
},
|
||||
"ruleId": "F401"
|
||||
},
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 6,
|
||||
"endLine": 2,
|
||||
"startColumn": 5,
|
||||
"startLine": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "Undefined name `y`"
|
||||
},
|
||||
"ruleId": "F821"
|
||||
},
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 6,
|
||||
"endLine": 3,
|
||||
"startColumn": 1,
|
||||
"startLine": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
},
|
||||
"ruleId": "invalid-syntax"
|
||||
}
|
||||
],
|
||||
"tool": {
|
||||
"driver": {
|
||||
"informationUri": "https://github.com/astral-sh/ruff",
|
||||
"name": "ruff",
|
||||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
},
|
||||
"helpUri": "https://docs.astral.sh/ruff/rules/unused-import",
|
||||
"id": "F401",
|
||||
"properties": {
|
||||
"id": "F401",
|
||||
"kind": "Pyflakes",
|
||||
"name": "unused-import",
|
||||
"problem.severity": "error"
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
}
|
||||
},
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for uses of undefined names.\n\n## Why is this bad?\nAn undefined name is likely to raise `NameError` at runtime.\n\n## Example\n```python\ndef double():\n return n * 2 # raises `NameError` if `n` is undefined when `double` is called\n```\n\nUse instead:\n```python\ndef double(n):\n return n * 2\n```\n\n## Options\n- [`target-version`]: Can be used to configure which symbols Ruff will understand\n as being available in the `builtins` namespace.\n\n## References\n- [Python documentation: Naming and binding](https://docs.python.org/3/reference/executionmodel.html#naming-and-binding)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "Undefined name `{name}`. {tip}"
|
||||
},
|
||||
"helpUri": "https://docs.astral.sh/ruff/rules/undefined-name",
|
||||
"id": "F821",
|
||||
"properties": {
|
||||
"id": "F821",
|
||||
"kind": "Pyflakes",
|
||||
"name": "undefined-name",
|
||||
"problem.severity": "error"
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "Undefined name `{name}`. {tip}"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "[VERSION]"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "2.1.0"
|
||||
}
|
||||
----- stderr -----
|
||||
@@ -392,7 +392,7 @@ formatter.docstring_code_line_width = dynamic
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.7
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ fn config_option_ignored_but_validated() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("version")
|
||||
.args(["--config", "foo = bar"]), @r"
|
||||
.args(["--config", "foo = bar"]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -77,11 +77,12 @@ fn config_option_ignored_but_validated() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
"
|
||||
"#
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![expect(clippy::needless_doctest_main)]
|
||||
|
||||
//! A library for formatting of text or programming code snippets.
|
||||
//!
|
||||
//! It's primary purpose is to build an ASCII-graphical representation of the snippet
|
||||
|
||||
@@ -193,14 +193,9 @@ impl DisplaySet<'_> {
|
||||
stylesheet: &Stylesheet,
|
||||
buffer: &mut StyledBuffer,
|
||||
) -> fmt::Result {
|
||||
let hide_severity = annotation.annotation_type.is_none();
|
||||
let color = get_annotation_style(&annotation.annotation_type, stylesheet);
|
||||
let formatted_len = if let Some(id) = &annotation.id {
|
||||
if hide_severity {
|
||||
id.len()
|
||||
} else {
|
||||
2 + id.len() + annotation_type_len(&annotation.annotation_type)
|
||||
}
|
||||
2 + id.len() + annotation_type_len(&annotation.annotation_type)
|
||||
} else {
|
||||
annotation_type_len(&annotation.annotation_type)
|
||||
};
|
||||
@@ -214,66 +209,18 @@ impl DisplaySet<'_> {
|
||||
if formatted_len == 0 {
|
||||
self.format_label(line_offset, &annotation.label, stylesheet, buffer)
|
||||
} else {
|
||||
// TODO(brent) All of this complicated checking of `hide_severity` should be reverted
|
||||
// once we have real severities in Ruff. This code is trying to account for two
|
||||
// different cases:
|
||||
//
|
||||
// - main diagnostic message
|
||||
// - subdiagnostic message
|
||||
//
|
||||
// In the first case, signaled by `hide_severity = true`, we want to print the ID (the
|
||||
// noqa code for a ruff lint diagnostic, e.g. `F401`, or `invalid-syntax` for a syntax
|
||||
// error) without brackets. Instead, for subdiagnostics, we actually want to print the
|
||||
// severity (usually `help`) regardless of the `hide_severity` setting. This is signaled
|
||||
// by an ID of `None`.
|
||||
//
|
||||
// With real severities these should be reported more like in ty:
|
||||
//
|
||||
// ```
|
||||
// error[F401]: `math` imported but unused
|
||||
// error[invalid-syntax]: Cannot use `match` statement on Python 3.9...
|
||||
// ```
|
||||
//
|
||||
// instead of the current versions intended to mimic the old Ruff output format:
|
||||
//
|
||||
// ```
|
||||
// F401 `math` imported but unused
|
||||
// invalid-syntax: Cannot use `match` statement on Python 3.9...
|
||||
// ```
|
||||
//
|
||||
// Note that the `invalid-syntax` colon is added manually in `ruff_db`, not here. We
|
||||
// could eventually add a colon to Ruff lint diagnostics (`F401:`) and then make the
|
||||
// colon below unconditional again.
|
||||
//
|
||||
// This also applies to the hard-coded `stylesheet.error()` styling of the
|
||||
// hidden-severity `id`. This should just be `*color` again later, but for now we don't
|
||||
// want an unformatted `id`, which is what `get_annotation_style` returns for
|
||||
// `DisplayAnnotationType::None`.
|
||||
let annotation_type = annotation_type_str(&annotation.annotation_type);
|
||||
if let Some(id) = annotation.id {
|
||||
if hide_severity {
|
||||
buffer.append(line_offset, &format!("{id} "), *stylesheet.error());
|
||||
} else {
|
||||
buffer.append(line_offset, &format!("{annotation_type}[{id}]"), *color);
|
||||
}
|
||||
} else {
|
||||
buffer.append(line_offset, annotation_type, *color);
|
||||
}
|
||||
|
||||
if annotation.is_fixable {
|
||||
buffer.append(line_offset, "[", stylesheet.none);
|
||||
buffer.append(line_offset, "*", stylesheet.help);
|
||||
buffer.append(line_offset, "]", stylesheet.none);
|
||||
// In the hide-severity case, we need a space instead of the colon and space below.
|
||||
if hide_severity {
|
||||
buffer.append(line_offset, " ", stylesheet.none);
|
||||
}
|
||||
}
|
||||
let id = match &annotation.id {
|
||||
Some(id) => format!("[{id}]"),
|
||||
None => String::new(),
|
||||
};
|
||||
buffer.append(
|
||||
line_offset,
|
||||
&format!("{}{}", annotation_type_str(&annotation.annotation_type), id),
|
||||
*color,
|
||||
);
|
||||
|
||||
if !is_annotation_empty(annotation) {
|
||||
if annotation.id.is_none() || !hide_severity {
|
||||
buffer.append(line_offset, ": ", stylesheet.none);
|
||||
}
|
||||
buffer.append(line_offset, ": ", stylesheet.none);
|
||||
self.format_label(line_offset, &annotation.label, stylesheet, buffer)?;
|
||||
}
|
||||
Ok(())
|
||||
@@ -302,15 +249,11 @@ impl DisplaySet<'_> {
|
||||
let lineno_color = stylesheet.line_no();
|
||||
buffer.puts(line_offset, lineno_width, header_sigil, *lineno_color);
|
||||
buffer.puts(line_offset, lineno_width + 4, path, stylesheet.none);
|
||||
if let Some(Position { row, col, cell }) = pos {
|
||||
if let Some(cell) = cell {
|
||||
buffer.append(line_offset, ":", stylesheet.none);
|
||||
buffer.append(line_offset, &format!("cell {cell}"), stylesheet.none);
|
||||
}
|
||||
buffer.append(line_offset, ":", stylesheet.none);
|
||||
buffer.append(line_offset, row.to_string().as_str(), stylesheet.none);
|
||||
if let Some((col, row)) = pos {
|
||||
buffer.append(line_offset, ":", stylesheet.none);
|
||||
buffer.append(line_offset, col.to_string().as_str(), stylesheet.none);
|
||||
buffer.append(line_offset, ":", stylesheet.none);
|
||||
buffer.append(line_offset, row.to_string().as_str(), stylesheet.none);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -825,7 +768,6 @@ pub(crate) struct Annotation<'a> {
|
||||
pub(crate) annotation_type: DisplayAnnotationType,
|
||||
pub(crate) id: Option<&'a str>,
|
||||
pub(crate) label: Vec<DisplayTextFragment<'a>>,
|
||||
pub(crate) is_fixable: bool,
|
||||
}
|
||||
|
||||
/// A single line used in `DisplayList`.
|
||||
@@ -891,13 +833,6 @@ impl DisplaySourceAnnotation<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub(crate) struct Position {
|
||||
row: usize,
|
||||
col: usize,
|
||||
cell: Option<usize>,
|
||||
}
|
||||
|
||||
/// Raw line - a line which does not have the `lineno` part and is not considered
|
||||
/// a part of the snippet.
|
||||
#[derive(Debug, PartialEq)]
|
||||
@@ -906,7 +841,7 @@ pub(crate) enum DisplayRawLine<'a> {
|
||||
/// slice in the project structure.
|
||||
Origin {
|
||||
path: &'a str,
|
||||
pos: Option<Position>,
|
||||
pos: Option<(usize, usize)>,
|
||||
header_type: DisplayHeaderType,
|
||||
},
|
||||
|
||||
@@ -985,13 +920,6 @@ pub(crate) enum DisplayAnnotationType {
|
||||
Help,
|
||||
}
|
||||
|
||||
impl DisplayAnnotationType {
|
||||
#[inline]
|
||||
const fn is_none(&self) -> bool {
|
||||
matches!(self, Self::None)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<snippet::Level> for DisplayAnnotationType {
|
||||
fn from(at: snippet::Level) -> Self {
|
||||
match at {
|
||||
@@ -1087,12 +1015,11 @@ fn format_message<'m>(
|
||||
title,
|
||||
footer,
|
||||
snippets,
|
||||
is_fixable,
|
||||
} = message;
|
||||
|
||||
let mut sets = vec![];
|
||||
let body = if !snippets.is_empty() || primary {
|
||||
vec![format_title(level, id, title, is_fixable)]
|
||||
vec![format_title(level, id, title)]
|
||||
} else {
|
||||
format_footer(level, id, title)
|
||||
};
|
||||
@@ -1133,18 +1060,12 @@ fn format_message<'m>(
|
||||
sets
|
||||
}
|
||||
|
||||
fn format_title<'a>(
|
||||
level: crate::Level,
|
||||
id: Option<&'a str>,
|
||||
label: &'a str,
|
||||
is_fixable: bool,
|
||||
) -> DisplayLine<'a> {
|
||||
fn format_title<'a>(level: crate::Level, id: Option<&'a str>, label: &'a str) -> DisplayLine<'a> {
|
||||
DisplayLine::Raw(DisplayRawLine::Annotation {
|
||||
annotation: Annotation {
|
||||
annotation_type: DisplayAnnotationType::from(level),
|
||||
id,
|
||||
label: format_label(Some(label), Some(DisplayTextStyle::Emphasis)),
|
||||
is_fixable,
|
||||
},
|
||||
source_aligned: false,
|
||||
continuation: false,
|
||||
@@ -1163,7 +1084,6 @@ fn format_footer<'a>(
|
||||
annotation_type: DisplayAnnotationType::from(level),
|
||||
id,
|
||||
label: format_label(Some(line), None),
|
||||
is_fixable: false,
|
||||
},
|
||||
source_aligned: true,
|
||||
continuation: i != 0,
|
||||
@@ -1198,28 +1118,6 @@ fn format_snippet<'m>(
|
||||
let main_range = snippet.annotations.first().map(|x| x.range.start);
|
||||
let origin = snippet.origin;
|
||||
let need_empty_header = origin.is_some() || is_first;
|
||||
|
||||
let is_file_level = snippet.annotations.iter().any(|ann| ann.is_file_level);
|
||||
if is_file_level {
|
||||
// TODO(brent) enable this assertion again once we set `is_file_level` for individual rules.
|
||||
// It's causing too many false positives currently when the default is to make any
|
||||
// annotation with a default range file-level. See
|
||||
// https://github.com/astral-sh/ruff/issues/19688.
|
||||
//
|
||||
// assert!(
|
||||
// snippet.source.is_empty(),
|
||||
// "Non-empty file-level snippet that won't be rendered: {:?}",
|
||||
// snippet.source
|
||||
// );
|
||||
let header = format_header(origin, main_range, &[], is_first, snippet.cell_index);
|
||||
return DisplaySet {
|
||||
display_lines: header.map_or_else(Vec::new, |header| vec![header]),
|
||||
margin: Margin::new(0, 0, 0, 0, term_width, 0),
|
||||
};
|
||||
}
|
||||
|
||||
let cell_index = snippet.cell_index;
|
||||
|
||||
let mut body = format_body(
|
||||
snippet,
|
||||
need_empty_header,
|
||||
@@ -1228,13 +1126,7 @@ fn format_snippet<'m>(
|
||||
anonymized_line_numbers,
|
||||
cut_indicator,
|
||||
);
|
||||
let header = format_header(
|
||||
origin,
|
||||
main_range,
|
||||
&body.display_lines,
|
||||
is_first,
|
||||
cell_index,
|
||||
);
|
||||
let header = format_header(origin, main_range, &body.display_lines, is_first);
|
||||
|
||||
if let Some(header) = header {
|
||||
body.display_lines.insert(0, header);
|
||||
@@ -1254,7 +1146,6 @@ fn format_header<'a>(
|
||||
main_range: Option<usize>,
|
||||
body: &[DisplayLine<'_>],
|
||||
is_first: bool,
|
||||
cell_index: Option<usize>,
|
||||
) -> Option<DisplayLine<'a>> {
|
||||
let display_header = if is_first {
|
||||
DisplayHeaderType::Initial
|
||||
@@ -1278,31 +1169,20 @@ fn format_header<'a>(
|
||||
..
|
||||
} = item
|
||||
{
|
||||
// At the very end of the `main_range`, report the location as the first character
|
||||
// in the next line instead of falling back to the default location of `1:1`. This
|
||||
// is another divergence from upstream.
|
||||
let end_of_range = range.1 + max(*end_line as usize, 1);
|
||||
if main_range >= range.0 && main_range < end_of_range {
|
||||
if main_range >= range.0 && main_range < range.1 + max(*end_line as usize, 1) {
|
||||
let char_column = text[0..(main_range - range.0).min(text.len())]
|
||||
.chars()
|
||||
.count();
|
||||
col = char_column + 1;
|
||||
line_offset = lineno.unwrap_or(1);
|
||||
break;
|
||||
} else if main_range == end_of_range {
|
||||
line_offset = lineno.map_or(1, |line| line + 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Some(DisplayLine::Raw(DisplayRawLine::Origin {
|
||||
path,
|
||||
pos: Some(Position {
|
||||
row: line_offset,
|
||||
col,
|
||||
cell: cell_index,
|
||||
}),
|
||||
pos: Some((line_offset, col)),
|
||||
header_type: display_header,
|
||||
}));
|
||||
}
|
||||
@@ -1592,7 +1472,6 @@ fn format_body<'m>(
|
||||
annotation_type,
|
||||
id: None,
|
||||
label: format_label(annotation.label, None),
|
||||
is_fixable: false,
|
||||
},
|
||||
range,
|
||||
annotation_type: DisplayAnnotationType::from(annotation.level),
|
||||
@@ -1632,7 +1511,6 @@ fn format_body<'m>(
|
||||
annotation_type,
|
||||
id: None,
|
||||
label: vec![],
|
||||
is_fixable: false,
|
||||
},
|
||||
range,
|
||||
annotation_type: DisplayAnnotationType::from(annotation.level),
|
||||
@@ -1702,7 +1580,6 @@ fn format_body<'m>(
|
||||
annotation_type,
|
||||
id: None,
|
||||
label: format_label(annotation.label, None),
|
||||
is_fixable: false,
|
||||
},
|
||||
range,
|
||||
annotation_type: DisplayAnnotationType::from(annotation.level),
|
||||
|
||||
@@ -22,7 +22,6 @@ pub struct Message<'a> {
|
||||
pub(crate) title: &'a str,
|
||||
pub(crate) snippets: Vec<Snippet<'a>>,
|
||||
pub(crate) footer: Vec<Message<'a>>,
|
||||
pub(crate) is_fixable: bool,
|
||||
}
|
||||
|
||||
impl<'a> Message<'a> {
|
||||
@@ -50,15 +49,6 @@ impl<'a> Message<'a> {
|
||||
self.footer.extend(footer);
|
||||
self
|
||||
}
|
||||
|
||||
/// Whether or not the diagnostic for this message is fixable.
|
||||
///
|
||||
/// This is rendered as a `[*]` indicator after the `id` in an annotation header, if the
|
||||
/// annotation also has `Level::None`.
|
||||
pub fn is_fixable(mut self, yes: bool) -> Self {
|
||||
self.is_fixable = yes;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Structure containing the slice of text to be annotated and
|
||||
@@ -75,10 +65,6 @@ pub struct Snippet<'a> {
|
||||
pub(crate) annotations: Vec<Annotation<'a>>,
|
||||
|
||||
pub(crate) fold: bool,
|
||||
|
||||
/// The optional cell index in a Jupyter notebook, used for reporting source locations along
|
||||
/// with the ranges on `annotations`.
|
||||
pub(crate) cell_index: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a> Snippet<'a> {
|
||||
@@ -89,7 +75,6 @@ impl<'a> Snippet<'a> {
|
||||
source,
|
||||
annotations: vec![],
|
||||
fold: false,
|
||||
cell_index: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,12 +103,6 @@ impl<'a> Snippet<'a> {
|
||||
self.fold = fold;
|
||||
self
|
||||
}
|
||||
|
||||
/// Attach a Jupyter notebook cell index.
|
||||
pub fn cell_index(mut self, index: Option<usize>) -> Self {
|
||||
self.cell_index = index;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// An annotation for a [`Snippet`].
|
||||
@@ -135,7 +114,6 @@ pub struct Annotation<'a> {
|
||||
pub(crate) range: Range<usize>,
|
||||
pub(crate) label: Option<&'a str>,
|
||||
pub(crate) level: Level,
|
||||
pub(crate) is_file_level: bool,
|
||||
}
|
||||
|
||||
impl<'a> Annotation<'a> {
|
||||
@@ -143,11 +121,6 @@ impl<'a> Annotation<'a> {
|
||||
self.label = Some(label);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn is_file_level(mut self, yes: bool) -> Self {
|
||||
self.is_file_level = yes;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Types of annotations.
|
||||
@@ -172,7 +145,6 @@ impl Level {
|
||||
title,
|
||||
snippets: vec![],
|
||||
footer: vec![],
|
||||
is_fixable: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,7 +154,6 @@ impl Level {
|
||||
range: span,
|
||||
label: None,
|
||||
level: self,
|
||||
is_file_level: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,5 +86,5 @@ walltime = ["ruff_db/os", "ty_project", "divan"]
|
||||
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dev-dependencies]
|
||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
|
||||
tikv-jemallocator = { workspace = true }
|
||||
|
||||
@@ -21,8 +21,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
|
||||
@@ -18,8 +18,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
|
||||
@@ -26,8 +26,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
@@ -43,8 +42,7 @@ static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[unsafe(export_name = "_rjem_malloc_conf")]
|
||||
@@ -79,11 +77,8 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
||||
b.iter_batched(
|
||||
|| parsed.clone(),
|
||||
|parsed| {
|
||||
// Assert that file contains no parse errors
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
let path = case.path();
|
||||
lint_only(
|
||||
let result = lint_only(
|
||||
&path,
|
||||
None,
|
||||
settings,
|
||||
@@ -91,7 +86,10 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
||||
&SourceKind::Python(case.code().to_string()),
|
||||
PySourceType::from(path.as_path()),
|
||||
ParseSource::Precomputed(parsed),
|
||||
)
|
||||
);
|
||||
|
||||
// Assert that file contains no parse errors
|
||||
assert!(!result.has_syntax_errors());
|
||||
},
|
||||
criterion::BatchSize::SmallInput,
|
||||
);
|
||||
|
||||
@@ -20,8 +20,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "riscv64"
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
use ruff_benchmark::criterion;
|
||||
use ruff_benchmark::real_world_projects::{InstalledProject, RealWorldProject};
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
|
||||
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
|
||||
@@ -18,7 +17,7 @@ use ruff_python_ast::PythonVersion;
|
||||
use ty_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||
use ty_project::watch::{ChangeEvent, ChangedKind};
|
||||
use ty_project::{CheckMode, Db, ProjectDatabase, ProjectMetadata};
|
||||
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||
|
||||
struct Case {
|
||||
db: ProjectDatabase,
|
||||
@@ -102,7 +101,6 @@ fn setup_tomllib_case() -> Case {
|
||||
|
||||
let re = re.unwrap();
|
||||
|
||||
db.set_check_mode(CheckMode::OpenFiles);
|
||||
db.project().set_open_files(&mut db, tomllib_files);
|
||||
|
||||
let re_path = re.path(&db).as_system_path().unwrap().to_owned();
|
||||
@@ -238,7 +236,6 @@ fn setup_micro_case(code: &str) -> Case {
|
||||
let mut db = ProjectDatabase::new(metadata, system).unwrap();
|
||||
let file = system_path_to_file(&db, SystemPathBuf::from(file_path)).unwrap();
|
||||
|
||||
db.set_check_mode(CheckMode::OpenFiles);
|
||||
db.project()
|
||||
.set_open_files(&mut db, FxHashSet::from_iter([file]));
|
||||
|
||||
@@ -351,41 +348,6 @@ fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_tuple_implicit_instance_attributes(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
criterion.bench_function("ty_micro[many_tuple_assignments]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
// This is a regression benchmark for a case that used to hang:
|
||||
// https://github.com/astral-sh/ty/issues/765
|
||||
setup_micro_case(
|
||||
r#"
|
||||
from typing import Any
|
||||
|
||||
class A:
|
||||
foo: tuple[Any, ...]
|
||||
|
||||
class B(A):
|
||||
def __init__(self, parent: "C", x: tuple[Any]):
|
||||
self.foo = parent.foo + x
|
||||
|
||||
class C(A):
|
||||
def __init__(self, parent: B, x: tuple[Any]):
|
||||
self.foo = parent.foo + x
|
||||
"#,
|
||||
)
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_complex_constrained_attributes_1(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
@@ -479,37 +441,6 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_many_enum_members(criterion: &mut Criterion) {
|
||||
const NUM_ENUM_MEMBERS: usize = 512;
|
||||
|
||||
setup_rayon();
|
||||
|
||||
let mut code = String::new();
|
||||
writeln!(&mut code, "from enum import Enum").ok();
|
||||
|
||||
writeln!(&mut code, "class E(Enum):").ok();
|
||||
for i in 0..NUM_ENUM_MEMBERS {
|
||||
writeln!(&mut code, " m{i} = {i}").ok();
|
||||
}
|
||||
writeln!(&mut code).ok();
|
||||
|
||||
for i in 0..NUM_ENUM_MEMBERS {
|
||||
writeln!(&mut code, "print(E.m{i})").ok();
|
||||
}
|
||||
|
||||
criterion.bench_function("ty_micro[many_enum_members]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| setup_micro_case(&code),
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
struct ProjectBenchmark<'a> {
|
||||
project: InstalledProject<'a>,
|
||||
fs: MemoryFileSystem,
|
||||
@@ -562,21 +493,17 @@ impl<'a> ProjectBenchmark<'a> {
|
||||
|
||||
#[track_caller]
|
||||
fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
|
||||
fn check_project(db: &mut ProjectDatabase, project_name: &str, max_diagnostics: usize) {
|
||||
fn check_project(db: &mut ProjectDatabase, max_diagnostics: usize) {
|
||||
let result = db.check();
|
||||
let diagnostics = result.len();
|
||||
|
||||
if diagnostics > max_diagnostics {
|
||||
let details = result
|
||||
.into_iter()
|
||||
.map(|diagnostic| diagnostic.concise_message().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n ");
|
||||
assert!(
|
||||
diagnostics <= max_diagnostics,
|
||||
"{project_name}: Expected <={max_diagnostics} diagnostics but got {diagnostics}:\n {details}",
|
||||
);
|
||||
}
|
||||
assert!(
|
||||
diagnostics > 1 && diagnostics <= max_diagnostics,
|
||||
"Expected between {} and {} diagnostics but got {}",
|
||||
1,
|
||||
max_diagnostics,
|
||||
diagnostics
|
||||
);
|
||||
}
|
||||
|
||||
setup_rayon();
|
||||
@@ -586,7 +513,7 @@ fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
|
||||
group.bench_function(benchmark.project.config.name, |b| {
|
||||
b.iter_batched_ref(
|
||||
|| benchmark.setup_iteration(),
|
||||
|db| check_project(db, benchmark.project.config.name, benchmark.max_diagnostics),
|
||||
|db| check_project(db, benchmark.max_diagnostics),
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
@@ -643,32 +570,13 @@ fn anyio(criterion: &mut Criterion) {
|
||||
bench_project(&benchmark, criterion);
|
||||
}
|
||||
|
||||
fn datetype(criterion: &mut Criterion) {
|
||||
let benchmark = ProjectBenchmark::new(
|
||||
RealWorldProject {
|
||||
name: "DateType",
|
||||
repository: "https://github.com/glyph/DateType",
|
||||
commit: "57c9c93cf2468069f72945fc04bf27b64100dad8",
|
||||
paths: vec![SystemPath::new("src")],
|
||||
dependencies: vec![],
|
||||
max_dep_date: "2025-07-04",
|
||||
python_version: PythonVersion::PY313,
|
||||
},
|
||||
2,
|
||||
);
|
||||
|
||||
bench_project(&benchmark, criterion);
|
||||
}
|
||||
|
||||
criterion_group!(check_file, benchmark_cold, benchmark_incremental);
|
||||
criterion_group!(
|
||||
micro,
|
||||
benchmark_many_string_assignments,
|
||||
benchmark_many_tuple_assignments,
|
||||
benchmark_tuple_implicit_instance_attributes,
|
||||
benchmark_complex_constrained_attributes_1,
|
||||
benchmark_complex_constrained_attributes_2,
|
||||
benchmark_many_enum_members,
|
||||
);
|
||||
criterion_group!(project, anyio, attrs, hydra, datetype);
|
||||
criterion_group!(project, anyio, attrs, hydra);
|
||||
criterion_main!(check_file, micro, project);
|
||||
|
||||
@@ -218,24 +218,6 @@ static TANJUN: std::sync::LazyLock<Benchmark<'static>> = std::sync::LazyLock::ne
|
||||
)
|
||||
});
|
||||
|
||||
static STATIC_FRAME: std::sync::LazyLock<Benchmark<'static>> = std::sync::LazyLock::new(|| {
|
||||
Benchmark::new(
|
||||
RealWorldProject {
|
||||
name: "static-frame",
|
||||
repository: "https://github.com/static-frame/static-frame",
|
||||
commit: "34962b41baca5e7f98f5a758d530bff02748a421",
|
||||
paths: vec![SystemPath::new("static_frame")],
|
||||
// N.B. `arraykit` is installed as a dependency during mypy_primer runs,
|
||||
// but it takes much longer to be installed in a Codspeed run than it does in a mypy_primer run
|
||||
// (seems to be built from source on the Codspeed CI runners for some reason).
|
||||
dependencies: vec!["numpy"],
|
||||
max_dep_date: "2025-08-09",
|
||||
python_version: PythonVersion::PY311,
|
||||
},
|
||||
500,
|
||||
)
|
||||
});
|
||||
|
||||
#[track_caller]
|
||||
fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
bencher
|
||||
@@ -250,7 +232,7 @@ fn small(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&*COLOUR_SCIENCE, &*PANDAS, &*STATIC_FRAME], sample_size=1, sample_count=3)]
|
||||
#[bench(args=[&*COLOUR_SCIENCE, &*PANDAS], sample_size=1, sample_count=3)]
|
||||
fn medium(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
@@ -260,7 +242,7 @@ fn large(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&*PYDANTIC], sample_size=3, sample_count=8)]
|
||||
#[bench(args=[&*PYDANTIC], sample_size=3, sample_count=3)]
|
||||
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
|
||||
|
||||
|
||||
@@ -13,19 +13,17 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_annotate_snippets = { workspace = true }
|
||||
ruff_cache = { workspace = true, optional = true }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_memory_usage = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_source_file = { workspace = true, features = ["get-size"] }
|
||||
ruff_text_size = { workspace = true }
|
||||
ty_static = { workspace = true }
|
||||
|
||||
anstyle = { workspace = true }
|
||||
arc-swap = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
@@ -34,13 +32,10 @@ glob = { workspace = true }
|
||||
ignore = { workspace = true, optional = true }
|
||||
matchit = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
quick-junit = { workspace = true, optional = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
serde_json = { workspace = true, optional = true }
|
||||
similar = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
@@ -58,13 +53,7 @@ tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
cache = ["ruff_cache"]
|
||||
junit = ["dep:quick-junit"]
|
||||
os = ["ignore", "dep:etcetera"]
|
||||
serde = [
|
||||
"camino/serde1",
|
||||
"dep:serde",
|
||||
"dep:serde_json",
|
||||
"ruff_diagnostics/serde",
|
||||
]
|
||||
serde = ["dep:serde", "camino/serde1"]
|
||||
# Exposes testing utilities.
|
||||
testing = ["tracing-subscriber"]
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
use std::{fmt::Formatter, path::Path, sync::Arc};
|
||||
use std::{fmt::Formatter, sync::Arc};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||
use render::{FileResolver, Input};
|
||||
use ruff_source_file::{SourceCode, SourceFile};
|
||||
|
||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
pub use self::render::{
|
||||
DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary,
|
||||
};
|
||||
pub use self::render::DisplayDiagnostic;
|
||||
use crate::{Db, files::File};
|
||||
|
||||
mod render;
|
||||
@@ -21,7 +19,7 @@ mod stylesheet;
|
||||
/// characteristics in the inputs given to the tool. Typically, but not always,
|
||||
/// a characteristic is a deficiency. An example of a characteristic that is
|
||||
/// _not_ a deficiency is the `reveal_type` diagnostic for our type checker.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub struct Diagnostic {
|
||||
/// The actual diagnostic.
|
||||
///
|
||||
@@ -64,37 +62,10 @@ impl Diagnostic {
|
||||
message: message.into_diagnostic_message(),
|
||||
annotations: vec![],
|
||||
subs: vec![],
|
||||
fix: None,
|
||||
parent: None,
|
||||
noqa_offset: None,
|
||||
secondary_code: None,
|
||||
});
|
||||
Diagnostic { inner }
|
||||
}
|
||||
|
||||
/// Creates a `Diagnostic` for a syntax error.
|
||||
///
|
||||
/// Unlike the more general [`Diagnostic::new`], this requires a [`Span`] and a [`TextRange`]
|
||||
/// attached to it.
|
||||
///
|
||||
/// This should _probably_ be a method on the syntax errors, but
|
||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
///
|
||||
/// Note that `message` is stored in the primary annotation, _not_ in the primary diagnostic
|
||||
/// message.
|
||||
pub fn invalid_syntax(
|
||||
span: impl Into<Span>,
|
||||
message: impl IntoDiagnosticMessage,
|
||||
range: impl Ranged,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = span.into().with_range(range.range());
|
||||
diag.annotate(Annotation::primary(span).message(message));
|
||||
diag
|
||||
}
|
||||
|
||||
/// Add an annotation to this diagnostic.
|
||||
///
|
||||
/// Annotations for a diagnostic are optional, but if any are added,
|
||||
@@ -124,14 +95,7 @@ impl Diagnostic {
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn info<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Info, message));
|
||||
}
|
||||
|
||||
/// Adds a "help" sub-diagnostic with the given message.
|
||||
///
|
||||
/// See the closely related [`Diagnostic::info`] method for more details.
|
||||
pub fn help<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Help, message));
|
||||
self.sub(SubDiagnostic::new(Severity::Info, message));
|
||||
}
|
||||
|
||||
/// Adds a "sub" diagnostic to this diagnostic.
|
||||
@@ -212,7 +176,7 @@ impl Diagnostic {
|
||||
/// The type returned implements the `std::fmt::Display` trait. In most
|
||||
/// cases, just converting it to a string (or printing it) will do what
|
||||
/// you want.
|
||||
pub fn concise_message(&self) -> ConciseMessage<'_> {
|
||||
pub fn concise_message(&self) -> ConciseMessage {
|
||||
let main = self.inner.message.as_str();
|
||||
let annotation = self
|
||||
.primary_annotation()
|
||||
@@ -254,11 +218,6 @@ impl Diagnostic {
|
||||
.find(|ann| ann.is_primary)
|
||||
}
|
||||
|
||||
/// Returns a mutable borrow of all annotations of this diagnostic.
|
||||
pub fn annotations_mut(&mut self) -> impl Iterator<Item = &mut Annotation> {
|
||||
Arc::make_mut(&mut self.inner).annotations.iter_mut()
|
||||
}
|
||||
|
||||
/// Returns the "primary" span of this diagnostic if one exists.
|
||||
///
|
||||
/// When there are multiple primary spans, then the first one that was
|
||||
@@ -267,11 +226,6 @@ impl Diagnostic {
|
||||
self.primary_annotation().map(|ann| ann.span.clone())
|
||||
}
|
||||
|
||||
/// Returns a reference to the primary span of this diagnostic.
|
||||
pub fn primary_span_ref(&self) -> Option<&Span> {
|
||||
self.primary_annotation().map(|ann| &ann.span)
|
||||
}
|
||||
|
||||
/// Returns the tags from the primary annotation of this diagnostic if it exists.
|
||||
pub fn primary_tags(&self) -> Option<&[DiagnosticTag]> {
|
||||
self.primary_annotation().map(|ann| ann.tags.as_slice())
|
||||
@@ -314,207 +268,15 @@ impl Diagnostic {
|
||||
pub fn sub_diagnostics(&self) -> &[SubDiagnostic] {
|
||||
&self.inner.subs
|
||||
}
|
||||
|
||||
/// Returns a mutable borrow of the sub-diagnostics of this diagnostic.
|
||||
pub fn sub_diagnostics_mut(&mut self) -> impl Iterator<Item = &mut SubDiagnostic> {
|
||||
Arc::make_mut(&mut self.inner).subs.iter_mut()
|
||||
}
|
||||
|
||||
/// Returns the fix for this diagnostic if it exists.
|
||||
pub fn fix(&self) -> Option<&Fix> {
|
||||
self.inner.fix.as_ref()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn fix_mut(&mut self) -> Option<&mut Fix> {
|
||||
Arc::make_mut(&mut self.inner).fix.as_mut()
|
||||
}
|
||||
|
||||
/// Set the fix for this diagnostic.
|
||||
pub fn set_fix(&mut self, fix: Fix) {
|
||||
debug_assert!(
|
||||
self.primary_span().is_some(),
|
||||
"Expected a source file for a diagnostic with a fix"
|
||||
);
|
||||
Arc::make_mut(&mut self.inner).fix = Some(fix);
|
||||
}
|
||||
|
||||
/// Remove the fix for this diagnostic.
|
||||
pub fn remove_fix(&mut self) {
|
||||
Arc::make_mut(&mut self.inner).fix = None;
|
||||
}
|
||||
|
||||
/// Returns `true` if the diagnostic contains a [`Fix`].
|
||||
pub fn fixable(&self) -> bool {
|
||||
self.fix().is_some()
|
||||
}
|
||||
|
||||
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
||||
///
|
||||
/// This is primarily used for checking noqa/secondary code suppressions.
|
||||
pub fn parent(&self) -> Option<TextSize> {
|
||||
self.inner.parent
|
||||
}
|
||||
|
||||
/// Set the offset of the diagnostic's parent statement.
|
||||
pub fn set_parent(&mut self, parent: TextSize) {
|
||||
Arc::make_mut(&mut self.inner).parent = Some(parent);
|
||||
}
|
||||
|
||||
/// Returns the remapped offset for a suppression comment if it exists.
|
||||
///
|
||||
/// Like [`Diagnostic::parent`], this is used for noqa code suppression comments in Ruff.
|
||||
pub fn noqa_offset(&self) -> Option<TextSize> {
|
||||
self.inner.noqa_offset
|
||||
}
|
||||
|
||||
/// Set the remapped offset for a suppression comment.
|
||||
pub fn set_noqa_offset(&mut self, noqa_offset: TextSize) {
|
||||
Arc::make_mut(&mut self.inner).noqa_offset = Some(noqa_offset);
|
||||
}
|
||||
|
||||
/// Returns the secondary code for the diagnostic if it exists.
|
||||
///
|
||||
/// The "primary" code for the diagnostic is its lint name. Diagnostics in ty don't have
|
||||
/// secondary codes (yet), but in Ruff the noqa code is used.
|
||||
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
|
||||
self.inner.secondary_code.as_ref()
|
||||
}
|
||||
|
||||
/// Returns the secondary code for the diagnostic if it exists, or the lint name otherwise.
|
||||
///
|
||||
/// This is a common pattern for Ruff diagnostics, which want to use the noqa code in general,
|
||||
/// but fall back on the `invalid-syntax` identifier for syntax errors, which don't have
|
||||
/// secondary codes.
|
||||
pub fn secondary_code_or_id(&self) -> &str {
|
||||
self.secondary_code()
|
||||
.map_or_else(|| self.inner.id.as_str(), SecondaryCode::as_str)
|
||||
}
|
||||
|
||||
/// Set the secondary code for this diagnostic.
|
||||
pub fn set_secondary_code(&mut self, code: SecondaryCode) {
|
||||
Arc::make_mut(&mut self.inner).secondary_code = Some(code);
|
||||
}
|
||||
|
||||
/// Returns the name used to represent the diagnostic.
|
||||
pub fn name(&self) -> &'static str {
|
||||
self.id().as_str()
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a syntax error message.
|
||||
pub fn is_invalid_syntax(&self) -> bool {
|
||||
self.id().is_invalid_syntax()
|
||||
}
|
||||
|
||||
/// Returns the message body to display to the user.
|
||||
pub fn body(&self) -> &str {
|
||||
self.primary_message()
|
||||
}
|
||||
|
||||
/// Returns the message of the first sub-diagnostic with a `Help` severity.
|
||||
///
|
||||
/// Note that this is used as the fix title/suggestion for some of Ruff's output formats, but in
|
||||
/// general this is not the guaranteed meaning of such a message.
|
||||
pub fn first_help_text(&self) -> Option<&str> {
|
||||
self.sub_diagnostics()
|
||||
.iter()
|
||||
.find(|sub| matches!(sub.inner.severity, SubDiagnosticSeverity::Help))
|
||||
.map(|sub| sub.inner.message.as_str())
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
pub fn to_ruff_url(&self) -> Option<String> {
|
||||
if self.is_invalid_syntax() {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"{}/rules/{}",
|
||||
env!("CARGO_PKG_HOMEPAGE"),
|
||||
self.name()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the filename for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_filename(&self) -> String {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.name()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Computes the start source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_start_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().start())
|
||||
}
|
||||
|
||||
/// Computes the end source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_end_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().end())
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
pub fn ruff_source_file(&self) -> Option<&SourceFile> {
|
||||
self.primary_span_ref()?.as_ruff_file()
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_source_file(&self) -> &SourceFile {
|
||||
self.ruff_source_file()
|
||||
.expect("Expected a ruff source file")
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
pub fn range(&self) -> Option<TextRange> {
|
||||
self.primary_span()?.range()
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span or if the span has no range.
|
||||
pub fn expect_range(&self) -> TextRange {
|
||||
self.range().expect("Expected a range for the primary span")
|
||||
}
|
||||
|
||||
/// Returns the ordering of diagnostics based on the start of their ranges, if they have any.
|
||||
///
|
||||
/// Panics if either diagnostic has no primary span, if the span has no range, or if its file is
|
||||
/// not a `SourceFile`.
|
||||
pub fn ruff_start_ordering(&self, other: &Self) -> std::cmp::Ordering {
|
||||
(self.expect_ruff_source_file(), self.expect_range().start()).cmp(&(
|
||||
other.expect_ruff_source_file(),
|
||||
other.expect_range().start(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
struct DiagnosticInner {
|
||||
id: DiagnosticId,
|
||||
severity: Severity,
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
subs: Vec<SubDiagnostic>,
|
||||
fix: Option<Fix>,
|
||||
parent: Option<TextSize>,
|
||||
noqa_offset: Option<TextSize>,
|
||||
secondary_code: Option<SecondaryCode>,
|
||||
}
|
||||
|
||||
struct RenderingSortKey<'a> {
|
||||
@@ -580,7 +342,7 @@ impl Eq for RenderingSortKey<'_> {}
|
||||
/// Currently, the order in which sub-diagnostics are rendered relative to one
|
||||
/// another (for a single parent diagnostic) is the order in which they were
|
||||
/// attached to the diagnostic.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub struct SubDiagnostic {
|
||||
/// Like with `Diagnostic`, we box the `SubDiagnostic` to make it
|
||||
/// pointer-sized.
|
||||
@@ -605,10 +367,7 @@ impl SubDiagnostic {
|
||||
/// Callers can pass anything that implements `std::fmt::Display`
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn new<'a>(
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: impl IntoDiagnosticMessage + 'a,
|
||||
) -> SubDiagnostic {
|
||||
pub fn new<'a>(severity: Severity, message: impl IntoDiagnosticMessage + 'a) -> SubDiagnostic {
|
||||
let inner = Box::new(SubDiagnosticInner {
|
||||
severity,
|
||||
message: message.into_diagnostic_message(),
|
||||
@@ -636,11 +395,6 @@ impl SubDiagnostic {
|
||||
&self.inner.annotations
|
||||
}
|
||||
|
||||
/// Returns a mutable borrow of the annotations of this sub-diagnostic.
|
||||
pub fn annotations_mut(&mut self) -> impl Iterator<Item = &mut Annotation> {
|
||||
self.inner.annotations.iter_mut()
|
||||
}
|
||||
|
||||
/// Returns a shared borrow of the "primary" annotation of this diagnostic
|
||||
/// if one exists.
|
||||
///
|
||||
@@ -674,7 +428,7 @@ impl SubDiagnostic {
|
||||
/// The type returned implements the `std::fmt::Display` trait. In most
|
||||
/// cases, just converting it to a string (or printing it) will do what
|
||||
/// you want.
|
||||
pub fn concise_message(&self) -> ConciseMessage<'_> {
|
||||
pub fn concise_message(&self) -> ConciseMessage {
|
||||
let main = self.inner.message.as_str();
|
||||
let annotation = self
|
||||
.primary_annotation()
|
||||
@@ -689,9 +443,9 @@ impl SubDiagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SubDiagnosticInner {
|
||||
severity: SubDiagnosticSeverity,
|
||||
severity: Severity,
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
}
|
||||
@@ -717,7 +471,7 @@ struct SubDiagnosticInner {
|
||||
///
|
||||
/// Messages attached to annotations should also be as brief and specific as
|
||||
/// possible. Long messages could negative impact the quality of rendering.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub struct Annotation {
|
||||
/// The span of this annotation, corresponding to some subsequence of the
|
||||
/// user's input that we want to highlight.
|
||||
@@ -732,11 +486,6 @@ pub struct Annotation {
|
||||
is_primary: bool,
|
||||
/// The diagnostic tags associated with this annotation.
|
||||
tags: Vec<DiagnosticTag>,
|
||||
/// Whether this annotation is a file-level or full-file annotation.
|
||||
///
|
||||
/// When set, rendering will only include the file's name and (optional) range. Everything else
|
||||
/// is omitted, including any file snippet or message.
|
||||
is_file_level: bool,
|
||||
}
|
||||
|
||||
impl Annotation {
|
||||
@@ -755,7 +504,6 @@ impl Annotation {
|
||||
message: None,
|
||||
is_primary: true,
|
||||
tags: Vec::new(),
|
||||
is_file_level: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -772,7 +520,6 @@ impl Annotation {
|
||||
message: None,
|
||||
is_primary: false,
|
||||
tags: Vec::new(),
|
||||
is_file_level: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -838,28 +585,13 @@ impl Annotation {
|
||||
pub fn push_tag(&mut self, tag: DiagnosticTag) {
|
||||
self.tags.push(tag);
|
||||
}
|
||||
|
||||
/// Set whether or not this annotation is file-level.
|
||||
///
|
||||
/// File-level annotations are only rendered with their file name and range, if available. This
|
||||
/// is intended for backwards compatibility with Ruff diagnostics, which historically used
|
||||
/// `TextRange::default` to indicate a file-level diagnostic. In the new diagnostic model, a
|
||||
/// [`Span`] with a range of `None` should be used instead, as mentioned in the `Span`
|
||||
/// documentation.
|
||||
///
|
||||
/// TODO(brent) update this usage in Ruff and remove `is_file_level` entirely. See
|
||||
/// <https://github.com/astral-sh/ruff/issues/19688>, especially my first comment, for more
|
||||
/// details.
|
||||
pub fn set_file_level(&mut self, yes: bool) {
|
||||
self.is_file_level = yes;
|
||||
}
|
||||
}
|
||||
|
||||
/// Tags that can be associated with an annotation.
|
||||
///
|
||||
/// These tags are used to provide additional information about the annotation.
|
||||
/// and are passed through to the language server protocol.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub enum DiagnosticTag {
|
||||
/// Unused or unnecessary code. Used for unused parameters, unreachable code, etc.
|
||||
Unnecessary,
|
||||
@@ -1068,7 +800,7 @@ impl std::fmt::Display for DiagnosticId {
|
||||
///
|
||||
/// This enum presents a unified interface to these two types for the sake of creating [`Span`]s and
|
||||
/// emitting diagnostics from both ty and ruff.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
|
||||
pub enum UnifiedFile {
|
||||
Ty(File),
|
||||
Ruff(SourceFile),
|
||||
@@ -1082,18 +814,6 @@ impl UnifiedFile {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the file's path relative to the current working directory.
|
||||
pub fn relative_path<'a>(&'a self, resolver: &'a dyn FileResolver) -> &'a Path {
|
||||
let cwd = resolver.current_directory();
|
||||
let path = Path::new(self.path(resolver));
|
||||
|
||||
if let Ok(path) = path.strip_prefix(cwd) {
|
||||
return path;
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn diagnostic_source(&self, resolver: &dyn FileResolver) -> DiagnosticSource {
|
||||
match self {
|
||||
UnifiedFile::Ty(file) => DiagnosticSource::Ty(resolver.input(*file)),
|
||||
@@ -1119,7 +839,7 @@ enum DiagnosticSource {
|
||||
|
||||
impl DiagnosticSource {
|
||||
/// Returns this input as a `SourceCode` for convenient querying.
|
||||
fn as_source_code(&self) -> SourceCode<'_, '_> {
|
||||
fn as_source_code(&self) -> SourceCode {
|
||||
match self {
|
||||
DiagnosticSource::Ty(input) => SourceCode::new(input.text.as_str(), &input.line_index),
|
||||
DiagnosticSource::Ruff(source) => SourceCode::new(source.source_text(), source.index()),
|
||||
@@ -1132,7 +852,7 @@ impl DiagnosticSource {
|
||||
/// It consists of a `File` and an optional range into that file. When the
|
||||
/// range isn't present, it semantically implies that the diagnostic refers to
|
||||
/// the entire file. For example, when the file should be executable but isn't.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
|
||||
pub struct Span {
|
||||
file: UnifiedFile,
|
||||
range: Option<TextRange>,
|
||||
@@ -1177,15 +897,9 @@ impl Span {
|
||||
///
|
||||
/// Panics if the file is a [`UnifiedFile::Ty`] instead of a [`UnifiedFile::Ruff`].
|
||||
pub fn expect_ruff_file(&self) -> &SourceFile {
|
||||
self.as_ruff_file()
|
||||
.expect("Expected a ruff `SourceFile`, found a ty `File`")
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] attached to this [`Span`].
|
||||
pub fn as_ruff_file(&self) -> Option<&SourceFile> {
|
||||
match &self.file {
|
||||
UnifiedFile::Ty(_) => None,
|
||||
UnifiedFile::Ruff(file) => Some(file),
|
||||
UnifiedFile::Ty(_) => panic!("Expected a ruff `SourceFile`, found a ty `File`"),
|
||||
UnifiedFile::Ruff(file) => file,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1210,7 +924,7 @@ impl From<crate::files::FileRange> for Span {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
|
||||
pub enum Severity {
|
||||
Info,
|
||||
Warning,
|
||||
@@ -1240,32 +954,6 @@ impl Severity {
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`Severity`] but exclusively for sub-diagnostics.
|
||||
///
|
||||
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
|
||||
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
|
||||
/// deleted and the two combined again.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, get_size2::GetSize)]
|
||||
pub enum SubDiagnosticSeverity {
|
||||
Help,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Fatal,
|
||||
}
|
||||
|
||||
impl SubDiagnosticSeverity {
|
||||
fn to_annotate(self) -> AnnotateLevel {
|
||||
match self {
|
||||
SubDiagnosticSeverity::Help => AnnotateLevel::Help,
|
||||
SubDiagnosticSeverity::Info => AnnotateLevel::Info,
|
||||
SubDiagnosticSeverity::Warning => AnnotateLevel::Warning,
|
||||
SubDiagnosticSeverity::Error => AnnotateLevel::Error,
|
||||
SubDiagnosticSeverity::Fatal => AnnotateLevel::Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for rendering diagnostics.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DisplayDiagnosticConfig {
|
||||
@@ -1286,25 +974,6 @@ pub struct DisplayDiagnosticConfig {
|
||||
/// here for now as the most "sensible" place for it to live until
|
||||
/// we had more concrete use cases. ---AG
|
||||
context: usize,
|
||||
/// Whether to use preview formatting for Ruff diagnostics.
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
preview: bool,
|
||||
/// Whether to hide the real `Severity` of diagnostics.
|
||||
///
|
||||
/// This is intended for temporary use by Ruff, which only has a single `error` severity at the
|
||||
/// moment. We should be able to remove this option when Ruff gets more severities.
|
||||
hide_severity: bool,
|
||||
/// Whether to show the availability of a fix in a diagnostic.
|
||||
show_fix_status: bool,
|
||||
/// Whether to show the diff for an available fix after the main diagnostic.
|
||||
///
|
||||
/// This currently only applies to `DiagnosticFormat::Full`.
|
||||
show_fix_diff: bool,
|
||||
/// The lowest applicability that should be shown when reporting diagnostics.
|
||||
fix_applicability: Applicability,
|
||||
}
|
||||
|
||||
impl DisplayDiagnosticConfig {
|
||||
@@ -1325,51 +994,6 @@ impl DisplayDiagnosticConfig {
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to enable preview behavior or not.
|
||||
pub fn preview(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
preview: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to hide a diagnostic's severity or not.
|
||||
pub fn hide_severity(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
hide_severity: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to show a fix's availability or not.
|
||||
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
show_fix_status: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to show a diff for an available fix after the main diagnostic.
|
||||
pub fn show_fix_diff(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
show_fix_diff: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the lowest fix applicability that should be shown.
|
||||
///
|
||||
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
|
||||
/// availability for unsafe or display-only fixes.
|
||||
///
|
||||
/// Note that this option is currently ignored when `hide_severity` is false.
|
||||
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
fix_applicability: applicability,
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DisplayDiagnosticConfig {
|
||||
@@ -1378,11 +1002,6 @@ impl Default for DisplayDiagnosticConfig {
|
||||
format: DiagnosticFormat::default(),
|
||||
color: false,
|
||||
context: 2,
|
||||
preview: false,
|
||||
hide_severity: false,
|
||||
show_fix_status: false,
|
||||
show_fix_diff: false,
|
||||
fix_applicability: Applicability::Safe,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1410,31 +1029,6 @@ pub enum DiagnosticFormat {
|
||||
///
|
||||
/// This may use color when printing to a `tty`.
|
||||
Concise,
|
||||
/// Print diagnostics in the [Azure Pipelines] format.
|
||||
///
|
||||
/// [Azure Pipelines]: https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning
|
||||
Azure,
|
||||
/// Print diagnostics in JSON format.
|
||||
///
|
||||
/// Unlike `json-lines`, this prints all of the diagnostics as a JSON array.
|
||||
#[cfg(feature = "serde")]
|
||||
Json,
|
||||
/// Print diagnostics in JSON format, one per line.
|
||||
///
|
||||
/// This will print each diagnostic as a separate JSON object on its own line. See the `json`
|
||||
/// format for an array of all diagnostics. See <https://jsonlines.org/> for more details.
|
||||
#[cfg(feature = "serde")]
|
||||
JsonLines,
|
||||
/// Print diagnostics in the JSON format expected by [reviewdog].
|
||||
///
|
||||
/// [reviewdog]: https://github.com/reviewdog/reviewdog
|
||||
#[cfg(feature = "serde")]
|
||||
Rdjson,
|
||||
/// Print diagnostics in the format emitted by Pylint.
|
||||
Pylint,
|
||||
/// Print diagnostics in the format expected by JUnit.
|
||||
#[cfg(feature = "junit")]
|
||||
Junit,
|
||||
}
|
||||
|
||||
/// A representation of the kinds of messages inside a diagnostic.
|
||||
@@ -1493,7 +1087,7 @@ impl std::fmt::Display for ConciseMessage<'_> {
|
||||
/// In most cases, callers shouldn't need to use this. Instead, there is
|
||||
/// a blanket trait implementation for `IntoDiagnosticMessage` for
|
||||
/// anything that implements `std::fmt::Display`.
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
|
||||
pub struct DiagnosticMessage(Box<str>);
|
||||
|
||||
impl DiagnosticMessage {
|
||||
@@ -1553,52 +1147,41 @@ impl<T: std::fmt::Display> IntoDiagnosticMessage for T {
|
||||
}
|
||||
}
|
||||
|
||||
/// A secondary identifier for a lint diagnostic.
|
||||
/// Creates a `Diagnostic` from a parse error.
|
||||
///
|
||||
/// For Ruff rules this means the noqa code.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, get_size2::GetSize)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize), serde(transparent))]
|
||||
pub struct SecondaryCode(String);
|
||||
|
||||
impl SecondaryCode {
|
||||
pub fn new(code: String) -> Self {
|
||||
Self(code)
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
/// This should _probably_ be a method on `ruff_python_parser::ParseError`, but
|
||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
pub fn create_parse_diagnostic(file: File, err: &ruff_python_parser::ParseError) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.location);
|
||||
diag.annotate(Annotation::primary(span).message(&err.error));
|
||||
diag
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SecondaryCode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
/// Creates a `Diagnostic` from an unsupported syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_unsupported_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::UnsupportedSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SecondaryCode {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&str> for SecondaryCode {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for &str {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
// for `hashbrown::EntryRef`
|
||||
impl From<&SecondaryCode> for SecondaryCode {
|
||||
fn from(value: &SecondaryCode) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
/// Creates a `Diagnostic` from a semantic syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_semantic_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::semantic_errors::SemanticSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,81 +0,0 @@
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, Severity};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct AzureRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> AzureRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl AzureRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diag in diagnostics {
|
||||
let severity = match diag.severity() {
|
||||
Severity::Info | Severity::Warning => "warning",
|
||||
Severity::Error | Severity::Fatal => "error",
|
||||
};
|
||||
write!(f, "##vso[task.logissue type={severity};")?;
|
||||
if let Some(span) = diag.primary_span() {
|
||||
let filename = span.file().path(self.resolver);
|
||||
write!(f, "sourcepath={filename};")?;
|
||||
if let Some(range) = span.range() {
|
||||
let location = if self.resolver.notebook_index(span.file()).is_some() {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
LineColumn::default()
|
||||
} else {
|
||||
span.file()
|
||||
.diagnostic_source(self.resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"linenumber={line};columnnumber={col};",
|
||||
line = location.line,
|
||||
col = location.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
writeln!(
|
||||
f,
|
||||
"code={code};]{body}",
|
||||
code = diag.secondary_code_or_id(),
|
||||
body = diag.body(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Azure);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Azure);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,201 +0,0 @@
|
||||
use crate::diagnostic::{
|
||||
Diagnostic, DisplayDiagnosticConfig, Severity,
|
||||
stylesheet::{DiagnosticStylesheet, fmt_styled},
|
||||
};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct ConciseRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> ConciseRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let sep = fmt_styled(":", stylesheet.separator);
|
||||
for diag in diagnostics {
|
||||
if let Some(span) = diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
"{path}",
|
||||
path = fmt_styled(
|
||||
span.file().relative_path(self.resolver).to_string_lossy(),
|
||||
stylesheet.emphasis
|
||||
)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
|
||||
if let Some(notebook_index) = self.resolver.notebook_index(span.file()) {
|
||||
write!(
|
||||
f,
|
||||
"{sep}cell {cell}{sep}{line}{sep}{col}",
|
||||
cell = notebook_index.cell(start.line).unwrap_or_default(),
|
||||
line = notebook_index.cell_row(start.line).unwrap_or_default(),
|
||||
col = start.column,
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{sep}{line}{sep}{col}",
|
||||
line = start.line,
|
||||
col = start.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
write!(f, "{sep} ")?;
|
||||
}
|
||||
if self.config.hide_severity {
|
||||
if let Some(code) = diag.secondary_code() {
|
||||
write!(
|
||||
f,
|
||||
"{code} ",
|
||||
code = fmt_styled(code, stylesheet.secondary_code)
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{id}: ",
|
||||
id = fmt_styled(diag.inner.id.as_str(), stylesheet.secondary_code)
|
||||
)?;
|
||||
}
|
||||
if self.config.show_fix_status {
|
||||
if let Some(fix) = diag.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.config.fix_applicability) {
|
||||
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let (severity, severity_style) = match diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}] ",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
}
|
||||
|
||||
writeln!(f, "{message}", message = diag.concise_message())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: error[unused-import] `os` imported but unused
|
||||
fib.py:6:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: error[undefined-name] Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_preview() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
env.preview(true);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_syntax_errors() {
|
||||
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: error[invalid-syntax] Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: error[invalid-syntax] Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
notebook.ipynb:cell 1:2:8: error[unused-import] `os` imported but unused
|
||||
notebook.ipynb:cell 2:2:8: error[unused-import] `math` imported but unused
|
||||
notebook.ipynb:cell 3:4:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Concise);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@"error[test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,904 +0,0 @@
|
||||
use std::borrow::Cow;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use anstyle::Style;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use similar::{ChangeTag, TextDiff};
|
||||
|
||||
use ruff_annotate_snippets::Renderer as AnnotateRenderer;
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::render::{FileResolver, Resolved};
|
||||
use crate::diagnostic::stylesheet::{DiagnosticStylesheet, fmt_styled};
|
||||
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig};
|
||||
|
||||
pub(super) struct FullRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> FullRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let mut renderer = if self.config.color {
|
||||
AnnotateRenderer::styled()
|
||||
} else {
|
||||
AnnotateRenderer::plain()
|
||||
}
|
||||
.cut_indicator("…");
|
||||
|
||||
renderer = renderer
|
||||
.error(stylesheet.error)
|
||||
.warning(stylesheet.warning)
|
||||
.info(stylesheet.info)
|
||||
.note(stylesheet.note)
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none);
|
||||
|
||||
for diag in diagnostics {
|
||||
let resolved = Resolved::new(self.resolver, diag, self.config);
|
||||
let renderable = resolved.to_renderable(self.config.context);
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)?;
|
||||
|
||||
if self.config.show_fix_diff {
|
||||
if let Some(diff) = Diff::from_diagnostic(diag, &stylesheet, self.resolver) {
|
||||
writeln!(f, "{diff}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Renders a diff that shows the code fixes.
|
||||
///
|
||||
/// The implementation isn't fully fledged out and only used by tests. Before using in production, try
|
||||
/// * Improve layout
|
||||
/// * Replace tabs with spaces for a consistent experience across terminals
|
||||
/// * Replace zero-width whitespaces
|
||||
/// * Print a simpler diff if only a single line has changed
|
||||
/// * Compute the diff from the `Edit` because diff calculation is expensive.
|
||||
struct Diff<'a> {
|
||||
fix: &'a Fix,
|
||||
diagnostic_source: DiagnosticSource,
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
stylesheet: &'a DiagnosticStylesheet,
|
||||
}
|
||||
|
||||
impl<'a> Diff<'a> {
|
||||
fn from_diagnostic(
|
||||
diagnostic: &'a Diagnostic,
|
||||
stylesheet: &'a DiagnosticStylesheet,
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> Option<Diff<'a>> {
|
||||
let file = &diagnostic.primary_span_ref()?.file;
|
||||
Some(Diff {
|
||||
fix: diagnostic.fix()?,
|
||||
diagnostic_source: file.diagnostic_source(resolver),
|
||||
notebook_index: resolver.notebook_index(file),
|
||||
stylesheet,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Diff<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let source_code = self.diagnostic_source.as_source_code();
|
||||
let source_text = source_code.text();
|
||||
|
||||
// Partition the source code into end offsets for each cell. If `self.notebook_index` is
|
||||
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
|
||||
// `None` key.
|
||||
let cells = if let Some(notebook_index) = &self.notebook_index {
|
||||
let mut last_cell = OneIndexed::MIN;
|
||||
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
|
||||
for (row, cell) in notebook_index.iter() {
|
||||
if cell != last_cell {
|
||||
let offset = source_code.line_start(row);
|
||||
cells.push((Some(last_cell), offset));
|
||||
last_cell = cell;
|
||||
}
|
||||
}
|
||||
cells.push((Some(last_cell), source_text.text_len()));
|
||||
cells
|
||||
} else {
|
||||
vec![(None, source_text.text_len())]
|
||||
};
|
||||
|
||||
let message = match self.fix.applicability() {
|
||||
// TODO(zanieb): Adjust this messaging once it's user-facing
|
||||
Applicability::Safe => "Safe fix",
|
||||
Applicability::Unsafe => "Unsafe fix",
|
||||
Applicability::DisplayOnly => "Display-only fix",
|
||||
};
|
||||
|
||||
// TODO(brent) `stylesheet.separator` is cyan rather than blue, as we had before. I think
|
||||
// we're getting rid of this soon anyway, so I didn't think it was worth adding another
|
||||
// style to the stylesheet temporarily. The color doesn't appear at all in the snapshot
|
||||
// tests, which is the only place these are currently used.
|
||||
writeln!(f, "ℹ {}", fmt_styled(message, self.stylesheet.separator))?;
|
||||
|
||||
let mut last_end = TextSize::ZERO;
|
||||
for (cell, offset) in cells {
|
||||
let range = TextRange::new(last_end, offset);
|
||||
last_end = offset;
|
||||
let input = source_code.slice(range);
|
||||
|
||||
let mut output = String::with_capacity(input.len());
|
||||
let mut last_end = range.start();
|
||||
|
||||
let mut applied = 0;
|
||||
for edit in self.fix.edits() {
|
||||
if range.contains_range(edit.range()) {
|
||||
output.push_str(source_code.slice(TextRange::new(last_end, edit.start())));
|
||||
output.push_str(edit.content().unwrap_or_default());
|
||||
last_end = edit.end();
|
||||
applied += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// No edits were applied, so there's no need to diff.
|
||||
if applied == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
output.push_str(&source_text[usize::from(last_end)..usize::from(range.end())]);
|
||||
|
||||
let diff = TextDiff::from_lines(input, &output);
|
||||
|
||||
let (largest_old, largest_new) = diff
|
||||
.ops()
|
||||
.last()
|
||||
.map(|op| (op.old_range().start, op.new_range().start))
|
||||
.unwrap_or_default();
|
||||
|
||||
let digit_with = OneIndexed::from_zero_indexed(largest_new.max(largest_old)).digits();
|
||||
|
||||
if let Some(cell) = cell {
|
||||
// Room for 2 digits, 2 x 1 space before each digit, 1 space, and 1 `|`. This
|
||||
// centers the three colons on the pipe.
|
||||
writeln!(f, "{:>1$} cell {cell}", ":::", 2 * digit_with.get() + 4)?;
|
||||
}
|
||||
|
||||
for (idx, group) in diff.grouped_ops(3).iter().enumerate() {
|
||||
if idx > 0 {
|
||||
writeln!(f, "{:-^1$}", "-", 80)?;
|
||||
}
|
||||
for op in group {
|
||||
for change in diff.iter_inline_changes(op) {
|
||||
let sign = match change.tag() {
|
||||
ChangeTag::Delete => "-",
|
||||
ChangeTag::Insert => "+",
|
||||
ChangeTag::Equal => " ",
|
||||
};
|
||||
|
||||
let line_style = LineStyle::from(change.tag(), self.stylesheet);
|
||||
|
||||
let old_index = change.old_index().map(OneIndexed::from_zero_indexed);
|
||||
let new_index = change.new_index().map(OneIndexed::from_zero_indexed);
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{} {} |{}",
|
||||
Line {
|
||||
index: old_index,
|
||||
width: digit_with,
|
||||
},
|
||||
Line {
|
||||
index: new_index,
|
||||
width: digit_with,
|
||||
},
|
||||
fmt_styled(line_style.apply_to(sign), self.stylesheet.emphasis),
|
||||
)?;
|
||||
|
||||
for (emphasized, value) in change.iter_strings_lossy() {
|
||||
let value = show_nonprinting(&value);
|
||||
if emphasized {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
fmt_styled(
|
||||
line_style.apply_to(&value),
|
||||
self.stylesheet.underline
|
||||
)
|
||||
)?;
|
||||
} else {
|
||||
write!(f, "{}", line_style.apply_to(&value))?;
|
||||
}
|
||||
}
|
||||
if change.missing_newline() {
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct LineStyle {
|
||||
style: Style,
|
||||
}
|
||||
|
||||
impl LineStyle {
|
||||
fn apply_to(&self, input: &str) -> impl std::fmt::Display {
|
||||
fmt_styled(input, self.style)
|
||||
}
|
||||
|
||||
fn from(value: ChangeTag, stylesheet: &DiagnosticStylesheet) -> LineStyle {
|
||||
match value {
|
||||
ChangeTag::Equal => LineStyle {
|
||||
style: stylesheet.none,
|
||||
},
|
||||
ChangeTag::Delete => LineStyle {
|
||||
style: stylesheet.deletion,
|
||||
},
|
||||
ChangeTag::Insert => LineStyle {
|
||||
style: stylesheet.insertion,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Line {
|
||||
index: Option<OneIndexed>,
|
||||
width: NonZeroUsize,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Line {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self.index {
|
||||
None => {
|
||||
for _ in 0..self.width.get() {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Some(idx) => write!(f, "{:<width$}", idx, width = self.width.get()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn show_nonprinting(s: &str) -> Cow<'_, str> {
|
||||
if s.find(['\x07', '\x08', '\x1b', '\x7f']).is_some() {
|
||||
Cow::Owned(
|
||||
s.replace('\x07', "␇")
|
||||
.replace('\x08', "␈")
|
||||
.replace('\x1b', "␛")
|
||||
.replace('\x7f', "␡"),
|
||||
)
|
||||
} else {
|
||||
Cow::Borrowed(s)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::{
|
||||
Annotation, DiagnosticFormat, Severity,
|
||||
render::tests::{
|
||||
NOTEBOOK, TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
error[undefined-name]: Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[invalid-syntax]: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
| ^
|
||||
2 |
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
error[invalid-syntax]: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
2 |
|
||||
3 | if call(foo
|
||||
| ^
|
||||
4 | def bar():
|
||||
5 | pass
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hide_severity_output() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
|
||||
env.hide_severity(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
|
||||
F401 [*] `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
F841 [*] Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
F821 Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hide_severity_syntax_errors() {
|
||||
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
|
||||
env.hide_severity(true);
|
||||
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
invalid-syntax: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
| ^
|
||||
2 |
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
invalid-syntax: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
2 |
|
||||
3 | if call(foo
|
||||
| ^
|
||||
4 | def bar():
|
||||
5 | pass
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit c9b99e4.
|
||||
///
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[no-indented-block]: Expected an indented block
|
||||
/// --> example.py:3:1
|
||||
/// |
|
||||
/// 2 | if False:
|
||||
/// | ^
|
||||
/// 3 | print()
|
||||
/// |
|
||||
/// ```
|
||||
///
|
||||
/// where the caret points to the end of the previous line instead of the start of the next.
|
||||
#[test]
|
||||
fn empty_span_after_line_terminator() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"example.py",
|
||||
r#"
|
||||
if False:
|
||||
print()
|
||||
"#,
|
||||
);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"no-indented-block",
|
||||
Severity::Error,
|
||||
"Expected an indented block",
|
||||
)
|
||||
.primary("example.py", "3:0", "3:0", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[no-indented-block]: Expected an indented block
|
||||
--> example.py:3:1
|
||||
|
|
||||
2 | if False:
|
||||
3 | print()
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit 2922490.
|
||||
///
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead
|
||||
/// --> example.py:1:25
|
||||
/// |
|
||||
/// 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
/// | ^
|
||||
/// |
|
||||
/// ```
|
||||
///
|
||||
/// where the caret points to the `f` in the f-string instead of the start of the invalid
|
||||
/// character (`^Z`).
|
||||
#[test]
|
||||
fn unprintable_characters() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "nested_fstrings = f'{f'{f''}'}'");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1a" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:24", "1:24", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead
|
||||
--> example.py:1:25
|
||||
|
|
||||
1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_unprintable_characters() -> std::io::Result<()> {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1a" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:1", "1:1", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1a" instead
|
||||
--> example.py:1:2
|
||||
|
|
||||
1 | ␈␛
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure that the header column matches the column in the user's input, even if we've replaced
|
||||
/// tabs with spaces for rendering purposes.
|
||||
#[test]
|
||||
fn tab_replacement() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "def foo():\n\treturn 1");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env.err().primary("example.py", "2:1", "2:9", "").build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:2:2
|
||||
|
|
||||
1 | def foo():
|
||||
2 | return 1
|
||||
| ^^^^^^^^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// For file-level diagnostics, we expect to see the header line with the diagnostic information
|
||||
/// and the `-->` line with the file information but no lines of source code.
|
||||
#[test]
|
||||
fn file_level() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env.path("example.py").with_range(TextRange::default());
|
||||
let mut annotation = Annotation::primary(span);
|
||||
annotation.set_file_level(true);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:1:1
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that ranges in notebooks are remapped relative to the cells.
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-import][*]: `math` imported but unused
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
");
|
||||
}
|
||||
|
||||
/// Check notebook handling for multiple annotations in a single diagnostic that span cells.
|
||||
#[test]
|
||||
fn notebook_output_multiple_annotations() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("notebook.ipynb", NOTEBOOK);
|
||||
|
||||
let diagnostics = vec![
|
||||
// adjacent context windows
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "")
|
||||
.secondary("notebook.ipynb", "4:7", "4:11", "second cell")
|
||||
.help("Remove unused import: `os`")
|
||||
.build(),
|
||||
// non-adjacent context windows
|
||||
env.builder("unused-import", Severity::Error, "`os` imported but unused")
|
||||
.primary("notebook.ipynb", "2:7", "2:9", "")
|
||||
.secondary("notebook.ipynb", "10:4", "10:5", "second cell")
|
||||
.help("Remove unused import: `os`")
|
||||
.build(),
|
||||
// adjacent context windows in the same cell
|
||||
env.err()
|
||||
.primary("notebook.ipynb", "4:7", "4:11", "second cell")
|
||||
.secondary("notebook.ipynb", "6:0", "6:5", "print statement")
|
||||
.help("Remove `print` statement")
|
||||
.build(),
|
||||
];
|
||||
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
::: notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ---- second cell
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
::: notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| - second cell
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^ second cell
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
| ----- print statement
|
||||
|
|
||||
help: Remove `print` statement
|
||||
");
|
||||
}
|
||||
|
||||
/// Test that we remap notebook cell line numbers in the diff as well as the main diagnostic.
|
||||
#[test]
|
||||
fn notebook_output_with_diff() {
|
||||
let (mut env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
|
||||
env.show_fix_diff(true);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
ℹ Safe fix
|
||||
::: cell 1
|
||||
1 1 | # cell 1
|
||||
2 |-import os
|
||||
|
||||
error[unused-import][*]: `math` imported but unused
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
|
||||
ℹ Safe fix
|
||||
::: cell 2
|
||||
1 1 | # cell 2
|
||||
2 |-import math
|
||||
3 2 |
|
||||
4 3 | print('hello world')
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
ℹ Unsafe fix
|
||||
::: cell 3
|
||||
1 1 | # cell 3
|
||||
2 2 | def foo():
|
||||
3 3 | print()
|
||||
4 |- x = 1
|
||||
5 4 |
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output_with_diff_spanning_cells() {
|
||||
let (mut env, mut diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
|
||||
env.show_fix_diff(true);
|
||||
|
||||
// Move all of the edits from the later diagnostics to the first diagnostic to simulate a
|
||||
// single diagnostic with edits in different cells.
|
||||
let mut diagnostic = diagnostics.swap_remove(0);
|
||||
let fix = diagnostic.fix_mut().unwrap();
|
||||
let mut edits = fix.edits().to_vec();
|
||||
for diag in diagnostics {
|
||||
edits.extend_from_slice(diag.fix().unwrap().edits());
|
||||
}
|
||||
*fix = Fix::unsafe_edits(edits.remove(0), edits);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
ℹ Unsafe fix
|
||||
::: cell 1
|
||||
1 1 | # cell 1
|
||||
2 |-import os
|
||||
::: cell 2
|
||||
1 1 | # cell 2
|
||||
2 |-import math
|
||||
3 2 |
|
||||
4 3 | print('hello world')
|
||||
::: cell 3
|
||||
1 1 | # cell 3
|
||||
2 2 | def foo():
|
||||
3 3 | print()
|
||||
4 |- x = 1
|
||||
5 4 |
|
||||
");
|
||||
}
|
||||
|
||||
/// Carriage return (`\r`) is a valid line-ending in Python, so we should normalize this to a
|
||||
/// line feed (`\n`) for rendering. Otherwise we report a single long line for this case.
|
||||
#[test]
|
||||
fn normalize_carriage_return() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"example.py",
|
||||
"# Keep parenthesis around preserved CR\rint(-\r 1)\rint(+\r 1)",
|
||||
);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env
|
||||
.path("example.py")
|
||||
.with_range(TextRange::at(TextSize::new(39), TextSize::new(0)));
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:2:1
|
||||
|
|
||||
1 | # Keep parenthesis around preserved CR
|
||||
2 | int(-
|
||||
| ^
|
||||
3 | 1)
|
||||
4 | int(+
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Without stripping the BOM, we report an error in column 2, unlike Ruff.
|
||||
#[test]
|
||||
fn strip_bom() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "\u{feff}import foo");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env
|
||||
.path("example.py")
|
||||
.with_range(TextRange::at(TextSize::new(3), TextSize::new(0)));
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:1:1
|
||||
|
|
||||
1 | import foo
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bom_with_default_range() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "\u{feff}import foo");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env.path("example.py").with_range(TextRange::default());
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:1:1
|
||||
|
|
||||
1 | import foo
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// We previously rendered this correctly, but the header was falling back to 1:1 for ranges
|
||||
/// pointing to the final newline in a file. Like Ruff, we now use the offset of the first
|
||||
/// character in the nonexistent final line in the header.
|
||||
#[test]
|
||||
fn end_of_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
let contents = "unexpected eof\n";
|
||||
env.add("example.py", contents);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let mut diagnostic = env.err().build();
|
||||
let span = env
|
||||
.path("example.py")
|
||||
.with_range(TextRange::at(contents.text_len(), TextSize::new(0)));
|
||||
let annotation = Annotation::primary(span);
|
||||
diagnostic.annotate(annotation);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic]: main diagnostic message
|
||||
--> example.py:2:1
|
||||
|
|
||||
1 | unexpected eof
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
}
|
||||
@@ -1,352 +0,0 @@
|
||||
use serde::{Serialize, Serializer, ser::SerializeSeq};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Edit};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct JsonRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> JsonRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:#}",
|
||||
diagnostics_to_json_value(diagnostics, self.resolver, self.config)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostics_to_json_value<'a>(
|
||||
diagnostics: impl IntoIterator<Item = &'a Diagnostic>,
|
||||
resolver: &dyn FileResolver,
|
||||
config: &DisplayDiagnosticConfig,
|
||||
) -> Value {
|
||||
let values: Vec<_> = diagnostics
|
||||
.into_iter()
|
||||
.map(|diag| diagnostic_to_json(diag, resolver, config))
|
||||
.collect();
|
||||
json!(values)
|
||||
}
|
||||
|
||||
pub(super) fn diagnostic_to_json<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
) -> JsonDiagnostic<'a> {
|
||||
let span = diagnostic.primary_span_ref();
|
||||
let filename = span.map(|span| span.file().path(resolver));
|
||||
let range = span.and_then(|span| span.range());
|
||||
let diagnostic_source = span.map(|span| span.file().diagnostic_source(resolver));
|
||||
let source_code = diagnostic_source
|
||||
.as_ref()
|
||||
.map(|diagnostic_source| diagnostic_source.as_source_code());
|
||||
let notebook_index = span.and_then(|span| resolver.notebook_index(span.file()));
|
||||
|
||||
let mut start_location = None;
|
||||
let mut end_location = None;
|
||||
let mut noqa_location = None;
|
||||
let mut notebook_cell_index = None;
|
||||
if let Some(source_code) = source_code {
|
||||
noqa_location = diagnostic
|
||||
.noqa_offset()
|
||||
.map(|offset| source_code.line_column(offset));
|
||||
if let Some(range) = range {
|
||||
let mut start = source_code.line_column(range.start());
|
||||
let mut end = source_code.line_column(range.end());
|
||||
if let Some(notebook_index) = ¬ebook_index {
|
||||
notebook_cell_index =
|
||||
Some(notebook_index.cell(start.line).unwrap_or(OneIndexed::MIN));
|
||||
start = notebook_index.translate_line_column(&start);
|
||||
end = notebook_index.translate_line_column(&end);
|
||||
noqa_location =
|
||||
noqa_location.map(|location| notebook_index.translate_line_column(&location));
|
||||
}
|
||||
start_location = Some(start);
|
||||
end_location = Some(end);
|
||||
}
|
||||
}
|
||||
|
||||
let fix = diagnostic.fix().map(|fix| JsonFix {
|
||||
applicability: fix.applicability(),
|
||||
message: diagnostic.first_help_text(),
|
||||
edits: ExpandedEdits {
|
||||
edits: fix.edits(),
|
||||
notebook_index,
|
||||
config,
|
||||
diagnostic_source,
|
||||
},
|
||||
});
|
||||
|
||||
// In preview, the locations and filename can be optional.
|
||||
if config.preview {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code_or_id(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
location: start_location.map(JsonLocation::from),
|
||||
end_location: end_location.map(JsonLocation::from),
|
||||
filename,
|
||||
noqa_row: noqa_location.map(|location| location.line),
|
||||
}
|
||||
} else {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code_or_id(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
location: Some(start_location.unwrap_or_default().into()),
|
||||
end_location: Some(end_location.unwrap_or_default().into()),
|
||||
filename: Some(filename.unwrap_or_default()),
|
||||
noqa_row: noqa_location.map(|location| location.line),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedEdits<'a> {
|
||||
edits: &'a [Edit],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
diagnostic_source: Option<DiagnosticSource>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedEdits<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
|
||||
|
||||
for edit in self.edits {
|
||||
let (location, end_location) = if let Some(diagnostic_source) = &self.diagnostic_source
|
||||
{
|
||||
let source_code = diagnostic_source.as_source_code();
|
||||
let mut location = source_code.line_column(edit.start());
|
||||
let mut end_location = source_code.line_column(edit.end());
|
||||
|
||||
if let Some(notebook_index) = &self.notebook_index {
|
||||
// There exists a newline between each cell's source code in the
|
||||
// concatenated source code in Ruff. This newline doesn't actually
|
||||
// exists in the JSON source field.
|
||||
//
|
||||
// Now, certain edits may try to remove this newline, which means
|
||||
// the edit will spill over to the first character of the next cell.
|
||||
// If it does, we need to translate the end location to the last
|
||||
// character of the previous cell.
|
||||
match (
|
||||
notebook_index.cell(location.line),
|
||||
notebook_index.cell(end_location.line),
|
||||
) {
|
||||
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: source_code
|
||||
.line_column(source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
(Some(_), None) => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: source_code
|
||||
.line_column(source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
}
|
||||
}
|
||||
location = notebook_index.translate_line_column(&location);
|
||||
}
|
||||
|
||||
(Some(location), Some(end_location))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
// In preview, the locations can be optional.
|
||||
let value = if self.config.preview {
|
||||
JsonEdit {
|
||||
content: edit.content().unwrap_or_default(),
|
||||
location: location.map(JsonLocation::from),
|
||||
end_location: end_location.map(JsonLocation::from),
|
||||
}
|
||||
} else {
|
||||
JsonEdit {
|
||||
content: edit.content().unwrap_or_default(),
|
||||
location: Some(location.unwrap_or_default().into()),
|
||||
end_location: Some(end_location.unwrap_or_default().into()),
|
||||
}
|
||||
};
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
/// A serializable version of `Diagnostic`.
|
||||
///
|
||||
/// The `Old` variant only exists to preserve backwards compatibility. Both this and `JsonEdit`
|
||||
/// should become structs with the `New` definitions in a future Ruff release.
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct JsonDiagnostic<'a> {
|
||||
cell: Option<OneIndexed>,
|
||||
code: &'a str,
|
||||
end_location: Option<JsonLocation>,
|
||||
filename: Option<&'a str>,
|
||||
fix: Option<JsonFix<'a>>,
|
||||
location: Option<JsonLocation>,
|
||||
message: &'a str,
|
||||
noqa_row: Option<OneIndexed>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonFix<'a> {
|
||||
applicability: Applicability,
|
||||
edits: ExpandedEdits<'a>,
|
||||
message: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonLocation {
|
||||
column: OneIndexed,
|
||||
row: OneIndexed,
|
||||
}
|
||||
|
||||
impl From<LineColumn> for JsonLocation {
|
||||
fn from(location: LineColumn) -> Self {
|
||||
JsonLocation {
|
||||
row: location.line,
|
||||
column: location.column,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonEdit<'a> {
|
||||
content: &'a str,
|
||||
end_location: Option<JsonLocation>,
|
||||
location: Option<JsonLocation>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_stable() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r#"
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "test-diagnostic",
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"filename": "",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
|
||||
}
|
||||
]
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_preview() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r#"
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "test-diagnostic",
|
||||
"end_location": null,
|
||||
"filename": null,
|
||||
"fix": null,
|
||||
"location": null,
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
|
||||
}
|
||||
]
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
use crate::diagnostic::{Diagnostic, DisplayDiagnosticConfig, render::json::diagnostic_to_json};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct JsonLinesRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> JsonLinesRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonLinesRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diag in diagnostics {
|
||||
writeln!(
|
||||
f,
|
||||
"{}",
|
||||
serde_json::json!(diagnostic_to_json(diag, self.resolver, self.config))
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
create_diagnostics, create_notebook_diagnostics, create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
use std::{collections::BTreeMap, ops::Deref, path::Path};
|
||||
|
||||
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
|
||||
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
|
||||
|
||||
/// A renderer for diagnostics in the [JUnit] format.
|
||||
///
|
||||
/// See [`junit.xsd`] for the specification in the JUnit repository and an annotated [version]
|
||||
/// linked from the [`quick_junit`] docs.
|
||||
///
|
||||
/// [JUnit]: https://junit.org/
|
||||
/// [`junit.xsd`]: https://github.com/junit-team/junit-framework/blob/2870b7d8fd5bf7c1efe489d3991d3ed3900e82bb/platform-tests/src/test/resources/jenkins-junit.xsd
|
||||
/// [version]: https://llg.cubic.org/docs/junit/
|
||||
/// [`quick_junit`]: https://docs.rs/quick-junit/latest/quick_junit/
|
||||
pub struct JunitRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> JunitRenderer<'a> {
|
||||
pub fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let mut report = Report::new("ruff");
|
||||
|
||||
if diagnostics.is_empty() {
|
||||
let mut test_suite = TestSuite::new("ruff");
|
||||
test_suite
|
||||
.extra
|
||||
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
|
||||
let mut case = TestCase::new("No errors found", TestCaseStatus::success());
|
||||
case.set_classname("ruff");
|
||||
test_suite.add_test_case(case);
|
||||
report.add_test_suite(test_suite);
|
||||
} else {
|
||||
for (filename, diagnostics) in group_diagnostics_by_filename(diagnostics, self.resolver)
|
||||
{
|
||||
let mut test_suite = TestSuite::new(filename);
|
||||
test_suite
|
||||
.extra
|
||||
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
|
||||
|
||||
let classname = Path::new(filename).with_extension("");
|
||||
|
||||
for diagnostic in diagnostics {
|
||||
let DiagnosticWithLocation {
|
||||
diagnostic,
|
||||
start_location: location,
|
||||
} = diagnostic;
|
||||
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
|
||||
status.set_message(diagnostic.body());
|
||||
|
||||
if let Some(location) = location {
|
||||
status.set_description(format!(
|
||||
"line {row}, col {col}, {body}",
|
||||
row = location.line,
|
||||
col = location.column,
|
||||
body = diagnostic.body()
|
||||
));
|
||||
} else {
|
||||
status.set_description(diagnostic.body());
|
||||
}
|
||||
|
||||
let code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
|
||||
let mut case = TestCase::new(format!("org.ruff.{code}"), status);
|
||||
case.set_classname(classname.to_str().unwrap());
|
||||
|
||||
if let Some(location) = location {
|
||||
case.extra.insert(
|
||||
XmlString::new("line"),
|
||||
XmlString::new(location.line.to_string()),
|
||||
);
|
||||
case.extra.insert(
|
||||
XmlString::new("column"),
|
||||
XmlString::new(location.column.to_string()),
|
||||
);
|
||||
}
|
||||
|
||||
test_suite.add_test_case(case);
|
||||
}
|
||||
report.add_test_suite(test_suite);
|
||||
}
|
||||
}
|
||||
|
||||
let adapter = FmtAdapter { fmt: f };
|
||||
report.serialize(adapter).map_err(|_| std::fmt::Error)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(brent) this and `group_diagnostics_by_filename` are also used by the `grouped` output
|
||||
// format. I think they'd make more sense in that file, but I started here first. I'll move them to
|
||||
// that module when adding the `grouped` output format.
|
||||
struct DiagnosticWithLocation<'a> {
|
||||
diagnostic: &'a Diagnostic,
|
||||
start_location: Option<LineColumn>,
|
||||
}
|
||||
|
||||
impl Deref for DiagnosticWithLocation<'_> {
|
||||
type Target = Diagnostic;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.diagnostic
|
||||
}
|
||||
}
|
||||
|
||||
fn group_diagnostics_by_filename<'a>(
|
||||
diagnostics: &'a [Diagnostic],
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> BTreeMap<&'a str, Vec<DiagnosticWithLocation<'a>>> {
|
||||
let mut grouped_diagnostics = BTreeMap::default();
|
||||
for diagnostic in diagnostics {
|
||||
let (filename, start_location) = diagnostic
|
||||
.primary_span_ref()
|
||||
.map(|span| {
|
||||
let file = span.file();
|
||||
let start_location =
|
||||
span.range()
|
||||
.filter(|_| !resolver.is_notebook(file))
|
||||
.map(|range| {
|
||||
file.diagnostic_source(resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
});
|
||||
|
||||
(span.file().path(resolver), start_location)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
grouped_diagnostics
|
||||
.entry(filename)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(DiagnosticWithLocation {
|
||||
diagnostic,
|
||||
start_location,
|
||||
});
|
||||
}
|
||||
grouped_diagnostics
|
||||
}
|
||||
|
||||
struct FmtAdapter<'a> {
|
||||
fmt: &'a mut dyn std::fmt::Write,
|
||||
}
|
||||
|
||||
impl std::io::Write for FmtAdapter<'_> {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.fmt
|
||||
.write_str(std::str::from_utf8(buf).map_err(|_| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Invalid UTF-8 in JUnit report",
|
||||
)
|
||||
})?)
|
||||
.map_err(std::io::Error::other)?;
|
||||
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_fmt(&mut self, args: std::fmt::Arguments<'_>) -> std::io::Result<()> {
|
||||
self.fmt.write_fmt(args).map_err(std::io::Error::other)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Junit);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Junit);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
|
||||
|
||||
/// Generate violations in Pylint format.
|
||||
///
|
||||
/// The format is given by this string:
|
||||
///
|
||||
/// ```python
|
||||
/// "%(path)s:%(row)d: [%(code)s] %(text)s"
|
||||
/// ```
|
||||
///
|
||||
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
|
||||
pub(super) struct PylintRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> PylintRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl PylintRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diagnostic in diagnostics {
|
||||
let (filename, row) = diagnostic
|
||||
.primary_span_ref()
|
||||
.map(|span| {
|
||||
let file = span.file();
|
||||
|
||||
let row = span
|
||||
.range()
|
||||
.filter(|_| !self.resolver.is_notebook(file))
|
||||
.map(|range| {
|
||||
file.diagnostic_source(self.resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
.line
|
||||
});
|
||||
|
||||
(file.relative_path(self.resolver).to_string_lossy(), row)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
|
||||
|
||||
let row = row.unwrap_or_default();
|
||||
|
||||
writeln!(
|
||||
f,
|
||||
"{path}:{row}: [{code}] {body}",
|
||||
path = filename,
|
||||
body = diagnostic.body()
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Pylint);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Pylint);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Pylint);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@":1: [test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,235 +0,0 @@
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::Diagnostic;
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub struct RdjsonRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> RdjsonRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:#}",
|
||||
serde_json::json!(RdjsonDiagnostics::new(diagnostics, self.resolver))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedDiagnostics<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
diagnostics: &'a [Diagnostic],
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedDiagnostics<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
|
||||
for diagnostic in self.diagnostics {
|
||||
let value = diagnostic_to_rdjson(diagnostic, self.resolver);
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostic_to_rdjson<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> RdjsonDiagnostic<'a> {
|
||||
let span = diagnostic.primary_span_ref();
|
||||
let source_file = span.map(|span| {
|
||||
let file = span.file();
|
||||
(file.path(resolver), file.diagnostic_source(resolver))
|
||||
});
|
||||
|
||||
let location = source_file.as_ref().map(|(path, source)| {
|
||||
let range = diagnostic.range().map(|range| {
|
||||
let source_code = source.as_source_code();
|
||||
let start = source_code.line_column(range.start());
|
||||
let end = source_code.line_column(range.end());
|
||||
RdjsonRange::new(start, end)
|
||||
});
|
||||
|
||||
RdjsonLocation { path, range }
|
||||
});
|
||||
|
||||
let edits = diagnostic.fix().map(Fix::edits).unwrap_or_default();
|
||||
|
||||
RdjsonDiagnostic {
|
||||
message: diagnostic.body(),
|
||||
location,
|
||||
code: RdjsonCode {
|
||||
value: diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
},
|
||||
suggestions: rdjson_suggestions(
|
||||
edits,
|
||||
source_file
|
||||
.as_ref()
|
||||
.map(|(_, source)| source.as_source_code()),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn rdjson_suggestions<'a>(
|
||||
edits: &'a [Edit],
|
||||
source_code: Option<SourceCode>,
|
||||
) -> Vec<RdjsonSuggestion<'a>> {
|
||||
if edits.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let Some(source_code) = source_code else {
|
||||
debug_assert!(false, "Expected a source file for a diagnostic with a fix");
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
edits
|
||||
.iter()
|
||||
.map(|edit| {
|
||||
let start = source_code.line_column(edit.start());
|
||||
let end = source_code.line_column(edit.end());
|
||||
let range = RdjsonRange::new(start, end);
|
||||
|
||||
RdjsonSuggestion {
|
||||
range,
|
||||
text: edit.content().unwrap_or_default(),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonDiagnostics<'a> {
|
||||
diagnostics: ExpandedDiagnostics<'a>,
|
||||
severity: &'static str,
|
||||
source: RdjsonSource,
|
||||
}
|
||||
|
||||
impl<'a> RdjsonDiagnostics<'a> {
|
||||
fn new(diagnostics: &'a [Diagnostic], resolver: &'a dyn FileResolver) -> Self {
|
||||
Self {
|
||||
source: RdjsonSource {
|
||||
name: "ruff",
|
||||
url: env!("CARGO_PKG_HOMEPAGE"),
|
||||
},
|
||||
severity: "WARNING",
|
||||
diagnostics: ExpandedDiagnostics {
|
||||
diagnostics,
|
||||
resolver,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonSource {
|
||||
name: &'static str,
|
||||
url: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonDiagnostic<'a> {
|
||||
code: RdjsonCode<'a>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
location: Option<RdjsonLocation<'a>>,
|
||||
message: &'a str,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
suggestions: Vec<RdjsonSuggestion<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonLocation<'a> {
|
||||
path: &'a str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
range: Option<RdjsonRange>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct RdjsonRange {
|
||||
end: LineColumn,
|
||||
start: LineColumn,
|
||||
}
|
||||
|
||||
impl RdjsonRange {
|
||||
fn new(start: LineColumn, end: LineColumn) -> Self {
|
||||
Self { start, end }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonCode<'a> {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
url: Option<String>,
|
||||
value: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonSuggestion<'a> {
|
||||
range: RdjsonRange,
|
||||
text: &'a str,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Rdjson);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Rdjson);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_stable() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_preview() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/azure.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;code=invalid-syntax;]Expected one or more symbol names after import
|
||||
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;code=invalid-syntax;]Expected ')', found newline
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"Expected one or more symbol names after import","noqa_row":null,"url":null}
|
||||
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"Expected ')', found newline","noqa_row":null,"url":null}
|
||||
@@ -1,15 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/junit.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites name="ruff" tests="2" failures="2" errors="0">
|
||||
<testsuite name="syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
|
||||
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="1" column="15">
|
||||
<failure message="Expected one or more symbol names after import">line 1, col 15, Expected one or more symbol names after import</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="3" column="12">
|
||||
<failure message="Expected ')', found newline">line 3, col 12, Expected ')', found newline</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/pylint.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
syntax_errors.py:1: [invalid-syntax] Expected one or more symbol names after import
|
||||
syntax_errors.py:3: [invalid-syntax] Expected ')', found newline
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render(&diag)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
|
||||
"value": "test-diagnostic"
|
||||
},
|
||||
"message": "main diagnostic message"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render(&diag)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
|
||||
"value": "test-diagnostic"
|
||||
},
|
||||
"message": "main diagnostic message"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
@@ -40,12 +40,7 @@ pub struct DiagnosticStylesheet {
|
||||
pub(crate) help: Style,
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) underline: Style,
|
||||
pub(crate) none: Style,
|
||||
pub(crate) separator: Style,
|
||||
pub(crate) secondary_code: Style,
|
||||
pub(crate) insertion: Style,
|
||||
pub(crate) deletion: Style,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
@@ -66,12 +61,7 @@ impl DiagnosticStylesheet {
|
||||
help: AnsiColor::BrightCyan.on_default().effects(Effects::BOLD),
|
||||
line_no: bright_blue.effects(Effects::BOLD),
|
||||
emphasis: Style::new().effects(Effects::BOLD),
|
||||
underline: Style::new().effects(Effects::UNDERLINE),
|
||||
none: Style::new(),
|
||||
separator: AnsiColor::Cyan.on_default(),
|
||||
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
insertion: AnsiColor::Green.on_default(),
|
||||
deletion: AnsiColor::Red.on_default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,12 +74,7 @@ impl DiagnosticStylesheet {
|
||||
help: Style::new(),
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
underline: Style::new(),
|
||||
none: Style::new(),
|
||||
separator: Style::new(),
|
||||
secondary_code: Style::new(),
|
||||
insertion: Style::new(),
|
||||
deletion: Style::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use crate::system::file_time_now;
|
||||
/// * The last modification time of the file.
|
||||
/// * The hash of the file's content.
|
||||
/// * The revision as it comes from an external system, for example the LSP.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default, get_size2::GetSize)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct FileRevision(u128);
|
||||
|
||||
impl FileRevision {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
pub use file_root::{FileRoot, FileRootKind};
|
||||
pub use path::FilePath;
|
||||
@@ -87,12 +88,11 @@ impl Files {
|
||||
.system_by_path
|
||||
.entry(absolute.clone())
|
||||
.or_insert_with(|| {
|
||||
tracing::trace!("Adding file '{path}'");
|
||||
|
||||
let metadata = db.system().path_metadata(path);
|
||||
|
||||
tracing::trace!("Adding file '{absolute}'");
|
||||
|
||||
let durability = self
|
||||
.root(db, &absolute)
|
||||
.root(db, path)
|
||||
.map_or(Durability::default(), |root| root.durability(db));
|
||||
|
||||
let builder = File::builder(FilePath::System(absolute))
|
||||
@@ -232,7 +232,7 @@ impl Files {
|
||||
let roots = inner.roots.read().unwrap();
|
||||
|
||||
for root in roots.all() {
|
||||
if path.starts_with(root.path(db)) {
|
||||
if root.path(db).starts_with(&path) {
|
||||
root.set_revision(db).to(FileRevision::now());
|
||||
}
|
||||
}
|
||||
@@ -290,7 +290,7 @@ impl std::panic::RefUnwindSafe for Files {}
|
||||
/// # Ordering
|
||||
/// Ordering is based on the file's salsa-assigned id and not on its values.
|
||||
/// The id may change between runs.
|
||||
#[salsa::input(heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::input]
|
||||
#[derive(PartialOrd, Ord)]
|
||||
pub struct File {
|
||||
/// The path of the file (immutable).
|
||||
@@ -312,6 +312,11 @@ pub struct File {
|
||||
/// the file has been deleted is to change the status to `Deleted`.
|
||||
#[default]
|
||||
status: FileStatus,
|
||||
|
||||
/// Counter that counts the number of created file instances and active file instances.
|
||||
/// Only enabled in debug builds.
|
||||
#[default]
|
||||
count: Count<File>,
|
||||
}
|
||||
|
||||
// The Salsa heap is tracked separately.
|
||||
@@ -370,25 +375,12 @@ impl File {
|
||||
}
|
||||
|
||||
/// Refreshes the file metadata by querying the file system if needed.
|
||||
///
|
||||
/// This also "touches" the file root associated with the given path.
|
||||
/// This means that any Salsa queries that depend on the corresponding
|
||||
/// root's revision will become invalidated.
|
||||
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
Files::touch_root(db, &absolute);
|
||||
Self::sync_system_path(db, &absolute, None);
|
||||
}
|
||||
|
||||
/// Refreshes *only* the file metadata by querying the file system if needed.
|
||||
///
|
||||
/// This specifically does not touch any file root associated with the
|
||||
/// given file path.
|
||||
pub fn sync_path_only(db: &mut dyn Db, path: &SystemPath) {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
Self::sync_system_path(db, &absolute, None);
|
||||
}
|
||||
|
||||
/// Increments the revision for the virtual file at `path`.
|
||||
pub fn sync_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath) {
|
||||
if let Some(virtual_file) = db.files().try_virtual_file(path) {
|
||||
@@ -494,7 +486,7 @@ impl fmt::Debug for File {
|
||||
///
|
||||
/// This is a wrapper around a [`File`] that provides additional methods to interact with a virtual
|
||||
/// file.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct VirtualFile(File);
|
||||
|
||||
impl VirtualFile {
|
||||
@@ -522,7 +514,7 @@ impl VirtualFile {
|
||||
// The types in here need to be public because they're salsa ingredients but we
|
||||
// don't want them to be publicly accessible. That's why we put them into a private module.
|
||||
mod private {
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default, get_size2::GetSize)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub enum FileStatus {
|
||||
/// The file exists.
|
||||
#[default]
|
||||
|
||||
@@ -16,14 +16,14 @@ use crate::system::{SystemPath, SystemPathBuf};
|
||||
/// The main usage of file roots is to determine a file's durability. But it can also be used
|
||||
/// to make a salsa query dependent on whether a file in a root has changed without writing any
|
||||
/// manual invalidation logic.
|
||||
#[salsa::input(debug, heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::input(debug)]
|
||||
pub struct FileRoot {
|
||||
/// The path of a root is guaranteed to never change.
|
||||
#[returns(deref)]
|
||||
pub path: SystemPathBuf,
|
||||
|
||||
/// The kind of the root at the time of its creation.
|
||||
pub kind_at_time_of_creation: FileRootKind,
|
||||
kind_at_time_of_creation: FileRootKind,
|
||||
|
||||
/// A revision that changes when the contents of the source root change.
|
||||
///
|
||||
@@ -37,7 +37,7 @@ impl FileRoot {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileRootKind {
|
||||
/// The root of a project.
|
||||
Project,
|
||||
|
||||
@@ -11,7 +11,7 @@ use std::fmt::{Display, Formatter};
|
||||
/// * a file stored on the [host system](crate::system::System).
|
||||
/// * a virtual file stored on the [host system](crate::system::System).
|
||||
/// * a vendored file stored in the [vendored file system](crate::vendored::VendoredFileSystem).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum FilePath {
|
||||
/// Path to a file on the [host system](crate::system::System).
|
||||
System(SystemPathBuf),
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
#![warn(
|
||||
clippy::disallowed_methods,
|
||||
reason = "Prefer System trait methods over std methods"
|
||||
)]
|
||||
|
||||
use crate::files::Files;
|
||||
use crate::system::System;
|
||||
use crate::vendored::VendoredFileSystem;
|
||||
@@ -10,7 +5,6 @@ use ruff_python_ast::PythonVersion;
|
||||
use rustc_hash::FxHasher;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::num::NonZeroUsize;
|
||||
use ty_static::EnvVars;
|
||||
|
||||
pub mod diagnostic;
|
||||
pub mod display;
|
||||
@@ -33,21 +27,6 @@ pub use web_time::{Instant, SystemTime, SystemTimeError};
|
||||
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
pub type FxDashSet<K> = dashmap::DashSet<K, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
static VERSION: std::sync::OnceLock<String> = std::sync::OnceLock::new();
|
||||
|
||||
/// Returns the version of the executing program if set.
|
||||
pub fn program_version() -> Option<&'static str> {
|
||||
VERSION.get().map(|version| version.as_str())
|
||||
}
|
||||
|
||||
/// Sets the version of the executing program.
|
||||
///
|
||||
/// ## Errors
|
||||
/// If the version has already been initialized (can only be set once).
|
||||
pub fn set_program_version(version: String) -> Result<(), String> {
|
||||
VERSION.set(version)
|
||||
}
|
||||
|
||||
/// Most basic database that gives access to files, the host system, source code, and parsed AST.
|
||||
#[salsa::db]
|
||||
pub trait Db: salsa::Database {
|
||||
@@ -70,13 +49,9 @@ pub trait Db: salsa::Database {
|
||||
/// to process work in parallel. For example, to index a directory or checking the files of a project.
|
||||
/// ty can still spawn more threads for other tasks, e.g. to wait for a Ctrl+C signal or
|
||||
/// watching the files for changes.
|
||||
#[expect(
|
||||
clippy::disallowed_methods,
|
||||
reason = "We don't have access to System here, but this is also only used by the CLI and the server which always run on a real system."
|
||||
)]
|
||||
pub fn max_parallelism() -> NonZeroUsize {
|
||||
std::env::var(EnvVars::TY_MAX_PARALLELISM)
|
||||
.or_else(|_| std::env::var(EnvVars::RAYON_NUM_THREADS))
|
||||
std::env::var("TY_MAX_PARALLELISM")
|
||||
.or_else(|_| std::env::var("RAYON_NUM_THREADS"))
|
||||
.ok()
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap_or_else(|| {
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::source::source_text;
|
||||
/// reflected in the changed AST offsets.
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
|
||||
/// for determining if a query result is unchanged.
|
||||
#[salsa::tracked(returns(ref), no_eq, heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::tracked(returns(ref), no_eq, heap_size=get_size2::GetSize::get_heap_size)]
|
||||
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
let _span = tracing::trace_span!("parsed_module", ?file).entered();
|
||||
|
||||
@@ -92,14 +92,14 @@ impl ParsedModule {
|
||||
self.inner.store(None);
|
||||
}
|
||||
|
||||
/// Returns the pointer address of this [`ParsedModule`].
|
||||
/// Returns a pointer for this [`ParsedModule`].
|
||||
///
|
||||
/// The pointer uniquely identifies the module within the current Salsa revision,
|
||||
/// regardless of whether particular [`ParsedModuleRef`] instances are garbage collected.
|
||||
pub fn addr(&self) -> usize {
|
||||
pub fn as_ptr(&self) -> *const () {
|
||||
// Note that the outer `Arc` in `inner` is stable across garbage collection, while the inner
|
||||
// `Arc` within the `ArcSwap` may change.
|
||||
Arc::as_ptr(&self.inner).addr()
|
||||
Arc::as_ptr(&self.inner).cast()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,13 +202,9 @@ mod indexed {
|
||||
|
||||
/// Returns the node at the given index.
|
||||
pub fn get_by_index<'ast>(&'ast self, index: NodeIndex) -> AnyRootNodeRef<'ast> {
|
||||
let index = index
|
||||
.as_u32()
|
||||
.expect("attempted to access uninitialized `NodeIndex`");
|
||||
|
||||
// Note that this method restores the correct lifetime: the nodes are valid for as
|
||||
// long as the reference to `IndexedModule` is alive.
|
||||
self.index[index as usize]
|
||||
self.index[index.as_usize()]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,7 +220,7 @@ mod indexed {
|
||||
T: HasNodeIndex + std::fmt::Debug,
|
||||
AnyRootNodeRef<'a>: From<&'a T>,
|
||||
{
|
||||
node.node_index().set(NodeIndex::from(self.index));
|
||||
node.node_index().set(self.index);
|
||||
self.nodes.push(AnyRootNodeRef::from(node));
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_source_file::LineIndex;
|
||||
@@ -9,7 +11,7 @@ use crate::Db;
|
||||
use crate::files::{File, FilePath};
|
||||
|
||||
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
|
||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
|
||||
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
@@ -36,7 +38,11 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
};
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner { kind, read_error }),
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind,
|
||||
read_error,
|
||||
count: Count::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,21 +75,21 @@ impl SourceText {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match &self.inner.kind {
|
||||
SourceTextKind::Text(source) => source,
|
||||
SourceTextKind::Notebook { notebook } => notebook.source_code(),
|
||||
SourceTextKind::Notebook(notebook) => notebook.source_code(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the underlying notebook if this is a notebook file.
|
||||
pub fn as_notebook(&self) -> Option<&Notebook> {
|
||||
match &self.inner.kind {
|
||||
SourceTextKind::Notebook { notebook } => Some(notebook),
|
||||
SourceTextKind::Notebook(notebook) => Some(notebook),
|
||||
SourceTextKind::Text(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if this is a notebook source file.
|
||||
pub fn is_notebook(&self) -> bool {
|
||||
matches!(&self.inner.kind, SourceTextKind::Notebook { .. })
|
||||
matches!(&self.inner.kind, SourceTextKind::Notebook(_))
|
||||
}
|
||||
|
||||
/// Returns `true` if there was an error when reading the content of the file.
|
||||
@@ -108,7 +114,7 @@ impl std::fmt::Debug for SourceText {
|
||||
SourceTextKind::Text(text) => {
|
||||
dbg.field(text);
|
||||
}
|
||||
SourceTextKind::Notebook { notebook } => {
|
||||
SourceTextKind::Notebook(notebook) => {
|
||||
dbg.field(notebook);
|
||||
}
|
||||
}
|
||||
@@ -119,19 +125,29 @@ impl std::fmt::Debug for SourceText {
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SourceTextInner {
|
||||
#[get_size(ignore)]
|
||||
count: Count<SourceText>,
|
||||
kind: SourceTextKind,
|
||||
read_error: Option<SourceTextError>,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Eq, PartialEq)]
|
||||
enum SourceTextKind {
|
||||
Text(String),
|
||||
Notebook {
|
||||
// Jupyter notebooks are not very relevant for memory profiling, and contain
|
||||
// arbitrary JSON values that do not implement the `GetSize` trait.
|
||||
#[get_size(ignore)]
|
||||
notebook: Box<Notebook>,
|
||||
},
|
||||
Notebook(Box<Notebook>),
|
||||
}
|
||||
|
||||
impl get_size2::GetSize for SourceTextKind {
|
||||
fn get_heap_size(&self) -> usize {
|
||||
match self {
|
||||
SourceTextKind::Text(text) => text.get_heap_size(),
|
||||
// TODO: The `get-size` derive does not support ignoring enum variants.
|
||||
//
|
||||
// Jupyter notebooks are not very relevant for memory profiling, and contain
|
||||
// arbitrary JSON values that do not implement the `GetSize` trait.
|
||||
SourceTextKind::Notebook(_) => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for SourceTextKind {
|
||||
@@ -142,9 +158,7 @@ impl From<String> for SourceTextKind {
|
||||
|
||||
impl From<Notebook> for SourceTextKind {
|
||||
fn from(notebook: Notebook) -> Self {
|
||||
SourceTextKind::Notebook {
|
||||
notebook: Box::new(notebook),
|
||||
}
|
||||
SourceTextKind::Notebook(Box::new(notebook))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,7 +171,7 @@ pub enum SourceTextError {
|
||||
}
|
||||
|
||||
/// Computes the [`LineIndex`] for `file`.
|
||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
|
||||
pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
|
||||
let _span = tracing::trace_span!("line_index", ?file).entered();
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ pub type Result<T> = std::io::Result<T>;
|
||||
/// * File watching isn't supported.
|
||||
///
|
||||
/// Abstracting the system also enables tests to use a more efficient in-memory file system.
|
||||
pub trait System: Debug + Sync + Send {
|
||||
pub trait System: Debug {
|
||||
/// Reads the metadata of the file or directory at `path`.
|
||||
///
|
||||
/// This function will traverse symbolic links to query information about the destination file.
|
||||
@@ -197,8 +197,6 @@ pub trait System: Debug + Sync + Send {
|
||||
fn as_any(&self) -> &dyn std::any::Any;
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![allow(clippy::disallowed_methods)]
|
||||
|
||||
use super::walk_directory::{
|
||||
self, DirectoryWalker, WalkDirectoryBuilder, WalkDirectoryConfiguration,
|
||||
WalkDirectoryVisitorBuilder, WalkState,
|
||||
@@ -257,10 +255,6 @@ impl System for OsSystem {
|
||||
fn env_var(&self, name: &str) -> std::result::Result<String, std::env::VarError> {
|
||||
std::env::var(name)
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl OsSystem {
|
||||
|
||||
@@ -236,7 +236,7 @@ impl SystemPath {
|
||||
///
|
||||
/// [`CurDir`]: camino::Utf8Component::CurDir
|
||||
#[inline]
|
||||
pub fn components(&self) -> camino::Utf8Components<'_> {
|
||||
pub fn components(&self) -> camino::Utf8Components {
|
||||
self.0.components()
|
||||
}
|
||||
|
||||
@@ -762,7 +762,7 @@ impl SystemVirtualPath {
|
||||
}
|
||||
|
||||
/// An owned, virtual path on [`System`](`super::System`) (akin to [`String`]).
|
||||
#[derive(Eq, PartialEq, Clone, Hash, PartialOrd, Ord, get_size2::GetSize)]
|
||||
#[derive(Eq, PartialEq, Clone, Hash, PartialOrd, Ord)]
|
||||
pub struct SystemVirtualPathBuf(String);
|
||||
|
||||
impl SystemVirtualPathBuf {
|
||||
|
||||
@@ -146,10 +146,6 @@ impl System for TestSystem {
|
||||
fn case_sensitivity(&self) -> CaseSensitivity {
|
||||
self.system().case_sensitivity()
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TestSystem {
|
||||
@@ -398,13 +394,6 @@ impl System for InMemorySystem {
|
||||
fn case_sensitivity(&self) -> CaseSensitivity {
|
||||
CaseSensitivity::CaseSensitive
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System> {
|
||||
Box::new(Self {
|
||||
user_config_directory: Mutex::new(self.user_config_directory.lock().unwrap().clone()),
|
||||
memory_fs: self.memory_fs.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableSystem for InMemorySystem {
|
||||
|
||||
@@ -21,19 +21,6 @@ type LockedZipArchive<'a> = MutexGuard<'a, VendoredZipArchive>;
|
||||
///
|
||||
/// "Files" in the `VendoredFileSystem` are read-only and immutable.
|
||||
/// Directories are supported, but symlinks and hardlinks cannot exist.
|
||||
///
|
||||
/// # Path separators
|
||||
///
|
||||
/// At time of writing (2025-07-11), this implementation always uses `/` as a
|
||||
/// path separator, even in Windows environments where `\` is traditionally
|
||||
/// used as a file path separator. Namely, this is only currently used with zip
|
||||
/// files built by `crates/ty_vendored/build.rs`.
|
||||
///
|
||||
/// Callers using this may provide paths that use a `\` as a separator. It will
|
||||
/// be transparently normalized to `/`.
|
||||
///
|
||||
/// This is particularly important because the presence of a trailing separator
|
||||
/// in a zip file is conventionally used to indicate a directory entry.
|
||||
#[derive(Clone)]
|
||||
pub struct VendoredFileSystem {
|
||||
inner: Arc<Mutex<VendoredZipArchive>>,
|
||||
@@ -128,74 +115,12 @@ impl VendoredFileSystem {
|
||||
read_to_string(self, path.as_ref())
|
||||
}
|
||||
|
||||
/// Read the direct children of the directory
|
||||
/// identified by `path`.
|
||||
///
|
||||
/// If `path` is not a directory, then this will
|
||||
/// return an empty `Vec`.
|
||||
pub fn read_directory(&self, dir: impl AsRef<VendoredPath>) -> Vec<DirectoryEntry> {
|
||||
// N.B. We specifically do not return an iterator here to avoid
|
||||
// holding a lock for the lifetime of the iterator returned.
|
||||
// That is, it seems like a footgun to keep the zip archive
|
||||
// locked during iteration, since the unit of work for each
|
||||
// item in the iterator could be arbitrarily long. Allocating
|
||||
// up front and stuffing all entries into it is probably the
|
||||
// simplest solution and what we do here. If this becomes
|
||||
// a problem, there are other strategies we could pursue.
|
||||
// (Amortizing allocs, using a different synchronization
|
||||
// behavior or even exposing additional APIs.) ---AG
|
||||
|
||||
fn read_directory(fs: &VendoredFileSystem, dir: &VendoredPath) -> Vec<DirectoryEntry> {
|
||||
let mut normalized = NormalizedVendoredPath::from(dir);
|
||||
if !normalized.as_str().ends_with('/') {
|
||||
normalized = normalized.with_trailing_slash();
|
||||
}
|
||||
let archive = fs.lock_archive();
|
||||
let mut entries = vec![];
|
||||
for name in archive.0.file_names() {
|
||||
// Any entry that doesn't have the `path` (with a
|
||||
// trailing slash) as a prefix cannot possibly be in
|
||||
// the directory referenced by `path`.
|
||||
let Some(without_dir_prefix) = name.strip_prefix(normalized.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
// Filter out an entry equivalent to the path given
|
||||
// since we only want children of the directory.
|
||||
if without_dir_prefix.is_empty() {
|
||||
continue;
|
||||
}
|
||||
// We only want *direct* children. Files that are
|
||||
// direct children cannot have any slashes (or else
|
||||
// they are not direct children). Directories that
|
||||
// are direct children can only have one slash and
|
||||
// it must be at the end.
|
||||
//
|
||||
// (We do this manually ourselves to avoid doing a
|
||||
// full file lookup and metadata retrieval via the
|
||||
// `zip` crate.)
|
||||
let file_type = FileType::from_zip_file_name(without_dir_prefix);
|
||||
let slash_count = without_dir_prefix.matches('/').count();
|
||||
match file_type {
|
||||
FileType::File if slash_count > 0 => continue,
|
||||
FileType::Directory if slash_count > 1 => continue,
|
||||
_ => {}
|
||||
}
|
||||
entries.push(DirectoryEntry {
|
||||
path: VendoredPathBuf::from(name),
|
||||
file_type,
|
||||
});
|
||||
}
|
||||
entries
|
||||
}
|
||||
read_directory(self, dir.as_ref())
|
||||
}
|
||||
|
||||
/// Acquire a lock on the underlying zip archive.
|
||||
/// The call will block until it is able to acquire the lock.
|
||||
///
|
||||
/// ## Panics:
|
||||
/// If the current thread already holds the lock.
|
||||
fn lock_archive(&self) -> LockedZipArchive<'_> {
|
||||
fn lock_archive(&self) -> LockedZipArchive {
|
||||
self.inner.lock().unwrap()
|
||||
}
|
||||
}
|
||||
@@ -281,14 +206,6 @@ pub enum FileType {
|
||||
}
|
||||
|
||||
impl FileType {
|
||||
fn from_zip_file_name(name: &str) -> FileType {
|
||||
if name.ends_with('/') {
|
||||
FileType::Directory
|
||||
} else {
|
||||
FileType::File
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn is_file(self) -> bool {
|
||||
matches!(self, Self::File)
|
||||
}
|
||||
@@ -327,30 +244,6 @@ impl Metadata {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct DirectoryEntry {
|
||||
path: VendoredPathBuf,
|
||||
file_type: FileType,
|
||||
}
|
||||
|
||||
impl DirectoryEntry {
|
||||
pub fn new(path: VendoredPathBuf, file_type: FileType) -> Self {
|
||||
Self { path, file_type }
|
||||
}
|
||||
|
||||
pub fn into_path(self) -> VendoredPathBuf {
|
||||
self.path
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &VendoredPath {
|
||||
&self.path
|
||||
}
|
||||
|
||||
pub fn file_type(&self) -> FileType {
|
||||
self.file_type
|
||||
}
|
||||
}
|
||||
|
||||
/// Newtype wrapper around a ZipArchive.
|
||||
#[derive(Debug)]
|
||||
struct VendoredZipArchive(ZipArchive<io::Cursor<Cow<'static, [u8]>>>);
|
||||
@@ -360,7 +253,7 @@ impl VendoredZipArchive {
|
||||
Ok(Self(ZipArchive::new(io::Cursor::new(data))?))
|
||||
}
|
||||
|
||||
fn lookup_path(&mut self, path: &NormalizedVendoredPath) -> Result<ZipFile<'_>> {
|
||||
fn lookup_path(&mut self, path: &NormalizedVendoredPath) -> Result<ZipFile> {
|
||||
Ok(self.0.by_name(path.as_str())?)
|
||||
}
|
||||
|
||||
@@ -605,60 +498,6 @@ pub(crate) mod tests {
|
||||
test_directory("./stdlib/asyncio/../asyncio/")
|
||||
}
|
||||
|
||||
fn readdir_snapshot(fs: &VendoredFileSystem, path: &str) -> String {
|
||||
let mut paths = fs
|
||||
.read_directory(VendoredPath::new(path))
|
||||
.into_iter()
|
||||
.map(|entry| entry.path().to_string())
|
||||
.collect::<Vec<String>>();
|
||||
paths.sort();
|
||||
paths.join("\n")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_directory_stdlib() {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib/"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib/"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_directory_asyncio() {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "stdlib/asyncio"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "stdlib/asyncio/"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio/"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
}
|
||||
|
||||
fn test_nonexistent_path(path: &str) {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
let path = VendoredPath::new(path);
|
||||
|
||||
@@ -17,10 +17,6 @@ impl VendoredPath {
|
||||
unsafe { &*(path as *const Utf8Path as *const VendoredPath) }
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.0.file_name()
|
||||
}
|
||||
|
||||
pub fn to_path_buf(&self) -> VendoredPathBuf {
|
||||
VendoredPathBuf(self.0.to_path_buf())
|
||||
}
|
||||
@@ -37,7 +33,7 @@ impl VendoredPath {
|
||||
self.0.as_std_path()
|
||||
}
|
||||
|
||||
pub fn components(&self) -> Utf8Components<'_> {
|
||||
pub fn components(&self) -> Utf8Components {
|
||||
self.0.components()
|
||||
}
|
||||
|
||||
|
||||
@@ -13,8 +13,6 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ty = { workspace = true }
|
||||
ty_project = { workspace = true, features = ["schemars"] }
|
||||
ty_python_semantic = { workspace = true }
|
||||
ty_static = { workspace = true }
|
||||
ruff = { workspace = true }
|
||||
ruff_formatter = { workspace = true }
|
||||
ruff_linter = { workspace = true, features = ["schemars"] }
|
||||
|
||||
@@ -348,7 +348,7 @@ fn format_dev_multi_project(
|
||||
debug!(parent: None, "Starting {}", project_path.display());
|
||||
|
||||
match format_dev_project(
|
||||
std::slice::from_ref(&project_path),
|
||||
&[project_path.clone()],
|
||||
args.stability_check,
|
||||
args.write,
|
||||
args.preview,
|
||||
@@ -628,7 +628,7 @@ struct CheckRepoResult {
|
||||
}
|
||||
|
||||
impl CheckRepoResult {
|
||||
fn display(&self, format: Format) -> DisplayCheckRepoResult<'_> {
|
||||
fn display(&self, format: Format) -> DisplayCheckRepoResult {
|
||||
DisplayCheckRepoResult {
|
||||
result: self,
|
||||
format,
|
||||
@@ -665,7 +665,7 @@ struct Diagnostic {
|
||||
}
|
||||
|
||||
impl Diagnostic {
|
||||
fn display(&self, format: Format) -> DisplayDiagnostic<'_> {
|
||||
fn display(&self, format: Format) -> DisplayDiagnostic {
|
||||
DisplayDiagnostic {
|
||||
diagnostic: self,
|
||||
format,
|
||||
|
||||
@@ -4,7 +4,7 @@ use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
generate_cli_help, generate_docs, generate_json_schema, generate_ty_cli_reference,
|
||||
generate_ty_env_vars_reference, generate_ty_options, generate_ty_rules, generate_ty_schema,
|
||||
generate_ty_options, generate_ty_rules, generate_ty_schema,
|
||||
};
|
||||
|
||||
pub(crate) const REGENERATE_ALL_COMMAND: &str = "cargo dev generate-all";
|
||||
@@ -44,8 +44,5 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
generate_ty_options::main(&generate_ty_options::Args { mode: args.mode })?;
|
||||
generate_ty_rules::main(&generate_ty_rules::Args { mode: args.mode })?;
|
||||
generate_ty_cli_reference::main(&generate_ty_cli_reference::Args { mode: args.mode })?;
|
||||
generate_ty_env_vars_reference::main(&generate_ty_env_vars_reference::Args {
|
||||
mode: args.mode,
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
//! Generate the environment variables reference from `ty_static::EnvVars`.
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::bail;
|
||||
use pretty_assertions::StrComparison;
|
||||
|
||||
use ty_static::EnvVars;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
#[arg(long, default_value_t, value_enum)]
|
||||
pub(crate) mode: Mode,
|
||||
}
|
||||
|
||||
pub(crate) fn main(args: &Args) -> anyhow::Result<()> {
|
||||
let reference_string = generate();
|
||||
let filename = "environment.md";
|
||||
let reference_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("crates")
|
||||
.join("ty")
|
||||
.join("docs")
|
||||
.join(filename);
|
||||
|
||||
match args.mode {
|
||||
Mode::DryRun => {
|
||||
println!("{reference_string}");
|
||||
}
|
||||
Mode::Check => match fs::read_to_string(&reference_path) {
|
||||
Ok(current) => {
|
||||
if current == reference_string {
|
||||
println!("Up-to-date: {filename}");
|
||||
} else {
|
||||
let comparison = StrComparison::new(¤t, &reference_string);
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{comparison}"
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
bail!(
|
||||
"{filename} not found, please run `cargo dev generate-ty-env-vars-reference`"
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{err}"
|
||||
);
|
||||
}
|
||||
},
|
||||
Mode::Write => {
|
||||
// Ensure the docs directory exists
|
||||
if let Some(parent) = reference_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
match fs::read_to_string(&reference_path) {
|
||||
Ok(current) => {
|
||||
if current == reference_string {
|
||||
println!("Up-to-date: {filename}");
|
||||
} else {
|
||||
println!("Updating: {filename}");
|
||||
fs::write(&reference_path, reference_string.as_bytes())?;
|
||||
}
|
||||
}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
println!("Updating: {filename}");
|
||||
fs::write(&reference_path, reference_string.as_bytes())?;
|
||||
}
|
||||
Err(err) => {
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate() -> String {
|
||||
let mut output = String::new();
|
||||
|
||||
output.push_str("# Environment variables\n\n");
|
||||
|
||||
// Partition and sort environment variables into TY_ and external variables.
|
||||
let (ty_vars, external_vars): (BTreeSet<_>, BTreeSet<_>) = EnvVars::metadata()
|
||||
.iter()
|
||||
.partition(|(var, _)| var.starts_with("TY_"));
|
||||
|
||||
output.push_str("ty defines and respects the following environment variables:\n\n");
|
||||
|
||||
for (var, doc) in ty_vars {
|
||||
output.push_str(&render(var, doc));
|
||||
}
|
||||
|
||||
output.push_str("## Externally-defined variables\n\n");
|
||||
output.push_str("ty also reads the following externally defined environment variables:\n\n");
|
||||
|
||||
for (var, doc) in external_vars {
|
||||
output.push_str(&render(var, doc));
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
/// Render an environment variable and its documentation.
|
||||
fn render(var: &str, doc: &str) -> String {
|
||||
format!("### `{var}`\n\n{doc}\n\n")
|
||||
}
|
||||
@@ -52,7 +52,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
}
|
||||
|
||||
fn generate_markdown() -> String {
|
||||
let registry = ty_python_semantic::default_lint_registry();
|
||||
let registry = &*ty_project::DEFAULT_LINT_REGISTRY;
|
||||
|
||||
let mut output = String::new();
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user