Compare commits
10 Commits
david/enum
...
zb/dev-dri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40b4aa28f9 | ||
|
|
ea4bf00c23 | ||
|
|
7f4aa4b3fb | ||
|
|
34c98361ae | ||
|
|
38bb96a6c2 | ||
|
|
a014d55455 | ||
|
|
306f6f17a9 | ||
|
|
b233888f00 | ||
|
|
540cbd9085 | ||
|
|
0112f7f0e4 |
111
.github/workflows/ci.yaml
vendored
111
.github/workflows/ci.yaml
vendored
@@ -143,12 +143,12 @@ jobs:
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
# NOTE: Do not exclude all Markdown files here, but rather use
|
||||
# specific exclude patterns like 'docs/**'), because tests for
|
||||
# 'ty' are written in Markdown.
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
|
||||
':!**/*.md' \
|
||||
':crates/ty_python_semantic/resources/mdtest/**/*.md' \
|
||||
':!docs/**' \
|
||||
':!assets/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
@@ -234,17 +234,17 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -292,17 +292,17 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -321,14 +321,30 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: Setup Dev Drive
|
||||
run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1
|
||||
|
||||
# actions/checkout does not let us clone into anywhere outside `github.workspace`, so we have to copy the clone
|
||||
- name: Copy Git Repo to Dev Drive
|
||||
env:
|
||||
RUFF_WORKSPACE: ${{ env.RUFF_WORKSPACE }}
|
||||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${env:RUFF_WORKSPACE}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: ${{ env.RUFF_WORKSPACE }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
working-directory: ${{ env.RUFF_WORKSPACE }}
|
||||
run: rustup show
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
working-directory: ${{ env.RUFF_WORKSPACE }}
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
@@ -348,7 +364,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
@@ -377,11 +393,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -400,18 +416,27 @@ jobs:
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Build tests"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: cargo "+${MSRV}" test --no-run --all-features
|
||||
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
@@ -423,13 +448,13 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
|
||||
uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -451,7 +476,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -485,7 +510,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
@@ -652,7 +677,7 @@ jobs:
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -682,7 +707,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
|
||||
- uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -699,7 +724,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
@@ -722,8 +747,8 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
@@ -756,7 +781,7 @@ jobs:
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
@@ -765,7 +790,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -795,7 +820,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run checks"
|
||||
@@ -865,7 +890,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
@@ -896,14 +921,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -911,7 +936,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
@@ -929,14 +954,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -944,7 +969,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
6
.github/workflows/daily_fuzz.yaml
vendored
6
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,12 +34,12 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
|
||||
85
.github/workflows/mypy_primer.yaml
vendored
85
.github/workflows/mypy_primer.yaml
vendored
@@ -12,7 +12,6 @@ on:
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
@@ -38,9 +37,9 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
@@ -50,12 +49,46 @@ jobs:
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
DIFF_FILE: mypy_primer.diff
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
echo ${{ github.event.number }} > ../pr-number
|
||||
|
||||
echo "Enabling mypy primer specific configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/ty_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b base_commit "$MERGE_BASE"
|
||||
echo "base commit"
|
||||
git rev-list --format=%s --max-count=1 base_commit
|
||||
|
||||
cd ..
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx \
|
||||
--from="git+https://github.com/hauntsaninja/mypy_primer@e5f55447969d33ae3c7ccdb183e2a37101867270" \
|
||||
mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker ty \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
# Output diff with ANSI color codes
|
||||
cat mypy_primer.diff
|
||||
|
||||
# Remove ANSI color codes before uploading
|
||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
@@ -68,41 +101,3 @@ jobs:
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
TY_MAX_PARALLELISM: 1 # for deterministic memory numbers
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/memory.txt
|
||||
DIFF_FILE: mypy_primer_memory.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
path: mypy_primer_memory.diff
|
||||
|
||||
31
.github/workflows/mypy_primer_comment.yaml
vendored
31
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -45,28 +45,15 @@ jobs:
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer memory results"
|
||||
id: download-mypy_primer_memory_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_memory_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -87,18 +74,6 @@ jobs:
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
2
.github/workflows/publish-docs.yml
vendored
2
.github/workflows/publish-docs.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
93
.github/workflows/setup-dev-drive.ps1
vendored
Normal file
93
.github/workflows/setup-dev-drive.ps1
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# Configures a drive for testing in CI.
|
||||
#
|
||||
# When using standard GitHub Actions runners, a `D:` drive is present and has
|
||||
# similar or better performance characteristics than a ReFS dev drive. Sometimes
|
||||
# using a larger runner is still more performant (e.g., when running the test
|
||||
# suite) and we need to create a dev drive. This script automatically configures
|
||||
# the appropriate drive.
|
||||
#
|
||||
# When using GitHub Actions' "larger runners", the `D:` drive is not present and
|
||||
# we create a DevDrive mount on `C:`. This is purported to be more performant
|
||||
# than an ReFS drive, though we did not see a change when we switched over.
|
||||
#
|
||||
# When using Depot runners, the underling infrastructure is EC2, which does not
|
||||
# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can
|
||||
# create a ReFS drive using `diskpart` and `format` directory. We cannot use a
|
||||
# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already,
|
||||
# so we must check if it's a Depot runner first, and we use `V:` as the target
|
||||
# instead.
|
||||
|
||||
|
||||
if ($env:DEPOT_RUNNER -eq "1") {
|
||||
Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..."
|
||||
|
||||
# Create VHD and configure drive using diskpart
|
||||
$vhdPath = "C:\ruff_dev_drive.vhdx"
|
||||
@"
|
||||
create vdisk file="$vhdPath" maximum=20480 type=expandable
|
||||
attach vdisk
|
||||
create partition primary
|
||||
active
|
||||
assign letter=V
|
||||
"@ | diskpart
|
||||
|
||||
# Format the drive as ReFS
|
||||
format V: /fs:ReFS /q /y
|
||||
$Drive = "V:"
|
||||
|
||||
Write-Output "Custom dev drive created at $Drive"
|
||||
} elseif (Test-Path "D:\") {
|
||||
# Note `Get-PSDrive` is not sufficient because the drive letter is assigned.
|
||||
Write-Output "Using existing drive at D:"
|
||||
$Drive = "D:"
|
||||
} else {
|
||||
# The size (20 GB) is chosen empirically to be large enough for our
|
||||
# workflows; larger drives can take longer to set up.
|
||||
$Volume = New-VHD -Path C:/ruff_dev_drive.vhdx -SizeBytes 20GB |
|
||||
Mount-VHD -Passthru |
|
||||
Initialize-Disk -Passthru |
|
||||
New-Partition -AssignDriveLetter -UseMaximumSize |
|
||||
Format-Volume -DevDrive -Confirm:$false -Force
|
||||
|
||||
$Drive = "$($Volume.DriveLetter):"
|
||||
|
||||
# Set the drive as trusted
|
||||
# See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-designate-a-dev-drive-as-trusted
|
||||
fsutil devdrv trust $Drive
|
||||
|
||||
# Disable antivirus filtering on dev drives
|
||||
# See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-configure-additional-filters-on-dev-drive
|
||||
fsutil devdrv enable /disallowAv
|
||||
|
||||
# Remount so the changes take effect
|
||||
Dismount-VHD -Path C:/ruff_dev_drive.vhdx
|
||||
Mount-VHD -Path C:/ruff_dev_drive.vhdx
|
||||
|
||||
# Show some debug information
|
||||
Write-Output $Volume
|
||||
fsutil devdrv query $Drive
|
||||
|
||||
Write-Output "Using Dev Drive at $Volume"
|
||||
}
|
||||
|
||||
$Tmp = "$($Drive)\ruff-tmp"
|
||||
|
||||
# Create the directory ahead of time in an attempt to avoid race-conditions
|
||||
New-Item $Tmp -ItemType Directory
|
||||
|
||||
# Move Cargo to the dev drive
|
||||
New-Item -Path "$($Drive)/.cargo/bin" -ItemType Directory -Force
|
||||
if (Test-Path "C:/Users/runneradmin/.cargo") {
|
||||
Copy-Item -Path "C:/Users/runneradmin/.cargo/*" -Destination "$($Drive)/.cargo/" -Recurse -Force
|
||||
}
|
||||
|
||||
Write-Output `
|
||||
"DEV_DRIVE=$($Drive)" `
|
||||
"TMP=$($Tmp)" `
|
||||
"TEMP=$($Tmp)" `
|
||||
"UV_INTERNAL__TEST_DIR=$($Tmp)" `
|
||||
"RUSTUP_HOME=$($Drive)/.rustup" `
|
||||
"CARGO_HOME=$($Drive)/.cargo" `
|
||||
"RUFF_WORKSPACE=$($Drive)/ruff" `
|
||||
"PATH=$($Drive)/.cargo/bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
166
.github/workflows/sync_typeshed.yaml
vendored
166
.github/workflows/sync_typeshed.yaml
vendored
@@ -1,25 +1,5 @@
|
||||
name: Sync typeshed
|
||||
|
||||
# How this works:
|
||||
#
|
||||
# 1. A Linux worker:
|
||||
# a. Checks out Ruff and typeshed
|
||||
# b. Deletes the vendored typeshed stdlib stubs from Ruff
|
||||
# c. Copies the latest versions of the stubs from typeshed
|
||||
# d. Uses docstring-adder to sync all docstrings available on Linux
|
||||
# e. Creates a new branch on the upstream astral-sh/ruff repository
|
||||
# f. Commits the changes it's made and pushes them to the new upstream branch
|
||||
# 2. Once the Linux worker is done, a Windows worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on Windows that are not available on Linux
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# 3. Once the Windows worker is done, a MacOS worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# d. Creates a PR against the `main` branch using the branch all three workers have pushed to
|
||||
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
@@ -30,17 +10,7 @@ env:
|
||||
FORCE_COLOR: 1
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
# The name of the upstream branch that the first worker creates,
|
||||
# and which all three workers push to.
|
||||
UPSTREAM_BRANCH: typeshedbot/sync-typeshed
|
||||
|
||||
# The path to the directory that contains the vendored typeshed stubs,
|
||||
# relative to the root of the Ruff repository.
|
||||
VENDORED_TYPESHED: crates/ty_vendored/vendor/typeshed
|
||||
|
||||
jobs:
|
||||
# Sync typeshed stubs, and sync all docstrings available on Linux.
|
||||
# Push the changes to a new branch on the upstream repository.
|
||||
sync:
|
||||
name: Sync typeshed
|
||||
runs-on: ubuntu-latest
|
||||
@@ -49,6 +19,7 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
@@ -65,130 +36,37 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Sync typeshed stubs
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
mkdir "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/README.md "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/LICENSE "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
# The pyproject.toml file is needed by a later job for the black configuration.
|
||||
# It's deleted before creating the PR.
|
||||
cp typeshed/pyproject.toml "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
cp -r typeshed/stdlib "ruff/${VENDORED_TYPESHED}/stdlib"
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}/stdlib/@tests"
|
||||
git -C typeshed rev-parse HEAD > "ruff/${VENDORED_TYPESHED}/source_commit.txt"
|
||||
cd ruff
|
||||
git checkout -b "${UPSTREAM_BRANCH}"
|
||||
git add .
|
||||
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
|
||||
- name: Sync Linux docstrings
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
cd ruff
|
||||
./scripts/codemod_docstrings.sh
|
||||
git commit -am "Sync Linux docstrings" --allow-empty
|
||||
- name: Push the changes
|
||||
id: commit
|
||||
if: ${{ success() }}
|
||||
run: git -C ruff push --force --set-upstream origin "${UPSTREAM_BRANCH}"
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on Windows that are not available on Linux.
|
||||
# Commit the changes and push them to the same branch.
|
||||
docstrings-windows:
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync job failed, because it's specified in `needs` above,
|
||||
# and we haven't used `always()` in the `if` condition here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync Windows docstrings
|
||||
id: docstrings
|
||||
shell: bash
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
if: ${{ steps.docstrings.outcome == 'success' }}
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
run: |
|
||||
git commit -am "Sync Windows docstrings" --allow-empty
|
||||
git push
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on macOS that are not available on Linux or Windows.
|
||||
# Push the changes to the same branch and create a PR against the `main` branch using that branch.
|
||||
docstrings-macos-and-pr:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync, docstrings-windows]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync or docstrings-windows jobs failed,
|
||||
# because they're specified in `needs` above and we haven't used an `always()` condition in the `if` here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync macOS docstrings
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
- name: Commit and push the changes
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
git commit -am "Sync macOS docstrings" --allow-empty
|
||||
|
||||
# Here we just reformat the codemodded stubs so that they are
|
||||
# consistent with the other typeshed stubs around them.
|
||||
# Typeshed formats code using black in their CI, so we just invoke
|
||||
# black on the stubs the same way that typeshed does.
|
||||
uvx black "${VENDORED_TYPESHED}/stdlib" --config "${VENDORED_TYPESHED}/pyproject.toml" || true
|
||||
git commit -am "Format codemodded docstrings" --allow-empty
|
||||
|
||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
|
||||
git push
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
||||
- name: Create a PR
|
||||
if: ${{ success() }}
|
||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
||||
run: |
|
||||
gh pr list --repo "${GITHUB_REPOSITORY}" --head "${UPSTREAM_BRANCH}" --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
|
||||
needs: [sync]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
|
||||
80
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
80
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -17,7 +17,6 @@ env:
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-analyzer:
|
||||
@@ -33,9 +32,9 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
@@ -64,75 +63,32 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@9c34dc514ee9aef6735db1dfebb80f63acbc3440"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
diff \
|
||||
--projects-old ruff/projects_old.txt \
|
||||
--projects-new ruff/projects_new.txt \
|
||||
--old old_commit \
|
||||
--new new_commit \
|
||||
--output-old diagnostics-old.json \
|
||||
--output-new diagnostics-new.json
|
||||
analyze \
|
||||
--projects ruff/projects_old.txt \
|
||||
--commit old_commit \
|
||||
--output diagnostics_old.json
|
||||
|
||||
mkdir dist
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--projects ruff/projects_new.txt \
|
||||
--commit new_commit \
|
||||
--output diagnostics_new.json
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-diff \
|
||||
diagnostics-old.json \
|
||||
diagnostics-new.json \
|
||||
diagnostics_old.json \
|
||||
diagnostics_new.json \
|
||||
--old-name "main (merge base)" \
|
||||
--new-name "$REF_NAME" \
|
||||
--output-html dist/diff.html
|
||||
--output-html diff.html
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-diff-statistics \
|
||||
diagnostics-old.json \
|
||||
diagnostics-new.json \
|
||||
--old-name "main (merge base)" \
|
||||
--new-name "$REF_NAME" \
|
||||
--output diff-statistics.md
|
||||
|
||||
echo '## `ecosystem-analyzer` results' > comment.md
|
||||
echo >> comment.md
|
||||
cat diff-statistics.md >> comment.md
|
||||
|
||||
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch ${{ github.head_ref }} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
- name: "Append deployment URL"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
env:
|
||||
DEPLOYMENT_URL: ${{ steps.deploy.outputs.pages-deployment-alias-url }}
|
||||
run: |
|
||||
echo >> comment.md
|
||||
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)**" >> comment.md
|
||||
|
||||
- name: Upload comment
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: comment.md
|
||||
path: comment.md
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload diagnostics diff
|
||||
- name: Upload HTML diff report
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: diff.html
|
||||
path: dist/diff.html
|
||||
path: diff.html
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
name: PR comment (ty ecosystem-analyzer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [ty ecosystem-analyzer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ty ecosystem-analyzer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download comment.md"
|
||||
id: download-comment
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: comment.md
|
||||
workflow: ty-ecosystem-analyzer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/comment
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/comment/comment.md ]]
|
||||
then
|
||||
echo "Error: comment.md cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note: this identifier is used to find the comment to update on subsequent runs
|
||||
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
|
||||
echo >> comment.md
|
||||
cat pr/comment/comment.md >> comment.md
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.md >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
76
.github/workflows/ty-ecosystem-report.yaml
vendored
76
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -1,76 +0,0 @@
|
||||
name: ty ecosystem-report
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run every Wednesday at 5:00 UTC:
|
||||
- cron: 0 5 * * 3
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-report:
|
||||
name: Create ecosystem report
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Create report
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "Enabling configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
|
||||
--output ecosystem-diagnostics.json
|
||||
|
||||
mkdir dist
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-report \
|
||||
--max-diagnostics-per-project=1200 \
|
||||
ecosystem-diagnostics.json \
|
||||
--output dist/index.html
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}
|
||||
109
.github/workflows/typing_conformance.yaml
vendored
109
.github/workflows/typing_conformance.yaml
vendored
@@ -1,109 +0,0 @@
|
||||
name: Run typing conformance
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/typing_conformance.yaml"
|
||||
- ".github/workflows/typing_conformance_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
typing_conformance:
|
||||
name: Compute diagnostic diff
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
repository: python/typing
|
||||
ref: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
path: typing
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Compute diagnostic diff
|
||||
shell: bash
|
||||
run: |
|
||||
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
|
||||
|
||||
# Build the executable for the old and new commit
|
||||
(
|
||||
cd ruff
|
||||
|
||||
echo "new commit"
|
||||
git checkout -b new_commit "${{ github.event.pull_request.head.sha }}"
|
||||
git rev-list --format=%s --max-count=1 new_commit
|
||||
cargo build --release --bin ty
|
||||
mv target/release/ty ty-new
|
||||
|
||||
echo "old commit (merge base)"
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b old_commit "$MERGE_BASE"
|
||||
git rev-list --format=%s --max-count=1 old_commit
|
||||
cargo build --release --bin ty
|
||||
mv target/release/ty ty-old
|
||||
)
|
||||
|
||||
(
|
||||
cd typing/conformance/tests
|
||||
|
||||
echo "Running ty on old commit (merge base)"
|
||||
"$RUFF_DIR/ty-old" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/old-output.txt" 2>&1 || true
|
||||
|
||||
echo "Running ty on new commit"
|
||||
"$RUFF_DIR/ty-new" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/new-output.txt" 2>&1 || true
|
||||
)
|
||||
|
||||
if ! diff -u old-output.txt new-output.txt > typing_conformance_diagnostics.diff; then
|
||||
echo "Differences found between base and PR"
|
||||
else
|
||||
echo "No differences found"
|
||||
touch typing_conformance_diagnostics.diff
|
||||
fi
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
path: typing_conformance_diagnostics.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
@@ -1,97 +0,0 @@
|
||||
name: PR comment (typing_conformance)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run typing conformance]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The typing_conformance workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download typing_conformance results"
|
||||
id: download-typing_conformance_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
workflow: typing_conformance.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/typing_conformance_diagnostics_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious typing_conformance results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
|
||||
then
|
||||
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||
|
||||
echo '## Diagnostic diff on typing conformance tests' >> comment.txt
|
||||
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
2
.github/zizmor.yml
vendored
2
.github/zizmor.yml
vendored
@@ -10,8 +10,6 @@ rules:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- ty-ecosystem-analyzer.yaml
|
||||
- ty-ecosystem-report.yaml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
|
||||
@@ -6,7 +6,7 @@ exclude: |
|
||||
crates/ty_vendored/vendor/.*|
|
||||
crates/ty_project/resources/.*|
|
||||
crates/ty_python_semantic/resources/corpus/.*|
|
||||
crates/ty/docs/(configuration|rules|cli|environment).md|
|
||||
crates/ty/docs/(configuration|rules|cli).md|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
@@ -67,7 +67,7 @@ repos:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.34.0
|
||||
rev: v1.33.1
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -81,10 +81,10 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.5
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff-check
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
@@ -99,12 +99,12 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.11.0
|
||||
rev: v1.10.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.2
|
||||
rev: 0.33.1
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
@@ -128,10 +128,5 @@ repos:
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.10.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
133
CHANGELOG.md
133
CHANGELOG.md
@@ -1,138 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
|
||||
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
|
||||
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
|
||||
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
|
||||
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
|
||||
|
||||
## 0.12.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
|
||||
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
|
||||
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
|
||||
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
|
||||
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
|
||||
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
|
||||
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
|
||||
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
|
||||
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
|
||||
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
|
||||
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
|
||||
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
|
||||
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
|
||||
|
||||
## 0.12.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Support non-context-manager calls in `B017` ([#19063](https://github.com/astral-sh/ruff/pull/19063))
|
||||
- \[`flake8-use-pathlib`\] Add autofixes for `PTH100`, `PTH106`, `PTH107`, `PTH108`, `PTH110`, `PTH111`, `PTH112`, `PTH113`, `PTH114`, `PTH115`, `PTH117`, `PTH119`, `PTH120` ([#19213](https://github.com/astral-sh/ruff/pull/19213))
|
||||
- \[`flake8-use-pathlib`\] Add autofixes for `PTH203`, `PTH204`, `PTH205` ([#18922](https://github.com/astral-sh/ruff/pull/18922))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-return`\] Fix false-positive for variables used inside nested functions in `RET504` ([#18433](https://github.com/astral-sh/ruff/pull/18433))
|
||||
- Treat form feed as valid whitespace before a line continuation ([#19220](https://github.com/astral-sh/ruff/pull/19220))
|
||||
- \[`flake8-type-checking`\] Fix syntax error introduced by fix (`TC008`) ([#19150](https://github.com/astral-sh/ruff/pull/19150))
|
||||
- \[`pyupgrade`\] Keyword arguments in `super` should suppress the `UP008` fix ([#19131](https://github.com/astral-sh/ruff/pull/19131))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI007`, `PYI008`) ([#19103](https://github.com/astral-sh/ruff/pull/19103))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM116`) ([#19111](https://github.com/astral-sh/ruff/pull/19111))
|
||||
- \[`flake8-type-checking`\] Make example error out-of-the-box (`TC001`) ([#19151](https://github.com/astral-sh/ruff/pull/19151))
|
||||
- \[`flake8-use-pathlib`\] Make example error out-of-the-box (`PTH210`) ([#19189](https://github.com/astral-sh/ruff/pull/19189))
|
||||
- \[`pycodestyle`\] Make example error out-of-the-box (`E272`) ([#19191](https://github.com/astral-sh/ruff/pull/19191))
|
||||
- \[`pycodestyle`\] Make example not raise unnecessary `SyntaxError` (`E114`) ([#19190](https://github.com/astral-sh/ruff/pull/19190))
|
||||
- \[`pydoclint`\] Make example error out-of-the-box (`DOC501`) ([#19218](https://github.com/astral-sh/ruff/pull/19218))
|
||||
- \[`pylint`, `pyupgrade`\] Fix syntax errors in examples (`PLW1501`, `UP028`) ([#19127](https://github.com/astral-sh/ruff/pull/19127))
|
||||
- \[`pylint`\] Update `missing-maxsplit-arg` docs and error to suggest proper usage (`PLC0207`) ([#18949](https://github.com/astral-sh/ruff/pull/18949))
|
||||
- \[`flake8-bandit`\] Make example error out-of-the-box (`S412`) ([#19241](https://github.com/astral-sh/ruff/pull/19241))
|
||||
|
||||
## 0.12.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-pyi`\] Expand `Optional[A]` to `A | None` (`PYI016`) ([#18572](https://github.com/astral-sh/ruff/pull/18572))
|
||||
- \[`pyupgrade`\] Mark `UP008` fix safe if no comments are in range ([#18683](https://github.com/astral-sh/ruff/pull/18683))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Fix `C420` to prepend whitespace when needed ([#18616](https://github.com/astral-sh/ruff/pull/18616))
|
||||
- \[`perflint`\] Fix `PERF403` panic on attribute or subscription loop variable ([#19042](https://github.com/astral-sh/ruff/pull/19042))
|
||||
- \[`pydocstyle`\] Fix `D413` infinite loop for parenthesized docstring ([#18930](https://github.com/astral-sh/ruff/pull/18930))
|
||||
- \[`pylint`\] Fix `PLW0108` autofix introducing a syntax error when the lambda's body contains an assignment expression ([#18678](https://github.com/astral-sh/ruff/pull/18678))
|
||||
- \[`refurb`\] Fix false positive on empty tuples (`FURB168`) ([#19058](https://github.com/astral-sh/ruff/pull/19058))
|
||||
- \[`ruff`\] Allow more `field` calls from `attrs` (`RUF009`) ([#19021](https://github.com/astral-sh/ruff/pull/19021))
|
||||
- \[`ruff`\] Fix syntax error introduced for an empty string followed by a u-prefixed string (`UP025`) ([#18899](https://github.com/astral-sh/ruff/pull/18899))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-executable`\] Allow `uvx` in shebang line (`EXE003`) ([#18967](https://github.com/astral-sh/ruff/pull/18967))
|
||||
- \[`pandas`\] Avoid flagging `PD002` if `pandas` is not imported ([#18963](https://github.com/astral-sh/ruff/pull/18963))
|
||||
- \[`pyupgrade`\] Avoid PEP-604 unions with `typing.NamedTuple` (`UP007`, `UP045`) ([#18682](https://github.com/astral-sh/ruff/pull/18682))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document link between `import-outside-top-level (PLC0415)` and `lint.flake8-tidy-imports.banned-module-level-imports` ([#18733](https://github.com/astral-sh/ruff/pull/18733))
|
||||
- Fix description of the `format.skip-magic-trailing-comma` example ([#19095](https://github.com/astral-sh/ruff/pull/19095))
|
||||
- \[`airflow`\] Make `AIR302` example error out-of-the-box ([#18988](https://github.com/astral-sh/ruff/pull/18988))
|
||||
- \[`airflow`\] Make `AIR312` example error out-of-the-box ([#18989](https://github.com/astral-sh/ruff/pull/18989))
|
||||
- \[`flake8-annotations`\] Make `ANN401` example error out-of-the-box ([#18974](https://github.com/astral-sh/ruff/pull/18974))
|
||||
- \[`flake8-async`\] Make `ASYNC100` example error out-of-the-box ([#18993](https://github.com/astral-sh/ruff/pull/18993))
|
||||
- \[`flake8-async`\] Make `ASYNC105` example error out-of-the-box ([#19002](https://github.com/astral-sh/ruff/pull/19002))
|
||||
- \[`flake8-async`\] Make `ASYNC110` example error out-of-the-box ([#18975](https://github.com/astral-sh/ruff/pull/18975))
|
||||
- \[`flake8-async`\] Make `ASYNC210` example error out-of-the-box ([#18977](https://github.com/astral-sh/ruff/pull/18977))
|
||||
- \[`flake8-async`\] Make `ASYNC220`, `ASYNC221`, and `ASYNC222` examples error out-of-the-box ([#18978](https://github.com/astral-sh/ruff/pull/18978))
|
||||
- \[`flake8-async`\] Make `ASYNC251` example error out-of-the-box ([#18990](https://github.com/astral-sh/ruff/pull/18990))
|
||||
- \[`flake8-bandit`\] Make `S201` example error out-of-the-box ([#19017](https://github.com/astral-sh/ruff/pull/19017))
|
||||
- \[`flake8-bandit`\] Make `S604` and `S609` examples error out-of-the-box ([#19049](https://github.com/astral-sh/ruff/pull/19049))
|
||||
- \[`flake8-bugbear`\] Make `B028` example error out-of-the-box ([#19054](https://github.com/astral-sh/ruff/pull/19054))
|
||||
- \[`flake8-bugbear`\] Make `B911` example error out-of-the-box ([#19051](https://github.com/astral-sh/ruff/pull/19051))
|
||||
- \[`flake8-datetimez`\] Make `DTZ011` example error out-of-the-box ([#19055](https://github.com/astral-sh/ruff/pull/19055))
|
||||
- \[`flake8-datetimez`\] Make `DTZ901` example error out-of-the-box ([#19056](https://github.com/astral-sh/ruff/pull/19056))
|
||||
- \[`flake8-pyi`\] Make `PYI032` example error out-of-the-box ([#19061](https://github.com/astral-sh/ruff/pull/19061))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI014`, `PYI015`) ([#19097](https://github.com/astral-sh/ruff/pull/19097))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI042`) ([#19101](https://github.com/astral-sh/ruff/pull/19101))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI059`) ([#19080](https://github.com/astral-sh/ruff/pull/19080))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI062`) ([#19079](https://github.com/astral-sh/ruff/pull/19079))
|
||||
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT023`) ([#19104](https://github.com/astral-sh/ruff/pull/19104))
|
||||
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT030`) ([#19105](https://github.com/astral-sh/ruff/pull/19105))
|
||||
- \[`flake8-quotes`\] Make example error out-of-the-box (`Q003`) ([#19106](https://github.com/astral-sh/ruff/pull/19106))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM110`) ([#19113](https://github.com/astral-sh/ruff/pull/19113))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM113`) ([#19109](https://github.com/astral-sh/ruff/pull/19109))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM401`) ([#19110](https://github.com/astral-sh/ruff/pull/19110))
|
||||
- \[`pyflakes`\] Fix backslash in docs (`F621`) ([#19098](https://github.com/astral-sh/ruff/pull/19098))
|
||||
- \[`pylint`\] Fix `PLC0415` example ([#18970](https://github.com/astral-sh/ruff/pull/18970))
|
||||
|
||||
## 0.12.1
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -266,13 +266,6 @@ Finally, regenerate the documentation and generated code with `cargo dev generat
|
||||
|
||||
## MkDocs
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
|
||||
> This means only members of the Astral organization can preview the documentation exactly as it
|
||||
> will appear in production.
|
||||
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
|
||||
|
||||
To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
476
Cargo.lock
generated
476
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
32
Cargo.toml
32
Cargo.toml
@@ -44,7 +44,6 @@ ty_ide = { path = "crates/ty_ide" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
ty_server = { path = "crates/ty_server" }
|
||||
ty_static = { path = "crates/ty_static" }
|
||||
ty_test = { path = "crates/ty_test" }
|
||||
ty_vendored = { path = "crates/ty_vendored" }
|
||||
|
||||
@@ -57,9 +56,6 @@ assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bitvec = { version = "1.0.1", default-features = false, features = [
|
||||
"alloc",
|
||||
] }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
@@ -73,7 +69,7 @@ console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.7.0", default-features = false }
|
||||
criterion = { version = "0.6.0", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
@@ -83,11 +79,11 @@ etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
get-size2 = { version = "0.6.0", features = [
|
||||
get-size2 = { version = "0.5.0", features = [
|
||||
"derive",
|
||||
"smallvec",
|
||||
"hashbrown",
|
||||
"compact-str",
|
||||
"compact-str"
|
||||
] }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
@@ -102,7 +98,7 @@ ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.18.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
@@ -141,7 +137,7 @@ regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "dba66f1a37acca014c2402f231ed5b361bd7d8fe" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "fc00eba89e5dcaa5edba51c41aa5f309b5cb126b" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -153,7 +149,7 @@ serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2", features = ["union", "const_generics", "const_new"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
snapbox = { version = "0.6.0", features = [
|
||||
"diff",
|
||||
"term-svg",
|
||||
@@ -168,16 +164,16 @@ tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.9.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.11" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-log = { version = "0.2.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
"ansi",
|
||||
"smallvec",
|
||||
"smallvec"
|
||||
] }
|
||||
tryfn = { version = "0.2.1" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
@@ -187,7 +183,11 @@ unicode-width = { version = "0.2.0" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
@@ -222,8 +222,8 @@ must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.5/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.5/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.5
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
@@ -430,7 +430,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- Capital One ([datacompy](https://github.com/capitalone/datacompy))
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
@@ -507,7 +506,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- [Weblate](https://weblate.org/)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
- [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [Zulip](https://github.com/zulip/zulip)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.5"
|
||||
version = "0.12.1"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -169,9 +169,6 @@ pub struct AnalyzeGraphCommand {
|
||||
/// Attempt to detect imports from string literals.
|
||||
#[clap(long)]
|
||||
detect_string_imports: bool,
|
||||
/// The minimum number of dots in a string import to consider it a valid import.
|
||||
#[clap(long)]
|
||||
min_dots: Option<usize>,
|
||||
/// Enable preview mode. Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
@@ -811,7 +808,6 @@ impl AnalyzeGraphCommand {
|
||||
} else {
|
||||
None
|
||||
},
|
||||
string_imports_min_dots: self.min_dots,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
..ExplicitConfigOverrides::default()
|
||||
@@ -1309,7 +1305,6 @@ struct ExplicitConfigOverrides {
|
||||
show_fixes: Option<bool>,
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
string_imports_min_dots: Option<usize>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1397,9 +1392,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(detect_string_imports) = &self.detect_string_imports {
|
||||
config.analyze.detect_string_imports = Some(*detect_string_imports);
|
||||
}
|
||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -18,15 +18,14 @@ use rustc_hash::FxHashMap;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_linter::message::create_lint_diagnostic;
|
||||
use ruff_linter::message::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::{VERSION, warn_user};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
use ruff_workspace::resolver::Resolver;
|
||||
|
||||
@@ -349,7 +348,7 @@ impl FileCache {
|
||||
lint.messages
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
create_lint_diagnostic(
|
||||
OldDiagnostic::lint(
|
||||
&msg.body,
|
||||
msg.suggestion.as_ref(),
|
||||
msg.range,
|
||||
@@ -429,11 +428,11 @@ pub(crate) struct LintCacheData {
|
||||
|
||||
impl LintCacheData {
|
||||
pub(crate) fn from_diagnostics(
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
) -> Self {
|
||||
let source = if let Some(msg) = diagnostics.first() {
|
||||
msg.expect_ruff_source_file().source_text().to_owned()
|
||||
msg.source_file().source_text().to_owned()
|
||||
} else {
|
||||
String::new() // No messages, no need to keep the source!
|
||||
};
|
||||
@@ -447,16 +446,16 @@ impl LintCacheData {
|
||||
.map(|(rule, msg)| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
msg.expect_ruff_source_file(),
|
||||
diagnostics.first().unwrap().expect_ruff_source_file(),
|
||||
msg.source_file(),
|
||||
diagnostics.first().unwrap().source_file(),
|
||||
"message uses a different source file"
|
||||
);
|
||||
CacheMessage {
|
||||
rule,
|
||||
body: msg.body().to_string(),
|
||||
suggestion: msg.first_help_text().map(ToString::to_string),
|
||||
range: msg.expect_range(),
|
||||
parent: msg.parent(),
|
||||
suggestion: msg.suggestion().map(ToString::to_string),
|
||||
range: msg.range(),
|
||||
parent: msg.parent,
|
||||
fix: msg.fix().cloned(),
|
||||
noqa_offset: msg.noqa_offset(),
|
||||
}
|
||||
@@ -609,12 +608,12 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use filetime::{FileTime, set_file_mtime};
|
||||
use itertools::Itertools;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_cache::CACHE_DIR_NAME;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_linter::message::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||
@@ -681,7 +680,7 @@ mod tests {
|
||||
UnsafeFixes::Enabled,
|
||||
)
|
||||
.unwrap();
|
||||
if diagnostics.inner.iter().any(Diagnostic::is_invalid_syntax) {
|
||||
if diagnostics.inner.iter().any(OldDiagnostic::is_syntax_error) {
|
||||
parse_errors.push(path.clone());
|
||||
}
|
||||
paths.push(path);
|
||||
|
||||
@@ -102,7 +102,7 @@ pub(crate) fn analyze_graph(
|
||||
|
||||
// Resolve the per-file settings.
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.string_imports;
|
||||
let string_imports = settings.analyze.detect_string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
|
||||
// Skip excluded files.
|
||||
|
||||
@@ -11,13 +11,13 @@ use log::{debug, error, warn};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::panic::catch_unwind;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::{IOError, Violation, fs, warn_user_once};
|
||||
use ruff_linter::{IOError, fs, warn_user_once};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::resolver::{
|
||||
@@ -129,7 +129,11 @@ pub(crate) fn check(
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||
|
||||
Diagnostics::new(
|
||||
vec![IOError { message }.into_diagnostic(TextRange::default(), &dummy)],
|
||||
vec![OldDiagnostic::new(
|
||||
IOError { message },
|
||||
TextRange::default(),
|
||||
&dummy,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -162,9 +166,7 @@ pub(crate) fn check(
|
||||
|a, b| (a.0 + b.0, a.1 + b.1),
|
||||
);
|
||||
|
||||
all_diagnostics
|
||||
.inner
|
||||
.sort_by(Diagnostic::ruff_start_ordering);
|
||||
all_diagnostics.inner.sort();
|
||||
|
||||
// Store the caches.
|
||||
caches.persist()?;
|
||||
@@ -279,7 +281,6 @@ mod test {
|
||||
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_color(false)
|
||||
.emit(
|
||||
&mut output,
|
||||
&diagnostics.inner,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::packaging;
|
||||
use ruff_linter::settings::flags;
|
||||
@@ -53,8 +52,6 @@ pub(crate) fn check_stdin(
|
||||
noqa,
|
||||
fix_mode,
|
||||
)?;
|
||||
diagnostics
|
||||
.inner
|
||||
.sort_unstable_by(Diagnostic::ruff_start_ordering);
|
||||
diagnostics.inner.sort_unstable();
|
||||
Ok(diagnostics)
|
||||
}
|
||||
|
||||
@@ -10,35 +10,35 @@ use std::path::Path;
|
||||
use anyhow::{Context, Result};
|
||||
use colored::Colorize;
|
||||
use log::{debug, warn};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||
use ruff_linter::message::create_syntax_error_diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::{IOError, Violation, fs};
|
||||
use ruff_linter::{IOError, fs};
|
||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::Settings;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) inner: Vec<Diagnostic>,
|
||||
pub(crate) inner: Vec<OldDiagnostic>,
|
||||
pub(crate) fixed: FixMap,
|
||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub(crate) fn new(
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
diagnostics: Vec<OldDiagnostic>,
|
||||
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -62,12 +62,13 @@ impl Diagnostics {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let source_file = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![
|
||||
vec![OldDiagnostic::new(
|
||||
IOError {
|
||||
message: err.to_string(),
|
||||
}
|
||||
.into_diagnostic(TextRange::default(), &source_file),
|
||||
],
|
||||
},
|
||||
TextRange::default(),
|
||||
&source_file,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -97,10 +98,10 @@ impl Diagnostics {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let dummy = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![create_syntax_error_diagnostic(
|
||||
dummy,
|
||||
vec![OldDiagnostic::syntax_error(
|
||||
err,
|
||||
TextRange::default(),
|
||||
dummy,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
|
||||
@@ -131,7 +131,6 @@ pub fn run(
|
||||
}: Args,
|
||||
) -> Result<ExitStatus> {
|
||||
{
|
||||
ruff_db::set_program_version(crate::version::version().to_string()).unwrap();
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
#[expect(clippy::print_stderr)]
|
||||
@@ -440,7 +439,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
if cli.statistics {
|
||||
printer.write_statistics(&diagnostics, &mut summary_writer)?;
|
||||
} else {
|
||||
printer.write_once(&diagnostics, &mut summary_writer, preview)?;
|
||||
printer.write_once(&diagnostics, &mut summary_writer)?;
|
||||
}
|
||||
|
||||
if !cli.exit_zero {
|
||||
|
||||
@@ -9,14 +9,12 @@ use itertools::{Itertools, iterate};
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
|
||||
};
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, SarifEmitter,
|
||||
TextEmitter,
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter,
|
||||
SarifEmitter, SecondaryCode, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
@@ -203,7 +201,6 @@ impl Printer {
|
||||
&self,
|
||||
diagnostics: &Diagnostics,
|
||||
writer: &mut dyn Write,
|
||||
preview: bool,
|
||||
) -> Result<()> {
|
||||
if matches!(self.log_level, LogLevel::Silent) {
|
||||
return Ok(());
|
||||
@@ -231,32 +228,16 @@ impl Printer {
|
||||
|
||||
match self.format {
|
||||
OutputFormat::Json => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Json)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Rdjson => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Rdjson)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
RdjsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::JsonLines => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::JsonLines)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JsonLinesEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Junit => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Junit)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JunitEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Concise | OutputFormat::Full => {
|
||||
TextEmitter::default()
|
||||
@@ -264,7 +245,6 @@ impl Printer {
|
||||
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
|
||||
.with_show_source(self.format == OutputFormat::Full)
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.with_preview(preview)
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
@@ -299,18 +279,10 @@ impl Printer {
|
||||
GitlabEmitter::default().emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Pylint => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Pylint)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
PylintEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Azure => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Azure)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
AzureEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
@@ -334,7 +306,8 @@ impl Printer {
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &Diagnostic), usize)>, (code, message)| {
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &OldDiagnostic), usize)>,
|
||||
(code, message)| {
|
||||
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||
if *prev_code == code {
|
||||
*count += 1;
|
||||
|
||||
@@ -57,40 +57,33 @@ fn dependencies() -> Result<()> {
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f(): pass
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("e.pyi")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f() -> None: ...
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py",
|
||||
"ruff/e.pyi"
|
||||
],
|
||||
"ruff/e.py": [],
|
||||
"ruff/e.pyi": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py"
|
||||
],
|
||||
"ruff/e.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -204,43 +197,23 @@ fn string_detection() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").arg("--min-dots").arg("1").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -120,7 +120,7 @@ fn nonexistent_config_file() {
|
||||
#[test]
|
||||
fn config_override_rejected_if_invalid_toml() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--config", "foo = bar", "."]), @r"
|
||||
.args(["format", "--config", "foo = bar", "."]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -137,11 +137,12 @@ fn config_override_rejected_if_invalid_toml() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1067,7 +1067,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1080,7 +1080,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1093,7 +1093,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -2246,7 +2246,8 @@ fn pyproject_toml_stdin_syntax_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: unclosed table, expected `]`
|
||||
pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: invalid table header
|
||||
expected `.`, `]`
|
||||
|
|
||||
1 | [project
|
||||
| ^ RUF200
|
||||
|
||||
@@ -534,7 +534,7 @@ fn nonexistent_config_file() {
|
||||
fn config_override_rejected_if_invalid_toml() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "foo = bar", "."]), @r"
|
||||
.args(["--config", "foo = bar", "."]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -551,11 +551,12 @@ fn config_override_rejected_if_invalid_toml() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -732,8 +733,9 @@ select = [E501]
|
||||
Cause: TOML parse error at line 3, column 11
|
||||
|
|
||||
3 | select = [E501]
|
||||
| ^^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid array
|
||||
expected `]`
|
||||
");
|
||||
});
|
||||
|
||||
@@ -874,7 +876,7 @@ fn each_toml_option_requires_a_new_flag_1() {
|
||||
|
|
||||
1 | extend-select=['F841'], line-length=90
|
||||
| ^
|
||||
unexpected key or value, expected newline, `#`
|
||||
expected newline, `#`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -905,7 +907,7 @@ fn each_toml_option_requires_a_new_flag_2() {
|
||||
|
|
||||
1 | extend-select=['F841'] line-length=90
|
||||
| ^
|
||||
unexpected key or value, expected newline, `#`
|
||||
expected newline, `#`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -993,7 +995,6 @@ fn value_given_to_table_key_is_not_inline_table_2() {
|
||||
- `lint.exclude`
|
||||
- `lint.preview`
|
||||
- `lint.typing-extensions`
|
||||
- `lint.future-annotations`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -2422,7 +2423,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -2734,7 +2735,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3098,7 +3099,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3478,7 +3479,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3806,7 +3807,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4134,7 +4135,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4419,7 +4420,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4757,7 +4758,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -5691,82 +5692,3 @@ class Foo:
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test_case::test_case("concise")]
|
||||
#[test_case::test_case("full")]
|
||||
#[test_case::test_case("json")]
|
||||
#[test_case::test_case("json-lines")]
|
||||
#[test_case::test_case("junit")]
|
||||
#[test_case::test_case("grouped")]
|
||||
#[test_case::test_case("github")]
|
||||
#[test_case::test_case("gitlab")]
|
||||
#[test_case::test_case("pylint")]
|
||||
#[test_case::test_case("rdjson")]
|
||||
#[test_case::test_case("azure")]
|
||||
#[test_case::test_case("sarif")]
|
||||
fn output_format(output_format: &str) -> Result<()> {
|
||||
const CONTENT: &str = "\
|
||||
import os # F401
|
||||
x = y # F821
|
||||
match 42: # invalid-syntax
|
||||
case _: ...
|
||||
";
|
||||
|
||||
let tempdir = TempDir::new()?;
|
||||
let input = tempdir.path().join("input.py");
|
||||
fs::write(&input, CONTENT)?;
|
||||
|
||||
let snapshot = format!("output_format_{output_format}");
|
||||
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![
|
||||
(tempdir_filter(&project_dir).as_str(), "[TMP]/"),
|
||||
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
|
||||
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
|
||||
(ruff_linter::VERSION, "[VERSION]"),
|
||||
]
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
snapshot,
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args([
|
||||
"check",
|
||||
"--no-cache",
|
||||
"--output-format",
|
||||
output_format,
|
||||
"--select",
|
||||
"F401,F821",
|
||||
"--target-version",
|
||||
"py39",
|
||||
"input.py",
|
||||
])
|
||||
.current_dir(&tempdir),
|
||||
);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn future_annotations_preview_warning() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "lint.future-annotations = true"])
|
||||
.args(["--select", "F"])
|
||||
.arg("--no-preview")
|
||||
.arg("-")
|
||||
.pass_stdin("1"),
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: The `lint.future-annotations` setting will have no effect because `preview` is disabled
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- azure
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y`
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;]SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,25 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- concise
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,49 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- full
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os # F401
|
||||
| ^^ F401
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
| ^ F821
|
||||
3 | match 42: # invalid-syntax
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
| ^^^^^
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- github
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused
|
||||
::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y`
|
||||
::error title=Ruff,file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,60 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- gitlab
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"check_name": "F401",
|
||||
"description": "`os` imported but unused",
|
||||
"fingerprint": "4dbad37161e65c72",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 1,
|
||||
"end": 1
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "F821",
|
||||
"description": "Undefined name `y`",
|
||||
"fingerprint": "7af59862a085230",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 2,
|
||||
"end": 2
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "syntax-error",
|
||||
"description": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"fingerprint": "e558cec859bb66e8",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 3,
|
||||
"end": 3
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
@@ -1,27 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- grouped
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:
|
||||
1:8 F401 [*] `os` imported but unused
|
||||
2:5 F821 Undefined name `y`
|
||||
3:1 SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- json-lines
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
|
||||
{"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
|
||||
{"cell":null,"code":null,"end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,88 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- json
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "F401",
|
||||
"end_location": {
|
||||
"column": 10,
|
||||
"row": 1
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": {
|
||||
"applicability": "safe",
|
||||
"edits": [
|
||||
{
|
||||
"content": "",
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 2
|
||||
},
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": "Remove unused import: `os`"
|
||||
},
|
||||
"location": {
|
||||
"column": 8,
|
||||
"row": 1
|
||||
},
|
||||
"message": "`os` imported but unused",
|
||||
"noqa_row": 1,
|
||||
"url": "https://docs.astral.sh/ruff/rules/unused-import"
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": "F821",
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 2
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 5,
|
||||
"row": 2
|
||||
},
|
||||
"message": "Undefined name `y`",
|
||||
"noqa_row": 2,
|
||||
"url": "https://docs.astral.sh/ruff/rules/undefined-name"
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 3
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 3
|
||||
},
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"noqa_row": null,
|
||||
"url": null
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
@@ -1,34 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- junit
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites name="ruff" tests="3" failures="3" errors="0">
|
||||
<testsuite name="[TMP]/input.py" tests="3" disabled="0" errors="0" failures="3" package="org.ruff">
|
||||
<testcase name="org.ruff.F401" classname="[TMP]/input" line="1" column="8">
|
||||
<failure message="`os` imported but unused">line 1, col 8, `os` imported but unused</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.F821" classname="[TMP]/input" line="2" column="5">
|
||||
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1">
|
||||
<failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- pylint
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1: [F401] `os` imported but unused
|
||||
input.py:2: [F821] Undefined name `y`
|
||||
input.py:3: [invalid-syntax] SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,102 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- rdjson
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/unused-import",
|
||||
"value": "F401"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 10,
|
||||
"line": 1
|
||||
},
|
||||
"start": {
|
||||
"column": 8,
|
||||
"line": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "`os` imported but unused",
|
||||
"suggestions": [
|
||||
{
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 1,
|
||||
"line": 2
|
||||
},
|
||||
"start": {
|
||||
"column": 1,
|
||||
"line": 1
|
||||
}
|
||||
},
|
||||
"text": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/undefined-name",
|
||||
"value": "F821"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 2
|
||||
},
|
||||
"start": {
|
||||
"column": 5,
|
||||
"line": 2
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "Undefined name `y`"
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"value": "invalid-syntax"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 3
|
||||
},
|
||||
"start": {
|
||||
"column": 1,
|
||||
"line": 3
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
----- stderr -----
|
||||
@@ -1,142 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- sarif
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
"runs": [
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 10,
|
||||
"endLine": 1,
|
||||
"startColumn": 8,
|
||||
"startLine": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "`os` imported but unused"
|
||||
},
|
||||
"ruleId": "F401"
|
||||
},
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 6,
|
||||
"endLine": 2,
|
||||
"startColumn": 5,
|
||||
"startLine": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "Undefined name `y`"
|
||||
},
|
||||
"ruleId": "F821"
|
||||
},
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 6,
|
||||
"endLine": 3,
|
||||
"startColumn": 1,
|
||||
"startLine": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
},
|
||||
"ruleId": null
|
||||
}
|
||||
],
|
||||
"tool": {
|
||||
"driver": {
|
||||
"informationUri": "https://github.com/astral-sh/ruff",
|
||||
"name": "ruff",
|
||||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
},
|
||||
"helpUri": "https://docs.astral.sh/ruff/rules/unused-import",
|
||||
"id": "F401",
|
||||
"properties": {
|
||||
"id": "F401",
|
||||
"kind": "Pyflakes",
|
||||
"name": "unused-import",
|
||||
"problem.severity": "error"
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
}
|
||||
},
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for uses of undefined names.\n\n## Why is this bad?\nAn undefined name is likely to raise `NameError` at runtime.\n\n## Example\n```python\ndef double():\n return n * 2 # raises `NameError` if `n` is undefined when `double` is called\n```\n\nUse instead:\n```python\ndef double(n):\n return n * 2\n```\n\n## Options\n- [`target-version`]: Can be used to configure which symbols Ruff will understand\n as being available in the `builtins` namespace.\n\n## References\n- [Python documentation: Naming and binding](https://docs.python.org/3/reference/executionmodel.html#naming-and-binding)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "Undefined name `{name}`. {tip}"
|
||||
},
|
||||
"helpUri": "https://docs.astral.sh/ruff/rules/undefined-name",
|
||||
"id": "F821",
|
||||
"properties": {
|
||||
"id": "F821",
|
||||
"kind": "Pyflakes",
|
||||
"name": "undefined-name",
|
||||
"problem.severity": "error"
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "Undefined name `{name}`. {tip}"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "[VERSION]"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "2.1.0"
|
||||
}
|
||||
----- stderr -----
|
||||
@@ -392,7 +392,7 @@ formatter.docstring_code_line_width = dynamic
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.7
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ fn config_option_ignored_but_validated() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("version")
|
||||
.args(["--config", "foo = bar"]), @r"
|
||||
.args(["--config", "foo = bar"]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -77,11 +77,12 @@ fn config_option_ignored_but_validated() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
"
|
||||
"#
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
use ruff_benchmark::criterion;
|
||||
use ruff_benchmark::real_world_projects::{InstalledProject, RealWorldProject};
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
|
||||
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
|
||||
@@ -18,7 +17,7 @@ use ruff_python_ast::PythonVersion;
|
||||
use ty_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||
use ty_project::watch::{ChangeEvent, ChangedKind};
|
||||
use ty_project::{CheckMode, Db, ProjectDatabase, ProjectMetadata};
|
||||
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||
|
||||
struct Case {
|
||||
db: ProjectDatabase,
|
||||
@@ -102,7 +101,6 @@ fn setup_tomllib_case() -> Case {
|
||||
|
||||
let re = re.unwrap();
|
||||
|
||||
db.set_check_mode(CheckMode::OpenFiles);
|
||||
db.project().set_open_files(&mut db, tomllib_files);
|
||||
|
||||
let re_path = re.path(&db).as_system_path().unwrap().to_owned();
|
||||
@@ -238,7 +236,6 @@ fn setup_micro_case(code: &str) -> Case {
|
||||
let mut db = ProjectDatabase::new(metadata, system).unwrap();
|
||||
let file = system_path_to_file(&db, SystemPathBuf::from(file_path)).unwrap();
|
||||
|
||||
db.set_check_mode(CheckMode::OpenFiles);
|
||||
db.project()
|
||||
.set_open_files(&mut db, FxHashSet::from_iter([file]));
|
||||
|
||||
@@ -444,37 +441,6 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_many_enum_members(criterion: &mut Criterion) {
|
||||
const NUM_ENUM_MEMBERS: usize = 512;
|
||||
|
||||
setup_rayon();
|
||||
|
||||
let mut code = String::new();
|
||||
writeln!(&mut code, "from enum import Enum").ok();
|
||||
|
||||
writeln!(&mut code, "class E(Enum):").ok();
|
||||
for i in 0..NUM_ENUM_MEMBERS {
|
||||
writeln!(&mut code, " m{i} = {i}").ok();
|
||||
}
|
||||
writeln!(&mut code).ok();
|
||||
|
||||
for i in 0..NUM_ENUM_MEMBERS {
|
||||
writeln!(&mut code, "print(E.m{i})").ok();
|
||||
}
|
||||
|
||||
criterion.bench_function("ty_micro[many_enum_members]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| setup_micro_case(&code),
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
struct ProjectBenchmark<'a> {
|
||||
project: InstalledProject<'a>,
|
||||
fs: MemoryFileSystem,
|
||||
@@ -527,21 +493,17 @@ impl<'a> ProjectBenchmark<'a> {
|
||||
|
||||
#[track_caller]
|
||||
fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
|
||||
fn check_project(db: &mut ProjectDatabase, project_name: &str, max_diagnostics: usize) {
|
||||
fn check_project(db: &mut ProjectDatabase, max_diagnostics: usize) {
|
||||
let result = db.check();
|
||||
let diagnostics = result.len();
|
||||
|
||||
if diagnostics > max_diagnostics {
|
||||
let details = result
|
||||
.into_iter()
|
||||
.map(|diagnostic| diagnostic.concise_message().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n ");
|
||||
assert!(
|
||||
diagnostics <= max_diagnostics,
|
||||
"{project_name}: Expected <={max_diagnostics} diagnostics but got {diagnostics}:\n {details}",
|
||||
);
|
||||
}
|
||||
assert!(
|
||||
diagnostics > 1 && diagnostics <= max_diagnostics,
|
||||
"Expected between {} and {} diagnostics but got {}",
|
||||
1,
|
||||
max_diagnostics,
|
||||
diagnostics
|
||||
);
|
||||
}
|
||||
|
||||
setup_rayon();
|
||||
@@ -551,7 +513,7 @@ fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
|
||||
group.bench_function(benchmark.project.config.name, |b| {
|
||||
b.iter_batched_ref(
|
||||
|| benchmark.setup_iteration(),
|
||||
|db| check_project(db, benchmark.project.config.name, benchmark.max_diagnostics),
|
||||
|db| check_project(db, benchmark.max_diagnostics),
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
@@ -608,23 +570,6 @@ fn anyio(criterion: &mut Criterion) {
|
||||
bench_project(&benchmark, criterion);
|
||||
}
|
||||
|
||||
fn datetype(criterion: &mut Criterion) {
|
||||
let benchmark = ProjectBenchmark::new(
|
||||
RealWorldProject {
|
||||
name: "DateType",
|
||||
repository: "https://github.com/glyph/DateType",
|
||||
commit: "57c9c93cf2468069f72945fc04bf27b64100dad8",
|
||||
paths: vec![SystemPath::new("src")],
|
||||
dependencies: vec![],
|
||||
max_dep_date: "2025-07-04",
|
||||
python_version: PythonVersion::PY313,
|
||||
},
|
||||
2,
|
||||
);
|
||||
|
||||
bench_project(&benchmark, criterion);
|
||||
}
|
||||
|
||||
criterion_group!(check_file, benchmark_cold, benchmark_incremental);
|
||||
criterion_group!(
|
||||
micro,
|
||||
@@ -632,7 +577,6 @@ criterion_group!(
|
||||
benchmark_many_tuple_assignments,
|
||||
benchmark_complex_constrained_attributes_1,
|
||||
benchmark_complex_constrained_attributes_2,
|
||||
benchmark_many_enum_members,
|
||||
);
|
||||
criterion_group!(project, anyio, attrs, hydra, datetype);
|
||||
criterion_group!(project, anyio, attrs, hydra);
|
||||
criterion_main!(check_file, micro, project);
|
||||
|
||||
@@ -242,7 +242,7 @@ fn large(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&*PYDANTIC], sample_size=3, sample_count=8)]
|
||||
#[bench(args=[&*PYDANTIC], sample_size=3, sample_count=3)]
|
||||
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
|
||||
|
||||
|
||||
@@ -13,18 +13,17 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_annotate_snippets = { workspace = true }
|
||||
ruff_cache = { workspace = true, optional = true }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_source_file = { workspace = true, features = ["get-size"] }
|
||||
ruff_text_size = { workspace = true }
|
||||
ty_static = { workspace = true }
|
||||
|
||||
anstyle = { workspace = true }
|
||||
arc-swap = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
@@ -33,16 +32,13 @@ glob = { workspace = true }
|
||||
ignore = { workspace = true, optional = true }
|
||||
matchit = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
quick-junit = { workspace = true, optional = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
serde_json = { workspace = true, optional = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
unicode-width = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[target.'cfg(target_arch="wasm32")'.dependencies]
|
||||
@@ -57,13 +53,7 @@ tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
cache = ["ruff_cache"]
|
||||
junit = ["dep:quick-junit"]
|
||||
os = ["ignore", "dep:etcetera"]
|
||||
serde = [
|
||||
"camino/serde1",
|
||||
"dep:serde",
|
||||
"dep:serde_json",
|
||||
"ruff_diagnostics/serde",
|
||||
]
|
||||
serde = ["dep:serde", "camino/serde1"]
|
||||
# Exposes testing utilities.
|
||||
testing = ["tracing-subscriber"]
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
use std::{fmt::Formatter, path::Path, sync::Arc};
|
||||
use std::{fmt::Formatter, sync::Arc};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||
use render::{FileResolver, Input};
|
||||
use ruff_source_file::{SourceCode, SourceFile};
|
||||
|
||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
pub use self::render::{
|
||||
DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary,
|
||||
};
|
||||
pub use self::render::DisplayDiagnostic;
|
||||
use crate::{Db, files::File};
|
||||
|
||||
mod render;
|
||||
@@ -64,37 +62,10 @@ impl Diagnostic {
|
||||
message: message.into_diagnostic_message(),
|
||||
annotations: vec![],
|
||||
subs: vec![],
|
||||
fix: None,
|
||||
parent: None,
|
||||
noqa_offset: None,
|
||||
secondary_code: None,
|
||||
});
|
||||
Diagnostic { inner }
|
||||
}
|
||||
|
||||
/// Creates a `Diagnostic` for a syntax error.
|
||||
///
|
||||
/// Unlike the more general [`Diagnostic::new`], this requires a [`Span`] and a [`TextRange`]
|
||||
/// attached to it.
|
||||
///
|
||||
/// This should _probably_ be a method on the syntax errors, but
|
||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
///
|
||||
/// Note that `message` is stored in the primary annotation, _not_ in the primary diagnostic
|
||||
/// message.
|
||||
pub fn invalid_syntax(
|
||||
span: impl Into<Span>,
|
||||
message: impl IntoDiagnosticMessage,
|
||||
range: impl Ranged,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = span.into().with_range(range.range());
|
||||
diag.annotate(Annotation::primary(span).message(message));
|
||||
diag
|
||||
}
|
||||
|
||||
/// Add an annotation to this diagnostic.
|
||||
///
|
||||
/// Annotations for a diagnostic are optional, but if any are added,
|
||||
@@ -124,14 +95,7 @@ impl Diagnostic {
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn info<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Info, message));
|
||||
}
|
||||
|
||||
/// Adds a "help" sub-diagnostic with the given message.
|
||||
///
|
||||
/// See the closely related [`Diagnostic::info`] method for more details.
|
||||
pub fn help<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Help, message));
|
||||
self.sub(SubDiagnostic::new(Severity::Info, message));
|
||||
}
|
||||
|
||||
/// Adds a "sub" diagnostic to this diagnostic.
|
||||
@@ -262,11 +226,6 @@ impl Diagnostic {
|
||||
self.primary_annotation().map(|ann| ann.span.clone())
|
||||
}
|
||||
|
||||
/// Returns a reference to the primary span of this diagnostic.
|
||||
pub fn primary_span_ref(&self) -> Option<&Span> {
|
||||
self.primary_annotation().map(|ann| &ann.span)
|
||||
}
|
||||
|
||||
/// Returns the tags from the primary annotation of this diagnostic if it exists.
|
||||
pub fn primary_tags(&self) -> Option<&[DiagnosticTag]> {
|
||||
self.primary_annotation().map(|ann| ann.tags.as_slice())
|
||||
@@ -309,174 +268,6 @@ impl Diagnostic {
|
||||
pub fn sub_diagnostics(&self) -> &[SubDiagnostic] {
|
||||
&self.inner.subs
|
||||
}
|
||||
|
||||
/// Returns the fix for this diagnostic if it exists.
|
||||
pub fn fix(&self) -> Option<&Fix> {
|
||||
self.inner.fix.as_ref()
|
||||
}
|
||||
|
||||
/// Set the fix for this diagnostic.
|
||||
pub fn set_fix(&mut self, fix: Fix) {
|
||||
debug_assert!(
|
||||
self.primary_span().is_some(),
|
||||
"Expected a source file for a diagnostic with a fix"
|
||||
);
|
||||
Arc::make_mut(&mut self.inner).fix = Some(fix);
|
||||
}
|
||||
|
||||
/// Remove the fix for this diagnostic.
|
||||
pub fn remove_fix(&mut self) {
|
||||
Arc::make_mut(&mut self.inner).fix = None;
|
||||
}
|
||||
|
||||
/// Returns `true` if the diagnostic contains a [`Fix`].
|
||||
pub fn fixable(&self) -> bool {
|
||||
self.fix().is_some()
|
||||
}
|
||||
|
||||
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
||||
///
|
||||
/// This is primarily used for checking noqa/secondary code suppressions.
|
||||
pub fn parent(&self) -> Option<TextSize> {
|
||||
self.inner.parent
|
||||
}
|
||||
|
||||
/// Set the offset of the diagnostic's parent statement.
|
||||
pub fn set_parent(&mut self, parent: TextSize) {
|
||||
Arc::make_mut(&mut self.inner).parent = Some(parent);
|
||||
}
|
||||
|
||||
/// Returns the remapped offset for a suppression comment if it exists.
|
||||
///
|
||||
/// Like [`Diagnostic::parent`], this is used for noqa code suppression comments in Ruff.
|
||||
pub fn noqa_offset(&self) -> Option<TextSize> {
|
||||
self.inner.noqa_offset
|
||||
}
|
||||
|
||||
/// Set the remapped offset for a suppression comment.
|
||||
pub fn set_noqa_offset(&mut self, noqa_offset: TextSize) {
|
||||
Arc::make_mut(&mut self.inner).noqa_offset = Some(noqa_offset);
|
||||
}
|
||||
|
||||
/// Returns the secondary code for the diagnostic if it exists.
|
||||
///
|
||||
/// The "primary" code for the diagnostic is its lint name. Diagnostics in ty don't have
|
||||
/// secondary codes (yet), but in Ruff the noqa code is used.
|
||||
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
|
||||
self.inner.secondary_code.as_ref()
|
||||
}
|
||||
|
||||
/// Set the secondary code for this diagnostic.
|
||||
pub fn set_secondary_code(&mut self, code: SecondaryCode) {
|
||||
Arc::make_mut(&mut self.inner).secondary_code = Some(code);
|
||||
}
|
||||
|
||||
/// Returns the name used to represent the diagnostic.
|
||||
pub fn name(&self) -> &'static str {
|
||||
self.id().as_str()
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a syntax error message.
|
||||
pub fn is_invalid_syntax(&self) -> bool {
|
||||
self.id().is_invalid_syntax()
|
||||
}
|
||||
|
||||
/// Returns the message body to display to the user.
|
||||
pub fn body(&self) -> &str {
|
||||
self.primary_message()
|
||||
}
|
||||
|
||||
/// Returns the message of the first sub-diagnostic with a `Help` severity.
|
||||
///
|
||||
/// Note that this is used as the fix title/suggestion for some of Ruff's output formats, but in
|
||||
/// general this is not the guaranteed meaning of such a message.
|
||||
pub fn first_help_text(&self) -> Option<&str> {
|
||||
self.sub_diagnostics()
|
||||
.iter()
|
||||
.find(|sub| matches!(sub.inner.severity, SubDiagnosticSeverity::Help))
|
||||
.map(|sub| sub.inner.message.as_str())
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
pub fn to_ruff_url(&self) -> Option<String> {
|
||||
if self.is_invalid_syntax() {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"{}/rules/{}",
|
||||
env!("CARGO_PKG_HOMEPAGE"),
|
||||
self.name()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the filename for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_filename(&self) -> String {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.name()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Computes the start source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_start_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().start())
|
||||
}
|
||||
|
||||
/// Computes the end source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_end_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().end())
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
pub fn ruff_source_file(&self) -> Option<&SourceFile> {
|
||||
self.primary_span_ref()?.as_ruff_file()
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_source_file(&self) -> &SourceFile {
|
||||
self.ruff_source_file()
|
||||
.expect("Expected a ruff source file")
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
pub fn range(&self) -> Option<TextRange> {
|
||||
self.primary_span()?.range()
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span or if the span has no range.
|
||||
pub fn expect_range(&self) -> TextRange {
|
||||
self.range().expect("Expected a range for the primary span")
|
||||
}
|
||||
|
||||
/// Returns the ordering of diagnostics based on the start of their ranges, if they have any.
|
||||
///
|
||||
/// Panics if either diagnostic has no primary span, if the span has no range, or if its file is
|
||||
/// not a `SourceFile`.
|
||||
pub fn ruff_start_ordering(&self, other: &Self) -> std::cmp::Ordering {
|
||||
(self.expect_ruff_source_file(), self.expect_range().start()).cmp(&(
|
||||
other.expect_ruff_source_file(),
|
||||
other.expect_range().start(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
@@ -486,10 +277,6 @@ struct DiagnosticInner {
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
subs: Vec<SubDiagnostic>,
|
||||
fix: Option<Fix>,
|
||||
parent: Option<TextSize>,
|
||||
noqa_offset: Option<TextSize>,
|
||||
secondary_code: Option<SecondaryCode>,
|
||||
}
|
||||
|
||||
struct RenderingSortKey<'a> {
|
||||
@@ -580,10 +367,7 @@ impl SubDiagnostic {
|
||||
/// Callers can pass anything that implements `std::fmt::Display`
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn new<'a>(
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: impl IntoDiagnosticMessage + 'a,
|
||||
) -> SubDiagnostic {
|
||||
pub fn new<'a>(severity: Severity, message: impl IntoDiagnosticMessage + 'a) -> SubDiagnostic {
|
||||
let inner = Box::new(SubDiagnosticInner {
|
||||
severity,
|
||||
message: message.into_diagnostic_message(),
|
||||
@@ -661,7 +445,7 @@ impl SubDiagnostic {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SubDiagnosticInner {
|
||||
severity: SubDiagnosticSeverity,
|
||||
severity: Severity,
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
}
|
||||
@@ -1030,18 +814,6 @@ impl UnifiedFile {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the file's path relative to the current working directory.
|
||||
pub fn relative_path<'a>(&'a self, resolver: &'a dyn FileResolver) -> &'a Path {
|
||||
let cwd = resolver.current_directory();
|
||||
let path = Path::new(self.path(resolver));
|
||||
|
||||
if let Ok(path) = path.strip_prefix(cwd) {
|
||||
return path;
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn diagnostic_source(&self, resolver: &dyn FileResolver) -> DiagnosticSource {
|
||||
match self {
|
||||
UnifiedFile::Ty(file) => DiagnosticSource::Ty(resolver.input(*file)),
|
||||
@@ -1125,15 +897,9 @@ impl Span {
|
||||
///
|
||||
/// Panics if the file is a [`UnifiedFile::Ty`] instead of a [`UnifiedFile::Ruff`].
|
||||
pub fn expect_ruff_file(&self) -> &SourceFile {
|
||||
self.as_ruff_file()
|
||||
.expect("Expected a ruff `SourceFile`, found a ty `File`")
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] attached to this [`Span`].
|
||||
pub fn as_ruff_file(&self) -> Option<&SourceFile> {
|
||||
match &self.file {
|
||||
UnifiedFile::Ty(_) => None,
|
||||
UnifiedFile::Ruff(file) => Some(file),
|
||||
UnifiedFile::Ty(_) => panic!("Expected a ruff `SourceFile`, found a ty `File`"),
|
||||
UnifiedFile::Ruff(file) => file,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1188,32 +954,6 @@ impl Severity {
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`Severity`] but exclusively for sub-diagnostics.
|
||||
///
|
||||
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
|
||||
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
|
||||
/// deleted and the two combined again.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
|
||||
pub enum SubDiagnosticSeverity {
|
||||
Help,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Fatal,
|
||||
}
|
||||
|
||||
impl SubDiagnosticSeverity {
|
||||
fn to_annotate(self) -> AnnotateLevel {
|
||||
match self {
|
||||
SubDiagnosticSeverity::Help => AnnotateLevel::Help,
|
||||
SubDiagnosticSeverity::Info => AnnotateLevel::Info,
|
||||
SubDiagnosticSeverity::Warning => AnnotateLevel::Warning,
|
||||
SubDiagnosticSeverity::Error => AnnotateLevel::Error,
|
||||
SubDiagnosticSeverity::Fatal => AnnotateLevel::Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for rendering diagnostics.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DisplayDiagnosticConfig {
|
||||
@@ -1234,21 +974,6 @@ pub struct DisplayDiagnosticConfig {
|
||||
/// here for now as the most "sensible" place for it to live until
|
||||
/// we had more concrete use cases. ---AG
|
||||
context: usize,
|
||||
/// Whether to use preview formatting for Ruff diagnostics.
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
preview: bool,
|
||||
/// Whether to hide the real `Severity` of diagnostics.
|
||||
///
|
||||
/// This is intended for temporary use by Ruff, which only has a single `error` severity at the
|
||||
/// moment. We should be able to remove this option when Ruff gets more severities.
|
||||
hide_severity: bool,
|
||||
/// Whether to show the availability of a fix in a diagnostic.
|
||||
show_fix_status: bool,
|
||||
/// The lowest applicability that should be shown when reporting diagnostics.
|
||||
fix_applicability: Applicability,
|
||||
}
|
||||
|
||||
impl DisplayDiagnosticConfig {
|
||||
@@ -1269,43 +994,6 @@ impl DisplayDiagnosticConfig {
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to enable preview behavior or not.
|
||||
pub fn preview(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
preview: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to hide a diagnostic's severity or not.
|
||||
pub fn hide_severity(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
hide_severity: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to show a fix's availability or not.
|
||||
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
show_fix_status: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the lowest fix applicability that should be shown.
|
||||
///
|
||||
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
|
||||
/// availability for unsafe or display-only fixes.
|
||||
///
|
||||
/// Note that this option is currently ignored when `hide_severity` is false.
|
||||
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
fix_applicability: applicability,
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DisplayDiagnosticConfig {
|
||||
@@ -1314,10 +1002,6 @@ impl Default for DisplayDiagnosticConfig {
|
||||
format: DiagnosticFormat::default(),
|
||||
color: false,
|
||||
context: 2,
|
||||
preview: false,
|
||||
hide_severity: false,
|
||||
show_fix_status: false,
|
||||
fix_applicability: Applicability::Safe,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1345,31 +1029,6 @@ pub enum DiagnosticFormat {
|
||||
///
|
||||
/// This may use color when printing to a `tty`.
|
||||
Concise,
|
||||
/// Print diagnostics in the [Azure Pipelines] format.
|
||||
///
|
||||
/// [Azure Pipelines]: https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning
|
||||
Azure,
|
||||
/// Print diagnostics in JSON format.
|
||||
///
|
||||
/// Unlike `json-lines`, this prints all of the diagnostics as a JSON array.
|
||||
#[cfg(feature = "serde")]
|
||||
Json,
|
||||
/// Print diagnostics in JSON format, one per line.
|
||||
///
|
||||
/// This will print each diagnostic as a separate JSON object on its own line. See the `json`
|
||||
/// format for an array of all diagnostics. See <https://jsonlines.org/> for more details.
|
||||
#[cfg(feature = "serde")]
|
||||
JsonLines,
|
||||
/// Print diagnostics in the JSON format expected by [reviewdog].
|
||||
///
|
||||
/// [reviewdog]: https://github.com/reviewdog/reviewdog
|
||||
#[cfg(feature = "serde")]
|
||||
Rdjson,
|
||||
/// Print diagnostics in the format emitted by Pylint.
|
||||
Pylint,
|
||||
/// Print diagnostics in the format expected by JUnit.
|
||||
#[cfg(feature = "junit")]
|
||||
Junit,
|
||||
}
|
||||
|
||||
/// A representation of the kinds of messages inside a diagnostic.
|
||||
@@ -1488,52 +1147,41 @@ impl<T: std::fmt::Display> IntoDiagnosticMessage for T {
|
||||
}
|
||||
}
|
||||
|
||||
/// A secondary identifier for a lint diagnostic.
|
||||
/// Creates a `Diagnostic` from a parse error.
|
||||
///
|
||||
/// For Ruff rules this means the noqa code.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, get_size2::GetSize)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize), serde(transparent))]
|
||||
pub struct SecondaryCode(String);
|
||||
|
||||
impl SecondaryCode {
|
||||
pub fn new(code: String) -> Self {
|
||||
Self(code)
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
/// This should _probably_ be a method on `ruff_python_parser::ParseError`, but
|
||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
pub fn create_parse_diagnostic(file: File, err: &ruff_python_parser::ParseError) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.location);
|
||||
diag.annotate(Annotation::primary(span).message(&err.error));
|
||||
diag
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SecondaryCode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
/// Creates a `Diagnostic` from an unsupported syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_unsupported_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::UnsupportedSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SecondaryCode {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&str> for SecondaryCode {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for &str {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
// for `hashbrown::EntryRef`
|
||||
impl From<&SecondaryCode> for SecondaryCode {
|
||||
fn from(value: &SecondaryCode) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
/// Creates a `Diagnostic` from a semantic syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_semantic_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::semantic_errors::SemanticSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,83 +0,0 @@
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, Severity};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct AzureRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> AzureRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl AzureRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diag in diagnostics {
|
||||
let severity = match diag.severity() {
|
||||
Severity::Info | Severity::Warning => "warning",
|
||||
Severity::Error | Severity::Fatal => "error",
|
||||
};
|
||||
write!(f, "##vso[task.logissue type={severity};")?;
|
||||
if let Some(span) = diag.primary_span() {
|
||||
let filename = span.file().path(self.resolver);
|
||||
write!(f, "sourcepath={filename};")?;
|
||||
if let Some(range) = span.range() {
|
||||
let location = if self.resolver.notebook_index(span.file()).is_some() {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
LineColumn::default()
|
||||
} else {
|
||||
span.file()
|
||||
.diagnostic_source(self.resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"linenumber={line};columnnumber={col};",
|
||||
line = location.line,
|
||||
col = location.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
writeln!(
|
||||
f,
|
||||
"{code}]{body}",
|
||||
code = diag
|
||||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!("code={code};")),
|
||||
body = diag.body(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Azure);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Azure);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
use crate::diagnostic::{
|
||||
Diagnostic, DisplayDiagnosticConfig, Severity,
|
||||
stylesheet::{DiagnosticStylesheet, fmt_styled},
|
||||
};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct ConciseRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> ConciseRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let sep = fmt_styled(":", stylesheet.separator);
|
||||
for diag in diagnostics {
|
||||
if let Some(span) = diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
"{path}",
|
||||
path = fmt_styled(
|
||||
span.file().relative_path(self.resolver).to_string_lossy(),
|
||||
stylesheet.emphasis
|
||||
)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
|
||||
if let Some(notebook_index) = self.resolver.notebook_index(span.file()) {
|
||||
write!(
|
||||
f,
|
||||
"{sep}cell {cell}{sep}{line}{sep}{col}",
|
||||
cell = notebook_index.cell(start.line).unwrap_or_default(),
|
||||
line = notebook_index.cell_row(start.line).unwrap_or_default(),
|
||||
col = start.column,
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{sep}{line}{sep}{col}",
|
||||
line = start.line,
|
||||
col = start.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
write!(f, "{sep} ")?;
|
||||
}
|
||||
if self.config.hide_severity {
|
||||
if let Some(code) = diag.secondary_code() {
|
||||
write!(
|
||||
f,
|
||||
"{code} ",
|
||||
code = fmt_styled(code, stylesheet.secondary_code)
|
||||
)?;
|
||||
}
|
||||
if self.config.show_fix_status {
|
||||
if let Some(fix) = diag.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.config.fix_applicability) {
|
||||
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let (severity, severity_style) = match diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}] ",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
}
|
||||
|
||||
writeln!(f, "{message}", message = diag.concise_message())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: error[unused-import] `os` imported but unused
|
||||
fib.py:6:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: error[undefined-name] Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_preview() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
env.preview(true);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_syntax_errors() {
|
||||
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: error[invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: error[invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
notebook.ipynb:cell 1:2:8: error[unused-import] `os` imported but unused
|
||||
notebook.ipynb:cell 2:2:8: error[unused-import] `math` imported but unused
|
||||
notebook.ipynb:cell 3:4:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Concise);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@"error[test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,180 +0,0 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat, Severity,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
error[undefined-name]: Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[invalid-syntax]: SyntaxError: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
| ^
|
||||
2 |
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
error[invalid-syntax]: SyntaxError: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
2 |
|
||||
3 | if call(foo
|
||||
| ^
|
||||
4 | def bar():
|
||||
5 | pass
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit c9b99e4.
|
||||
///
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[no-indented-block]: Expected an indented block
|
||||
/// --> example.py:3:1
|
||||
/// |
|
||||
/// 2 | if False:
|
||||
/// | ^
|
||||
/// 3 | print()
|
||||
/// |
|
||||
/// ```
|
||||
///
|
||||
/// where the caret points to the end of the previous line instead of the start of the next.
|
||||
#[test]
|
||||
fn empty_span_after_line_terminator() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"example.py",
|
||||
r#"
|
||||
if False:
|
||||
print()
|
||||
"#,
|
||||
);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"no-indented-block",
|
||||
Severity::Error,
|
||||
"Expected an indented block",
|
||||
)
|
||||
.primary("example.py", "3:0", "3:0", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[no-indented-block]: Expected an indented block
|
||||
--> example.py:3:1
|
||||
|
|
||||
2 | if False:
|
||||
3 | print()
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit 2922490.
|
||||
///
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
/// --> example.py:1:25
|
||||
/// |
|
||||
/// 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
/// | ^
|
||||
/// |
|
||||
/// ```
|
||||
///
|
||||
/// where the caret points to the `f` in the f-string instead of the start of the invalid
|
||||
/// character (`^Z`).
|
||||
#[test]
|
||||
fn unprintable_characters() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "nested_fstrings = f'{f'{f''}'}'");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:24", "1:24", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
--> example.py:1:25
|
||||
|
|
||||
1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_unprintable_characters() -> std::io::Result<()> {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:1", "1:1", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
--> example.py:1:2
|
||||
|
|
||||
1 | ␈␛
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,352 +0,0 @@
|
||||
use serde::{Serialize, Serializer, ser::SerializeSeq};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Edit};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig, SecondaryCode};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct JsonRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> JsonRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:#}",
|
||||
diagnostics_to_json_value(diagnostics, self.resolver, self.config)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostics_to_json_value<'a>(
|
||||
diagnostics: impl IntoIterator<Item = &'a Diagnostic>,
|
||||
resolver: &dyn FileResolver,
|
||||
config: &DisplayDiagnosticConfig,
|
||||
) -> Value {
|
||||
let values: Vec<_> = diagnostics
|
||||
.into_iter()
|
||||
.map(|diag| diagnostic_to_json(diag, resolver, config))
|
||||
.collect();
|
||||
json!(values)
|
||||
}
|
||||
|
||||
pub(super) fn diagnostic_to_json<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
) -> JsonDiagnostic<'a> {
|
||||
let span = diagnostic.primary_span_ref();
|
||||
let filename = span.map(|span| span.file().path(resolver));
|
||||
let range = span.and_then(|span| span.range());
|
||||
let diagnostic_source = span.map(|span| span.file().diagnostic_source(resolver));
|
||||
let source_code = diagnostic_source
|
||||
.as_ref()
|
||||
.map(|diagnostic_source| diagnostic_source.as_source_code());
|
||||
let notebook_index = span.and_then(|span| resolver.notebook_index(span.file()));
|
||||
|
||||
let mut start_location = None;
|
||||
let mut end_location = None;
|
||||
let mut noqa_location = None;
|
||||
let mut notebook_cell_index = None;
|
||||
if let Some(source_code) = source_code {
|
||||
noqa_location = diagnostic
|
||||
.noqa_offset()
|
||||
.map(|offset| source_code.line_column(offset));
|
||||
if let Some(range) = range {
|
||||
let mut start = source_code.line_column(range.start());
|
||||
let mut end = source_code.line_column(range.end());
|
||||
if let Some(notebook_index) = ¬ebook_index {
|
||||
notebook_cell_index =
|
||||
Some(notebook_index.cell(start.line).unwrap_or(OneIndexed::MIN));
|
||||
start = notebook_index.translate_line_column(&start);
|
||||
end = notebook_index.translate_line_column(&end);
|
||||
noqa_location =
|
||||
noqa_location.map(|location| notebook_index.translate_line_column(&location));
|
||||
}
|
||||
start_location = Some(start);
|
||||
end_location = Some(end);
|
||||
}
|
||||
}
|
||||
|
||||
let fix = diagnostic.fix().map(|fix| JsonFix {
|
||||
applicability: fix.applicability(),
|
||||
message: diagnostic.first_help_text(),
|
||||
edits: ExpandedEdits {
|
||||
edits: fix.edits(),
|
||||
notebook_index,
|
||||
config,
|
||||
diagnostic_source,
|
||||
},
|
||||
});
|
||||
|
||||
// In preview, the locations and filename can be optional.
|
||||
if config.preview {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
location: start_location.map(JsonLocation::from),
|
||||
end_location: end_location.map(JsonLocation::from),
|
||||
filename,
|
||||
noqa_row: noqa_location.map(|location| location.line),
|
||||
}
|
||||
} else {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
location: Some(start_location.unwrap_or_default().into()),
|
||||
end_location: Some(end_location.unwrap_or_default().into()),
|
||||
filename: Some(filename.unwrap_or_default()),
|
||||
noqa_row: noqa_location.map(|location| location.line),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedEdits<'a> {
|
||||
edits: &'a [Edit],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
diagnostic_source: Option<DiagnosticSource>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedEdits<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
|
||||
|
||||
for edit in self.edits {
|
||||
let (location, end_location) = if let Some(diagnostic_source) = &self.diagnostic_source
|
||||
{
|
||||
let source_code = diagnostic_source.as_source_code();
|
||||
let mut location = source_code.line_column(edit.start());
|
||||
let mut end_location = source_code.line_column(edit.end());
|
||||
|
||||
if let Some(notebook_index) = &self.notebook_index {
|
||||
// There exists a newline between each cell's source code in the
|
||||
// concatenated source code in Ruff. This newline doesn't actually
|
||||
// exists in the JSON source field.
|
||||
//
|
||||
// Now, certain edits may try to remove this newline, which means
|
||||
// the edit will spill over to the first character of the next cell.
|
||||
// If it does, we need to translate the end location to the last
|
||||
// character of the previous cell.
|
||||
match (
|
||||
notebook_index.cell(location.line),
|
||||
notebook_index.cell(end_location.line),
|
||||
) {
|
||||
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: source_code
|
||||
.line_column(source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
(Some(_), None) => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: source_code
|
||||
.line_column(source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
}
|
||||
}
|
||||
location = notebook_index.translate_line_column(&location);
|
||||
}
|
||||
|
||||
(Some(location), Some(end_location))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
// In preview, the locations can be optional.
|
||||
let value = if self.config.preview {
|
||||
JsonEdit {
|
||||
content: edit.content().unwrap_or_default(),
|
||||
location: location.map(JsonLocation::from),
|
||||
end_location: end_location.map(JsonLocation::from),
|
||||
}
|
||||
} else {
|
||||
JsonEdit {
|
||||
content: edit.content().unwrap_or_default(),
|
||||
location: Some(location.unwrap_or_default().into()),
|
||||
end_location: Some(end_location.unwrap_or_default().into()),
|
||||
}
|
||||
};
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
/// A serializable version of `Diagnostic`.
|
||||
///
|
||||
/// The `Old` variant only exists to preserve backwards compatibility. Both this and `JsonEdit`
|
||||
/// should become structs with the `New` definitions in a future Ruff release.
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct JsonDiagnostic<'a> {
|
||||
cell: Option<OneIndexed>,
|
||||
code: Option<&'a SecondaryCode>,
|
||||
end_location: Option<JsonLocation>,
|
||||
filename: Option<&'a str>,
|
||||
fix: Option<JsonFix<'a>>,
|
||||
location: Option<JsonLocation>,
|
||||
message: &'a str,
|
||||
noqa_row: Option<OneIndexed>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonFix<'a> {
|
||||
applicability: Applicability,
|
||||
edits: ExpandedEdits<'a>,
|
||||
message: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonLocation {
|
||||
column: OneIndexed,
|
||||
row: OneIndexed,
|
||||
}
|
||||
|
||||
impl From<LineColumn> for JsonLocation {
|
||||
fn from(location: LineColumn) -> Self {
|
||||
JsonLocation {
|
||||
row: location.line,
|
||||
column: location.column,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonEdit<'a> {
|
||||
content: &'a str,
|
||||
end_location: Option<JsonLocation>,
|
||||
location: Option<JsonLocation>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_stable() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r#"
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"filename": "",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
|
||||
}
|
||||
]
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_preview() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r#"
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": null,
|
||||
"filename": null,
|
||||
"fix": null,
|
||||
"location": null,
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
|
||||
}
|
||||
]
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
use crate::diagnostic::{Diagnostic, DisplayDiagnosticConfig, render::json::diagnostic_to_json};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct JsonLinesRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> JsonLinesRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonLinesRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diag in diagnostics {
|
||||
writeln!(
|
||||
f,
|
||||
"{}",
|
||||
serde_json::json!(diagnostic_to_json(diag, self.resolver, self.config))
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
create_diagnostics, create_notebook_diagnostics, create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
use std::{collections::BTreeMap, ops::Deref, path::Path};
|
||||
|
||||
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
|
||||
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
|
||||
|
||||
/// A renderer for diagnostics in the [JUnit] format.
|
||||
///
|
||||
/// See [`junit.xsd`] for the specification in the JUnit repository and an annotated [version]
|
||||
/// linked from the [`quick_junit`] docs.
|
||||
///
|
||||
/// [JUnit]: https://junit.org/
|
||||
/// [`junit.xsd`]: https://github.com/junit-team/junit-framework/blob/2870b7d8fd5bf7c1efe489d3991d3ed3900e82bb/platform-tests/src/test/resources/jenkins-junit.xsd
|
||||
/// [version]: https://llg.cubic.org/docs/junit/
|
||||
/// [`quick_junit`]: https://docs.rs/quick-junit/latest/quick_junit/
|
||||
pub struct JunitRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> JunitRenderer<'a> {
|
||||
pub fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let mut report = Report::new("ruff");
|
||||
|
||||
if diagnostics.is_empty() {
|
||||
let mut test_suite = TestSuite::new("ruff");
|
||||
test_suite
|
||||
.extra
|
||||
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
|
||||
let mut case = TestCase::new("No errors found", TestCaseStatus::success());
|
||||
case.set_classname("ruff");
|
||||
test_suite.add_test_case(case);
|
||||
report.add_test_suite(test_suite);
|
||||
} else {
|
||||
for (filename, diagnostics) in group_diagnostics_by_filename(diagnostics, self.resolver)
|
||||
{
|
||||
let mut test_suite = TestSuite::new(filename);
|
||||
test_suite
|
||||
.extra
|
||||
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
|
||||
|
||||
let classname = Path::new(filename).with_extension("");
|
||||
|
||||
for diagnostic in diagnostics {
|
||||
let DiagnosticWithLocation {
|
||||
diagnostic,
|
||||
start_location: location,
|
||||
} = diagnostic;
|
||||
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
|
||||
status.set_message(diagnostic.body());
|
||||
|
||||
if let Some(location) = location {
|
||||
status.set_description(format!(
|
||||
"line {row}, col {col}, {body}",
|
||||
row = location.line,
|
||||
col = location.column,
|
||||
body = diagnostic.body()
|
||||
));
|
||||
} else {
|
||||
status.set_description(diagnostic.body());
|
||||
}
|
||||
|
||||
let code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
|
||||
let mut case = TestCase::new(format!("org.ruff.{code}"), status);
|
||||
case.set_classname(classname.to_str().unwrap());
|
||||
|
||||
if let Some(location) = location {
|
||||
case.extra.insert(
|
||||
XmlString::new("line"),
|
||||
XmlString::new(location.line.to_string()),
|
||||
);
|
||||
case.extra.insert(
|
||||
XmlString::new("column"),
|
||||
XmlString::new(location.column.to_string()),
|
||||
);
|
||||
}
|
||||
|
||||
test_suite.add_test_case(case);
|
||||
}
|
||||
report.add_test_suite(test_suite);
|
||||
}
|
||||
}
|
||||
|
||||
let adapter = FmtAdapter { fmt: f };
|
||||
report.serialize(adapter).map_err(|_| std::fmt::Error)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(brent) this and `group_diagnostics_by_filename` are also used by the `grouped` output
|
||||
// format. I think they'd make more sense in that file, but I started here first. I'll move them to
|
||||
// that module when adding the `grouped` output format.
|
||||
struct DiagnosticWithLocation<'a> {
|
||||
diagnostic: &'a Diagnostic,
|
||||
start_location: Option<LineColumn>,
|
||||
}
|
||||
|
||||
impl Deref for DiagnosticWithLocation<'_> {
|
||||
type Target = Diagnostic;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.diagnostic
|
||||
}
|
||||
}
|
||||
|
||||
fn group_diagnostics_by_filename<'a>(
|
||||
diagnostics: &'a [Diagnostic],
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> BTreeMap<&'a str, Vec<DiagnosticWithLocation<'a>>> {
|
||||
let mut grouped_diagnostics = BTreeMap::default();
|
||||
for diagnostic in diagnostics {
|
||||
let (filename, start_location) = diagnostic
|
||||
.primary_span_ref()
|
||||
.map(|span| {
|
||||
let file = span.file();
|
||||
let start_location =
|
||||
span.range()
|
||||
.filter(|_| !resolver.is_notebook(file))
|
||||
.map(|range| {
|
||||
file.diagnostic_source(resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
});
|
||||
|
||||
(span.file().path(resolver), start_location)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
grouped_diagnostics
|
||||
.entry(filename)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(DiagnosticWithLocation {
|
||||
diagnostic,
|
||||
start_location,
|
||||
});
|
||||
}
|
||||
grouped_diagnostics
|
||||
}
|
||||
|
||||
struct FmtAdapter<'a> {
|
||||
fmt: &'a mut dyn std::fmt::Write,
|
||||
}
|
||||
|
||||
impl std::io::Write for FmtAdapter<'_> {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.fmt
|
||||
.write_str(std::str::from_utf8(buf).map_err(|_| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Invalid UTF-8 in JUnit report",
|
||||
)
|
||||
})?)
|
||||
.map_err(std::io::Error::other)?;
|
||||
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_fmt(&mut self, args: std::fmt::Arguments<'_>) -> std::io::Result<()> {
|
||||
self.fmt.write_fmt(args).map_err(std::io::Error::other)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Junit);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Junit);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
|
||||
|
||||
/// Generate violations in Pylint format.
|
||||
///
|
||||
/// The format is given by this string:
|
||||
///
|
||||
/// ```python
|
||||
/// "%(path)s:%(row)d: [%(code)s] %(text)s"
|
||||
/// ```
|
||||
///
|
||||
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
|
||||
pub(super) struct PylintRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> PylintRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl PylintRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diagnostic in diagnostics {
|
||||
let (filename, row) = diagnostic
|
||||
.primary_span_ref()
|
||||
.map(|span| {
|
||||
let file = span.file();
|
||||
|
||||
let row = span
|
||||
.range()
|
||||
.filter(|_| !self.resolver.is_notebook(file))
|
||||
.map(|range| {
|
||||
file.diagnostic_source(self.resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
.line
|
||||
});
|
||||
|
||||
(file.relative_path(self.resolver).to_string_lossy(), row)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
|
||||
|
||||
let row = row.unwrap_or_default();
|
||||
|
||||
writeln!(
|
||||
f,
|
||||
"{path}:{row}: [{code}] {body}",
|
||||
path = filename,
|
||||
body = diagnostic.body()
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Pylint);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Pylint);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Pylint);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@":1: [test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,235 +0,0 @@
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::Diagnostic;
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub struct RdjsonRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> RdjsonRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:#}",
|
||||
serde_json::json!(RdjsonDiagnostics::new(diagnostics, self.resolver))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedDiagnostics<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
diagnostics: &'a [Diagnostic],
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedDiagnostics<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
|
||||
for diagnostic in self.diagnostics {
|
||||
let value = diagnostic_to_rdjson(diagnostic, self.resolver);
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostic_to_rdjson<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> RdjsonDiagnostic<'a> {
|
||||
let span = diagnostic.primary_span_ref();
|
||||
let source_file = span.map(|span| {
|
||||
let file = span.file();
|
||||
(file.path(resolver), file.diagnostic_source(resolver))
|
||||
});
|
||||
|
||||
let location = source_file.as_ref().map(|(path, source)| {
|
||||
let range = diagnostic.range().map(|range| {
|
||||
let source_code = source.as_source_code();
|
||||
let start = source_code.line_column(range.start());
|
||||
let end = source_code.line_column(range.end());
|
||||
RdjsonRange::new(start, end)
|
||||
});
|
||||
|
||||
RdjsonLocation { path, range }
|
||||
});
|
||||
|
||||
let edits = diagnostic.fix().map(Fix::edits).unwrap_or_default();
|
||||
|
||||
RdjsonDiagnostic {
|
||||
message: diagnostic.body(),
|
||||
location,
|
||||
code: RdjsonCode {
|
||||
value: diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
},
|
||||
suggestions: rdjson_suggestions(
|
||||
edits,
|
||||
source_file
|
||||
.as_ref()
|
||||
.map(|(_, source)| source.as_source_code()),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn rdjson_suggestions<'a>(
|
||||
edits: &'a [Edit],
|
||||
source_code: Option<SourceCode>,
|
||||
) -> Vec<RdjsonSuggestion<'a>> {
|
||||
if edits.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let Some(source_code) = source_code else {
|
||||
debug_assert!(false, "Expected a source file for a diagnostic with a fix");
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
edits
|
||||
.iter()
|
||||
.map(|edit| {
|
||||
let start = source_code.line_column(edit.start());
|
||||
let end = source_code.line_column(edit.end());
|
||||
let range = RdjsonRange::new(start, end);
|
||||
|
||||
RdjsonSuggestion {
|
||||
range,
|
||||
text: edit.content().unwrap_or_default(),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonDiagnostics<'a> {
|
||||
diagnostics: ExpandedDiagnostics<'a>,
|
||||
severity: &'static str,
|
||||
source: RdjsonSource,
|
||||
}
|
||||
|
||||
impl<'a> RdjsonDiagnostics<'a> {
|
||||
fn new(diagnostics: &'a [Diagnostic], resolver: &'a dyn FileResolver) -> Self {
|
||||
Self {
|
||||
source: RdjsonSource {
|
||||
name: "ruff",
|
||||
url: env!("CARGO_PKG_HOMEPAGE"),
|
||||
},
|
||||
severity: "WARNING",
|
||||
diagnostics: ExpandedDiagnostics {
|
||||
diagnostics,
|
||||
resolver,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonSource {
|
||||
name: &'static str,
|
||||
url: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonDiagnostic<'a> {
|
||||
code: RdjsonCode<'a>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
location: Option<RdjsonLocation<'a>>,
|
||||
message: &'a str,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
suggestions: Vec<RdjsonSuggestion<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonLocation<'a> {
|
||||
path: &'a str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
range: Option<RdjsonRange>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct RdjsonRange {
|
||||
end: LineColumn,
|
||||
start: LineColumn,
|
||||
}
|
||||
|
||||
impl RdjsonRange {
|
||||
fn new(start: LineColumn, end: LineColumn) -> Self {
|
||||
Self { start, end }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonCode<'a> {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
url: Option<String>,
|
||||
value: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonSuggestion<'a> {
|
||||
range: RdjsonRange,
|
||||
text: &'a str,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Rdjson);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Rdjson);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_stable() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_preview() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/pylint.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
syntax_errors.py:1: [invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3: [invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render(&diag)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
|
||||
"value": "test-diagnostic"
|
||||
},
|
||||
"message": "main diagnostic message"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render(&diag)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
|
||||
"value": "test-diagnostic"
|
||||
},
|
||||
"message": "main diagnostic message"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
@@ -41,8 +41,6 @@ pub struct DiagnosticStylesheet {
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) none: Style,
|
||||
pub(crate) separator: Style,
|
||||
pub(crate) secondary_code: Style,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
@@ -64,8 +62,6 @@ impl DiagnosticStylesheet {
|
||||
line_no: bright_blue.effects(Effects::BOLD),
|
||||
emphasis: Style::new().effects(Effects::BOLD),
|
||||
none: Style::new(),
|
||||
separator: AnsiColor::Cyan.on_default(),
|
||||
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,8 +75,6 @@ impl DiagnosticStylesheet {
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
none: Style::new(),
|
||||
separator: Style::new(),
|
||||
secondary_code: Style::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
pub use file_root::{FileRoot, FileRootKind};
|
||||
pub use path::FilePath;
|
||||
@@ -231,7 +232,7 @@ impl Files {
|
||||
let roots = inner.roots.read().unwrap();
|
||||
|
||||
for root in roots.all() {
|
||||
if path.starts_with(root.path(db)) {
|
||||
if root.path(db).starts_with(&path) {
|
||||
root.set_revision(db).to(FileRevision::now());
|
||||
}
|
||||
}
|
||||
@@ -311,6 +312,11 @@ pub struct File {
|
||||
/// the file has been deleted is to change the status to `Deleted`.
|
||||
#[default]
|
||||
status: FileStatus,
|
||||
|
||||
/// Counter that counts the number of created file instances and active file instances.
|
||||
/// Only enabled in debug builds.
|
||||
#[default]
|
||||
count: Count<File>,
|
||||
}
|
||||
|
||||
// The Salsa heap is tracked separately.
|
||||
@@ -369,25 +375,12 @@ impl File {
|
||||
}
|
||||
|
||||
/// Refreshes the file metadata by querying the file system if needed.
|
||||
///
|
||||
/// This also "touches" the file root associated with the given path.
|
||||
/// This means that any Salsa queries that depend on the corresponding
|
||||
/// root's revision will become invalidated.
|
||||
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
Files::touch_root(db, &absolute);
|
||||
Self::sync_system_path(db, &absolute, None);
|
||||
}
|
||||
|
||||
/// Refreshes *only* the file metadata by querying the file system if needed.
|
||||
///
|
||||
/// This specifically does not touch any file root associated with the
|
||||
/// given file path.
|
||||
pub fn sync_path_only(db: &mut dyn Db, path: &SystemPath) {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
Self::sync_system_path(db, &absolute, None);
|
||||
}
|
||||
|
||||
/// Increments the revision for the virtual file at `path`.
|
||||
pub fn sync_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath) {
|
||||
if let Some(virtual_file) = db.files().try_virtual_file(path) {
|
||||
@@ -493,7 +486,7 @@ impl fmt::Debug for File {
|
||||
///
|
||||
/// This is a wrapper around a [`File`] that provides additional methods to interact with a virtual
|
||||
/// file.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct VirtualFile(File);
|
||||
|
||||
impl VirtualFile {
|
||||
|
||||
@@ -23,7 +23,7 @@ pub struct FileRoot {
|
||||
pub path: SystemPathBuf,
|
||||
|
||||
/// The kind of the root at the time of its creation.
|
||||
pub kind_at_time_of_creation: FileRootKind,
|
||||
kind_at_time_of_creation: FileRootKind,
|
||||
|
||||
/// A revision that changes when the contents of the source root change.
|
||||
///
|
||||
|
||||
@@ -5,7 +5,6 @@ use ruff_python_ast::PythonVersion;
|
||||
use rustc_hash::FxHasher;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::num::NonZeroUsize;
|
||||
use ty_static::EnvVars;
|
||||
|
||||
pub mod diagnostic;
|
||||
pub mod display;
|
||||
@@ -28,21 +27,6 @@ pub use web_time::{Instant, SystemTime, SystemTimeError};
|
||||
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
pub type FxDashSet<K> = dashmap::DashSet<K, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
static VERSION: std::sync::OnceLock<String> = std::sync::OnceLock::new();
|
||||
|
||||
/// Returns the version of the executing program if set.
|
||||
pub fn program_version() -> Option<&'static str> {
|
||||
VERSION.get().map(|version| version.as_str())
|
||||
}
|
||||
|
||||
/// Sets the version of the executing program.
|
||||
///
|
||||
/// ## Errors
|
||||
/// If the version has already been initialized (can only be set once).
|
||||
pub fn set_program_version(version: String) -> Result<(), String> {
|
||||
VERSION.set(version)
|
||||
}
|
||||
|
||||
/// Most basic database that gives access to files, the host system, source code, and parsed AST.
|
||||
#[salsa::db]
|
||||
pub trait Db: salsa::Database {
|
||||
@@ -66,8 +50,8 @@ pub trait Db: salsa::Database {
|
||||
/// ty can still spawn more threads for other tasks, e.g. to wait for a Ctrl+C signal or
|
||||
/// watching the files for changes.
|
||||
pub fn max_parallelism() -> NonZeroUsize {
|
||||
std::env::var(EnvVars::TY_MAX_PARALLELISM)
|
||||
.or_else(|_| std::env::var(EnvVars::RAYON_NUM_THREADS))
|
||||
std::env::var("TY_MAX_PARALLELISM")
|
||||
.or_else(|_| std::env::var("RAYON_NUM_THREADS"))
|
||||
.ok()
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap_or_else(|| {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_source_file::LineIndex;
|
||||
@@ -36,7 +38,11 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
};
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner { kind, read_error }),
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind,
|
||||
read_error,
|
||||
count: Count::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,6 +125,8 @@ impl std::fmt::Debug for SourceText {
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
struct SourceTextInner {
|
||||
#[get_size(ignore)]
|
||||
count: Count<SourceText>,
|
||||
kind: SourceTextKind,
|
||||
read_error: Option<SourceTextError>,
|
||||
}
|
||||
|
||||
@@ -21,19 +21,6 @@ type LockedZipArchive<'a> = MutexGuard<'a, VendoredZipArchive>;
|
||||
///
|
||||
/// "Files" in the `VendoredFileSystem` are read-only and immutable.
|
||||
/// Directories are supported, but symlinks and hardlinks cannot exist.
|
||||
///
|
||||
/// # Path separators
|
||||
///
|
||||
/// At time of writing (2025-07-11), this implementation always uses `/` as a
|
||||
/// path separator, even in Windows environments where `\` is traditionally
|
||||
/// used as a file path separator. Namely, this is only currently used with zip
|
||||
/// files built by `crates/ty_vendored/build.rs`.
|
||||
///
|
||||
/// Callers using this may provide paths that use a `\` as a separator. It will
|
||||
/// be transparently normalized to `/`.
|
||||
///
|
||||
/// This is particularly important because the presence of a trailing separator
|
||||
/// in a zip file is conventionally used to indicate a directory entry.
|
||||
#[derive(Clone)]
|
||||
pub struct VendoredFileSystem {
|
||||
inner: Arc<Mutex<VendoredZipArchive>>,
|
||||
@@ -128,68 +115,6 @@ impl VendoredFileSystem {
|
||||
read_to_string(self, path.as_ref())
|
||||
}
|
||||
|
||||
/// Read the direct children of the directory
|
||||
/// identified by `path`.
|
||||
///
|
||||
/// If `path` is not a directory, then this will
|
||||
/// return an empty `Vec`.
|
||||
pub fn read_directory(&self, dir: impl AsRef<VendoredPath>) -> Vec<DirectoryEntry> {
|
||||
// N.B. We specifically do not return an iterator here to avoid
|
||||
// holding a lock for the lifetime of the iterator returned.
|
||||
// That is, it seems like a footgun to keep the zip archive
|
||||
// locked during iteration, since the unit of work for each
|
||||
// item in the iterator could be arbitrarily long. Allocating
|
||||
// up front and stuffing all entries into it is probably the
|
||||
// simplest solution and what we do here. If this becomes
|
||||
// a problem, there are other strategies we could pursue.
|
||||
// (Amortizing allocs, using a different synchronization
|
||||
// behavior or even exposing additional APIs.) ---AG
|
||||
|
||||
fn read_directory(fs: &VendoredFileSystem, dir: &VendoredPath) -> Vec<DirectoryEntry> {
|
||||
let mut normalized = NormalizedVendoredPath::from(dir);
|
||||
if !normalized.as_str().ends_with('/') {
|
||||
normalized = normalized.with_trailing_slash();
|
||||
}
|
||||
let archive = fs.lock_archive();
|
||||
let mut entries = vec![];
|
||||
for name in archive.0.file_names() {
|
||||
// Any entry that doesn't have the `path` (with a
|
||||
// trailing slash) as a prefix cannot possibly be in
|
||||
// the directory referenced by `path`.
|
||||
let Some(without_dir_prefix) = name.strip_prefix(normalized.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
// Filter out an entry equivalent to the path given
|
||||
// since we only want children of the directory.
|
||||
if without_dir_prefix.is_empty() {
|
||||
continue;
|
||||
}
|
||||
// We only want *direct* children. Files that are
|
||||
// direct children cannot have any slashes (or else
|
||||
// they are not direct children). Directories that
|
||||
// are direct children can only have one slash and
|
||||
// it must be at the end.
|
||||
//
|
||||
// (We do this manually ourselves to avoid doing a
|
||||
// full file lookup and metadata retrieval via the
|
||||
// `zip` crate.)
|
||||
let file_type = FileType::from_zip_file_name(without_dir_prefix);
|
||||
let slash_count = without_dir_prefix.matches('/').count();
|
||||
match file_type {
|
||||
FileType::File if slash_count > 0 => continue,
|
||||
FileType::Directory if slash_count > 1 => continue,
|
||||
_ => {}
|
||||
}
|
||||
entries.push(DirectoryEntry {
|
||||
path: VendoredPathBuf::from(name),
|
||||
file_type,
|
||||
});
|
||||
}
|
||||
entries
|
||||
}
|
||||
read_directory(self, dir.as_ref())
|
||||
}
|
||||
|
||||
/// Acquire a lock on the underlying zip archive.
|
||||
/// The call will block until it is able to acquire the lock.
|
||||
///
|
||||
@@ -281,14 +206,6 @@ pub enum FileType {
|
||||
}
|
||||
|
||||
impl FileType {
|
||||
fn from_zip_file_name(name: &str) -> FileType {
|
||||
if name.ends_with('/') {
|
||||
FileType::Directory
|
||||
} else {
|
||||
FileType::File
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn is_file(self) -> bool {
|
||||
matches!(self, Self::File)
|
||||
}
|
||||
@@ -327,30 +244,6 @@ impl Metadata {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct DirectoryEntry {
|
||||
path: VendoredPathBuf,
|
||||
file_type: FileType,
|
||||
}
|
||||
|
||||
impl DirectoryEntry {
|
||||
pub fn new(path: VendoredPathBuf, file_type: FileType) -> Self {
|
||||
Self { path, file_type }
|
||||
}
|
||||
|
||||
pub fn into_path(self) -> VendoredPathBuf {
|
||||
self.path
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &VendoredPath {
|
||||
&self.path
|
||||
}
|
||||
|
||||
pub fn file_type(&self) -> FileType {
|
||||
self.file_type
|
||||
}
|
||||
}
|
||||
|
||||
/// Newtype wrapper around a ZipArchive.
|
||||
#[derive(Debug)]
|
||||
struct VendoredZipArchive(ZipArchive<io::Cursor<Cow<'static, [u8]>>>);
|
||||
@@ -605,60 +498,6 @@ pub(crate) mod tests {
|
||||
test_directory("./stdlib/asyncio/../asyncio/")
|
||||
}
|
||||
|
||||
fn readdir_snapshot(fs: &VendoredFileSystem, path: &str) -> String {
|
||||
let mut paths = fs
|
||||
.read_directory(VendoredPath::new(path))
|
||||
.into_iter()
|
||||
.map(|entry| entry.path().to_string())
|
||||
.collect::<Vec<String>>();
|
||||
paths.sort();
|
||||
paths.join("\n")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_directory_stdlib() {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib/"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib/"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_directory_asyncio() {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "stdlib/asyncio"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "stdlib/asyncio/"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio/"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
}
|
||||
|
||||
fn test_nonexistent_path(path: &str) {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
let path = VendoredPath::new(path);
|
||||
|
||||
@@ -17,10 +17,6 @@ impl VendoredPath {
|
||||
unsafe { &*(path as *const Utf8Path as *const VendoredPath) }
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.0.file_name()
|
||||
}
|
||||
|
||||
pub fn to_path_buf(&self) -> VendoredPathBuf {
|
||||
VendoredPathBuf(self.0.to_path_buf())
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ty = { workspace = true }
|
||||
ty_project = { workspace = true, features = ["schemars"] }
|
||||
ty_static = { workspace = true }
|
||||
ruff = { workspace = true }
|
||||
ruff_formatter = { workspace = true }
|
||||
ruff_linter = { workspace = true, features = ["schemars"] }
|
||||
|
||||
@@ -4,7 +4,7 @@ use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
generate_cli_help, generate_docs, generate_json_schema, generate_ty_cli_reference,
|
||||
generate_ty_env_vars_reference, generate_ty_options, generate_ty_rules, generate_ty_schema,
|
||||
generate_ty_options, generate_ty_rules, generate_ty_schema,
|
||||
};
|
||||
|
||||
pub(crate) const REGENERATE_ALL_COMMAND: &str = "cargo dev generate-all";
|
||||
@@ -44,8 +44,5 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
generate_ty_options::main(&generate_ty_options::Args { mode: args.mode })?;
|
||||
generate_ty_rules::main(&generate_ty_rules::Args { mode: args.mode })?;
|
||||
generate_ty_cli_reference::main(&generate_ty_cli_reference::Args { mode: args.mode })?;
|
||||
generate_ty_env_vars_reference::main(&generate_ty_env_vars_reference::Args {
|
||||
mode: args.mode,
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
//! Generate the environment variables reference from `ty_static::EnvVars`.
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::bail;
|
||||
use pretty_assertions::StrComparison;
|
||||
|
||||
use ty_static::EnvVars;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
#[arg(long, default_value_t, value_enum)]
|
||||
pub(crate) mode: Mode,
|
||||
}
|
||||
|
||||
pub(crate) fn main(args: &Args) -> anyhow::Result<()> {
|
||||
let reference_string = generate();
|
||||
let filename = "environment.md";
|
||||
let reference_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("crates")
|
||||
.join("ty")
|
||||
.join("docs")
|
||||
.join(filename);
|
||||
|
||||
match args.mode {
|
||||
Mode::DryRun => {
|
||||
println!("{reference_string}");
|
||||
}
|
||||
Mode::Check => match fs::read_to_string(&reference_path) {
|
||||
Ok(current) => {
|
||||
if current == reference_string {
|
||||
println!("Up-to-date: {filename}");
|
||||
} else {
|
||||
let comparison = StrComparison::new(¤t, &reference_string);
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{comparison}"
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
bail!(
|
||||
"{filename} not found, please run `cargo dev generate-ty-env-vars-reference`"
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{err}"
|
||||
);
|
||||
}
|
||||
},
|
||||
Mode::Write => {
|
||||
// Ensure the docs directory exists
|
||||
if let Some(parent) = reference_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
match fs::read_to_string(&reference_path) {
|
||||
Ok(current) => {
|
||||
if current == reference_string {
|
||||
println!("Up-to-date: {filename}");
|
||||
} else {
|
||||
println!("Updating: {filename}");
|
||||
fs::write(&reference_path, reference_string.as_bytes())?;
|
||||
}
|
||||
}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
println!("Updating: {filename}");
|
||||
fs::write(&reference_path, reference_string.as_bytes())?;
|
||||
}
|
||||
Err(err) => {
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate() -> String {
|
||||
let mut output = String::new();
|
||||
|
||||
output.push_str("# Environment variables\n\n");
|
||||
|
||||
// Partition and sort environment variables into TY_ and external variables.
|
||||
let (ty_vars, external_vars): (BTreeSet<_>, BTreeSet<_>) = EnvVars::metadata()
|
||||
.iter()
|
||||
.partition(|(var, _)| var.starts_with("TY_"));
|
||||
|
||||
output.push_str("ty defines and respects the following environment variables:\n\n");
|
||||
|
||||
for (var, doc) in ty_vars {
|
||||
output.push_str(&render(var, doc));
|
||||
}
|
||||
|
||||
output.push_str("## Externally-defined variables\n\n");
|
||||
output.push_str("ty also reads the following externally defined environment variables:\n\n");
|
||||
|
||||
for (var, doc) in external_vars {
|
||||
output.push_str(&render(var, doc));
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
/// Render an environment variable and its documentation.
|
||||
fn render(var: &str, doc: &str) -> String {
|
||||
format!("### `{var}`\n\n{doc}\n\n")
|
||||
}
|
||||
@@ -18,7 +18,6 @@ mod generate_json_schema;
|
||||
mod generate_options;
|
||||
mod generate_rules_table;
|
||||
mod generate_ty_cli_reference;
|
||||
mod generate_ty_env_vars_reference;
|
||||
mod generate_ty_options;
|
||||
mod generate_ty_rules;
|
||||
mod generate_ty_schema;
|
||||
@@ -54,8 +53,6 @@ enum Command {
|
||||
/// Generate a Markdown-compatible listing of configuration options.
|
||||
GenerateOptions,
|
||||
GenerateTyOptions(generate_ty_options::Args),
|
||||
/// Generate environment variables reference for ty.
|
||||
GenerateTyEnvVarsReference(generate_ty_env_vars_reference::Args),
|
||||
/// Generate CLI help.
|
||||
GenerateCliHelp(generate_cli_help::Args),
|
||||
/// Generate Markdown docs.
|
||||
@@ -101,7 +98,6 @@ fn main() -> Result<ExitCode> {
|
||||
Command::GenerateTyRules(args) => generate_ty_rules::main(&args)?,
|
||||
Command::GenerateOptions => println!("{}", generate_options::generate()),
|
||||
Command::GenerateTyOptions(args) => generate_ty_options::main(&args)?,
|
||||
Command::GenerateTyEnvVarsReference(args) => generate_ty_env_vars_reference::main(&args)?,
|
||||
Command::GenerateCliHelp(args) => generate_cli_help::main(&args)?,
|
||||
Command::GenerateDocs(args) => generate_docs::main(&args)?,
|
||||
Command::PrintAST(args) => print_ast::main(&args)?,
|
||||
|
||||
@@ -16,6 +16,5 @@ doctest = false
|
||||
[dependencies]
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
get-size2 = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
serde = { workspace = true, optional = true, features = [] }
|
||||
|
||||
@@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
|
||||
/// content at a given location.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct Edit {
|
||||
/// The start location of the edit.
|
||||
|
||||
@@ -6,9 +6,7 @@ use ruff_text_size::{Ranged, TextSize};
|
||||
use crate::edit::Edit;
|
||||
|
||||
/// Indicates if a fix can be applied.
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is, get_size2::GetSize,
|
||||
)]
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))]
|
||||
pub enum Applicability {
|
||||
@@ -32,7 +30,7 @@ pub enum Applicability {
|
||||
}
|
||||
|
||||
/// Indicates the level of isolation required to apply a fix.
|
||||
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, get_size2::GetSize)]
|
||||
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub enum IsolationLevel {
|
||||
/// The fix should be applied as long as no other fixes in the same group have been applied.
|
||||
@@ -43,7 +41,7 @@ pub enum IsolationLevel {
|
||||
}
|
||||
|
||||
/// A collection of [`Edit`] elements to be applied to a source file.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, get_size2::GetSize)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct Fix {
|
||||
/// The [`Edit`] elements to be applied, sorted by [`Edit::start`] in ascending order.
|
||||
|
||||
@@ -20,7 +20,6 @@ ty_python_semantic = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, optional = true }
|
||||
memchr = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::StringImports;
|
||||
use ruff_python_ast::visitor::source_order::{
|
||||
SourceOrderVisitor, walk_expr, walk_module, walk_stmt,
|
||||
};
|
||||
@@ -11,13 +10,13 @@ pub(crate) struct Collector<'a> {
|
||||
/// The path to the current module.
|
||||
module_path: Option<&'a [String]>,
|
||||
/// Whether to detect imports from string literals.
|
||||
string_imports: StringImports,
|
||||
string_imports: bool,
|
||||
/// The collected imports from the Python AST.
|
||||
imports: Vec<CollectedImport>,
|
||||
}
|
||||
|
||||
impl<'a> Collector<'a> {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: bool) -> Self {
|
||||
Self {
|
||||
module_path,
|
||||
string_imports,
|
||||
@@ -119,7 +118,7 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// Only traverse simple statements when string imports is enabled.
|
||||
if self.string_imports.enabled {
|
||||
if self.string_imports {
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
@@ -127,26 +126,20 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'ast Expr) {
|
||||
if self.string_imports.enabled {
|
||||
if self.string_imports {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral {
|
||||
value,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) = expr
|
||||
{
|
||||
let value = value.to_str();
|
||||
// Determine whether the string literal "looks like" an import statement: contains
|
||||
// the requisite number of dots, and consists solely of valid Python identifiers.
|
||||
if self.string_imports.min_dots == 0
|
||||
|| memchr::memchr_iter(b'.', value.as_bytes()).count()
|
||||
>= self.string_imports.min_dots
|
||||
{
|
||||
if let Some(module_name) = ModuleName::new(value) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
}
|
||||
// a dot, and consists solely of valid Python identifiers.
|
||||
let value = value.to_str();
|
||||
if let Some(module_name) = ModuleName::new(value) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
}
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ impl SourceDb for ModuleDb {
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for ModuleDb {
|
||||
fn should_check_file(&self, file: File) -> bool {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ use ruff_python_parser::{Mode, ParseOptions, parse};
|
||||
use crate::collector::Collector;
|
||||
pub use crate::db::ModuleDb;
|
||||
use crate::resolver::Resolver;
|
||||
pub use crate::settings::{AnalyzeSettings, Direction, StringImports};
|
||||
pub use crate::settings::{AnalyzeSettings, Direction};
|
||||
|
||||
mod collector;
|
||||
mod db;
|
||||
@@ -26,7 +26,7 @@ impl ModuleImports {
|
||||
db: &ModuleDb,
|
||||
path: &SystemPath,
|
||||
package: Option<&SystemPath>,
|
||||
string_imports: StringImports,
|
||||
string_imports: bool,
|
||||
) -> Result<Self> {
|
||||
// Read and parse the source code.
|
||||
let source = std::fs::read_to_string(path)?;
|
||||
@@ -42,11 +42,13 @@ impl ModuleImports {
|
||||
// Resolve the imports.
|
||||
let mut resolved_imports = ModuleImports::default();
|
||||
for import in imports {
|
||||
for resolved in Resolver::new(db).resolve(import) {
|
||||
if let Some(path) = resolved.as_system_path() {
|
||||
resolved_imports.insert(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
let Some(resolved) = Resolver::new(db).resolve(import) else {
|
||||
continue;
|
||||
};
|
||||
let Some(path) = resolved.as_system_path() else {
|
||||
continue;
|
||||
};
|
||||
resolved_imports.insert(path.to_path_buf());
|
||||
}
|
||||
|
||||
Ok(resolved_imports)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use ruff_db::files::FilePath;
|
||||
use ty_python_semantic::{ModuleName, resolve_module, resolve_real_module};
|
||||
use ty_python_semantic::resolve_module;
|
||||
|
||||
use crate::ModuleDb;
|
||||
use crate::collector::CollectedImport;
|
||||
@@ -16,67 +16,24 @@ impl<'a> Resolver<'a> {
|
||||
}
|
||||
|
||||
/// Resolve the [`CollectedImport`] into a [`FilePath`].
|
||||
pub(crate) fn resolve(&self, import: CollectedImport) -> impl Iterator<Item = &'a FilePath> {
|
||||
pub(crate) fn resolve(&self, import: CollectedImport) -> Option<&'a FilePath> {
|
||||
match import {
|
||||
CollectedImport::Import(import) => {
|
||||
// Attempt to resolve the module (e.g., given `import foo`, look for `foo`).
|
||||
let file = self.resolve_module(&import);
|
||||
|
||||
// If the file is a stub, look for the corresponding source file.
|
||||
let source_file = file
|
||||
.is_some_and(|file| file.extension() == Some("pyi"))
|
||||
.then(|| self.resolve_real_module(&import))
|
||||
.flatten();
|
||||
|
||||
std::iter::once(file)
|
||||
.chain(std::iter::once(source_file))
|
||||
.flatten()
|
||||
let module = resolve_module(self.db, &import)?;
|
||||
Some(module.file()?.path(self.db))
|
||||
}
|
||||
CollectedImport::ImportFrom(import) => {
|
||||
// Attempt to resolve the member (e.g., given `from foo import bar`, look for `foo.bar`).
|
||||
if let Some(file) = self.resolve_module(&import) {
|
||||
// If the file is a stub, look for the corresponding source file.
|
||||
let source_file = (file.extension() == Some("pyi"))
|
||||
.then(|| self.resolve_real_module(&import))
|
||||
.flatten();
|
||||
|
||||
return std::iter::once(Some(file))
|
||||
.chain(std::iter::once(source_file))
|
||||
.flatten();
|
||||
}
|
||||
|
||||
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
|
||||
let parent = import.parent();
|
||||
let file = parent
|
||||
.as_ref()
|
||||
.and_then(|parent| self.resolve_module(parent));
|
||||
|
||||
// If the file is a stub, look for the corresponding source file.
|
||||
let source_file = file
|
||||
.is_some_and(|file| file.extension() == Some("pyi"))
|
||||
.then(|| {
|
||||
parent
|
||||
.as_ref()
|
||||
.and_then(|parent| self.resolve_real_module(parent))
|
||||
})
|
||||
.flatten();
|
||||
let module = resolve_module(self.db, &import).or_else(|| {
|
||||
// Attempt to resolve the module (e.g., given `from foo import bar`, look for `foo`).
|
||||
|
||||
std::iter::once(file)
|
||||
.chain(std::iter::once(source_file))
|
||||
.flatten()
|
||||
resolve_module(self.db, &parent?)
|
||||
})?;
|
||||
|
||||
Some(module.file()?.path(self.db))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||
let module = resolve_module(self.db, module_name)?;
|
||||
Some(module.file(self.db)?.path(self.db))
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module (stubs not allowed).
|
||||
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||
let module = resolve_real_module(self.db, module_name)?;
|
||||
Some(module.file(self.db)?.path(self.db))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ pub struct AnalyzeSettings {
|
||||
pub exclude: FilePatternSet,
|
||||
pub preview: PreviewMode,
|
||||
pub target_version: PythonVersion,
|
||||
pub string_imports: StringImports,
|
||||
pub detect_string_imports: bool,
|
||||
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
|
||||
pub extension: ExtensionMapping,
|
||||
}
|
||||
@@ -26,7 +26,7 @@ impl fmt::Display for AnalyzeSettings {
|
||||
self.exclude,
|
||||
self.preview,
|
||||
self.target_version,
|
||||
self.string_imports,
|
||||
self.detect_string_imports,
|
||||
self.extension | debug,
|
||||
self.include_dependencies | debug,
|
||||
]
|
||||
@@ -35,31 +35,6 @@ impl fmt::Display for AnalyzeSettings {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, CacheKey)]
|
||||
pub struct StringImports {
|
||||
pub enabled: bool,
|
||||
pub min_dots: usize,
|
||||
}
|
||||
|
||||
impl Default for StringImports {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: false,
|
||||
min_dots: 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for StringImports {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.enabled {
|
||||
write!(f, "enabled (min_dots: {})", self.min_dots)
|
||||
} else {
|
||||
write!(f, "disabled")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, CacheKey)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.5"
|
||||
version = "0.12.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -15,7 +15,7 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_annotate_snippets = { workspace = true }
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["junit", "serde"] }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_diagnostics = { workspace = true, features = ["serde"] }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_macros = { workspace = true }
|
||||
@@ -55,6 +55,7 @@ path-absolutize = { workspace = true, features = [
|
||||
pathdiff = { workspace = true }
|
||||
pep440_rs = { workspace = true }
|
||||
pyproject-toml = { workspace = true }
|
||||
quick-junit = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
|
||||
@@ -25,5 +25,5 @@ def my_func():
|
||||
|
||||
# t-strings - all ok
|
||||
t"0.0.0.0"
|
||||
t"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
t"0.0.0.0" t"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
@@ -1,30 +1,29 @@
|
||||
for _ in []:
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception,):
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except (Exception,):
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except (Exception, ValueError):
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except (Exception, ValueError):
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except ValueError:
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pass
|
||||
except (ValueError,):
|
||||
continue
|
||||
try:
|
||||
pass
|
||||
except (ValueError,):
|
||||
continue
|
||||
|
||||
@@ -94,7 +94,7 @@ except Exception:
|
||||
logging.error("...", exc_info=True)
|
||||
|
||||
|
||||
from logging import critical, error, exception
|
||||
from logging import error, exception
|
||||
|
||||
try:
|
||||
pass
|
||||
@@ -114,23 +114,6 @@ except Exception:
|
||||
error("...", exc_info=None)
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
critical("...")
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
critical("...", exc_info=False)
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
critical("...", exc_info=None)
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
@@ -142,13 +125,6 @@ try:
|
||||
except Exception:
|
||||
error("...", exc_info=True)
|
||||
|
||||
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
critical("...", exc_info=True)
|
||||
|
||||
|
||||
try:
|
||||
...
|
||||
except Exception as e:
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""
|
||||
Should emit:
|
||||
B017 - on lines 24, 28, 46, 49, 52, 58, 62, 68, and 71
|
||||
B017 - on lines 23 and 41
|
||||
"""
|
||||
import asyncio
|
||||
import unittest
|
||||
import pytest, contextlib
|
||||
import pytest
|
||||
|
||||
CONSTANT = True
|
||||
|
||||
@@ -56,17 +56,3 @@ def test_pytest_raises():
|
||||
|
||||
with contextlib.nullcontext(), pytest.raises(Exception):
|
||||
raise ValueError("Multiple context managers")
|
||||
|
||||
|
||||
def test_pytest_raises_keyword():
|
||||
with pytest.raises(expected_exception=Exception):
|
||||
raise ValueError("Should be flagged")
|
||||
|
||||
def test_assert_raises_keyword():
|
||||
class TestKwargs(unittest.TestCase):
|
||||
def test_method(self):
|
||||
with self.assertRaises(exception=Exception):
|
||||
raise ValueError("Should be flagged")
|
||||
|
||||
with self.assertRaises(exception=BaseException):
|
||||
raise ValueError("Should be flagged")
|
||||
@@ -1,28 +0,0 @@
|
||||
"""
|
||||
Should emit:
|
||||
B017 - on lines 20, 21, 25, and 26
|
||||
"""
|
||||
import unittest
|
||||
import pytest
|
||||
|
||||
|
||||
def something_else() -> None:
|
||||
for i in (1, 2, 3):
|
||||
print(i)
|
||||
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
|
||||
class Foobar(unittest.TestCase):
|
||||
def call_form_raises(self) -> None:
|
||||
self.assertRaises(Exception, something_else)
|
||||
self.assertRaises(BaseException, something_else)
|
||||
|
||||
|
||||
def test_pytest_call_form() -> None:
|
||||
pytest.raises(Exception, something_else)
|
||||
pytest.raises(BaseException, something_else)
|
||||
|
||||
pytest.raises(Exception, something_else, match="hello")
|
||||
@@ -185,45 +185,38 @@ for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
collect_shop_items(shopper, section_items)
|
||||
|
||||
# Shouldn't trigger the warning when there is a return statement.
|
||||
def foo():
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
return section_items
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
return section_items
|
||||
collect_shop_items(shopper, section_items)
|
||||
|
||||
# Should trigger the warning for duplicate access, even if is a return statement after.
|
||||
def foo():
|
||||
from itertools import groupby
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
|
||||
# Should trigger the warning for duplicate access, even if is a return in another branch.
|
||||
def foo():
|
||||
from itertools import groupby
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items)
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items)
|
||||
|
||||
# Should trigger, since only one branch has a return statement.
|
||||
def foo():
|
||||
from itertools import groupby
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items) # B031
|
||||
for _section, section_items in groupby(items, key=lambda p: p[1]):
|
||||
if _section == "greens":
|
||||
collect_shop_items(shopper, section_items)
|
||||
return
|
||||
elif _section == "frozen items":
|
||||
collect_shop_items(shopper, section_items)
|
||||
collect_shop_items(shopper, section_items) # B031
|
||||
|
||||
# Let's redefine the `groupby` function to make sure we pick up the correct one.
|
||||
# NOTE: This should always be at the end of the file.
|
||||
|
||||
@@ -650,17 +650,3 @@ f"""This is a test. {
|
||||
if True else
|
||||
"Don't add a trailing comma here ->"
|
||||
}"""
|
||||
|
||||
type X[
|
||||
T
|
||||
] = T
|
||||
def f[
|
||||
T
|
||||
](): pass
|
||||
class C[
|
||||
T
|
||||
]: pass
|
||||
|
||||
type X[T,] = T
|
||||
def f[T,](): pass
|
||||
class C[T,]: pass
|
||||
@@ -181,51 +181,3 @@ class SubclassTestModel2(TestModel4):
|
||||
# Subclass without __str__
|
||||
class SubclassTestModel3(TestModel1):
|
||||
pass
|
||||
|
||||
|
||||
# Test cases for type-annotated abstract models - these should NOT trigger DJ008
|
||||
from typing import ClassVar
|
||||
from django_stubs_ext.db.models import TypedModelMeta
|
||||
|
||||
|
||||
class TypeAnnotatedAbstractModel1(models.Model):
|
||||
"""Model with type-annotated abstract = True - should not trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta(TypedModelMeta):
|
||||
abstract: ClassVar[bool] = True
|
||||
|
||||
|
||||
class TypeAnnotatedAbstractModel2(models.Model):
|
||||
"""Model with type-annotated abstract = True using regular Meta - should not trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta:
|
||||
abstract: ClassVar[bool] = True
|
||||
|
||||
|
||||
class TypeAnnotatedAbstractModel3(models.Model):
|
||||
"""Model with type-annotated abstract = True but without ClassVar - should not trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta:
|
||||
abstract: bool = True
|
||||
|
||||
|
||||
class TypeAnnotatedNonAbstractModel(models.Model):
|
||||
"""Model with type-annotated abstract = False - should trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta:
|
||||
abstract: ClassVar[bool] = False
|
||||
|
||||
|
||||
class TypeAnnotatedAbstractModelWithStr(models.Model):
|
||||
"""Model with type-annotated abstract = True and __str__ method - should not trigger DJ008"""
|
||||
new_field = models.CharField(max_length=10)
|
||||
|
||||
class Meta(TypedModelMeta):
|
||||
abstract: ClassVar[bool] = True
|
||||
|
||||
def __str__(self):
|
||||
return self.new_field
|
||||
|
||||
@@ -1,4 +1 @@
|
||||
_(f"{'value'}")
|
||||
|
||||
# Don't trigger for t-strings
|
||||
_(t"{'value'}")
|
||||
|
||||
@@ -13,7 +13,3 @@ from logging import info
|
||||
|
||||
info(f"{name}")
|
||||
info(f"{__name__}")
|
||||
|
||||
# Don't trigger for t-strings
|
||||
info(t"{name}")
|
||||
info(t"{__name__}")
|
||||
|
||||
@@ -26,9 +26,8 @@ abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
|
||||
# Some values need to be parenthesized.
|
||||
def foo():
|
||||
abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
abc(foo=1, **{'bar': (bar := 1)}) # PIE804
|
||||
abc(foo=1, **{'bar': (yield 1)}) # PIE804
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18036
|
||||
# The autofix for this is unsafe due to the comments inside the dictionary.
|
||||
|
||||
@@ -142,7 +142,3 @@ field47: typing.Optional[int] | typing.Optional[dict]
|
||||
# avoid reporting twice
|
||||
field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
|
||||
field49: typing.Optional[complex | complex] | complex
|
||||
|
||||
# Regression test for https://github.com/astral-sh/ruff/issues/19403
|
||||
# Should throw duplicate union member but not fix
|
||||
isinstance(None, typing.Union[None, None])
|
||||
@@ -47,7 +47,3 @@ def test_error_match_is_empty():
|
||||
|
||||
with pytest.raises(ValueError, match=f""):
|
||||
raise ValueError("Can't divide 1 by 0")
|
||||
|
||||
def test_ok_t_string_match():
|
||||
with pytest.raises(ValueError, match=t""):
|
||||
raise ValueError("Can't divide 1 by 0")
|
||||
|
||||
@@ -23,9 +23,3 @@ def f():
|
||||
pytest.fail(msg=f"")
|
||||
pytest.fail(reason="")
|
||||
pytest.fail(reason=f"")
|
||||
|
||||
# Skip for t-strings
|
||||
def g():
|
||||
pytest.fail(t"")
|
||||
pytest.fail(msg=t"")
|
||||
pytest.fail(reason=t"")
|
||||
|
||||
@@ -32,7 +32,3 @@ def test_error_match_is_empty():
|
||||
|
||||
with pytest.warns(UserWarning, match=f""):
|
||||
pass
|
||||
|
||||
def test_ok_match_t_string():
|
||||
with pytest.warns(UserWarning, match=t""):
|
||||
pass
|
||||
|
||||
@@ -422,35 +422,6 @@ def func(a: dict[str, int]) -> list[dict[str, int]]:
|
||||
services = a["services"]
|
||||
return services
|
||||
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/14052
|
||||
def outer() -> list[object]:
|
||||
@register
|
||||
async def inner() -> None:
|
||||
print(layout)
|
||||
|
||||
layout = [...]
|
||||
return layout
|
||||
|
||||
def outer() -> list[object]:
|
||||
with open("") as f:
|
||||
async def inner() -> None:
|
||||
print(layout)
|
||||
|
||||
layout = [...]
|
||||
return layout
|
||||
|
||||
|
||||
def outer() -> list[object]:
|
||||
def inner():
|
||||
with open("") as f:
|
||||
async def inner_inner() -> None:
|
||||
print(layout)
|
||||
|
||||
layout = [...]
|
||||
return layout
|
||||
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/18411
|
||||
def f():
|
||||
(#=
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user