Compare commits
2 Commits
david/enum
...
micha/add-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
daa385c1a9 | ||
|
|
01d9312529 |
29
.github/renovate.json5
vendored
29
.github/renovate.json5
vendored
@@ -16,7 +16,7 @@
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
managerFilePatterns: ["/^(python|scripts)/.*pyproject\\.toml$/"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
@@ -34,12 +34,32 @@
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
managerFilePatterns: ["/^playground/.*package\\.json$/"],
|
||||
},
|
||||
customManagers: [
|
||||
{
|
||||
customType: "regex",
|
||||
managerFilePatterns: ["/^dist-workspace\\.toml$/"],
|
||||
matchStrings: [
|
||||
'"(?<depName>actions/[^"]+)" = "(?<currentDigest>[a-f0-9]{40})"\\s*#\\s*(?<currentValue>v[\\d\\.]+).*'
|
||||
],
|
||||
datasourceTemplate: "github-tags",
|
||||
autoReplaceStringTemplate: '"{{depName}}" = "{{newDigest}}" # {{newValue}}"',
|
||||
extractVersionTemplate: "^(?<version>v[\\d\\.]+)$",
|
||||
versioningTemplate: "semver"
|
||||
}
|
||||
],
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
// Ignore GitHub Actions in generated release.yml (managed by cargo-dist)
|
||||
{
|
||||
matchManagers: ["github-actions"],
|
||||
matchFileNames: [".github/workflows/release.yml"],
|
||||
enabled: false,
|
||||
description: "Ignore GitHub Actions in release.yml as it's generated by cargo-dist",
|
||||
},
|
||||
// Pin GitHub Actions to immutable SHAs.
|
||||
{
|
||||
matchDepTypes: ["action"],
|
||||
@@ -106,6 +126,11 @@
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "cargo-dist GitHub Actions",
|
||||
matchManagers: ["custom.regex"],
|
||||
description: "Weekly update of GitHub Actions dependencies managed by cargo-dist",
|
||||
}
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
|
||||
16
.github/workflows/build-binaries.yml
vendored
16
.github/workflows/build-binaries.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
@@ -79,7 +79,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist
|
||||
@@ -177,7 +177,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
@@ -230,7 +230,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
@@ -304,7 +304,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
@@ -370,7 +370,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_2
|
||||
@@ -435,7 +435,7 @@ jobs:
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_2
|
||||
|
||||
97
.github/workflows/ci.yaml
vendored
97
.github/workflows/ci.yaml
vendored
@@ -143,12 +143,12 @@ jobs:
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
# NOTE: Do not exclude all Markdown files here, but rather use
|
||||
# specific exclude patterns like 'docs/**'), because tests for
|
||||
# 'ty' are written in Markdown.
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
|
||||
':!**/*.md' \
|
||||
':crates/ty_python_semantic/resources/mdtest/**/*.md' \
|
||||
':!docs/**' \
|
||||
':!assets/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
@@ -234,17 +234,17 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -292,17 +292,17 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -321,11 +321,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
@@ -348,7 +348,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
@@ -377,11 +377,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -400,18 +400,27 @@ jobs:
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- name: "Build tests"
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: cargo "+${MSRV}" test --no-run --all-features
|
||||
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
@@ -423,13 +432,13 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
|
||||
uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -451,7 +460,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -485,7 +494,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
@@ -652,7 +661,7 @@ jobs:
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -682,7 +691,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@808dcb1b503398677d089d3216c51ac7cc11e7ab # v1.14.2
|
||||
- uses: cargo-bins/cargo-binstall@8aac5aa2bf0dfaa2863eccad9f43c68fe40e5ec8 # v1.14.1
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -699,11 +708,11 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
|
||||
with:
|
||||
args: --out dist
|
||||
- name: "Test wheel"
|
||||
@@ -722,8 +731,8 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
@@ -756,7 +765,7 @@ jobs:
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
@@ -765,7 +774,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -795,7 +804,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run checks"
|
||||
@@ -865,7 +874,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
@@ -896,14 +905,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -911,7 +920,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
@@ -929,14 +938,14 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
uses: taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb # v2.53.2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
@@ -944,7 +953,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
6
.github/workflows/daily_fuzz.yaml
vendored
6
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,12 +34,12 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@702b1908b5edf30d71a8d1666b724e0f0c6fa035 # v1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
uses: rui314/setup-mold@85c79d00377f0d32cdbae595a46de6f7c2fa6599 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
|
||||
85
.github/workflows/mypy_primer.yaml
vendored
85
.github/workflows/mypy_primer.yaml
vendored
@@ -12,7 +12,6 @@ on:
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
@@ -38,9 +37,9 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
@@ -49,13 +48,45 @@ jobs:
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
DIFF_FILE: mypy_primer.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
echo ${{ github.event.number }} > ../pr-number
|
||||
|
||||
echo "Enabling mypy primer specific configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/ty_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b base_commit "$MERGE_BASE"
|
||||
echo "base commit"
|
||||
git rev-list --format=%s --max-count=1 base_commit
|
||||
|
||||
cd ..
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx \
|
||||
--from="git+https://github.com/hauntsaninja/mypy_primer@01a7ca325f674433c58e02416a867178d1571128" \
|
||||
mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker ty \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
# Output diff with ANSI color codes
|
||||
cat mypy_primer.diff
|
||||
|
||||
# Remove ANSI color codes before uploading
|
||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
@@ -68,41 +99,3 @@ jobs:
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
TY_MAX_PARALLELISM: 1 # for deterministic memory numbers
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/memory.txt
|
||||
DIFF_FILE: mypy_primer_memory.diff
|
||||
run: |
|
||||
cd ruff
|
||||
scripts/mypy_primer.sh
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
path: mypy_primer_memory.diff
|
||||
|
||||
31
.github/workflows/mypy_primer_comment.yaml
vendored
31
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -45,28 +45,15 @@ jobs:
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download mypy_primer memory results"
|
||||
id: download-mypy_primer_memory_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_memory_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_memory_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -87,18 +74,6 @@ jobs:
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
2
.github/workflows/publish-docs.yml
vendored
2
.github/workflows/publish-docs.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
18
.github/workflows/release.yml
vendored
18
.github/workflows/release.yml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -69,9 +69,9 @@ jobs:
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.5-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.5-prerelease.3/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -154,7 +154,7 @@ jobs:
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -201,7 +201,7 @@ jobs:
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
@@ -251,7 +251,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
166
.github/workflows/sync_typeshed.yaml
vendored
166
.github/workflows/sync_typeshed.yaml
vendored
@@ -1,25 +1,5 @@
|
||||
name: Sync typeshed
|
||||
|
||||
# How this works:
|
||||
#
|
||||
# 1. A Linux worker:
|
||||
# a. Checks out Ruff and typeshed
|
||||
# b. Deletes the vendored typeshed stdlib stubs from Ruff
|
||||
# c. Copies the latest versions of the stubs from typeshed
|
||||
# d. Uses docstring-adder to sync all docstrings available on Linux
|
||||
# e. Creates a new branch on the upstream astral-sh/ruff repository
|
||||
# f. Commits the changes it's made and pushes them to the new upstream branch
|
||||
# 2. Once the Linux worker is done, a Windows worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on Windows that are not available on Linux
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# 3. Once the Windows worker is done, a MacOS worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# d. Creates a PR against the `main` branch using the branch all three workers have pushed to
|
||||
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
@@ -30,17 +10,7 @@ env:
|
||||
FORCE_COLOR: 1
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
# The name of the upstream branch that the first worker creates,
|
||||
# and which all three workers push to.
|
||||
UPSTREAM_BRANCH: typeshedbot/sync-typeshed
|
||||
|
||||
# The path to the directory that contains the vendored typeshed stubs,
|
||||
# relative to the root of the Ruff repository.
|
||||
VENDORED_TYPESHED: crates/ty_vendored/vendor/typeshed
|
||||
|
||||
jobs:
|
||||
# Sync typeshed stubs, and sync all docstrings available on Linux.
|
||||
# Push the changes to a new branch on the upstream repository.
|
||||
sync:
|
||||
name: Sync typeshed
|
||||
runs-on: ubuntu-latest
|
||||
@@ -49,6 +19,7 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
@@ -65,130 +36,37 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Sync typeshed stubs
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
mkdir "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/README.md "ruff/${VENDORED_TYPESHED}"
|
||||
cp typeshed/LICENSE "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
# The pyproject.toml file is needed by a later job for the black configuration.
|
||||
# It's deleted before creating the PR.
|
||||
cp typeshed/pyproject.toml "ruff/${VENDORED_TYPESHED}"
|
||||
|
||||
cp -r typeshed/stdlib "ruff/${VENDORED_TYPESHED}/stdlib"
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}/stdlib/@tests"
|
||||
git -C typeshed rev-parse HEAD > "ruff/${VENDORED_TYPESHED}/source_commit.txt"
|
||||
cd ruff
|
||||
git checkout -b "${UPSTREAM_BRANCH}"
|
||||
git add .
|
||||
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
|
||||
- name: Sync Linux docstrings
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
cd ruff
|
||||
./scripts/codemod_docstrings.sh
|
||||
git commit -am "Sync Linux docstrings" --allow-empty
|
||||
- name: Push the changes
|
||||
id: commit
|
||||
if: ${{ success() }}
|
||||
run: git -C ruff push --force --set-upstream origin "${UPSTREAM_BRANCH}"
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on Windows that are not available on Linux.
|
||||
# Commit the changes and push them to the same branch.
|
||||
docstrings-windows:
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync job failed, because it's specified in `needs` above,
|
||||
# and we haven't used `always()` in the `if` condition here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync Windows docstrings
|
||||
id: docstrings
|
||||
shell: bash
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
if: ${{ steps.docstrings.outcome == 'success' }}
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
run: |
|
||||
git commit -am "Sync Windows docstrings" --allow-empty
|
||||
git push
|
||||
|
||||
# Checkout the branch created by the sync job,
|
||||
# and sync all docstrings available on macOS that are not available on Linux or Windows.
|
||||
# Push the changes to the same branch and create a PR against the `main` branch using that branch.
|
||||
docstrings-macos-and-pr:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 20
|
||||
needs: [sync, docstrings-windows]
|
||||
|
||||
# Don't run the cron job on forks.
|
||||
# The job will also be skipped if the sync or docstrings-windows jobs failed,
|
||||
# because they're specified in `needs` above and we haven't used an `always()` condition in the `if` here
|
||||
# (https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-requiring-successful-dependent-jobs)
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync macOS docstrings
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
- name: Commit and push the changes
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
git commit -am "Sync macOS docstrings" --allow-empty
|
||||
|
||||
# Here we just reformat the codemodded stubs so that they are
|
||||
# consistent with the other typeshed stubs around them.
|
||||
# Typeshed formats code using black in their CI, so we just invoke
|
||||
# black on the stubs the same way that typeshed does.
|
||||
uvx black "${VENDORED_TYPESHED}/stdlib" --config "${VENDORED_TYPESHED}/pyproject.toml" || true
|
||||
git commit -am "Format codemodded docstrings" --allow-empty
|
||||
|
||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
|
||||
git push
|
||||
cd ruff
|
||||
git checkout -b typeshedbot/sync-typeshed
|
||||
git add .
|
||||
git diff --staged --quiet || git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)"
|
||||
- name: Create a PR
|
||||
if: ${{ success() }}
|
||||
if: ${{ steps.sync.outcome == 'success' && steps.commit.outcome == 'success' }}
|
||||
run: |
|
||||
gh pr list --repo "${GITHUB_REPOSITORY}" --head "${UPSTREAM_BRANCH}" --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
runs-on: ubuntu-latest
|
||||
needs: [sync, docstrings-windows, docstrings-macos-and-pr]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && (needs.sync.result == 'failure' || needs.docstrings-windows.result == 'failure' || needs.docstrings-macos-and-pr.result == 'failure') }}
|
||||
needs: [sync]
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.sync.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
|
||||
80
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
80
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -17,7 +17,6 @@ env:
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-analyzer:
|
||||
@@ -33,9 +32,9 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
@@ -64,75 +63,32 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@9c34dc514ee9aef6735db1dfebb80f63acbc3440"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
diff \
|
||||
--projects-old ruff/projects_old.txt \
|
||||
--projects-new ruff/projects_new.txt \
|
||||
--old old_commit \
|
||||
--new new_commit \
|
||||
--output-old diagnostics-old.json \
|
||||
--output-new diagnostics-new.json
|
||||
analyze \
|
||||
--projects ruff/projects_old.txt \
|
||||
--commit old_commit \
|
||||
--output diagnostics_old.json
|
||||
|
||||
mkdir dist
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--projects ruff/projects_new.txt \
|
||||
--commit new_commit \
|
||||
--output diagnostics_new.json
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-diff \
|
||||
diagnostics-old.json \
|
||||
diagnostics-new.json \
|
||||
diagnostics_old.json \
|
||||
diagnostics_new.json \
|
||||
--old-name "main (merge base)" \
|
||||
--new-name "$REF_NAME" \
|
||||
--output-html dist/diff.html
|
||||
--output-html diff.html
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-diff-statistics \
|
||||
diagnostics-old.json \
|
||||
diagnostics-new.json \
|
||||
--old-name "main (merge base)" \
|
||||
--new-name "$REF_NAME" \
|
||||
--output diff-statistics.md
|
||||
|
||||
echo '## `ecosystem-analyzer` results' > comment.md
|
||||
echo >> comment.md
|
||||
cat diff-statistics.md >> comment.md
|
||||
|
||||
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch ${{ github.head_ref }} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
- name: "Append deployment URL"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
env:
|
||||
DEPLOYMENT_URL: ${{ steps.deploy.outputs.pages-deployment-alias-url }}
|
||||
run: |
|
||||
echo >> comment.md
|
||||
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)**" >> comment.md
|
||||
|
||||
- name: Upload comment
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: comment.md
|
||||
path: comment.md
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- name: Upload diagnostics diff
|
||||
- name: Upload HTML diff report
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: diff.html
|
||||
path: dist/diff.html
|
||||
path: diff.html
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
name: PR comment (ty ecosystem-analyzer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [ty ecosystem-analyzer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The ty ecosystem-analyzer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download comment.md"
|
||||
id: download-comment
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: comment.md
|
||||
workflow: ty-ecosystem-analyzer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/comment
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/comment/comment.md ]]
|
||||
then
|
||||
echo "Error: comment.md cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note: this identifier is used to find the comment to update on subsequent runs
|
||||
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
|
||||
echo >> comment.md
|
||||
cat pr/comment/comment.md >> comment.md
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.md >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
76
.github/workflows/ty-ecosystem-report.yaml
vendored
76
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -1,76 +0,0 @@
|
||||
name: ty ecosystem-report
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Run every Wednesday at 5:00 UTC:
|
||||
- cron: 0 5 * * 3
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
ty-ecosystem-report:
|
||||
name: Create ecosystem report
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Create report
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "Enabling configuration overloads (see .github/mypy-primer-ty.toml)"
|
||||
mkdir -p ~/.config/ty
|
||||
cp .github/mypy-primer-ty.toml ~/.config/ty/ty.toml
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
|
||||
--output ecosystem-diagnostics.json
|
||||
|
||||
mkdir dist
|
||||
|
||||
ecosystem-analyzer \
|
||||
generate-report \
|
||||
--max-diagnostics-per-project=1200 \
|
||||
ecosystem-diagnostics.json \
|
||||
--output dist/index.html
|
||||
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
id: deploy
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
command: pages deploy dist --project-name=ty-ecosystem --branch main --commit-hash ${GITHUB_SHA}
|
||||
109
.github/workflows/typing_conformance.yaml
vendored
109
.github/workflows/typing_conformance.yaml
vendored
@@ -1,109 +0,0 @@
|
||||
name: Run typing conformance
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/typing_conformance.yaml"
|
||||
- ".github/workflows/typing_conformance_comment.yaml"
|
||||
- "Cargo.lock"
|
||||
- "!**.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
typing_conformance:
|
||||
name: Compute diagnostic diff
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
repository: python/typing
|
||||
ref: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc
|
||||
path: typing
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Compute diagnostic diff
|
||||
shell: bash
|
||||
run: |
|
||||
RUFF_DIR="$GITHUB_WORKSPACE/ruff"
|
||||
|
||||
# Build the executable for the old and new commit
|
||||
(
|
||||
cd ruff
|
||||
|
||||
echo "new commit"
|
||||
git checkout -b new_commit "${{ github.event.pull_request.head.sha }}"
|
||||
git rev-list --format=%s --max-count=1 new_commit
|
||||
cargo build --release --bin ty
|
||||
mv target/release/ty ty-new
|
||||
|
||||
echo "old commit (merge base)"
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b old_commit "$MERGE_BASE"
|
||||
git rev-list --format=%s --max-count=1 old_commit
|
||||
cargo build --release --bin ty
|
||||
mv target/release/ty ty-old
|
||||
)
|
||||
|
||||
(
|
||||
cd typing/conformance/tests
|
||||
|
||||
echo "Running ty on old commit (merge base)"
|
||||
"$RUFF_DIR/ty-old" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/old-output.txt" 2>&1 || true
|
||||
|
||||
echo "Running ty on new commit"
|
||||
"$RUFF_DIR/ty-new" check --color=never --output-format=concise . > "$GITHUB_WORKSPACE/new-output.txt" 2>&1 || true
|
||||
)
|
||||
|
||||
if ! diff -u old-output.txt new-output.txt > typing_conformance_diagnostics.diff; then
|
||||
echo "Differences found between base and PR"
|
||||
else
|
||||
echo "No differences found"
|
||||
touch typing_conformance_diagnostics.diff
|
||||
fi
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
path: typing_conformance_diagnostics.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
@@ -1,97 +0,0 @@
|
||||
name: PR comment (typing_conformance)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run typing conformance]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The typing_conformance workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: "Download typing_conformance results"
|
||||
id: download-typing_conformance_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: typing_conformance_diagnostics_diff
|
||||
workflow: typing_conformance.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/typing_conformance_diagnostics_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
|
||||
run: |
|
||||
# Guard against malicious typing_conformance results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
|
||||
then
|
||||
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||
|
||||
echo '## Diagnostic diff on typing conformance tests' >> comment.txt
|
||||
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
2
.github/zizmor.yml
vendored
2
.github/zizmor.yml
vendored
@@ -10,8 +10,6 @@ rules:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- ty-ecosystem-analyzer.yaml
|
||||
- ty-ecosystem-report.yaml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
|
||||
@@ -6,7 +6,7 @@ exclude: |
|
||||
crates/ty_vendored/vendor/.*|
|
||||
crates/ty_project/resources/.*|
|
||||
crates/ty_python_semantic/resources/corpus/.*|
|
||||
crates/ty/docs/(configuration|rules|cli|environment).md|
|
||||
crates/ty/docs/(configuration|rules|cli).md|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
@@ -67,7 +67,7 @@ repos:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.34.0
|
||||
rev: v1.33.1
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -81,17 +81,17 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.5
|
||||
rev: v0.11.13
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff-check
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.6.2
|
||||
rev: v3.5.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
@@ -99,12 +99,12 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.11.0
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.33.2
|
||||
rev: 0.33.0
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
@@ -128,10 +128,5 @@ repos:
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.10.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
213
CHANGELOG.md
213
CHANGELOG.md
@@ -1,214 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH101`, `PTH104`, `PTH105`, `PTH121` ([#19404](https://github.com/astral-sh/ruff/pull/19404))
|
||||
- \[`ruff`\] Support byte strings (`RUF055`) ([#18926](https://github.com/astral-sh/ruff/pull/18926))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `unreachable` panic in parser ([#19183](https://github.com/astral-sh/ruff/pull/19183))
|
||||
- \[`flake8-pyi`\] Skip fix if all `Union` members are `None` (`PYI016`) ([#19416](https://github.com/astral-sh/ruff/pull/19416))
|
||||
- \[`perflint`\] Parenthesize generator expressions (`PERF401`) ([#19325](https://github.com/astral-sh/ruff/pull/19325))
|
||||
- \[`pylint`\] Handle empty comments after line continuation (`PLR2044`) ([#19405](https://github.com/astral-sh/ruff/pull/19405))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pep8-naming`\] Fix `N802` false positives for `CGIHTTPRequestHandler` and `SimpleHTTPRequestHandler` ([#19432](https://github.com/astral-sh/ruff/pull/19432))
|
||||
|
||||
## 0.12.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
|
||||
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
|
||||
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
|
||||
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
|
||||
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
|
||||
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
|
||||
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
|
||||
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
|
||||
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
|
||||
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
|
||||
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
|
||||
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
|
||||
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
|
||||
|
||||
## 0.12.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Support non-context-manager calls in `B017` ([#19063](https://github.com/astral-sh/ruff/pull/19063))
|
||||
- \[`flake8-use-pathlib`\] Add autofixes for `PTH100`, `PTH106`, `PTH107`, `PTH108`, `PTH110`, `PTH111`, `PTH112`, `PTH113`, `PTH114`, `PTH115`, `PTH117`, `PTH119`, `PTH120` ([#19213](https://github.com/astral-sh/ruff/pull/19213))
|
||||
- \[`flake8-use-pathlib`\] Add autofixes for `PTH203`, `PTH204`, `PTH205` ([#18922](https://github.com/astral-sh/ruff/pull/18922))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-return`\] Fix false-positive for variables used inside nested functions in `RET504` ([#18433](https://github.com/astral-sh/ruff/pull/18433))
|
||||
- Treat form feed as valid whitespace before a line continuation ([#19220](https://github.com/astral-sh/ruff/pull/19220))
|
||||
- \[`flake8-type-checking`\] Fix syntax error introduced by fix (`TC008`) ([#19150](https://github.com/astral-sh/ruff/pull/19150))
|
||||
- \[`pyupgrade`\] Keyword arguments in `super` should suppress the `UP008` fix ([#19131](https://github.com/astral-sh/ruff/pull/19131))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI007`, `PYI008`) ([#19103](https://github.com/astral-sh/ruff/pull/19103))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM116`) ([#19111](https://github.com/astral-sh/ruff/pull/19111))
|
||||
- \[`flake8-type-checking`\] Make example error out-of-the-box (`TC001`) ([#19151](https://github.com/astral-sh/ruff/pull/19151))
|
||||
- \[`flake8-use-pathlib`\] Make example error out-of-the-box (`PTH210`) ([#19189](https://github.com/astral-sh/ruff/pull/19189))
|
||||
- \[`pycodestyle`\] Make example error out-of-the-box (`E272`) ([#19191](https://github.com/astral-sh/ruff/pull/19191))
|
||||
- \[`pycodestyle`\] Make example not raise unnecessary `SyntaxError` (`E114`) ([#19190](https://github.com/astral-sh/ruff/pull/19190))
|
||||
- \[`pydoclint`\] Make example error out-of-the-box (`DOC501`) ([#19218](https://github.com/astral-sh/ruff/pull/19218))
|
||||
- \[`pylint`, `pyupgrade`\] Fix syntax errors in examples (`PLW1501`, `UP028`) ([#19127](https://github.com/astral-sh/ruff/pull/19127))
|
||||
- \[`pylint`\] Update `missing-maxsplit-arg` docs and error to suggest proper usage (`PLC0207`) ([#18949](https://github.com/astral-sh/ruff/pull/18949))
|
||||
- \[`flake8-bandit`\] Make example error out-of-the-box (`S412`) ([#19241](https://github.com/astral-sh/ruff/pull/19241))
|
||||
|
||||
## 0.12.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-pyi`\] Expand `Optional[A]` to `A | None` (`PYI016`) ([#18572](https://github.com/astral-sh/ruff/pull/18572))
|
||||
- \[`pyupgrade`\] Mark `UP008` fix safe if no comments are in range ([#18683](https://github.com/astral-sh/ruff/pull/18683))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Fix `C420` to prepend whitespace when needed ([#18616](https://github.com/astral-sh/ruff/pull/18616))
|
||||
- \[`perflint`\] Fix `PERF403` panic on attribute or subscription loop variable ([#19042](https://github.com/astral-sh/ruff/pull/19042))
|
||||
- \[`pydocstyle`\] Fix `D413` infinite loop for parenthesized docstring ([#18930](https://github.com/astral-sh/ruff/pull/18930))
|
||||
- \[`pylint`\] Fix `PLW0108` autofix introducing a syntax error when the lambda's body contains an assignment expression ([#18678](https://github.com/astral-sh/ruff/pull/18678))
|
||||
- \[`refurb`\] Fix false positive on empty tuples (`FURB168`) ([#19058](https://github.com/astral-sh/ruff/pull/19058))
|
||||
- \[`ruff`\] Allow more `field` calls from `attrs` (`RUF009`) ([#19021](https://github.com/astral-sh/ruff/pull/19021))
|
||||
- \[`ruff`\] Fix syntax error introduced for an empty string followed by a u-prefixed string (`UP025`) ([#18899](https://github.com/astral-sh/ruff/pull/18899))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-executable`\] Allow `uvx` in shebang line (`EXE003`) ([#18967](https://github.com/astral-sh/ruff/pull/18967))
|
||||
- \[`pandas`\] Avoid flagging `PD002` if `pandas` is not imported ([#18963](https://github.com/astral-sh/ruff/pull/18963))
|
||||
- \[`pyupgrade`\] Avoid PEP-604 unions with `typing.NamedTuple` (`UP007`, `UP045`) ([#18682](https://github.com/astral-sh/ruff/pull/18682))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document link between `import-outside-top-level (PLC0415)` and `lint.flake8-tidy-imports.banned-module-level-imports` ([#18733](https://github.com/astral-sh/ruff/pull/18733))
|
||||
- Fix description of the `format.skip-magic-trailing-comma` example ([#19095](https://github.com/astral-sh/ruff/pull/19095))
|
||||
- \[`airflow`\] Make `AIR302` example error out-of-the-box ([#18988](https://github.com/astral-sh/ruff/pull/18988))
|
||||
- \[`airflow`\] Make `AIR312` example error out-of-the-box ([#18989](https://github.com/astral-sh/ruff/pull/18989))
|
||||
- \[`flake8-annotations`\] Make `ANN401` example error out-of-the-box ([#18974](https://github.com/astral-sh/ruff/pull/18974))
|
||||
- \[`flake8-async`\] Make `ASYNC100` example error out-of-the-box ([#18993](https://github.com/astral-sh/ruff/pull/18993))
|
||||
- \[`flake8-async`\] Make `ASYNC105` example error out-of-the-box ([#19002](https://github.com/astral-sh/ruff/pull/19002))
|
||||
- \[`flake8-async`\] Make `ASYNC110` example error out-of-the-box ([#18975](https://github.com/astral-sh/ruff/pull/18975))
|
||||
- \[`flake8-async`\] Make `ASYNC210` example error out-of-the-box ([#18977](https://github.com/astral-sh/ruff/pull/18977))
|
||||
- \[`flake8-async`\] Make `ASYNC220`, `ASYNC221`, and `ASYNC222` examples error out-of-the-box ([#18978](https://github.com/astral-sh/ruff/pull/18978))
|
||||
- \[`flake8-async`\] Make `ASYNC251` example error out-of-the-box ([#18990](https://github.com/astral-sh/ruff/pull/18990))
|
||||
- \[`flake8-bandit`\] Make `S201` example error out-of-the-box ([#19017](https://github.com/astral-sh/ruff/pull/19017))
|
||||
- \[`flake8-bandit`\] Make `S604` and `S609` examples error out-of-the-box ([#19049](https://github.com/astral-sh/ruff/pull/19049))
|
||||
- \[`flake8-bugbear`\] Make `B028` example error out-of-the-box ([#19054](https://github.com/astral-sh/ruff/pull/19054))
|
||||
- \[`flake8-bugbear`\] Make `B911` example error out-of-the-box ([#19051](https://github.com/astral-sh/ruff/pull/19051))
|
||||
- \[`flake8-datetimez`\] Make `DTZ011` example error out-of-the-box ([#19055](https://github.com/astral-sh/ruff/pull/19055))
|
||||
- \[`flake8-datetimez`\] Make `DTZ901` example error out-of-the-box ([#19056](https://github.com/astral-sh/ruff/pull/19056))
|
||||
- \[`flake8-pyi`\] Make `PYI032` example error out-of-the-box ([#19061](https://github.com/astral-sh/ruff/pull/19061))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI014`, `PYI015`) ([#19097](https://github.com/astral-sh/ruff/pull/19097))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI042`) ([#19101](https://github.com/astral-sh/ruff/pull/19101))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI059`) ([#19080](https://github.com/astral-sh/ruff/pull/19080))
|
||||
- \[`flake8-pyi`\] Make example error out-of-the-box (`PYI062`) ([#19079](https://github.com/astral-sh/ruff/pull/19079))
|
||||
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT023`) ([#19104](https://github.com/astral-sh/ruff/pull/19104))
|
||||
- \[`flake8-pytest-style`\] Make example error out-of-the-box (`PT030`) ([#19105](https://github.com/astral-sh/ruff/pull/19105))
|
||||
- \[`flake8-quotes`\] Make example error out-of-the-box (`Q003`) ([#19106](https://github.com/astral-sh/ruff/pull/19106))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM110`) ([#19113](https://github.com/astral-sh/ruff/pull/19113))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM113`) ([#19109](https://github.com/astral-sh/ruff/pull/19109))
|
||||
- \[`flake8-simplify`\] Make example error out-of-the-box (`SIM401`) ([#19110](https://github.com/astral-sh/ruff/pull/19110))
|
||||
- \[`pyflakes`\] Fix backslash in docs (`F621`) ([#19098](https://github.com/astral-sh/ruff/pull/19098))
|
||||
- \[`pylint`\] Fix `PLC0415` example ([#18970](https://github.com/astral-sh/ruff/pull/18970))
|
||||
|
||||
## 0.12.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-errmsg`\] Extend `EM101` to support byte strings ([#18867](https://github.com/astral-sh/ruff/pull/18867))
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH202` ([#18763](https://github.com/astral-sh/ruff/pull/18763))
|
||||
- \[`pygrep-hooks`\] Add `AsyncMock` methods to `invalid-mock-access` (`PGH005`) ([#18547](https://github.com/astral-sh/ruff/pull/18547))
|
||||
- \[`pylint`\] Ignore `__init__.py` files in (`PLC0414`) ([#18400](https://github.com/astral-sh/ruff/pull/18400))
|
||||
- \[`ruff`\] Trigger `RUF037` for empty string and byte strings ([#18862](https://github.com/astral-sh/ruff/pull/18862))
|
||||
- [formatter] Fix missing blank lines before decorated classes in `.pyi` files ([#18888](https://github.com/astral-sh/ruff/pull/18888))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid generating diagnostics with per-file ignores ([#18801](https://github.com/astral-sh/ruff/pull/18801))
|
||||
- Handle parenthesized arguments in `remove_argument` ([#18805](https://github.com/astral-sh/ruff/pull/18805))
|
||||
- \[`flake8-logging`\] Avoid false positive for `exc_info=True` outside `logger.exception` (`LOG014`) ([#18737](https://github.com/astral-sh/ruff/pull/18737))
|
||||
- \[`flake8-pytest-style`\] Enforce `pytest` import for decorators ([#18779](https://github.com/astral-sh/ruff/pull/18779))
|
||||
- \[`flake8-pytest-style`\] Mark autofix for `PT001` and `PT023` as unsafe if there's comments in the decorator ([#18792](https://github.com/astral-sh/ruff/pull/18792))
|
||||
- \[`flake8-pytest-style`\] `PT001`/`PT023` fix makes syntax error on parenthesized decorator ([#18782](https://github.com/astral-sh/ruff/pull/18782))
|
||||
- \[`flake8-raise`\] Make fix unsafe if it deletes comments (`RSE102`) ([#18788](https://github.com/astral-sh/ruff/pull/18788))
|
||||
- \[`flake8-simplify`\] Fix `SIM911` autofix creating a syntax error ([#18793](https://github.com/astral-sh/ruff/pull/18793))
|
||||
- \[`flake8-simplify`\] Fix false negatives for shadowed bindings (`SIM910`, `SIM911`) ([#18794](https://github.com/astral-sh/ruff/pull/18794))
|
||||
- \[`flake8-simplify`\] Preserve original behavior for `except ()` and bare `except` (`SIM105`) ([#18213](https://github.com/astral-sh/ruff/pull/18213))
|
||||
- \[`flake8-pyi`\] Fix `PYI041`'s fix causing `TypeError` with `None | None | ...` ([#18637](https://github.com/astral-sh/ruff/pull/18637))
|
||||
- \[`perflint`\] Fix `PERF101` autofix creating a syntax error and mark autofix as unsafe if there are comments in the `list` call expr ([#18803](https://github.com/astral-sh/ruff/pull/18803))
|
||||
- \[`perflint`\] Fix false negative in `PERF401` ([#18866](https://github.com/astral-sh/ruff/pull/18866))
|
||||
- \[`pylint`\] Avoid flattening nested `min`/`max` when outer call has single argument (`PLW3301`) ([#16885](https://github.com/astral-sh/ruff/pull/16885))
|
||||
- \[`pylint`\] Fix `PLC2801` autofix creating a syntax error ([#18857](https://github.com/astral-sh/ruff/pull/18857))
|
||||
- \[`pylint`\] Mark `PLE0241` autofix as unsafe if there's comments in the base classes ([#18832](https://github.com/astral-sh/ruff/pull/18832))
|
||||
- \[`pylint`\] Suppress `PLE2510`/`PLE2512`/`PLE2513`/`PLE2514`/`PLE2515` autofix if the text contains an odd number of backslashes ([#18856](https://github.com/astral-sh/ruff/pull/18856))
|
||||
- \[`refurb`\] Detect more exotic float literals in `FURB164` ([#18925](https://github.com/astral-sh/ruff/pull/18925))
|
||||
- \[`refurb`\] Fix `FURB163` autofix creating a syntax error for `yield` expressions ([#18756](https://github.com/astral-sh/ruff/pull/18756))
|
||||
- \[`refurb`\] Mark `FURB129` autofix as unsafe if there's comments in the `readlines` call ([#18858](https://github.com/astral-sh/ruff/pull/18858))
|
||||
- \[`ruff`\] Fix false positives and negatives in `RUF010` ([#18690](https://github.com/astral-sh/ruff/pull/18690))
|
||||
- Fix casing of `analyze.direction` variant names ([#18892](https://github.com/astral-sh/ruff/pull/18892))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Fix f-string interpolation escaping in generated fixes ([#18882](https://github.com/astral-sh/ruff/pull/18882))
|
||||
- \[`flake8-return`\] Mark `RET501` fix unsafe if comments are inside ([#18780](https://github.com/astral-sh/ruff/pull/18780))
|
||||
- \[`flake8-async`\] Fix detection for large integer sleep durations in `ASYNC116` rule ([#18767](https://github.com/astral-sh/ruff/pull/18767))
|
||||
- \[`flake8-async`\] Mark autofix for `ASYNC115` as unsafe if the call expression contains comments ([#18753](https://github.com/astral-sh/ruff/pull/18753))
|
||||
- \[`flake8-bugbear`\] Mark autofix for `B004` as unsafe if the `hasattr` call expr contains comments ([#18755](https://github.com/astral-sh/ruff/pull/18755))
|
||||
- \[`flake8-comprehension`\] Mark autofix for `C420` as unsafe if there's comments inside the dict comprehension ([#18768](https://github.com/astral-sh/ruff/pull/18768))
|
||||
- \[`flake8-comprehensions`\] Handle template strings for comprehension fixes ([#18710](https://github.com/astral-sh/ruff/pull/18710))
|
||||
- \[`flake8-future-annotations`\] Add autofix (`FA100`) ([#18903](https://github.com/astral-sh/ruff/pull/18903))
|
||||
- \[`pyflakes`\] Mark `F504`/`F522`/`F523` autofix as unsafe if there's a call with side effect ([#18839](https://github.com/astral-sh/ruff/pull/18839))
|
||||
- \[`pylint`\] Allow fix with comments and document performance implications (`PLW3301`) ([#18936](https://github.com/astral-sh/ruff/pull/18936))
|
||||
- \[`pylint`\] Detect more exotic `NaN` literals in `PLW0177` ([#18630](https://github.com/astral-sh/ruff/pull/18630))
|
||||
- \[`pylint`\] Fix `PLC1802` autofix creating a syntax error and mark autofix as unsafe if there's comments in the `len` call ([#18836](https://github.com/astral-sh/ruff/pull/18836))
|
||||
- \[`pyupgrade`\] Extend version detection to include `sys.version_info.major` (`UP036`) ([#18633](https://github.com/astral-sh/ruff/pull/18633))
|
||||
- \[`ruff`\] Add lint rule `RUF064` for calling `chmod` with non-octal integers ([#18541](https://github.com/astral-sh/ruff/pull/18541))
|
||||
- \[`ruff`\] Added `cls.__dict__.get('__annotations__')` check (`RUF063`) ([#18233](https://github.com/astral-sh/ruff/pull/18233))
|
||||
- \[`ruff`\] Frozen `dataclass` default should be valid (`RUF009`) ([#18735](https://github.com/astral-sh/ruff/pull/18735))
|
||||
|
||||
### Server
|
||||
|
||||
- Consider virtual path for various server actions ([#18910](https://github.com/astral-sh/ruff/pull/18910))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add fix safety sections ([#18940](https://github.com/astral-sh/ruff/pull/18940),[#18841](https://github.com/astral-sh/ruff/pull/18841),[#18802](https://github.com/astral-sh/ruff/pull/18802),[#18837](https://github.com/astral-sh/ruff/pull/18837),[#18800](https://github.com/astral-sh/ruff/pull/18800),[#18415](https://github.com/astral-sh/ruff/pull/18415),[#18853](https://github.com/astral-sh/ruff/pull/18853),[#18842](https://github.com/astral-sh/ruff/pull/18842))
|
||||
- Use updated pre-commit id ([#18718](https://github.com/astral-sh/ruff/pull/18718))
|
||||
- \[`perflint`\] Small docs improvement to `PERF401` ([#18786](https://github.com/astral-sh/ruff/pull/18786))
|
||||
- \[`pyupgrade`\]: Use `super()`, not `__super__` in error messages (`UP008`) ([#18743](https://github.com/astral-sh/ruff/pull/18743))
|
||||
- \[`flake8-pie`\] Small docs fix to `PIE794` ([#18829](https://github.com/astral-sh/ruff/pull/18829))
|
||||
- \[`flake8-pyi`\] Correct `collections-named-tuple` example to use PascalCase assignment ([#16884](https://github.com/astral-sh/ruff/pull/16884))
|
||||
- \[`flake8-pie`\] Add note on type checking benefits to `unnecessary-dict-kwargs` (`PIE804`) ([#18666](https://github.com/astral-sh/ruff/pull/18666))
|
||||
- \[`pycodestyle`\] Clarify PEP 8 relationship to `whitespace-around-operator` rules ([#18870](https://github.com/astral-sh/ruff/pull/18870))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Disallow newlines in format specifiers of single quoted f- or t-strings ([#18708](https://github.com/astral-sh/ruff/pull/18708))
|
||||
- \[`flake8-logging`\] Add fix safety section to `LOG002` ([#18840](https://github.com/astral-sh/ruff/pull/18840))
|
||||
- \[`pyupgrade`\] Add fix safety section to `UP010` ([#18838](https://github.com/astral-sh/ruff/pull/18838))
|
||||
|
||||
## 0.12.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.12.0) for a migration
|
||||
@@ -225,7 +16,7 @@ guide and overview of the changes!
|
||||
|
||||
- **New default Python version handling for syntax errors**
|
||||
|
||||
Ruff will default to the *latest* supported Python version (3.13) when
|
||||
Ruff will default to the _latest_ supported Python version (3.13) when
|
||||
checking for the version-related syntax errors mentioned above to prevent
|
||||
false positives in projects without a Python version configured. The default
|
||||
in all other cases, like applying lint rules, is unchanged and remains at the
|
||||
@@ -280,7 +71,7 @@ The following rules have been stabilized and are no longer in preview:
|
||||
- [`class-with-mixed-type-vars`](https://docs.astral.sh/ruff/rules/class-with-mixed-type-vars) (`RUF053`)
|
||||
- [`unnecessary-round`](https://docs.astral.sh/ruff/rules/unnecessary-round) (`RUF057`)
|
||||
- [`starmap-zip`](https://docs.astral.sh/ruff/rules/starmap-zip) (`RUF058`)
|
||||
- [`non-pep604-annotation-optional`] (`UP045`)
|
||||
- [`non-pep604-annotation-optional`](https://docs.astral.sh/ruff/rules/non-pep604-annotation-optional) (`UP045`)
|
||||
- [`non-pep695-generic-class`](https://docs.astral.sh/ruff/rules/non-pep695-generic-class) (`UP046`)
|
||||
- [`non-pep695-generic-function`](https://docs.astral.sh/ruff/rules/non-pep695-generic-function) (`UP047`)
|
||||
- [`private-type-parameter`](https://docs.astral.sh/ruff/rules/private-type-parameter) (`UP049`)
|
||||
|
||||
@@ -266,13 +266,6 @@ Finally, regenerate the documentation and generated code with `cargo dev generat
|
||||
|
||||
## MkDocs
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
|
||||
> This means only members of the Astral organization can preview the documentation exactly as it
|
||||
> will appear in production.
|
||||
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
|
||||
|
||||
To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
710
Cargo.lock
generated
710
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
42
Cargo.toml
42
Cargo.toml
@@ -5,7 +5,7 @@ resolver = "2"
|
||||
[workspace.package]
|
||||
# Please update rustfmt.toml when bumping the Rust edition
|
||||
edition = "2024"
|
||||
rust-version = "1.86"
|
||||
rust-version = "1.85"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -44,7 +44,6 @@ ty_ide = { path = "crates/ty_ide" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
ty_server = { path = "crates/ty_server" }
|
||||
ty_static = { path = "crates/ty_static" }
|
||||
ty_test = { path = "crates/ty_test" }
|
||||
ty_vendored = { path = "crates/ty_vendored" }
|
||||
|
||||
@@ -57,38 +56,30 @@ assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bitvec = { version = "1.0.1", default-features = false, features = [
|
||||
"alloc",
|
||||
] }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "4.0.0" }
|
||||
divan = { package = "codspeed-divan-compat", version = "3.0.2" }
|
||||
codspeed-criterion-compat = { version = "3.0.2", default-features = false }
|
||||
divan = { package = "codspeed-divan-compat", version = "2.10.1" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "3.0.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.9.0"
|
||||
criterion = { version = "0.7.0", default-features = false }
|
||||
criterion = { version = "0.6.0", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
get-size2 = { version = "0.6.0", features = [
|
||||
"derive",
|
||||
"smallvec",
|
||||
"hashbrown",
|
||||
"compact-str",
|
||||
] }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
@@ -102,7 +93,7 @@ ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.18.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
@@ -141,7 +132,7 @@ regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "dba66f1a37acca014c2402f231ed5b361bd7d8fe" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "09627e450566f894956710a3fd923dc80462ae6d" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -153,7 +144,7 @@ serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2", features = ["union", "const_generics", "const_new"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
snapbox = { version = "0.6.0", features = [
|
||||
"diff",
|
||||
"term-svg",
|
||||
@@ -168,16 +159,16 @@ tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.9.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.11" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-log = { version = "0.2.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
"ansi",
|
||||
"smallvec",
|
||||
"smallvec"
|
||||
] }
|
||||
tryfn = { version = "0.2.1" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
@@ -187,7 +178,11 @@ unicode-width = { version = "0.2.0" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
@@ -222,12 +217,11 @@ must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
|
||||
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
ignore_without_reason = "allow" # Too many exsisting instances, and there's no auto fix.
|
||||
print_stdout = "warn"
|
||||
print_stderr = "warn"
|
||||
dbg_macro = "warn"
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.5/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.5/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.0/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.5
|
||||
rev: v0.12.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
@@ -423,14 +423,12 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
- [Albumentations](https://github.com/albumentations-team/albumentations)
|
||||
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
|
||||
- [Anki](https://apps.ankiweb.net/)
|
||||
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- Capital One ([datacompy](https://github.com/capitalone/datacompy))
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
@@ -507,7 +505,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- [Weblate](https://weblate.org/)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
- [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [Zulip](https://github.com/zulip/zulip)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.5"
|
||||
version = "0.12.0"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -68,7 +68,6 @@ ruff_linter = { workspace = true, features = ["clap", "test-rules"] }
|
||||
assert_fs = { workspace = true }
|
||||
# Avoid writing colored snapshots when running tests from the terminal
|
||||
colored = { workspace = true, features = ["no-color"] }
|
||||
dunce = { workspace = true }
|
||||
indoc = { workspace = true }
|
||||
insta = { workspace = true, features = ["filters", "json"] }
|
||||
insta-cmd = { workspace = true }
|
||||
|
||||
@@ -169,9 +169,6 @@ pub struct AnalyzeGraphCommand {
|
||||
/// Attempt to detect imports from string literals.
|
||||
#[clap(long)]
|
||||
detect_string_imports: bool,
|
||||
/// The minimum number of dots in a string import to consider it a valid import.
|
||||
#[clap(long)]
|
||||
min_dots: Option<usize>,
|
||||
/// Enable preview mode. Use `--no-preview` to disable.
|
||||
#[arg(long, overrides_with("no_preview"))]
|
||||
preview: bool,
|
||||
@@ -811,7 +808,6 @@ impl AnalyzeGraphCommand {
|
||||
} else {
|
||||
None
|
||||
},
|
||||
string_imports_min_dots: self.min_dots,
|
||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||
..ExplicitConfigOverrides::default()
|
||||
@@ -1309,7 +1305,6 @@ struct ExplicitConfigOverrides {
|
||||
show_fixes: Option<bool>,
|
||||
extension: Option<Vec<ExtensionPair>>,
|
||||
detect_string_imports: Option<bool>,
|
||||
string_imports_min_dots: Option<usize>,
|
||||
}
|
||||
|
||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
@@ -1397,9 +1392,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||
if let Some(detect_string_imports) = &self.detect_string_imports {
|
||||
config.analyze.detect_string_imports = Some(*detect_string_imports);
|
||||
}
|
||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -18,15 +18,14 @@ use rustc_hash::FxHashMap;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_linter::message::create_lint_diagnostic;
|
||||
use ruff_linter::message::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::{VERSION, warn_user};
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_workspace::Settings;
|
||||
use ruff_workspace::resolver::Resolver;
|
||||
|
||||
@@ -349,7 +348,7 @@ impl FileCache {
|
||||
lint.messages
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
create_lint_diagnostic(
|
||||
OldDiagnostic::lint(
|
||||
&msg.body,
|
||||
msg.suggestion.as_ref(),
|
||||
msg.range,
|
||||
@@ -429,11 +428,11 @@ pub(crate) struct LintCacheData {
|
||||
|
||||
impl LintCacheData {
|
||||
pub(crate) fn from_diagnostics(
|
||||
diagnostics: &[Diagnostic],
|
||||
diagnostics: &[OldDiagnostic],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
) -> Self {
|
||||
let source = if let Some(msg) = diagnostics.first() {
|
||||
msg.expect_ruff_source_file().source_text().to_owned()
|
||||
msg.source_file().source_text().to_owned()
|
||||
} else {
|
||||
String::new() // No messages, no need to keep the source!
|
||||
};
|
||||
@@ -443,20 +442,20 @@ impl LintCacheData {
|
||||
// Parse the kebab-case rule name into a `Rule`. This will fail for syntax errors, so
|
||||
// this also serves to filter them out, but we shouldn't be caching files with syntax
|
||||
// errors anyway.
|
||||
.filter_map(|msg| Some((msg.name().parse().ok()?, msg)))
|
||||
.filter_map(|msg| Some((msg.noqa_code().and_then(|code| code.rule())?, msg)))
|
||||
.map(|(rule, msg)| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
msg.expect_ruff_source_file(),
|
||||
diagnostics.first().unwrap().expect_ruff_source_file(),
|
||||
msg.source_file(),
|
||||
diagnostics.first().unwrap().source_file(),
|
||||
"message uses a different source file"
|
||||
);
|
||||
CacheMessage {
|
||||
rule,
|
||||
body: msg.body().to_string(),
|
||||
suggestion: msg.first_help_text().map(ToString::to_string),
|
||||
range: msg.expect_range(),
|
||||
parent: msg.parent(),
|
||||
suggestion: msg.suggestion().map(ToString::to_string),
|
||||
range: msg.range(),
|
||||
parent: msg.parent,
|
||||
fix: msg.fix().cloned(),
|
||||
noqa_offset: msg.noqa_offset(),
|
||||
}
|
||||
@@ -609,12 +608,12 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use filetime::{FileTime, set_file_mtime};
|
||||
use itertools::Itertools;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_cache::CACHE_DIR_NAME;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_linter::message::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::settings::LinterSettings;
|
||||
use ruff_linter::settings::flags;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||
@@ -681,7 +680,7 @@ mod tests {
|
||||
UnsafeFixes::Enabled,
|
||||
)
|
||||
.unwrap();
|
||||
if diagnostics.inner.iter().any(Diagnostic::is_invalid_syntax) {
|
||||
if diagnostics.inner.iter().any(OldDiagnostic::is_syntax_error) {
|
||||
parse_errors.push(path.clone());
|
||||
}
|
||||
paths.push(path);
|
||||
|
||||
@@ -102,7 +102,7 @@ pub(crate) fn analyze_graph(
|
||||
|
||||
// Resolve the per-file settings.
|
||||
let settings = resolver.resolve(path);
|
||||
let string_imports = settings.analyze.string_imports;
|
||||
let string_imports = settings.analyze.detect_string_imports;
|
||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||
|
||||
// Skip excluded files.
|
||||
|
||||
@@ -11,13 +11,13 @@ use log::{debug, error, warn};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::panic::catch_unwind;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::{IOError, Violation, fs, warn_user_once};
|
||||
use ruff_linter::{IOError, fs, warn_user_once};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::resolver::{
|
||||
@@ -129,7 +129,11 @@ pub(crate) fn check(
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||
|
||||
Diagnostics::new(
|
||||
vec![IOError { message }.into_diagnostic(TextRange::default(), &dummy)],
|
||||
vec![OldDiagnostic::new(
|
||||
IOError { message },
|
||||
TextRange::default(),
|
||||
&dummy,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -162,9 +166,7 @@ pub(crate) fn check(
|
||||
|a, b| (a.0 + b.0, a.1 + b.1),
|
||||
);
|
||||
|
||||
all_diagnostics
|
||||
.inner
|
||||
.sort_by(Diagnostic::ruff_start_ordering);
|
||||
all_diagnostics.inner.sort();
|
||||
|
||||
// Store the caches.
|
||||
caches.persist()?;
|
||||
@@ -279,7 +281,6 @@ mod test {
|
||||
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_color(false)
|
||||
.emit(
|
||||
&mut output,
|
||||
&diagnostics.inner,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::packaging;
|
||||
use ruff_linter::settings::flags;
|
||||
@@ -53,8 +52,6 @@ pub(crate) fn check_stdin(
|
||||
noqa,
|
||||
fix_mode,
|
||||
)?;
|
||||
diagnostics
|
||||
.inner
|
||||
.sort_unstable_by(Diagnostic::ruff_start_ordering);
|
||||
diagnostics.inner.sort_unstable();
|
||||
Ok(diagnostics)
|
||||
}
|
||||
|
||||
@@ -10,35 +10,35 @@ use std::path::Path;
|
||||
use anyhow::{Context, Result};
|
||||
use colored::Colorize;
|
||||
use log::{debug, warn};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||
use ruff_linter::message::create_syntax_error_diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
use ruff_linter::source_kind::{SourceError, SourceKind};
|
||||
use ruff_linter::{IOError, Violation, fs};
|
||||
use ruff_linter::{IOError, fs};
|
||||
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
|
||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::Settings;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub(crate) struct Diagnostics {
|
||||
pub(crate) inner: Vec<Diagnostic>,
|
||||
pub(crate) inner: Vec<OldDiagnostic>,
|
||||
pub(crate) fixed: FixMap,
|
||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub(crate) fn new(
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
diagnostics: Vec<OldDiagnostic>,
|
||||
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -62,12 +62,13 @@ impl Diagnostics {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let source_file = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![
|
||||
vec![OldDiagnostic::new(
|
||||
IOError {
|
||||
message: err.to_string(),
|
||||
}
|
||||
.into_diagnostic(TextRange::default(), &source_file),
|
||||
],
|
||||
},
|
||||
TextRange::default(),
|
||||
&source_file,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
} else {
|
||||
@@ -97,10 +98,10 @@ impl Diagnostics {
|
||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let dummy = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![create_syntax_error_diagnostic(
|
||||
dummy,
|
||||
vec![OldDiagnostic::syntax_error(
|
||||
err,
|
||||
TextRange::default(),
|
||||
dummy,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
|
||||
@@ -131,7 +131,6 @@ pub fn run(
|
||||
}: Args,
|
||||
) -> Result<ExitStatus> {
|
||||
{
|
||||
ruff_db::set_program_version(crate::version::version().to_string()).unwrap();
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
#[expect(clippy::print_stderr)]
|
||||
@@ -440,7 +439,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
if cli.statistics {
|
||||
printer.write_statistics(&diagnostics, &mut summary_writer)?;
|
||||
} else {
|
||||
printer.write_once(&diagnostics, &mut summary_writer, preview)?;
|
||||
printer.write_once(&diagnostics, &mut summary_writer)?;
|
||||
}
|
||||
|
||||
if !cli.exit_zero {
|
||||
|
||||
@@ -6,17 +6,16 @@ use anyhow::Result;
|
||||
use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use itertools::{Itertools, iterate};
|
||||
use ruff_linter::codes::NoqaCode;
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
|
||||
};
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, SarifEmitter,
|
||||
TextEmitter,
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter,
|
||||
SarifEmitter, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
@@ -37,8 +36,8 @@ bitflags! {
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ExpandedStatistics<'a> {
|
||||
code: Option<&'a SecondaryCode>,
|
||||
struct ExpandedStatistics {
|
||||
code: Option<NoqaCode>,
|
||||
name: &'static str,
|
||||
count: usize,
|
||||
fixable: bool,
|
||||
@@ -203,7 +202,6 @@ impl Printer {
|
||||
&self,
|
||||
diagnostics: &Diagnostics,
|
||||
writer: &mut dyn Write,
|
||||
preview: bool,
|
||||
) -> Result<()> {
|
||||
if matches!(self.log_level, LogLevel::Silent) {
|
||||
return Ok(());
|
||||
@@ -231,32 +229,16 @@ impl Printer {
|
||||
|
||||
match self.format {
|
||||
OutputFormat::Json => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Json)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Rdjson => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Rdjson)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
RdjsonEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::JsonLines => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::JsonLines)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JsonLinesEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Junit => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Junit)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
JunitEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Concise | OutputFormat::Full => {
|
||||
TextEmitter::default()
|
||||
@@ -264,7 +246,6 @@ impl Printer {
|
||||
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
|
||||
.with_show_source(self.format == OutputFormat::Full)
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.with_preview(preview)
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
@@ -299,18 +280,10 @@ impl Printer {
|
||||
GitlabEmitter::default().emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Pylint => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Pylint)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
PylintEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Azure => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Azure)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
AzureEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
@@ -330,11 +303,11 @@ impl Printer {
|
||||
let statistics: Vec<ExpandedStatistics> = diagnostics
|
||||
.inner
|
||||
.iter()
|
||||
.map(|message| (message.secondary_code(), message))
|
||||
.map(|message| (message.noqa_code(), message))
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &Diagnostic), usize)>, (code, message)| {
|
||||
|mut acc: Vec<((Option<NoqaCode>, &OldDiagnostic), usize)>, (code, message)| {
|
||||
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||
if *prev_code == code {
|
||||
*count += 1;
|
||||
@@ -376,7 +349,12 @@ impl Printer {
|
||||
);
|
||||
let code_width = statistics
|
||||
.iter()
|
||||
.map(|statistic| statistic.code.map_or(0, |s| s.len()))
|
||||
.map(|statistic| {
|
||||
statistic
|
||||
.code
|
||||
.map_or_else(String::new, |rule| rule.to_string())
|
||||
.len()
|
||||
})
|
||||
.max()
|
||||
.unwrap();
|
||||
let any_fixable = statistics.iter().any(|statistic| statistic.fixable);
|
||||
@@ -392,8 +370,7 @@ impl Printer {
|
||||
statistic.count.to_string().bold(),
|
||||
statistic
|
||||
.code
|
||||
.map(SecondaryCode::as_str)
|
||||
.unwrap_or_default()
|
||||
.map_or_else(String::new, |rule| rule.to_string())
|
||||
.red()
|
||||
.bold(),
|
||||
if any_fixable {
|
||||
|
||||
@@ -17,7 +17,6 @@ fn command() -> Command {
|
||||
command.arg("analyze");
|
||||
command.arg("graph");
|
||||
command.arg("--preview");
|
||||
command.env_clear();
|
||||
command
|
||||
}
|
||||
|
||||
@@ -57,40 +56,33 @@ fn dependencies() -> Result<()> {
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f(): pass
|
||||
"#})?;
|
||||
root.child("ruff")
|
||||
.child("e.pyi")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
def f() -> None: ...
|
||||
"#})?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py",
|
||||
"ruff/e.pyi"
|
||||
],
|
||||
"ruff/e.py": [],
|
||||
"ruff/e.pyi": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": [
|
||||
"ruff/d.py"
|
||||
],
|
||||
"ruff/d.py": [
|
||||
"ruff/e.py"
|
||||
],
|
||||
"ruff/e.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -204,43 +196,23 @@ fn string_detection() -> Result<()> {
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").current_dir(&root), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(command().arg("--detect-string-imports").arg("--min-dots").arg("1").current_dir(&root), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"ruff/__init__.py": [],
|
||||
"ruff/a.py": [
|
||||
"ruff/b.py"
|
||||
],
|
||||
"ruff/b.py": [
|
||||
"ruff/c.py"
|
||||
],
|
||||
"ruff/c.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
----- stderr -----
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -593,8 +565,8 @@ fn venv() -> Result<()> {
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Invalid `--python` argument `none`: does not point to a Python executable or a directory on disk
|
||||
Cause: No such file or directory (os error 2)
|
||||
Cause: Invalid search path settings
|
||||
Cause: Failed to discover the site-packages directory: Invalid `--python` argument `none`: does not point to a Python executable or a directory on disk
|
||||
");
|
||||
});
|
||||
|
||||
|
||||
@@ -120,7 +120,7 @@ fn nonexistent_config_file() {
|
||||
#[test]
|
||||
fn config_override_rejected_if_invalid_toml() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--config", "foo = bar", "."]), @r"
|
||||
.args(["format", "--config", "foo = bar", "."]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -137,11 +137,12 @@ fn config_override_rejected_if_invalid_toml() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1067,7 +1067,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1080,7 +1080,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1093,7 +1093,7 @@ fn show_statistics_syntax_errors() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
1 invalid-syntax
|
||||
1 syntax-error
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
@@ -2246,7 +2246,8 @@ fn pyproject_toml_stdin_syntax_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: unclosed table, expected `]`
|
||||
pyproject.toml:1:9: RUF200 Failed to parse pyproject.toml: invalid table header
|
||||
expected `.`, `]`
|
||||
|
|
||||
1 | [project
|
||||
| ^ RUF200
|
||||
|
||||
@@ -534,7 +534,7 @@ fn nonexistent_config_file() {
|
||||
fn config_override_rejected_if_invalid_toml() {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "foo = bar", "."]), @r"
|
||||
.args(["--config", "foo = bar", "."]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -551,11 +551,12 @@ fn config_override_rejected_if_invalid_toml() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -611,7 +612,7 @@ fn extend_passed_via_config_argument() {
|
||||
#[test]
|
||||
fn nonexistent_extend_file() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
fs::write(
|
||||
project_dir.join("ruff.toml"),
|
||||
r#"
|
||||
@@ -652,7 +653,7 @@ extend = "ruff3.toml"
|
||||
#[test]
|
||||
fn circular_extend() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_path = dunce::canonicalize(tempdir.path())?;
|
||||
let project_path = tempdir.path().canonicalize()?;
|
||||
|
||||
fs::write(
|
||||
project_path.join("ruff.toml"),
|
||||
@@ -697,7 +698,7 @@ extend = "ruff.toml"
|
||||
#[test]
|
||||
fn parse_error_extends() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_path = dunce::canonicalize(tempdir.path())?;
|
||||
let project_path = tempdir.path().canonicalize()?;
|
||||
|
||||
fs::write(
|
||||
project_path.join("ruff.toml"),
|
||||
@@ -732,8 +733,9 @@ select = [E501]
|
||||
Cause: TOML parse error at line 3, column 11
|
||||
|
|
||||
3 | select = [E501]
|
||||
| ^^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid array
|
||||
expected `]`
|
||||
");
|
||||
});
|
||||
|
||||
@@ -874,7 +876,7 @@ fn each_toml_option_requires_a_new_flag_1() {
|
||||
|
|
||||
1 | extend-select=['F841'], line-length=90
|
||||
| ^
|
||||
unexpected key or value, expected newline, `#`
|
||||
expected newline, `#`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -905,7 +907,7 @@ fn each_toml_option_requires_a_new_flag_2() {
|
||||
|
|
||||
1 | extend-select=['F841'] line-length=90
|
||||
| ^
|
||||
unexpected key or value, expected newline, `#`
|
||||
expected newline, `#`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -993,7 +995,6 @@ fn value_given_to_table_key_is_not_inline_table_2() {
|
||||
- `lint.exclude`
|
||||
- `lint.preview`
|
||||
- `lint.typing-extensions`
|
||||
- `lint.future-annotations`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -2129,7 +2130,7 @@ select = ["UP006"]
|
||||
#[test]
|
||||
fn requires_python_no_tool() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let ruff_toml = tempdir.path().join("pyproject.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
@@ -2422,7 +2423,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -2440,7 +2441,7 @@ requires-python = ">= 3.11"
|
||||
#[test]
|
||||
fn requires_python_no_tool_target_version_override() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let ruff_toml = tempdir.path().join("pyproject.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
@@ -2734,7 +2735,7 @@ requires-python = ">= 3.11"
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -2751,7 +2752,7 @@ requires-python = ">= 3.11"
|
||||
#[test]
|
||||
fn requires_python_no_tool_with_check() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let ruff_toml = tempdir.path().join("pyproject.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
@@ -2796,7 +2797,7 @@ requires-python = ">= 3.11"
|
||||
#[test]
|
||||
fn requires_python_ruff_toml_no_target_fallback() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
@@ -3098,7 +3099,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3117,7 +3118,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
#[test]
|
||||
fn requires_python_ruff_toml_no_target_fallback_check() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
@@ -3172,7 +3173,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
#[test]
|
||||
fn requires_python_pyproject_toml_above() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let outer_pyproject = tempdir.path().join("pyproject.toml");
|
||||
fs::write(
|
||||
&outer_pyproject,
|
||||
@@ -3199,7 +3200,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let testpy_canon = dunce::canonicalize(testpy)?;
|
||||
let testpy_canon = testpy.canonicalize()?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/"),(r"(?m)^foo\\test","foo/test")]
|
||||
@@ -3478,7 +3479,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.11
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3498,7 +3499,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
#[test]
|
||||
fn requires_python_pyproject_toml_above_with_tool() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let outer_pyproject = tempdir.path().join("pyproject.toml");
|
||||
fs::write(
|
||||
&outer_pyproject,
|
||||
@@ -3527,7 +3528,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let testpy_canon = dunce::canonicalize(testpy)?;
|
||||
let testpy_canon = testpy.canonicalize()?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/"),(r"foo\\","foo/")]
|
||||
@@ -3806,7 +3807,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -3826,7 +3827,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
#[test]
|
||||
fn requires_python_ruff_toml_above() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
@@ -3855,7 +3856,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let testpy_canon = dunce::canonicalize(testpy)?;
|
||||
let testpy_canon = testpy.canonicalize()?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/")]
|
||||
@@ -4134,7 +4135,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4419,7 +4420,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.9
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -4440,7 +4441,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
#[test]
|
||||
fn requires_python_extend_from_shared_config() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
let project_dir = tempdir.path().canonicalize()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
@@ -4478,7 +4479,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let testpy_canon = dunce::canonicalize(testpy)?;
|
||||
let testpy_canon = testpy.canonicalize()?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/")]
|
||||
@@ -4757,7 +4758,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.10
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
@@ -5691,82 +5692,3 @@ class Foo:
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test_case::test_case("concise")]
|
||||
#[test_case::test_case("full")]
|
||||
#[test_case::test_case("json")]
|
||||
#[test_case::test_case("json-lines")]
|
||||
#[test_case::test_case("junit")]
|
||||
#[test_case::test_case("grouped")]
|
||||
#[test_case::test_case("github")]
|
||||
#[test_case::test_case("gitlab")]
|
||||
#[test_case::test_case("pylint")]
|
||||
#[test_case::test_case("rdjson")]
|
||||
#[test_case::test_case("azure")]
|
||||
#[test_case::test_case("sarif")]
|
||||
fn output_format(output_format: &str) -> Result<()> {
|
||||
const CONTENT: &str = "\
|
||||
import os # F401
|
||||
x = y # F821
|
||||
match 42: # invalid-syntax
|
||||
case _: ...
|
||||
";
|
||||
|
||||
let tempdir = TempDir::new()?;
|
||||
let input = tempdir.path().join("input.py");
|
||||
fs::write(&input, CONTENT)?;
|
||||
|
||||
let snapshot = format!("output_format_{output_format}");
|
||||
|
||||
let project_dir = dunce::canonicalize(tempdir.path())?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![
|
||||
(tempdir_filter(&project_dir).as_str(), "[TMP]/"),
|
||||
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
|
||||
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
|
||||
(ruff_linter::VERSION, "[VERSION]"),
|
||||
]
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
snapshot,
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args([
|
||||
"check",
|
||||
"--no-cache",
|
||||
"--output-format",
|
||||
output_format,
|
||||
"--select",
|
||||
"F401,F821",
|
||||
"--target-version",
|
||||
"py39",
|
||||
"input.py",
|
||||
])
|
||||
.current_dir(&tempdir),
|
||||
);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn future_annotations_preview_warning() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "lint.future-annotations = true"])
|
||||
.args(["--select", "F"])
|
||||
.arg("--no-preview")
|
||||
.arg("-")
|
||||
.pass_stdin("1"),
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: The `lint.future-annotations` setting will have no effect because `preview` is disabled
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -12,8 +12,10 @@ fn display_default_settings() -> anyhow::Result<()> {
|
||||
|
||||
// Tempdir path's on macos are symlinks, which doesn't play nicely with
|
||||
// our snapshot filtering.
|
||||
let project_dir =
|
||||
dunce::canonicalize(tempdir.path()).context("Failed to canonical tempdir path.")?;
|
||||
let project_dir = tempdir
|
||||
.path()
|
||||
.canonicalize()
|
||||
.context("Failed to canonical tempdir path.")?;
|
||||
|
||||
std::fs::write(
|
||||
project_dir.join("pyproject.toml"),
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- azure
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=2;columnnumber=5;code=F821;]Undefined name `y`
|
||||
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=3;columnnumber=1;]SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,25 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- concise
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,49 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- full
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os # F401
|
||||
| ^^ F401
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
input.py:2:5: F821 Undefined name `y`
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
| ^ F821
|
||||
3 | match 42: # invalid-syntax
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
|
||||
1 | import os # F401
|
||||
2 | x = y # F821
|
||||
3 | match 42: # invalid-syntax
|
||||
| ^^^^^
|
||||
4 | case _: ...
|
||||
|
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- github
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
::error title=Ruff (F401),file=[TMP]/input.py,line=1,col=8,endLine=1,endColumn=10::input.py:1:8: F401 `os` imported but unused
|
||||
::error title=Ruff (F821),file=[TMP]/input.py,line=2,col=5,endLine=2,endColumn=6::input.py:2:5: F821 Undefined name `y`
|
||||
::error title=Ruff,file=[TMP]/input.py,line=3,col=1,endLine=3,endColumn=6::input.py:3:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,60 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- gitlab
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"check_name": "F401",
|
||||
"description": "`os` imported but unused",
|
||||
"fingerprint": "4dbad37161e65c72",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 1,
|
||||
"end": 1
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "F821",
|
||||
"description": "Undefined name `y`",
|
||||
"fingerprint": "7af59862a085230",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 2,
|
||||
"end": 2
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
},
|
||||
{
|
||||
"check_name": "syntax-error",
|
||||
"description": "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"fingerprint": "e558cec859bb66e8",
|
||||
"location": {
|
||||
"lines": {
|
||||
"begin": 3,
|
||||
"end": 3
|
||||
},
|
||||
"path": "input.py"
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
@@ -1,27 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- grouped
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:
|
||||
1:8 F401 [*] `os` imported but unused
|
||||
2:5 F821 Undefined name `y`
|
||||
3:1 SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
Found 3 errors.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- json-lines
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
|
||||
{"cell":null,"code":"F821","end_location":{"column":6,"row":2},"filename":"[TMP]/input.py","fix":null,"location":{"column":5,"row":2},"message":"Undefined name `y`","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
|
||||
{"cell":null,"code":null,"end_location":{"column":6,"row":3},"filename":"[TMP]/input.py","fix":null,"location":{"column":1,"row":3},"message":"SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)","noqa_row":null,"url":null}
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,88 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- json
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": "F401",
|
||||
"end_location": {
|
||||
"column": 10,
|
||||
"row": 1
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": {
|
||||
"applicability": "safe",
|
||||
"edits": [
|
||||
{
|
||||
"content": "",
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 2
|
||||
},
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": "Remove unused import: `os`"
|
||||
},
|
||||
"location": {
|
||||
"column": 8,
|
||||
"row": 1
|
||||
},
|
||||
"message": "`os` imported but unused",
|
||||
"noqa_row": 1,
|
||||
"url": "https://docs.astral.sh/ruff/rules/unused-import"
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": "F821",
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 2
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 5,
|
||||
"row": 2
|
||||
},
|
||||
"message": "Undefined name `y`",
|
||||
"noqa_row": 2,
|
||||
"url": "https://docs.astral.sh/ruff/rules/undefined-name"
|
||||
},
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 6,
|
||||
"row": 3
|
||||
},
|
||||
"filename": "[TMP]/input.py",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 3
|
||||
},
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"noqa_row": null,
|
||||
"url": null
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
@@ -1,34 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- junit
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites name="ruff" tests="3" failures="3" errors="0">
|
||||
<testsuite name="[TMP]/input.py" tests="3" disabled="0" errors="0" failures="3" package="org.ruff">
|
||||
<testcase name="org.ruff.F401" classname="[TMP]/input" line="1" column="8">
|
||||
<failure message="`os` imported but unused">line 1, col 8, `os` imported but unused</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.F821" classname="[TMP]/input" line="2" column="5">
|
||||
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
|
||||
</testcase>
|
||||
<testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1">
|
||||
<failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- pylint
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
input.py:1: [F401] `os` imported but unused
|
||||
input.py:2: [F821] Undefined name `y`
|
||||
input.py:3: [invalid-syntax] SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
|
||||
----- stderr -----
|
||||
@@ -1,102 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- rdjson
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/unused-import",
|
||||
"value": "F401"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 10,
|
||||
"line": 1
|
||||
},
|
||||
"start": {
|
||||
"column": 8,
|
||||
"line": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "`os` imported but unused",
|
||||
"suggestions": [
|
||||
{
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 1,
|
||||
"line": 2
|
||||
},
|
||||
"start": {
|
||||
"column": 1,
|
||||
"line": 1
|
||||
}
|
||||
},
|
||||
"text": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/undefined-name",
|
||||
"value": "F821"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 2
|
||||
},
|
||||
"start": {
|
||||
"column": 5,
|
||||
"line": 2
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "Undefined name `y`"
|
||||
},
|
||||
{
|
||||
"code": {
|
||||
"value": "invalid-syntax"
|
||||
},
|
||||
"location": {
|
||||
"path": "[TMP]/input.py",
|
||||
"range": {
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 3
|
||||
},
|
||||
"start": {
|
||||
"column": 1,
|
||||
"line": 3
|
||||
}
|
||||
}
|
||||
},
|
||||
"message": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
----- stderr -----
|
||||
@@ -1,142 +0,0 @@
|
||||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- sarif
|
||||
- "--select"
|
||||
- "F401,F821"
|
||||
- "--target-version"
|
||||
- py39
|
||||
- input.py
|
||||
---
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
"runs": [
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 10,
|
||||
"endLine": 1,
|
||||
"startColumn": 8,
|
||||
"startLine": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "`os` imported but unused"
|
||||
},
|
||||
"ruleId": "F401"
|
||||
},
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 6,
|
||||
"endLine": 2,
|
||||
"startColumn": 5,
|
||||
"startLine": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "Undefined name `y`"
|
||||
},
|
||||
"ruleId": "F821"
|
||||
},
|
||||
{
|
||||
"level": "error",
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "[TMP]/input.py"
|
||||
},
|
||||
"region": {
|
||||
"endColumn": 6,
|
||||
"endLine": 3,
|
||||
"startColumn": 1,
|
||||
"startLine": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
},
|
||||
"ruleId": null
|
||||
}
|
||||
],
|
||||
"tool": {
|
||||
"driver": {
|
||||
"informationUri": "https://github.com/astral-sh/ruff",
|
||||
"name": "ruff",
|
||||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/source/libraries.html#library-interface-public-and-private-symbols)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
},
|
||||
"helpUri": "https://docs.astral.sh/ruff/rules/unused-import",
|
||||
"id": "F401",
|
||||
"properties": {
|
||||
"id": "F401",
|
||||
"kind": "Pyflakes",
|
||||
"name": "unused-import",
|
||||
"problem.severity": "error"
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
}
|
||||
},
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for uses of undefined names.\n\n## Why is this bad?\nAn undefined name is likely to raise `NameError` at runtime.\n\n## Example\n```python\ndef double():\n return n * 2 # raises `NameError` if `n` is undefined when `double` is called\n```\n\nUse instead:\n```python\ndef double(n):\n return n * 2\n```\n\n## Options\n- [`target-version`]: Can be used to configure which symbols Ruff will understand\n as being available in the `builtins` namespace.\n\n## References\n- [Python documentation: Naming and binding](https://docs.python.org/3/reference/executionmodel.html#naming-and-binding)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "Undefined name `{name}`. {tip}"
|
||||
},
|
||||
"helpUri": "https://docs.astral.sh/ruff/rules/undefined-name",
|
||||
"id": "F821",
|
||||
"properties": {
|
||||
"id": "F821",
|
||||
"kind": "Pyflakes",
|
||||
"name": "undefined-name",
|
||||
"problem.severity": "error"
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "Undefined name `{name}`. {tip}"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "[VERSION]"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "2.1.0"
|
||||
}
|
||||
----- stderr -----
|
||||
@@ -392,7 +392,7 @@ formatter.docstring_code_line_width = dynamic
|
||||
analyze.exclude = []
|
||||
analyze.preview = disabled
|
||||
analyze.target_version = 3.7
|
||||
analyze.string_imports = disabled
|
||||
analyze.detect_string_imports = false
|
||||
analyze.extension = ExtensionMapping({})
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ fn config_option_ignored_but_validated() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("version")
|
||||
.args(["--config", "foo = bar"]), @r"
|
||||
.args(["--config", "foo = bar"]), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
@@ -77,11 +77,12 @@ fn config_option_ignored_but_validated() {
|
||||
TOML parse error at line 1, column 7
|
||||
|
|
||||
1 | foo = bar
|
||||
| ^^^
|
||||
string values must be quoted, expected literal string
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
|
||||
For more information, try '--help'.
|
||||
"
|
||||
"#
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -352,7 +352,7 @@ impl DisplaySet<'_> {
|
||||
// FIXME: `unicode_width` sometimes disagrees with terminals on how wide a `char`
|
||||
// is. For now, just accept that sometimes the code line will be longer than
|
||||
// desired.
|
||||
let next = char_width(ch).unwrap_or(1);
|
||||
let next = unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1);
|
||||
if taken + next > right - left {
|
||||
was_cut_right = true;
|
||||
break;
|
||||
@@ -377,7 +377,7 @@ impl DisplaySet<'_> {
|
||||
let left: usize = text
|
||||
.chars()
|
||||
.take(left)
|
||||
.map(|ch| char_width(ch).unwrap_or(1))
|
||||
.map(|ch| unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1))
|
||||
.sum();
|
||||
|
||||
let mut annotations = annotations.clone();
|
||||
@@ -821,7 +821,11 @@ impl DisplaySourceAnnotation<'_> {
|
||||
// Length of this annotation as displayed in the stderr output
|
||||
fn len(&self) -> usize {
|
||||
// Account for usize underflows
|
||||
self.range.1.abs_diff(self.range.0)
|
||||
if self.range.1 > self.range.0 {
|
||||
self.range.1 - self.range.0
|
||||
} else {
|
||||
self.range.0 - self.range.1
|
||||
}
|
||||
}
|
||||
|
||||
fn takes_space(&self) -> bool {
|
||||
@@ -1389,7 +1393,6 @@ fn format_body<'m>(
|
||||
let line_length: usize = line.len();
|
||||
let line_range = (current_index, current_index + line_length);
|
||||
let end_line_size = end_line.len();
|
||||
|
||||
body.push(DisplayLine::Source {
|
||||
lineno: Some(current_line),
|
||||
inline_marks: vec![],
|
||||
@@ -1449,12 +1452,12 @@ fn format_body<'m>(
|
||||
let annotation_start_col = line
|
||||
[0..(start - line_start_index).min(line_length)]
|
||||
.chars()
|
||||
.map(|c| char_width(c).unwrap_or(0))
|
||||
.map(|c| unicode_width::UnicodeWidthChar::width(c).unwrap_or(0))
|
||||
.sum::<usize>();
|
||||
let mut annotation_end_col = line
|
||||
[0..(end - line_start_index).min(line_length)]
|
||||
.chars()
|
||||
.map(|c| char_width(c).unwrap_or(0))
|
||||
.map(|c| unicode_width::UnicodeWidthChar::width(c).unwrap_or(0))
|
||||
.sum::<usize>();
|
||||
if annotation_start_col == annotation_end_col {
|
||||
// At least highlight something
|
||||
@@ -1496,7 +1499,7 @@ fn format_body<'m>(
|
||||
let annotation_start_col = line
|
||||
[0..(start - line_start_index).min(line_length)]
|
||||
.chars()
|
||||
.map(|c| char_width(c).unwrap_or(0))
|
||||
.map(|c| unicode_width::UnicodeWidthChar::width(c).unwrap_or(0))
|
||||
.sum::<usize>();
|
||||
let annotation_end_col = annotation_start_col + 1;
|
||||
|
||||
@@ -1555,7 +1558,7 @@ fn format_body<'m>(
|
||||
{
|
||||
let end_mark = line[0..(end - line_start_index).min(line_length)]
|
||||
.chars()
|
||||
.map(|c| char_width(c).unwrap_or(0))
|
||||
.map(|c| unicode_width::UnicodeWidthChar::width(c).unwrap_or(0))
|
||||
.sum::<usize>()
|
||||
.saturating_sub(1);
|
||||
// If the annotation ends on a line-end character, we
|
||||
@@ -1751,11 +1754,3 @@ fn format_inline_marks(
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn char_width(c: char) -> Option<usize> {
|
||||
if c == '\t' {
|
||||
Some(4)
|
||||
} else {
|
||||
unicode_width::UnicodeWidthChar::width(c)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
<svg width="740px" height="146px" xmlns="http://www.w3.org/2000/svg">
|
||||
<style>
|
||||
.fg { fill: #AAAAAA }
|
||||
.bg { background: #000000 }
|
||||
.fg-bright-blue { fill: #5555FF }
|
||||
.fg-bright-red { fill: #FF5555 }
|
||||
.container {
|
||||
padding: 0 10px;
|
||||
line-height: 18px;
|
||||
}
|
||||
.bold { font-weight: bold; }
|
||||
tspan {
|
||||
font: 14px SFMono-Regular, Consolas, Liberation Mono, Menlo, monospace;
|
||||
white-space: pre;
|
||||
line-height: 18px;
|
||||
}
|
||||
</style>
|
||||
|
||||
<rect width="100%" height="100%" y="0" rx="4.5" class="bg" />
|
||||
|
||||
<text xml:space="preserve" class="container fg">
|
||||
<tspan x="10px" y="28px"><tspan class="fg-bright-red bold">error[E0308]</tspan><tspan>: </tspan><tspan class="bold">call-non-callable</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="46px"><tspan> </tspan><tspan class="fg-bright-blue bold">--></tspan><tspan> $DIR/main.py:5:9</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="64px"><tspan> </tspan><tspan class="fg-bright-blue bold">|</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="82px"><tspan class="fg-bright-blue bold">4 |</tspan><tspan> def f():</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="100px"><tspan class="fg-bright-blue bold">5 |</tspan><tspan> return (1 == '2')() # Tab indented</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="118px"><tspan> </tspan><tspan class="fg-bright-blue bold">|</tspan><tspan> </tspan><tspan class="fg-bright-red bold">^^^^^^^^^^^^</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="136px"><tspan> </tspan><tspan class="fg-bright-blue bold">|</tspan>
|
||||
</tspan>
|
||||
</text>
|
||||
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.5 KiB |
@@ -1,45 +0,0 @@
|
||||
|
||||
# [crates/ruff_db/src/diagnostic/render.rs:123:47] diag.to_annotate() = Message {
|
||||
# level: Error,
|
||||
# id: Some(
|
||||
# "call-non-callable",
|
||||
# ),
|
||||
# title: "Object of type `bool` is not callable",
|
||||
# snippets: [
|
||||
# Snippet {
|
||||
# origin: Some(
|
||||
# "main.py",
|
||||
# ),
|
||||
# line_start: 1,
|
||||
# source: "def f():\n\treturn (1 == '2')() # Tab indented\n",
|
||||
# annotations: [
|
||||
# Annotation {
|
||||
# range: 17..29,
|
||||
# label: None,
|
||||
# level: Error,
|
||||
# },
|
||||
# ],
|
||||
# fold: false,
|
||||
# },
|
||||
# ],
|
||||
# footer: [],
|
||||
# }
|
||||
|
||||
[message]
|
||||
level = "Error"
|
||||
id = "E0308"
|
||||
title = "call-non-callable"
|
||||
|
||||
[[message.snippets]]
|
||||
source = "def f():\n\treturn (1 == '2')() # Tab indented\n"
|
||||
line_start = 4
|
||||
origin = "$DIR/main.py"
|
||||
|
||||
[[message.snippets.annotations]]
|
||||
label = ""
|
||||
level = "Error"
|
||||
range = [17, 29]
|
||||
|
||||
[renderer]
|
||||
# anonymized_line_numbers = true
|
||||
color = true
|
||||
@@ -1,36 +0,0 @@
|
||||
<svg width="1196px" height="128px" xmlns="http://www.w3.org/2000/svg">
|
||||
<style>
|
||||
.fg { fill: #AAAAAA }
|
||||
.bg { background: #000000 }
|
||||
.fg-bright-blue { fill: #5555FF }
|
||||
.fg-bright-red { fill: #FF5555 }
|
||||
.container {
|
||||
padding: 0 10px;
|
||||
line-height: 18px;
|
||||
}
|
||||
.bold { font-weight: bold; }
|
||||
tspan {
|
||||
font: 14px SFMono-Regular, Consolas, Liberation Mono, Menlo, monospace;
|
||||
white-space: pre;
|
||||
line-height: 18px;
|
||||
}
|
||||
</style>
|
||||
|
||||
<rect width="100%" height="100%" y="0" rx="4.5" class="bg" />
|
||||
|
||||
<text xml:space="preserve" class="container fg">
|
||||
<tspan x="10px" y="28px"><tspan class="fg-bright-red bold">error[E0308]</tspan><tspan>: </tspan><tspan class="bold">mismatched types</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="46px"><tspan> </tspan><tspan class="fg-bright-blue bold">--></tspan><tspan> $DIR/non-whitespace-trimming.rs:4:6</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="64px"><tspan> </tspan><tspan class="fg-bright-blue bold">|</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="82px"><tspan class="fg-bright-blue bold">4 |</tspan><tspan> </tspan><tspan class="fg-bright-blue bold">...</tspan><tspan> s_data['d_dails'] = bb['contacted'][hostip]['ansible_facts']['ansible_devices']['vda']['vendor'] + " " + bb['contacted'][hostip</tspan><tspan class="fg-bright-blue bold">...</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="100px"><tspan> </tspan><tspan class="fg-bright-blue bold">|</tspan><tspan> </tspan><tspan class="fg-bright-red bold">^^^^^^</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="118px"><tspan> </tspan><tspan class="fg-bright-blue bold">|</tspan>
|
||||
</tspan>
|
||||
</text>
|
||||
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.6 KiB |
@@ -1,20 +0,0 @@
|
||||
[message]
|
||||
level = "Error"
|
||||
id = "E0308"
|
||||
title = "mismatched types"
|
||||
|
||||
[[message.snippets]]
|
||||
source = """
|
||||
s_data['d_dails'] = bb['contacted'][hostip]['ansible_facts']['ansible_devices']['vda']['vendor'] + " " + bb['contacted'][hostip]['an
|
||||
"""
|
||||
line_start = 4
|
||||
origin = "$DIR/non-whitespace-trimming.rs"
|
||||
|
||||
[[message.snippets.annotations]]
|
||||
label = ""
|
||||
level = "Error"
|
||||
range = [5, 11]
|
||||
|
||||
[renderer]
|
||||
# anonymized_line_numbers = true
|
||||
color = true
|
||||
@@ -21,7 +21,7 @@
|
||||
<text xml:space="preserve" class="container fg">
|
||||
<tspan x="10px" y="28px"><tspan class="fg-bright-red bold">error[E0308]</tspan><tspan>: </tspan><tspan class="bold">mismatched types</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="46px"><tspan> </tspan><tspan class="fg-bright-blue bold">--></tspan><tspan> $DIR/non-whitespace-trimming.rs:4:238</tspan>
|
||||
<tspan x="10px" y="46px"><tspan> </tspan><tspan class="fg-bright-blue bold">--></tspan><tspan> $DIR/non-whitespace-trimming.rs:4:242</tspan>
|
||||
</tspan>
|
||||
<tspan x="10px" y="64px"><tspan> </tspan><tspan class="fg-bright-blue bold">|</tspan>
|
||||
</tspan>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 2.2 KiB |
@@ -13,12 +13,12 @@ origin = "$DIR/non-whitespace-trimming.rs"
|
||||
[[message.snippets.annotations]]
|
||||
label = "expected `()`, found integer"
|
||||
level = "Error"
|
||||
range = [237, 239]
|
||||
range = [241, 243]
|
||||
|
||||
[[message.snippets.annotations]]
|
||||
label = "expected due to this"
|
||||
level = "Error"
|
||||
range = [232, 234]
|
||||
range = [236, 238]
|
||||
|
||||
|
||||
[renderer]
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
use ruff_benchmark::criterion;
|
||||
use ruff_benchmark::real_world_projects::{InstalledProject, RealWorldProject};
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
|
||||
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
|
||||
@@ -18,7 +17,7 @@ use ruff_python_ast::PythonVersion;
|
||||
use ty_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||
use ty_project::watch::{ChangeEvent, ChangedKind};
|
||||
use ty_project::{CheckMode, Db, ProjectDatabase, ProjectMetadata};
|
||||
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||
|
||||
struct Case {
|
||||
db: ProjectDatabase,
|
||||
@@ -102,7 +101,6 @@ fn setup_tomllib_case() -> Case {
|
||||
|
||||
let re = re.unwrap();
|
||||
|
||||
db.set_check_mode(CheckMode::OpenFiles);
|
||||
db.project().set_open_files(&mut db, tomllib_files);
|
||||
|
||||
let re_path = re.path(&db).as_system_path().unwrap().to_owned();
|
||||
@@ -238,7 +236,6 @@ fn setup_micro_case(code: &str) -> Case {
|
||||
let mut db = ProjectDatabase::new(metadata, system).unwrap();
|
||||
let file = system_path_to_file(&db, SystemPathBuf::from(file_path)).unwrap();
|
||||
|
||||
db.set_check_mode(CheckMode::OpenFiles);
|
||||
db.project()
|
||||
.set_open_files(&mut db, FxHashSet::from_iter([file]));
|
||||
|
||||
@@ -351,130 +348,6 @@ fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_complex_constrained_attributes_1(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
criterion.bench_function("ty_micro[complex_constrained_attributes_1]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
// This is a regression benchmark for https://github.com/astral-sh/ty/issues/627.
|
||||
// Before this was fixed, the following sample would take >1s to type check.
|
||||
setup_micro_case(
|
||||
r#"
|
||||
class C:
|
||||
def f(self: "C"):
|
||||
if isinstance(self.a, str):
|
||||
return
|
||||
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
"#,
|
||||
)
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert!(!result.is_empty());
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
criterion.bench_function("ty_micro[complex_constrained_attributes_2]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
// This is is similar to the case above, but now the attributes are actually defined.
|
||||
// https://github.com/astral-sh/ty/issues/711
|
||||
setup_micro_case(
|
||||
r#"
|
||||
class C:
|
||||
def f(self: "C"):
|
||||
self.a = ""
|
||||
self.b = ""
|
||||
|
||||
if isinstance(self.a, str):
|
||||
return
|
||||
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
"#,
|
||||
)
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_many_enum_members(criterion: &mut Criterion) {
|
||||
const NUM_ENUM_MEMBERS: usize = 512;
|
||||
|
||||
setup_rayon();
|
||||
|
||||
let mut code = String::new();
|
||||
writeln!(&mut code, "from enum import Enum").ok();
|
||||
|
||||
writeln!(&mut code, "class E(Enum):").ok();
|
||||
for i in 0..NUM_ENUM_MEMBERS {
|
||||
writeln!(&mut code, " m{i} = {i}").ok();
|
||||
}
|
||||
writeln!(&mut code).ok();
|
||||
|
||||
for i in 0..NUM_ENUM_MEMBERS {
|
||||
writeln!(&mut code, "print(E.m{i})").ok();
|
||||
}
|
||||
|
||||
criterion.bench_function("ty_micro[many_enum_members]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| setup_micro_case(&code),
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
struct ProjectBenchmark<'a> {
|
||||
project: InstalledProject<'a>,
|
||||
fs: MemoryFileSystem,
|
||||
@@ -504,7 +377,8 @@ impl<'a> ProjectBenchmark<'a> {
|
||||
metadata.apply_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(self.project.config.python_version)),
|
||||
python: Some(RelativePathBuf::cli(SystemPath::new(".venv"))),
|
||||
python: (!self.project.config().dependencies.is_empty())
|
||||
.then_some(RelativePathBuf::cli(SystemPath::new(".venv"))),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
@@ -527,21 +401,17 @@ impl<'a> ProjectBenchmark<'a> {
|
||||
|
||||
#[track_caller]
|
||||
fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
|
||||
fn check_project(db: &mut ProjectDatabase, project_name: &str, max_diagnostics: usize) {
|
||||
fn check_project(db: &mut ProjectDatabase, max_diagnostics: usize) {
|
||||
let result = db.check();
|
||||
let diagnostics = result.len();
|
||||
|
||||
if diagnostics > max_diagnostics {
|
||||
let details = result
|
||||
.into_iter()
|
||||
.map(|diagnostic| diagnostic.concise_message().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n ");
|
||||
assert!(
|
||||
diagnostics <= max_diagnostics,
|
||||
"{project_name}: Expected <={max_diagnostics} diagnostics but got {diagnostics}:\n {details}",
|
||||
);
|
||||
}
|
||||
assert!(
|
||||
diagnostics > 1 && diagnostics <= max_diagnostics,
|
||||
"Expected between {} and {} diagnostics but got {}",
|
||||
1,
|
||||
max_diagnostics,
|
||||
diagnostics
|
||||
);
|
||||
}
|
||||
|
||||
setup_rayon();
|
||||
@@ -551,7 +421,7 @@ fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
|
||||
group.bench_function(benchmark.project.config.name, |b| {
|
||||
b.iter_batched_ref(
|
||||
|| benchmark.setup_iteration(),
|
||||
|db| check_project(db, benchmark.project.config.name, benchmark.max_diagnostics),
|
||||
|db| check_project(db, benchmark.max_diagnostics),
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
@@ -608,31 +478,11 @@ fn anyio(criterion: &mut Criterion) {
|
||||
bench_project(&benchmark, criterion);
|
||||
}
|
||||
|
||||
fn datetype(criterion: &mut Criterion) {
|
||||
let benchmark = ProjectBenchmark::new(
|
||||
RealWorldProject {
|
||||
name: "DateType",
|
||||
repository: "https://github.com/glyph/DateType",
|
||||
commit: "57c9c93cf2468069f72945fc04bf27b64100dad8",
|
||||
paths: vec![SystemPath::new("src")],
|
||||
dependencies: vec![],
|
||||
max_dep_date: "2025-07-04",
|
||||
python_version: PythonVersion::PY313,
|
||||
},
|
||||
2,
|
||||
);
|
||||
|
||||
bench_project(&benchmark, criterion);
|
||||
}
|
||||
|
||||
criterion_group!(check_file, benchmark_cold, benchmark_incremental);
|
||||
criterion_group!(
|
||||
micro,
|
||||
benchmark_many_string_assignments,
|
||||
benchmark_many_tuple_assignments,
|
||||
benchmark_complex_constrained_attributes_1,
|
||||
benchmark_complex_constrained_attributes_2,
|
||||
benchmark_many_enum_members,
|
||||
);
|
||||
criterion_group!(project, anyio, attrs, hydra, datetype);
|
||||
criterion_group!(project, anyio, attrs, hydra);
|
||||
criterion_main!(check_file, micro, project);
|
||||
|
||||
@@ -36,7 +36,8 @@ impl<'a> Benchmark<'a> {
|
||||
metadata.apply_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(self.project.config.python_version)),
|
||||
python: Some(RelativePathBuf::cli(SystemPath::new(".venv"))),
|
||||
python: (!self.project.config().dependencies.is_empty())
|
||||
.then_some(RelativePathBuf::cli(SystemPath::new(".venv"))),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
@@ -242,7 +243,7 @@ fn large(bencher: Bencher, benchmark: &Benchmark) {
|
||||
run_single_threaded(bencher, benchmark);
|
||||
}
|
||||
|
||||
#[bench(args=[&*PYDANTIC], sample_size=3, sample_count=8)]
|
||||
#[bench(args=[&*PYDANTIC], sample_size=3, sample_count=3)]
|
||||
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
|
||||
|
||||
|
||||
@@ -74,17 +74,19 @@ impl<'a> RealWorldProject<'a> {
|
||||
};
|
||||
|
||||
// Install dependencies if specified
|
||||
tracing::debug!(
|
||||
"Installing {} dependencies for project '{}'...",
|
||||
checkout.project().dependencies.len(),
|
||||
checkout.project().name
|
||||
);
|
||||
let start_install = std::time::Instant::now();
|
||||
install_dependencies(&checkout)?;
|
||||
tracing::debug!(
|
||||
"Dependency installation completed in {:.2}s",
|
||||
start_install.elapsed().as_secs_f64()
|
||||
);
|
||||
if !checkout.project().dependencies.is_empty() {
|
||||
tracing::debug!(
|
||||
"Installing {} dependencies for project '{}'...",
|
||||
checkout.project().dependencies.len(),
|
||||
checkout.project().name
|
||||
);
|
||||
let start = std::time::Instant::now();
|
||||
install_dependencies(&checkout)?;
|
||||
tracing::debug!(
|
||||
"Dependency installation completed in {:.2}s",
|
||||
start.elapsed().as_secs_f64()
|
||||
);
|
||||
}
|
||||
|
||||
tracing::debug!("Project setup took: {:.2}s", start.elapsed().as_secs_f64());
|
||||
|
||||
@@ -279,14 +281,6 @@ fn install_dependencies(checkout: &Checkout) -> Result<()> {
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
if checkout.project().dependencies.is_empty() {
|
||||
tracing::debug!(
|
||||
"No dependencies to install for project '{}'",
|
||||
checkout.project().name
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Install dependencies with date constraint in the isolated environment
|
||||
let mut cmd = Command::new("uv");
|
||||
cmd.args([
|
||||
|
||||
@@ -13,36 +13,31 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_annotate_snippets = { workspace = true }
|
||||
ruff_cache = { workspace = true, optional = true }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_source_file = { workspace = true, features = ["get-size"] }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ty_static = { workspace = true }
|
||||
|
||||
anstyle = { workspace = true }
|
||||
arc-swap = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
get-size2 = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
ignore = { workspace = true, optional = true }
|
||||
matchit = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
quick-junit = { workspace = true, optional = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
serde_json = { workspace = true, optional = true }
|
||||
path-slash = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
unicode-width = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[target.'cfg(target_arch="wasm32")'.dependencies]
|
||||
@@ -57,13 +52,7 @@ tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
cache = ["ruff_cache"]
|
||||
junit = ["dep:quick-junit"]
|
||||
os = ["ignore", "dep:etcetera"]
|
||||
serde = [
|
||||
"camino/serde1",
|
||||
"dep:serde",
|
||||
"dep:serde_json",
|
||||
"ruff_diagnostics/serde",
|
||||
]
|
||||
serde = ["dep:serde", "camino/serde1"]
|
||||
# Exposes testing utilities.
|
||||
testing = ["tracing-subscriber"]
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
use std::{fmt::Formatter, path::Path, sync::Arc};
|
||||
use std::{fmt::Formatter, sync::Arc};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||
use render::{FileResolver, Input};
|
||||
use ruff_source_file::{SourceCode, SourceFile};
|
||||
|
||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
pub use self::render::{
|
||||
DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary,
|
||||
};
|
||||
pub use self::render::DisplayDiagnostic;
|
||||
use crate::{Db, files::File};
|
||||
|
||||
mod render;
|
||||
@@ -21,7 +19,7 @@ mod stylesheet;
|
||||
/// characteristics in the inputs given to the tool. Typically, but not always,
|
||||
/// a characteristic is a deficiency. An example of a characteristic that is
|
||||
/// _not_ a deficiency is the `reveal_type` diagnostic for our type checker.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Diagnostic {
|
||||
/// The actual diagnostic.
|
||||
///
|
||||
@@ -64,37 +62,10 @@ impl Diagnostic {
|
||||
message: message.into_diagnostic_message(),
|
||||
annotations: vec![],
|
||||
subs: vec![],
|
||||
fix: None,
|
||||
parent: None,
|
||||
noqa_offset: None,
|
||||
secondary_code: None,
|
||||
});
|
||||
Diagnostic { inner }
|
||||
}
|
||||
|
||||
/// Creates a `Diagnostic` for a syntax error.
|
||||
///
|
||||
/// Unlike the more general [`Diagnostic::new`], this requires a [`Span`] and a [`TextRange`]
|
||||
/// attached to it.
|
||||
///
|
||||
/// This should _probably_ be a method on the syntax errors, but
|
||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
///
|
||||
/// Note that `message` is stored in the primary annotation, _not_ in the primary diagnostic
|
||||
/// message.
|
||||
pub fn invalid_syntax(
|
||||
span: impl Into<Span>,
|
||||
message: impl IntoDiagnosticMessage,
|
||||
range: impl Ranged,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = span.into().with_range(range.range());
|
||||
diag.annotate(Annotation::primary(span).message(message));
|
||||
diag
|
||||
}
|
||||
|
||||
/// Add an annotation to this diagnostic.
|
||||
///
|
||||
/// Annotations for a diagnostic are optional, but if any are added,
|
||||
@@ -124,14 +95,7 @@ impl Diagnostic {
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn info<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Info, message));
|
||||
}
|
||||
|
||||
/// Adds a "help" sub-diagnostic with the given message.
|
||||
///
|
||||
/// See the closely related [`Diagnostic::info`] method for more details.
|
||||
pub fn help<'a>(&mut self, message: impl IntoDiagnosticMessage + 'a) {
|
||||
self.sub(SubDiagnostic::new(SubDiagnosticSeverity::Help, message));
|
||||
self.sub(SubDiagnostic::new(Severity::Info, message));
|
||||
}
|
||||
|
||||
/// Adds a "sub" diagnostic to this diagnostic.
|
||||
@@ -262,11 +226,6 @@ impl Diagnostic {
|
||||
self.primary_annotation().map(|ann| ann.span.clone())
|
||||
}
|
||||
|
||||
/// Returns a reference to the primary span of this diagnostic.
|
||||
pub fn primary_span_ref(&self) -> Option<&Span> {
|
||||
self.primary_annotation().map(|ann| &ann.span)
|
||||
}
|
||||
|
||||
/// Returns the tags from the primary annotation of this diagnostic if it exists.
|
||||
pub fn primary_tags(&self) -> Option<&[DiagnosticTag]> {
|
||||
self.primary_annotation().map(|ann| ann.tags.as_slice())
|
||||
@@ -309,187 +268,15 @@ impl Diagnostic {
|
||||
pub fn sub_diagnostics(&self) -> &[SubDiagnostic] {
|
||||
&self.inner.subs
|
||||
}
|
||||
|
||||
/// Returns the fix for this diagnostic if it exists.
|
||||
pub fn fix(&self) -> Option<&Fix> {
|
||||
self.inner.fix.as_ref()
|
||||
}
|
||||
|
||||
/// Set the fix for this diagnostic.
|
||||
pub fn set_fix(&mut self, fix: Fix) {
|
||||
debug_assert!(
|
||||
self.primary_span().is_some(),
|
||||
"Expected a source file for a diagnostic with a fix"
|
||||
);
|
||||
Arc::make_mut(&mut self.inner).fix = Some(fix);
|
||||
}
|
||||
|
||||
/// Remove the fix for this diagnostic.
|
||||
pub fn remove_fix(&mut self) {
|
||||
Arc::make_mut(&mut self.inner).fix = None;
|
||||
}
|
||||
|
||||
/// Returns `true` if the diagnostic contains a [`Fix`].
|
||||
pub fn fixable(&self) -> bool {
|
||||
self.fix().is_some()
|
||||
}
|
||||
|
||||
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
||||
///
|
||||
/// This is primarily used for checking noqa/secondary code suppressions.
|
||||
pub fn parent(&self) -> Option<TextSize> {
|
||||
self.inner.parent
|
||||
}
|
||||
|
||||
/// Set the offset of the diagnostic's parent statement.
|
||||
pub fn set_parent(&mut self, parent: TextSize) {
|
||||
Arc::make_mut(&mut self.inner).parent = Some(parent);
|
||||
}
|
||||
|
||||
/// Returns the remapped offset for a suppression comment if it exists.
|
||||
///
|
||||
/// Like [`Diagnostic::parent`], this is used for noqa code suppression comments in Ruff.
|
||||
pub fn noqa_offset(&self) -> Option<TextSize> {
|
||||
self.inner.noqa_offset
|
||||
}
|
||||
|
||||
/// Set the remapped offset for a suppression comment.
|
||||
pub fn set_noqa_offset(&mut self, noqa_offset: TextSize) {
|
||||
Arc::make_mut(&mut self.inner).noqa_offset = Some(noqa_offset);
|
||||
}
|
||||
|
||||
/// Returns the secondary code for the diagnostic if it exists.
|
||||
///
|
||||
/// The "primary" code for the diagnostic is its lint name. Diagnostics in ty don't have
|
||||
/// secondary codes (yet), but in Ruff the noqa code is used.
|
||||
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
|
||||
self.inner.secondary_code.as_ref()
|
||||
}
|
||||
|
||||
/// Set the secondary code for this diagnostic.
|
||||
pub fn set_secondary_code(&mut self, code: SecondaryCode) {
|
||||
Arc::make_mut(&mut self.inner).secondary_code = Some(code);
|
||||
}
|
||||
|
||||
/// Returns the name used to represent the diagnostic.
|
||||
pub fn name(&self) -> &'static str {
|
||||
self.id().as_str()
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a syntax error message.
|
||||
pub fn is_invalid_syntax(&self) -> bool {
|
||||
self.id().is_invalid_syntax()
|
||||
}
|
||||
|
||||
/// Returns the message body to display to the user.
|
||||
pub fn body(&self) -> &str {
|
||||
self.primary_message()
|
||||
}
|
||||
|
||||
/// Returns the message of the first sub-diagnostic with a `Help` severity.
|
||||
///
|
||||
/// Note that this is used as the fix title/suggestion for some of Ruff's output formats, but in
|
||||
/// general this is not the guaranteed meaning of such a message.
|
||||
pub fn first_help_text(&self) -> Option<&str> {
|
||||
self.sub_diagnostics()
|
||||
.iter()
|
||||
.find(|sub| matches!(sub.inner.severity, SubDiagnosticSeverity::Help))
|
||||
.map(|sub| sub.inner.message.as_str())
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
pub fn to_ruff_url(&self) -> Option<String> {
|
||||
if self.is_invalid_syntax() {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"{}/rules/{}",
|
||||
env!("CARGO_PKG_HOMEPAGE"),
|
||||
self.name()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the filename for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_filename(&self) -> String {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.name()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Computes the start source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_start_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().start())
|
||||
}
|
||||
|
||||
/// Computes the end source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_end_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().end())
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
pub fn ruff_source_file(&self) -> Option<&SourceFile> {
|
||||
self.primary_span_ref()?.as_ruff_file()
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn expect_ruff_source_file(&self) -> &SourceFile {
|
||||
self.ruff_source_file()
|
||||
.expect("Expected a ruff source file")
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
pub fn range(&self) -> Option<TextRange> {
|
||||
self.primary_span()?.range()
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] for the diagnostic.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span or if the span has no range.
|
||||
pub fn expect_range(&self) -> TextRange {
|
||||
self.range().expect("Expected a range for the primary span")
|
||||
}
|
||||
|
||||
/// Returns the ordering of diagnostics based on the start of their ranges, if they have any.
|
||||
///
|
||||
/// Panics if either diagnostic has no primary span, if the span has no range, or if its file is
|
||||
/// not a `SourceFile`.
|
||||
pub fn ruff_start_ordering(&self, other: &Self) -> std::cmp::Ordering {
|
||||
(self.expect_ruff_source_file(), self.expect_range().start()).cmp(&(
|
||||
other.expect_ruff_source_file(),
|
||||
other.expect_range().start(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
struct DiagnosticInner {
|
||||
id: DiagnosticId,
|
||||
severity: Severity,
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
subs: Vec<SubDiagnostic>,
|
||||
fix: Option<Fix>,
|
||||
parent: Option<TextSize>,
|
||||
noqa_offset: Option<TextSize>,
|
||||
secondary_code: Option<SecondaryCode>,
|
||||
}
|
||||
|
||||
struct RenderingSortKey<'a> {
|
||||
@@ -555,7 +342,7 @@ impl Eq for RenderingSortKey<'_> {}
|
||||
/// Currently, the order in which sub-diagnostics are rendered relative to one
|
||||
/// another (for a single parent diagnostic) is the order in which they were
|
||||
/// attached to the diagnostic.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct SubDiagnostic {
|
||||
/// Like with `Diagnostic`, we box the `SubDiagnostic` to make it
|
||||
/// pointer-sized.
|
||||
@@ -580,10 +367,7 @@ impl SubDiagnostic {
|
||||
/// Callers can pass anything that implements `std::fmt::Display`
|
||||
/// directly. If callers want or need to avoid cloning the diagnostic
|
||||
/// message, then they can also pass a `DiagnosticMessage` directly.
|
||||
pub fn new<'a>(
|
||||
severity: SubDiagnosticSeverity,
|
||||
message: impl IntoDiagnosticMessage + 'a,
|
||||
) -> SubDiagnostic {
|
||||
pub fn new<'a>(severity: Severity, message: impl IntoDiagnosticMessage + 'a) -> SubDiagnostic {
|
||||
let inner = Box::new(SubDiagnosticInner {
|
||||
severity,
|
||||
message: message.into_diagnostic_message(),
|
||||
@@ -659,9 +443,9 @@ impl SubDiagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
struct SubDiagnosticInner {
|
||||
severity: SubDiagnosticSeverity,
|
||||
severity: Severity,
|
||||
message: DiagnosticMessage,
|
||||
annotations: Vec<Annotation>,
|
||||
}
|
||||
@@ -687,7 +471,7 @@ struct SubDiagnosticInner {
|
||||
///
|
||||
/// Messages attached to annotations should also be as brief and specific as
|
||||
/// possible. Long messages could negative impact the quality of rendering.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Annotation {
|
||||
/// The span of this annotation, corresponding to some subsequence of the
|
||||
/// user's input that we want to highlight.
|
||||
@@ -807,7 +591,7 @@ impl Annotation {
|
||||
///
|
||||
/// These tags are used to provide additional information about the annotation.
|
||||
/// and are passed through to the language server protocol.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum DiagnosticTag {
|
||||
/// Unused or unnecessary code. Used for unused parameters, unreachable code, etc.
|
||||
Unnecessary,
|
||||
@@ -821,7 +605,7 @@ pub enum DiagnosticTag {
|
||||
/// be in kebab case, e.g. `no-foo` (all lower case).
|
||||
///
|
||||
/// Rules use kebab case, e.g. `no-foo`.
|
||||
#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
pub struct LintName(&'static str);
|
||||
|
||||
impl LintName {
|
||||
@@ -861,7 +645,7 @@ impl PartialEq<&str> for LintName {
|
||||
}
|
||||
|
||||
/// Uniquely identifies the kind of a diagnostic.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash, get_size2::GetSize)]
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
||||
pub enum DiagnosticId {
|
||||
Panic,
|
||||
|
||||
@@ -951,9 +735,6 @@ pub enum DiagnosticId {
|
||||
/// # no `[overrides.rules]`
|
||||
/// ```
|
||||
UselessOverridesSection,
|
||||
|
||||
/// Use of a deprecated setting.
|
||||
DeprecatedSetting,
|
||||
}
|
||||
|
||||
impl DiagnosticId {
|
||||
@@ -992,7 +773,6 @@ impl DiagnosticId {
|
||||
DiagnosticId::EmptyInclude => "empty-include",
|
||||
DiagnosticId::UnnecessaryOverridesSection => "unnecessary-overrides-section",
|
||||
DiagnosticId::UselessOverridesSection => "useless-overrides-section",
|
||||
DiagnosticId::DeprecatedSetting => "deprecated-setting",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1016,7 +796,7 @@ impl std::fmt::Display for DiagnosticId {
|
||||
///
|
||||
/// This enum presents a unified interface to these two types for the sake of creating [`Span`]s and
|
||||
/// emitting diagnostics from both ty and ruff.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum UnifiedFile {
|
||||
Ty(File),
|
||||
Ruff(SourceFile),
|
||||
@@ -1030,18 +810,6 @@ impl UnifiedFile {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the file's path relative to the current working directory.
|
||||
pub fn relative_path<'a>(&'a self, resolver: &'a dyn FileResolver) -> &'a Path {
|
||||
let cwd = resolver.current_directory();
|
||||
let path = Path::new(self.path(resolver));
|
||||
|
||||
if let Ok(path) = path.strip_prefix(cwd) {
|
||||
return path;
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn diagnostic_source(&self, resolver: &dyn FileResolver) -> DiagnosticSource {
|
||||
match self {
|
||||
UnifiedFile::Ty(file) => DiagnosticSource::Ty(resolver.input(*file)),
|
||||
@@ -1080,7 +848,7 @@ impl DiagnosticSource {
|
||||
/// It consists of a `File` and an optional range into that file. When the
|
||||
/// range isn't present, it semantically implies that the diagnostic refers to
|
||||
/// the entire file. For example, when the file should be executable but isn't.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Span {
|
||||
file: UnifiedFile,
|
||||
range: Option<TextRange>,
|
||||
@@ -1125,15 +893,9 @@ impl Span {
|
||||
///
|
||||
/// Panics if the file is a [`UnifiedFile::Ty`] instead of a [`UnifiedFile::Ruff`].
|
||||
pub fn expect_ruff_file(&self) -> &SourceFile {
|
||||
self.as_ruff_file()
|
||||
.expect("Expected a ruff `SourceFile`, found a ty `File`")
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] attached to this [`Span`].
|
||||
pub fn as_ruff_file(&self) -> Option<&SourceFile> {
|
||||
match &self.file {
|
||||
UnifiedFile::Ty(_) => None,
|
||||
UnifiedFile::Ruff(file) => Some(file),
|
||||
UnifiedFile::Ty(_) => panic!("Expected a ruff `SourceFile`, found a ty `File`"),
|
||||
UnifiedFile::Ruff(file) => file,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1158,7 +920,7 @@ impl From<crate::files::FileRange> for Span {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
|
||||
pub enum Severity {
|
||||
Info,
|
||||
Warning,
|
||||
@@ -1188,32 +950,6 @@ impl Severity {
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`Severity`] but exclusively for sub-diagnostics.
|
||||
///
|
||||
/// This type only exists to add an additional `Help` severity that isn't present in `Severity` or
|
||||
/// used for main diagnostics. If we want to add `Severity::Help` in the future, this type could be
|
||||
/// deleted and the two combined again.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, get_size2::GetSize)]
|
||||
pub enum SubDiagnosticSeverity {
|
||||
Help,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Fatal,
|
||||
}
|
||||
|
||||
impl SubDiagnosticSeverity {
|
||||
fn to_annotate(self) -> AnnotateLevel {
|
||||
match self {
|
||||
SubDiagnosticSeverity::Help => AnnotateLevel::Help,
|
||||
SubDiagnosticSeverity::Info => AnnotateLevel::Info,
|
||||
SubDiagnosticSeverity::Warning => AnnotateLevel::Warning,
|
||||
SubDiagnosticSeverity::Error => AnnotateLevel::Error,
|
||||
SubDiagnosticSeverity::Fatal => AnnotateLevel::Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for rendering diagnostics.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DisplayDiagnosticConfig {
|
||||
@@ -1234,21 +970,6 @@ pub struct DisplayDiagnosticConfig {
|
||||
/// here for now as the most "sensible" place for it to live until
|
||||
/// we had more concrete use cases. ---AG
|
||||
context: usize,
|
||||
/// Whether to use preview formatting for Ruff diagnostics.
|
||||
#[allow(
|
||||
dead_code,
|
||||
reason = "This is currently only used for JSON but will be needed soon for other formats"
|
||||
)]
|
||||
preview: bool,
|
||||
/// Whether to hide the real `Severity` of diagnostics.
|
||||
///
|
||||
/// This is intended for temporary use by Ruff, which only has a single `error` severity at the
|
||||
/// moment. We should be able to remove this option when Ruff gets more severities.
|
||||
hide_severity: bool,
|
||||
/// Whether to show the availability of a fix in a diagnostic.
|
||||
show_fix_status: bool,
|
||||
/// The lowest applicability that should be shown when reporting diagnostics.
|
||||
fix_applicability: Applicability,
|
||||
}
|
||||
|
||||
impl DisplayDiagnosticConfig {
|
||||
@@ -1269,43 +990,6 @@ impl DisplayDiagnosticConfig {
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to enable preview behavior or not.
|
||||
pub fn preview(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
preview: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to hide a diagnostic's severity or not.
|
||||
pub fn hide_severity(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
hide_severity: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether to show a fix's availability or not.
|
||||
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
show_fix_status: yes,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the lowest fix applicability that should be shown.
|
||||
///
|
||||
/// In other words, an applicability of `Safe` (the default) would suppress showing fixes or fix
|
||||
/// availability for unsafe or display-only fixes.
|
||||
///
|
||||
/// Note that this option is currently ignored when `hide_severity` is false.
|
||||
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
fix_applicability: applicability,
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DisplayDiagnosticConfig {
|
||||
@@ -1314,10 +998,6 @@ impl Default for DisplayDiagnosticConfig {
|
||||
format: DiagnosticFormat::default(),
|
||||
color: false,
|
||||
context: 2,
|
||||
preview: false,
|
||||
hide_severity: false,
|
||||
show_fix_status: false,
|
||||
fix_applicability: Applicability::Safe,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1345,31 +1025,6 @@ pub enum DiagnosticFormat {
|
||||
///
|
||||
/// This may use color when printing to a `tty`.
|
||||
Concise,
|
||||
/// Print diagnostics in the [Azure Pipelines] format.
|
||||
///
|
||||
/// [Azure Pipelines]: https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning
|
||||
Azure,
|
||||
/// Print diagnostics in JSON format.
|
||||
///
|
||||
/// Unlike `json-lines`, this prints all of the diagnostics as a JSON array.
|
||||
#[cfg(feature = "serde")]
|
||||
Json,
|
||||
/// Print diagnostics in JSON format, one per line.
|
||||
///
|
||||
/// This will print each diagnostic as a separate JSON object on its own line. See the `json`
|
||||
/// format for an array of all diagnostics. See <https://jsonlines.org/> for more details.
|
||||
#[cfg(feature = "serde")]
|
||||
JsonLines,
|
||||
/// Print diagnostics in the JSON format expected by [reviewdog].
|
||||
///
|
||||
/// [reviewdog]: https://github.com/reviewdog/reviewdog
|
||||
#[cfg(feature = "serde")]
|
||||
Rdjson,
|
||||
/// Print diagnostics in the format emitted by Pylint.
|
||||
Pylint,
|
||||
/// Print diagnostics in the format expected by JUnit.
|
||||
#[cfg(feature = "junit")]
|
||||
Junit,
|
||||
}
|
||||
|
||||
/// A representation of the kinds of messages inside a diagnostic.
|
||||
@@ -1428,7 +1083,7 @@ impl std::fmt::Display for ConciseMessage<'_> {
|
||||
/// In most cases, callers shouldn't need to use this. Instead, there is
|
||||
/// a blanket trait implementation for `IntoDiagnosticMessage` for
|
||||
/// anything that implements `std::fmt::Display`.
|
||||
#[derive(Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct DiagnosticMessage(Box<str>);
|
||||
|
||||
impl DiagnosticMessage {
|
||||
@@ -1488,52 +1143,41 @@ impl<T: std::fmt::Display> IntoDiagnosticMessage for T {
|
||||
}
|
||||
}
|
||||
|
||||
/// A secondary identifier for a lint diagnostic.
|
||||
/// Creates a `Diagnostic` from a parse error.
|
||||
///
|
||||
/// For Ruff rules this means the noqa code.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, get_size2::GetSize)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize), serde(transparent))]
|
||||
pub struct SecondaryCode(String);
|
||||
|
||||
impl SecondaryCode {
|
||||
pub fn new(code: String) -> Self {
|
||||
Self(code)
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
/// This should _probably_ be a method on `ruff_python_parser::ParseError`, but
|
||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
pub fn create_parse_diagnostic(file: File, err: &ruff_python_parser::ParseError) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.location);
|
||||
diag.annotate(Annotation::primary(span).message(&err.error));
|
||||
diag
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SecondaryCode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
/// Creates a `Diagnostic` from an unsupported syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_unsupported_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::UnsupportedSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SecondaryCode {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&str> for SecondaryCode {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for &str {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
// for `hashbrown::EntryRef`
|
||||
impl From<&SecondaryCode> for SecondaryCode {
|
||||
fn from(value: &SecondaryCode) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
/// Creates a `Diagnostic` from a semantic syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_semantic_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::semantic_errors::SemanticSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,83 +0,0 @@
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, Severity};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct AzureRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> AzureRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl AzureRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diag in diagnostics {
|
||||
let severity = match diag.severity() {
|
||||
Severity::Info | Severity::Warning => "warning",
|
||||
Severity::Error | Severity::Fatal => "error",
|
||||
};
|
||||
write!(f, "##vso[task.logissue type={severity};")?;
|
||||
if let Some(span) = diag.primary_span() {
|
||||
let filename = span.file().path(self.resolver);
|
||||
write!(f, "sourcepath={filename};")?;
|
||||
if let Some(range) = span.range() {
|
||||
let location = if self.resolver.notebook_index(span.file()).is_some() {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
LineColumn::default()
|
||||
} else {
|
||||
span.file()
|
||||
.diagnostic_source(self.resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"linenumber={line};columnnumber={col};",
|
||||
line = location.line,
|
||||
col = location.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
writeln!(
|
||||
f,
|
||||
"{code}]{body}",
|
||||
code = diag
|
||||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!("code={code};")),
|
||||
body = diag.body(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Azure);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Azure);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
use crate::diagnostic::{
|
||||
Diagnostic, DisplayDiagnosticConfig, Severity,
|
||||
stylesheet::{DiagnosticStylesheet, fmt_styled},
|
||||
};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct ConciseRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> ConciseRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
let sep = fmt_styled(":", stylesheet.separator);
|
||||
for diag in diagnostics {
|
||||
if let Some(span) = diag.primary_span() {
|
||||
write!(
|
||||
f,
|
||||
"{path}",
|
||||
path = fmt_styled(
|
||||
span.file().relative_path(self.resolver).to_string_lossy(),
|
||||
stylesheet.emphasis
|
||||
)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let diagnostic_source = span.file().diagnostic_source(self.resolver);
|
||||
let start = diagnostic_source
|
||||
.as_source_code()
|
||||
.line_column(range.start());
|
||||
|
||||
if let Some(notebook_index) = self.resolver.notebook_index(span.file()) {
|
||||
write!(
|
||||
f,
|
||||
"{sep}cell {cell}{sep}{line}{sep}{col}",
|
||||
cell = notebook_index.cell(start.line).unwrap_or_default(),
|
||||
line = notebook_index.cell_row(start.line).unwrap_or_default(),
|
||||
col = start.column,
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{sep}{line}{sep}{col}",
|
||||
line = start.line,
|
||||
col = start.column,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
write!(f, "{sep} ")?;
|
||||
}
|
||||
if self.config.hide_severity {
|
||||
if let Some(code) = diag.secondary_code() {
|
||||
write!(
|
||||
f,
|
||||
"{code} ",
|
||||
code = fmt_styled(code, stylesheet.secondary_code)
|
||||
)?;
|
||||
}
|
||||
if self.config.show_fix_status {
|
||||
if let Some(fix) = diag.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.config.fix_applicability) {
|
||||
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let (severity, severity_style) = match diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}] ",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
}
|
||||
|
||||
writeln!(f, "{message}", message = diag.concise_message())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: error[unused-import] `os` imported but unused
|
||||
fib.py:6:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: error[undefined-name] Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_preview() {
|
||||
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
env.preview(true);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
fib.py:1:8: F401 [*] `os` imported but unused
|
||||
fib.py:6:5: F841 [*] Local variable `x` is assigned to but never used
|
||||
undef.py:1:4: F821 Undefined name `a`
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_syntax_errors() {
|
||||
let (mut env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
env.hide_severity(true);
|
||||
env.show_fix_status(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
syntax_errors.py:1:15: error[invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3:12: error[invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Concise);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
notebook.ipynb:cell 1:2:8: error[unused-import] `os` imported but unused
|
||||
notebook.ipynb:cell 2:2:8: error[unused-import] `math` imported but unused
|
||||
notebook.ipynb:cell 3:4:5: error[unused-variable] Local variable `x` is assigned to but never used
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Concise);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@"error[test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,180 +0,0 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat, Severity,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
|
||||
error[unused-import]: `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
error[undefined-name]: Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Full);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[invalid-syntax]: SyntaxError: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
| ^
|
||||
2 |
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
error[invalid-syntax]: SyntaxError: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
2 |
|
||||
3 | if call(foo
|
||||
| ^
|
||||
4 | def bar():
|
||||
5 | pass
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit c9b99e4.
|
||||
///
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[no-indented-block]: Expected an indented block
|
||||
/// --> example.py:3:1
|
||||
/// |
|
||||
/// 2 | if False:
|
||||
/// | ^
|
||||
/// 3 | print()
|
||||
/// |
|
||||
/// ```
|
||||
///
|
||||
/// where the caret points to the end of the previous line instead of the start of the next.
|
||||
#[test]
|
||||
fn empty_span_after_line_terminator() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add(
|
||||
"example.py",
|
||||
r#"
|
||||
if False:
|
||||
print()
|
||||
"#,
|
||||
);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"no-indented-block",
|
||||
Severity::Error,
|
||||
"Expected an indented block",
|
||||
)
|
||||
.primary("example.py", "3:0", "3:0", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[no-indented-block]: Expected an indented block
|
||||
--> example.py:3:1
|
||||
|
|
||||
2 | if False:
|
||||
3 | print()
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that the new `full` rendering code in `ruff_db` handles cases fixed by commit 2922490.
|
||||
///
|
||||
/// For example, without the fix, we get diagnostics like this:
|
||||
///
|
||||
/// ```
|
||||
/// error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
/// --> example.py:1:25
|
||||
/// |
|
||||
/// 1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
/// | ^
|
||||
/// |
|
||||
/// ```
|
||||
///
|
||||
/// where the caret points to the `f` in the f-string instead of the start of the invalid
|
||||
/// character (`^Z`).
|
||||
#[test]
|
||||
fn unprintable_characters() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "nested_fstrings = f'{f'{f''}'}'");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:24", "1:24", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
--> example.py:1:25
|
||||
|
|
||||
1 | nested_fstrings = f'␈{f'{f'␛'}'}'
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_unprintable_characters() -> std::io::Result<()> {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("example.py", "");
|
||||
env.format(DiagnosticFormat::Full);
|
||||
|
||||
let diagnostic = env
|
||||
.builder(
|
||||
"invalid-character-sub",
|
||||
Severity::Error,
|
||||
r#"Invalid unescaped character SUB, use "\x1A" instead"#,
|
||||
)
|
||||
.primary("example.py", "1:1", "1:1", "")
|
||||
.build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r#"
|
||||
error[invalid-character-sub]: Invalid unescaped character SUB, use "\x1A" instead
|
||||
--> example.py:1:2
|
||||
|
|
||||
1 | ␈␛
|
||||
| ^
|
||||
|
|
||||
"#);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,352 +0,0 @@
|
||||
use serde::{Serialize, Serializer, ser::SerializeSeq};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Edit};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, DiagnosticSource, DisplayDiagnosticConfig, SecondaryCode};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct JsonRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> JsonRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:#}",
|
||||
diagnostics_to_json_value(diagnostics, self.resolver, self.config)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostics_to_json_value<'a>(
|
||||
diagnostics: impl IntoIterator<Item = &'a Diagnostic>,
|
||||
resolver: &dyn FileResolver,
|
||||
config: &DisplayDiagnosticConfig,
|
||||
) -> Value {
|
||||
let values: Vec<_> = diagnostics
|
||||
.into_iter()
|
||||
.map(|diag| diagnostic_to_json(diag, resolver, config))
|
||||
.collect();
|
||||
json!(values)
|
||||
}
|
||||
|
||||
pub(super) fn diagnostic_to_json<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
) -> JsonDiagnostic<'a> {
|
||||
let span = diagnostic.primary_span_ref();
|
||||
let filename = span.map(|span| span.file().path(resolver));
|
||||
let range = span.and_then(|span| span.range());
|
||||
let diagnostic_source = span.map(|span| span.file().diagnostic_source(resolver));
|
||||
let source_code = diagnostic_source
|
||||
.as_ref()
|
||||
.map(|diagnostic_source| diagnostic_source.as_source_code());
|
||||
let notebook_index = span.and_then(|span| resolver.notebook_index(span.file()));
|
||||
|
||||
let mut start_location = None;
|
||||
let mut end_location = None;
|
||||
let mut noqa_location = None;
|
||||
let mut notebook_cell_index = None;
|
||||
if let Some(source_code) = source_code {
|
||||
noqa_location = diagnostic
|
||||
.noqa_offset()
|
||||
.map(|offset| source_code.line_column(offset));
|
||||
if let Some(range) = range {
|
||||
let mut start = source_code.line_column(range.start());
|
||||
let mut end = source_code.line_column(range.end());
|
||||
if let Some(notebook_index) = ¬ebook_index {
|
||||
notebook_cell_index =
|
||||
Some(notebook_index.cell(start.line).unwrap_or(OneIndexed::MIN));
|
||||
start = notebook_index.translate_line_column(&start);
|
||||
end = notebook_index.translate_line_column(&end);
|
||||
noqa_location =
|
||||
noqa_location.map(|location| notebook_index.translate_line_column(&location));
|
||||
}
|
||||
start_location = Some(start);
|
||||
end_location = Some(end);
|
||||
}
|
||||
}
|
||||
|
||||
let fix = diagnostic.fix().map(|fix| JsonFix {
|
||||
applicability: fix.applicability(),
|
||||
message: diagnostic.first_help_text(),
|
||||
edits: ExpandedEdits {
|
||||
edits: fix.edits(),
|
||||
notebook_index,
|
||||
config,
|
||||
diagnostic_source,
|
||||
},
|
||||
});
|
||||
|
||||
// In preview, the locations and filename can be optional.
|
||||
if config.preview {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
location: start_location.map(JsonLocation::from),
|
||||
end_location: end_location.map(JsonLocation::from),
|
||||
filename,
|
||||
noqa_row: noqa_location.map(|location| location.line),
|
||||
}
|
||||
} else {
|
||||
JsonDiagnostic {
|
||||
code: diagnostic.secondary_code(),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
message: diagnostic.body(),
|
||||
fix,
|
||||
cell: notebook_cell_index,
|
||||
location: Some(start_location.unwrap_or_default().into()),
|
||||
end_location: Some(end_location.unwrap_or_default().into()),
|
||||
filename: Some(filename.unwrap_or_default()),
|
||||
noqa_row: noqa_location.map(|location| location.line),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedEdits<'a> {
|
||||
edits: &'a [Edit],
|
||||
notebook_index: Option<NotebookIndex>,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
diagnostic_source: Option<DiagnosticSource>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedEdits<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
|
||||
|
||||
for edit in self.edits {
|
||||
let (location, end_location) = if let Some(diagnostic_source) = &self.diagnostic_source
|
||||
{
|
||||
let source_code = diagnostic_source.as_source_code();
|
||||
let mut location = source_code.line_column(edit.start());
|
||||
let mut end_location = source_code.line_column(edit.end());
|
||||
|
||||
if let Some(notebook_index) = &self.notebook_index {
|
||||
// There exists a newline between each cell's source code in the
|
||||
// concatenated source code in Ruff. This newline doesn't actually
|
||||
// exists in the JSON source field.
|
||||
//
|
||||
// Now, certain edits may try to remove this newline, which means
|
||||
// the edit will spill over to the first character of the next cell.
|
||||
// If it does, we need to translate the end location to the last
|
||||
// character of the previous cell.
|
||||
match (
|
||||
notebook_index.cell(location.line),
|
||||
notebook_index.cell(end_location.line),
|
||||
) {
|
||||
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: source_code
|
||||
.line_column(source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
(Some(_), None) => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: source_code
|
||||
.line_column(source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
}
|
||||
}
|
||||
location = notebook_index.translate_line_column(&location);
|
||||
}
|
||||
|
||||
(Some(location), Some(end_location))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
// In preview, the locations can be optional.
|
||||
let value = if self.config.preview {
|
||||
JsonEdit {
|
||||
content: edit.content().unwrap_or_default(),
|
||||
location: location.map(JsonLocation::from),
|
||||
end_location: end_location.map(JsonLocation::from),
|
||||
}
|
||||
} else {
|
||||
JsonEdit {
|
||||
content: edit.content().unwrap_or_default(),
|
||||
location: Some(location.unwrap_or_default().into()),
|
||||
end_location: Some(end_location.unwrap_or_default().into()),
|
||||
}
|
||||
};
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
/// A serializable version of `Diagnostic`.
|
||||
///
|
||||
/// The `Old` variant only exists to preserve backwards compatibility. Both this and `JsonEdit`
|
||||
/// should become structs with the `New` definitions in a future Ruff release.
|
||||
#[derive(Serialize)]
|
||||
pub(crate) struct JsonDiagnostic<'a> {
|
||||
cell: Option<OneIndexed>,
|
||||
code: Option<&'a SecondaryCode>,
|
||||
end_location: Option<JsonLocation>,
|
||||
filename: Option<&'a str>,
|
||||
fix: Option<JsonFix<'a>>,
|
||||
location: Option<JsonLocation>,
|
||||
message: &'a str,
|
||||
noqa_row: Option<OneIndexed>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonFix<'a> {
|
||||
applicability: Applicability,
|
||||
edits: ExpandedEdits<'a>,
|
||||
message: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonLocation {
|
||||
column: OneIndexed,
|
||||
row: OneIndexed,
|
||||
}
|
||||
|
||||
impl From<LineColumn> for JsonLocation {
|
||||
fn from(location: LineColumn) -> Self {
|
||||
JsonLocation {
|
||||
row: location.line,
|
||||
column: location.column,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsonEdit<'a> {
|
||||
content: &'a str,
|
||||
end_location: Option<JsonLocation>,
|
||||
location: Option<JsonLocation>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
TestEnvironment, create_diagnostics, create_notebook_diagnostics,
|
||||
create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Json);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_stable() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r#"
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"filename": "",
|
||||
"fix": null,
|
||||
"location": {
|
||||
"column": 1,
|
||||
"row": 1
|
||||
},
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
|
||||
}
|
||||
]
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_preview() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Json);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r#"
|
||||
[
|
||||
{
|
||||
"cell": null,
|
||||
"code": null,
|
||||
"end_location": null,
|
||||
"filename": null,
|
||||
"fix": null,
|
||||
"location": null,
|
||||
"message": "main diagnostic message",
|
||||
"noqa_row": null,
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic"
|
||||
}
|
||||
]
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
use crate::diagnostic::{Diagnostic, DisplayDiagnosticConfig, render::json::diagnostic_to_json};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct JsonLinesRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl<'a> JsonLinesRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver, config: &'a DisplayDiagnosticConfig) -> Self {
|
||||
Self { resolver, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonLinesRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diag in diagnostics {
|
||||
writeln!(
|
||||
f,
|
||||
"{}",
|
||||
serde_json::json!(diagnostic_to_json(diag, self.resolver, self.config))
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{
|
||||
create_diagnostics, create_notebook_diagnostics, create_syntax_error_diagnostics,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::JsonLines);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
use std::{collections::BTreeMap, ops::Deref, path::Path};
|
||||
|
||||
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
|
||||
|
||||
use ruff_source_file::LineColumn;
|
||||
|
||||
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
|
||||
|
||||
/// A renderer for diagnostics in the [JUnit] format.
|
||||
///
|
||||
/// See [`junit.xsd`] for the specification in the JUnit repository and an annotated [version]
|
||||
/// linked from the [`quick_junit`] docs.
|
||||
///
|
||||
/// [JUnit]: https://junit.org/
|
||||
/// [`junit.xsd`]: https://github.com/junit-team/junit-framework/blob/2870b7d8fd5bf7c1efe489d3991d3ed3900e82bb/platform-tests/src/test/resources/jenkins-junit.xsd
|
||||
/// [version]: https://llg.cubic.org/docs/junit/
|
||||
/// [`quick_junit`]: https://docs.rs/quick-junit/latest/quick_junit/
|
||||
pub struct JunitRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> JunitRenderer<'a> {
|
||||
pub fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
let mut report = Report::new("ruff");
|
||||
|
||||
if diagnostics.is_empty() {
|
||||
let mut test_suite = TestSuite::new("ruff");
|
||||
test_suite
|
||||
.extra
|
||||
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
|
||||
let mut case = TestCase::new("No errors found", TestCaseStatus::success());
|
||||
case.set_classname("ruff");
|
||||
test_suite.add_test_case(case);
|
||||
report.add_test_suite(test_suite);
|
||||
} else {
|
||||
for (filename, diagnostics) in group_diagnostics_by_filename(diagnostics, self.resolver)
|
||||
{
|
||||
let mut test_suite = TestSuite::new(filename);
|
||||
test_suite
|
||||
.extra
|
||||
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
|
||||
|
||||
let classname = Path::new(filename).with_extension("");
|
||||
|
||||
for diagnostic in diagnostics {
|
||||
let DiagnosticWithLocation {
|
||||
diagnostic,
|
||||
start_location: location,
|
||||
} = diagnostic;
|
||||
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
|
||||
status.set_message(diagnostic.body());
|
||||
|
||||
if let Some(location) = location {
|
||||
status.set_description(format!(
|
||||
"line {row}, col {col}, {body}",
|
||||
row = location.line,
|
||||
col = location.column,
|
||||
body = diagnostic.body()
|
||||
));
|
||||
} else {
|
||||
status.set_description(diagnostic.body());
|
||||
}
|
||||
|
||||
let code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
|
||||
let mut case = TestCase::new(format!("org.ruff.{code}"), status);
|
||||
case.set_classname(classname.to_str().unwrap());
|
||||
|
||||
if let Some(location) = location {
|
||||
case.extra.insert(
|
||||
XmlString::new("line"),
|
||||
XmlString::new(location.line.to_string()),
|
||||
);
|
||||
case.extra.insert(
|
||||
XmlString::new("column"),
|
||||
XmlString::new(location.column.to_string()),
|
||||
);
|
||||
}
|
||||
|
||||
test_suite.add_test_case(case);
|
||||
}
|
||||
report.add_test_suite(test_suite);
|
||||
}
|
||||
}
|
||||
|
||||
let adapter = FmtAdapter { fmt: f };
|
||||
report.serialize(adapter).map_err(|_| std::fmt::Error)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(brent) this and `group_diagnostics_by_filename` are also used by the `grouped` output
|
||||
// format. I think they'd make more sense in that file, but I started here first. I'll move them to
|
||||
// that module when adding the `grouped` output format.
|
||||
struct DiagnosticWithLocation<'a> {
|
||||
diagnostic: &'a Diagnostic,
|
||||
start_location: Option<LineColumn>,
|
||||
}
|
||||
|
||||
impl Deref for DiagnosticWithLocation<'_> {
|
||||
type Target = Diagnostic;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.diagnostic
|
||||
}
|
||||
}
|
||||
|
||||
fn group_diagnostics_by_filename<'a>(
|
||||
diagnostics: &'a [Diagnostic],
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> BTreeMap<&'a str, Vec<DiagnosticWithLocation<'a>>> {
|
||||
let mut grouped_diagnostics = BTreeMap::default();
|
||||
for diagnostic in diagnostics {
|
||||
let (filename, start_location) = diagnostic
|
||||
.primary_span_ref()
|
||||
.map(|span| {
|
||||
let file = span.file();
|
||||
let start_location =
|
||||
span.range()
|
||||
.filter(|_| !resolver.is_notebook(file))
|
||||
.map(|range| {
|
||||
file.diagnostic_source(resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
});
|
||||
|
||||
(span.file().path(resolver), start_location)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
grouped_diagnostics
|
||||
.entry(filename)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(DiagnosticWithLocation {
|
||||
diagnostic,
|
||||
start_location,
|
||||
});
|
||||
}
|
||||
grouped_diagnostics
|
||||
}
|
||||
|
||||
struct FmtAdapter<'a> {
|
||||
fmt: &'a mut dyn std::fmt::Write,
|
||||
}
|
||||
|
||||
impl std::io::Write for FmtAdapter<'_> {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.fmt
|
||||
.write_str(std::str::from_utf8(buf).map_err(|_| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Invalid UTF-8 in JUnit report",
|
||||
)
|
||||
})?)
|
||||
.map_err(std::io::Error::other)?;
|
||||
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_fmt(&mut self, args: std::fmt::Arguments<'_>) -> std::io::Result<()> {
|
||||
self.fmt.write_fmt(args).map_err(std::io::Error::other)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Junit);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Junit);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
|
||||
|
||||
/// Generate violations in Pylint format.
|
||||
///
|
||||
/// The format is given by this string:
|
||||
///
|
||||
/// ```python
|
||||
/// "%(path)s:%(row)d: [%(code)s] %(text)s"
|
||||
/// ```
|
||||
///
|
||||
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
|
||||
pub(super) struct PylintRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> PylintRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl PylintRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
for diagnostic in diagnostics {
|
||||
let (filename, row) = diagnostic
|
||||
.primary_span_ref()
|
||||
.map(|span| {
|
||||
let file = span.file();
|
||||
|
||||
let row = span
|
||||
.range()
|
||||
.filter(|_| !self.resolver.is_notebook(file))
|
||||
.map(|range| {
|
||||
file.diagnostic_source(self.resolver)
|
||||
.as_source_code()
|
||||
.line_column(range.start())
|
||||
.line
|
||||
});
|
||||
|
||||
(file.relative_path(self.resolver).to_string_lossy(), row)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let code = diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
|
||||
|
||||
let row = row.unwrap_or_default();
|
||||
|
||||
writeln!(
|
||||
f,
|
||||
"{path}:{row}: [{code}] {body}",
|
||||
path = filename,
|
||||
body = diagnostic.body()
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Pylint);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Pylint);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Pylint);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@":1: [test-diagnostic] main diagnostic message",
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,235 +0,0 @@
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_source_file::{LineColumn, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::Diagnostic;
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub struct RdjsonRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> RdjsonRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:#}",
|
||||
serde_json::json!(RdjsonDiagnostics::new(diagnostics, self.resolver))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedDiagnostics<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
diagnostics: &'a [Diagnostic],
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedDiagnostics<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
|
||||
for diagnostic in self.diagnostics {
|
||||
let value = diagnostic_to_rdjson(diagnostic, self.resolver);
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostic_to_rdjson<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
resolver: &'a dyn FileResolver,
|
||||
) -> RdjsonDiagnostic<'a> {
|
||||
let span = diagnostic.primary_span_ref();
|
||||
let source_file = span.map(|span| {
|
||||
let file = span.file();
|
||||
(file.path(resolver), file.diagnostic_source(resolver))
|
||||
});
|
||||
|
||||
let location = source_file.as_ref().map(|(path, source)| {
|
||||
let range = diagnostic.range().map(|range| {
|
||||
let source_code = source.as_source_code();
|
||||
let start = source_code.line_column(range.start());
|
||||
let end = source_code.line_column(range.end());
|
||||
RdjsonRange::new(start, end)
|
||||
});
|
||||
|
||||
RdjsonLocation { path, range }
|
||||
});
|
||||
|
||||
let edits = diagnostic.fix().map(Fix::edits).unwrap_or_default();
|
||||
|
||||
RdjsonDiagnostic {
|
||||
message: diagnostic.body(),
|
||||
location,
|
||||
code: RdjsonCode {
|
||||
value: diagnostic
|
||||
.secondary_code()
|
||||
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
|
||||
url: diagnostic.to_ruff_url(),
|
||||
},
|
||||
suggestions: rdjson_suggestions(
|
||||
edits,
|
||||
source_file
|
||||
.as_ref()
|
||||
.map(|(_, source)| source.as_source_code()),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn rdjson_suggestions<'a>(
|
||||
edits: &'a [Edit],
|
||||
source_code: Option<SourceCode>,
|
||||
) -> Vec<RdjsonSuggestion<'a>> {
|
||||
if edits.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let Some(source_code) = source_code else {
|
||||
debug_assert!(false, "Expected a source file for a diagnostic with a fix");
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
edits
|
||||
.iter()
|
||||
.map(|edit| {
|
||||
let start = source_code.line_column(edit.start());
|
||||
let end = source_code.line_column(edit.end());
|
||||
let range = RdjsonRange::new(start, end);
|
||||
|
||||
RdjsonSuggestion {
|
||||
range,
|
||||
text: edit.content().unwrap_or_default(),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonDiagnostics<'a> {
|
||||
diagnostics: ExpandedDiagnostics<'a>,
|
||||
severity: &'static str,
|
||||
source: RdjsonSource,
|
||||
}
|
||||
|
||||
impl<'a> RdjsonDiagnostics<'a> {
|
||||
fn new(diagnostics: &'a [Diagnostic], resolver: &'a dyn FileResolver) -> Self {
|
||||
Self {
|
||||
source: RdjsonSource {
|
||||
name: "ruff",
|
||||
url: env!("CARGO_PKG_HOMEPAGE"),
|
||||
},
|
||||
severity: "WARNING",
|
||||
diagnostics: ExpandedDiagnostics {
|
||||
diagnostics,
|
||||
resolver,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonSource {
|
||||
name: &'static str,
|
||||
url: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonDiagnostic<'a> {
|
||||
code: RdjsonCode<'a>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
location: Option<RdjsonLocation<'a>>,
|
||||
message: &'a str,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
suggestions: Vec<RdjsonSuggestion<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonLocation<'a> {
|
||||
path: &'a str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
range: Option<RdjsonRange>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct RdjsonRange {
|
||||
end: LineColumn,
|
||||
start: LineColumn,
|
||||
}
|
||||
|
||||
impl RdjsonRange {
|
||||
fn new(start: LineColumn, end: LineColumn) -> Self {
|
||||
Self { start, end }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonCode<'a> {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
url: Option<String>,
|
||||
value: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RdjsonSuggestion<'a> {
|
||||
range: RdjsonRange,
|
||||
text: &'a str,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{TestEnvironment, create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Rdjson);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Rdjson);
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_stable() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(false);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_file_preview() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.format(DiagnosticFormat::Rdjson);
|
||||
env.preview(true);
|
||||
|
||||
let diag = env.err().build();
|
||||
|
||||
insta::assert_snapshot!(env.render(&diag));
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/pylint.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
syntax_errors.py:1: [invalid-syntax] SyntaxError: Expected one or more symbol names after import
|
||||
syntax_errors.py:3: [invalid-syntax] SyntaxError: Expected ')', found newline
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render(&diag)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
|
||||
"value": "test-diagnostic"
|
||||
},
|
||||
"message": "main diagnostic message"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_db/src/diagnostic/render/rdjson.rs
|
||||
expression: env.render(&diag)
|
||||
---
|
||||
{
|
||||
"diagnostics": [
|
||||
{
|
||||
"code": {
|
||||
"url": "https://docs.astral.sh/ruff/rules/test-diagnostic",
|
||||
"value": "test-diagnostic"
|
||||
},
|
||||
"message": "main diagnostic message"
|
||||
}
|
||||
],
|
||||
"severity": "WARNING",
|
||||
"source": {
|
||||
"name": "ruff",
|
||||
"url": "https://docs.astral.sh/ruff"
|
||||
}
|
||||
}
|
||||
@@ -41,8 +41,6 @@ pub struct DiagnosticStylesheet {
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) none: Style,
|
||||
pub(crate) separator: Style,
|
||||
pub(crate) secondary_code: Style,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
@@ -64,8 +62,6 @@ impl DiagnosticStylesheet {
|
||||
line_no: bright_blue.effects(Effects::BOLD),
|
||||
emphasis: Style::new().effects(Effects::BOLD),
|
||||
none: Style::new(),
|
||||
separator: AnsiColor::Cyan.on_default(),
|
||||
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,8 +75,6 @@ impl DiagnosticStylesheet {
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
none: Style::new(),
|
||||
separator: Style::new(),
|
||||
secondary_code: Style::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
pub use file_root::{FileRoot, FileRootKind};
|
||||
pub use path::FilePath;
|
||||
@@ -231,7 +232,7 @@ impl Files {
|
||||
let roots = inner.roots.read().unwrap();
|
||||
|
||||
for root in roots.all() {
|
||||
if path.starts_with(root.path(db)) {
|
||||
if root.path(db).starts_with(&path) {
|
||||
root.set_revision(db).to(FileRevision::now());
|
||||
}
|
||||
}
|
||||
@@ -262,23 +263,12 @@ impl Files {
|
||||
|
||||
impl fmt::Debug for Files {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if f.alternate() {
|
||||
let mut map = f.debug_map();
|
||||
let mut map = f.debug_map();
|
||||
|
||||
for entry in self.inner.system_by_path.iter() {
|
||||
map.entry(entry.key(), entry.value());
|
||||
}
|
||||
map.finish()
|
||||
} else {
|
||||
f.debug_struct("Files")
|
||||
.field("system_by_path", &self.inner.system_by_path.len())
|
||||
.field(
|
||||
"system_virtual_by_path",
|
||||
&self.inner.system_virtual_by_path.len(),
|
||||
)
|
||||
.field("vendored_by_path", &self.inner.vendored_by_path.len())
|
||||
.finish()
|
||||
for entry in self.inner.system_by_path.iter() {
|
||||
map.entry(entry.key(), entry.value());
|
||||
}
|
||||
map.finish()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,10 +301,12 @@ pub struct File {
|
||||
/// the file has been deleted is to change the status to `Deleted`.
|
||||
#[default]
|
||||
status: FileStatus,
|
||||
}
|
||||
|
||||
// The Salsa heap is tracked separately.
|
||||
impl get_size2::GetSize for File {}
|
||||
/// Counter that counts the number of created file instances and active file instances.
|
||||
/// Only enabled in debug builds.
|
||||
#[default]
|
||||
count: Count<File>,
|
||||
}
|
||||
|
||||
impl File {
|
||||
/// Reads the content of the file into a [`String`].
|
||||
@@ -369,25 +361,12 @@ impl File {
|
||||
}
|
||||
|
||||
/// Refreshes the file metadata by querying the file system if needed.
|
||||
///
|
||||
/// This also "touches" the file root associated with the given path.
|
||||
/// This means that any Salsa queries that depend on the corresponding
|
||||
/// root's revision will become invalidated.
|
||||
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
Files::touch_root(db, &absolute);
|
||||
Self::sync_system_path(db, &absolute, None);
|
||||
}
|
||||
|
||||
/// Refreshes *only* the file metadata by querying the file system if needed.
|
||||
///
|
||||
/// This specifically does not touch any file root associated with the
|
||||
/// given file path.
|
||||
pub fn sync_path_only(db: &mut dyn Db, path: &SystemPath) {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
Self::sync_system_path(db, &absolute, None);
|
||||
}
|
||||
|
||||
/// Increments the revision for the virtual file at `path`.
|
||||
pub fn sync_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath) {
|
||||
if let Some(virtual_file) = db.files().try_virtual_file(path) {
|
||||
@@ -493,7 +472,7 @@ impl fmt::Debug for File {
|
||||
///
|
||||
/// This is a wrapper around a [`File`] that provides additional methods to interact with a virtual
|
||||
/// file.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct VirtualFile(File);
|
||||
|
||||
impl VirtualFile {
|
||||
|
||||
@@ -23,7 +23,7 @@ pub struct FileRoot {
|
||||
pub path: SystemPathBuf,
|
||||
|
||||
/// The kind of the root at the time of its creation.
|
||||
pub kind_at_time_of_creation: FileRootKind,
|
||||
kind_at_time_of_creation: FileRootKind,
|
||||
|
||||
/// A revision that changes when the contents of the source root change.
|
||||
///
|
||||
|
||||
@@ -5,7 +5,6 @@ use ruff_python_ast::PythonVersion;
|
||||
use rustc_hash::FxHasher;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::num::NonZeroUsize;
|
||||
use ty_static::EnvVars;
|
||||
|
||||
pub mod diagnostic;
|
||||
pub mod display;
|
||||
@@ -28,21 +27,6 @@ pub use web_time::{Instant, SystemTime, SystemTimeError};
|
||||
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
pub type FxDashSet<K> = dashmap::DashSet<K, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
static VERSION: std::sync::OnceLock<String> = std::sync::OnceLock::new();
|
||||
|
||||
/// Returns the version of the executing program if set.
|
||||
pub fn program_version() -> Option<&'static str> {
|
||||
VERSION.get().map(|version| version.as_str())
|
||||
}
|
||||
|
||||
/// Sets the version of the executing program.
|
||||
///
|
||||
/// ## Errors
|
||||
/// If the version has already been initialized (can only be set once).
|
||||
pub fn set_program_version(version: String) -> Result<(), String> {
|
||||
VERSION.set(version)
|
||||
}
|
||||
|
||||
/// Most basic database that gives access to files, the host system, source code, and parsed AST.
|
||||
#[salsa::db]
|
||||
pub trait Db: salsa::Database {
|
||||
@@ -52,6 +36,12 @@ pub trait Db: salsa::Database {
|
||||
fn python_version(&self) -> PythonVersion;
|
||||
}
|
||||
|
||||
/// Trait for upcasting a reference to a base trait object.
|
||||
pub trait Upcast<T: ?Sized> {
|
||||
fn upcast(&self) -> &T;
|
||||
fn upcast_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
/// Returns the maximum number of tasks that ty is allowed
|
||||
/// to process in parallel.
|
||||
///
|
||||
@@ -66,8 +56,8 @@ pub trait Db: salsa::Database {
|
||||
/// ty can still spawn more threads for other tasks, e.g. to wait for a Ctrl+C signal or
|
||||
/// watching the files for changes.
|
||||
pub fn max_parallelism() -> NonZeroUsize {
|
||||
std::env::var(EnvVars::TY_MAX_PARALLELISM)
|
||||
.or_else(|_| std::env::var(EnvVars::RAYON_NUM_THREADS))
|
||||
std::env::var("TY_MAX_PARALLELISM")
|
||||
.or_else(|_| std::env::var("RAYON_NUM_THREADS"))
|
||||
.ok()
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap_or_else(|| {
|
||||
@@ -86,11 +76,11 @@ pub trait RustDoc {
|
||||
mod tests {
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crate::Db;
|
||||
use crate::files::Files;
|
||||
use crate::system::TestSystem;
|
||||
use crate::system::{DbWithTestSystem, System};
|
||||
use crate::vendored::VendoredFileSystem;
|
||||
use crate::{Db, Upcast};
|
||||
|
||||
type Events = Arc<Mutex<Vec<salsa::Event>>>;
|
||||
|
||||
@@ -163,6 +153,15 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn Db + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use arc_swap::ArcSwapOption;
|
||||
use get_size2::GetSize;
|
||||
use ruff_python_ast::{AnyRootNodeRef, ModModule, NodeIndex};
|
||||
use ruff_python_parser::{ParseOptions, Parsed, parse_unchecked};
|
||||
|
||||
@@ -21,7 +20,7 @@ use crate::source::source_text;
|
||||
/// reflected in the changed AST offsets.
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
|
||||
/// for determining if a query result is unchanged.
|
||||
#[salsa::tracked(returns(ref), no_eq, heap_size=get_size2::GetSize::get_heap_size)]
|
||||
#[salsa::tracked(returns(ref), no_eq)]
|
||||
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
let _span = tracing::trace_span!("parsed_module", ?file).entered();
|
||||
|
||||
@@ -45,10 +44,9 @@ pub fn parsed_module_impl(db: &dyn Db, file: File) -> Parsed<ModModule> {
|
||||
///
|
||||
/// This type manages instances of the module AST. A particular instance of the AST
|
||||
/// is represented with the [`ParsedModuleRef`] type.
|
||||
#[derive(Clone, get_size2::GetSize)]
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedModule {
|
||||
file: File,
|
||||
#[get_size(size_fn = arc_swap_size)]
|
||||
inner: Arc<ArcSwapOption<indexed::IndexedModule>>,
|
||||
}
|
||||
|
||||
@@ -144,18 +142,6 @@ impl std::ops::Deref for ParsedModuleRef {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the heap-size of the currently stored `T` in the `ArcSwap`.
|
||||
fn arc_swap_size<T>(arc_swap: &Arc<ArcSwapOption<T>>) -> usize
|
||||
where
|
||||
T: GetSize,
|
||||
{
|
||||
if let Some(value) = &*arc_swap.load() {
|
||||
T::get_heap_size(value)
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
mod indexed {
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -164,7 +150,7 @@ mod indexed {
|
||||
use ruff_python_parser::Parsed;
|
||||
|
||||
/// A wrapper around the AST that allows access to AST nodes by index.
|
||||
#[derive(Debug, get_size2::GetSize)]
|
||||
#[derive(Debug)]
|
||||
pub struct IndexedModule {
|
||||
index: Box<[AnyRootNodeRef<'static>]>,
|
||||
pub parsed: Parsed<ModModule>,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_source_file::LineIndex;
|
||||
@@ -9,7 +11,7 @@ use crate::Db;
|
||||
use crate::files::{File, FilePath};
|
||||
|
||||
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
|
||||
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
|
||||
#[salsa::tracked]
|
||||
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
@@ -36,7 +38,11 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
};
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner { kind, read_error }),
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind,
|
||||
read_error,
|
||||
count: Count::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +65,7 @@ fn is_notebook(path: &FilePath) -> bool {
|
||||
/// The file containing the source text can either be a text file or a notebook.
|
||||
///
|
||||
/// Cheap cloneable in `O(1)`.
|
||||
#[derive(Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Clone, Eq, PartialEq)]
|
||||
pub struct SourceText {
|
||||
inner: Arc<SourceTextInner>,
|
||||
}
|
||||
@@ -117,8 +123,9 @@ impl std::fmt::Debug for SourceText {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Eq, PartialEq)]
|
||||
struct SourceTextInner {
|
||||
count: Count<SourceText>,
|
||||
kind: SourceTextKind,
|
||||
read_error: Option<SourceTextError>,
|
||||
}
|
||||
@@ -129,19 +136,6 @@ enum SourceTextKind {
|
||||
Notebook(Box<Notebook>),
|
||||
}
|
||||
|
||||
impl get_size2::GetSize for SourceTextKind {
|
||||
fn get_heap_size(&self) -> usize {
|
||||
match self {
|
||||
SourceTextKind::Text(text) => text.get_heap_size(),
|
||||
// TODO: The `get-size` derive does not support ignoring enum variants.
|
||||
//
|
||||
// Jupyter notebooks are not very relevant for memory profiling, and contain
|
||||
// arbitrary JSON values that do not implement the `GetSize` trait.
|
||||
SourceTextKind::Notebook(_) => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for SourceTextKind {
|
||||
fn from(value: String) -> Self {
|
||||
SourceTextKind::Text(value)
|
||||
@@ -154,7 +148,7 @@ impl From<Notebook> for SourceTextKind {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, PartialEq, Eq, Clone, get_size2::GetSize)]
|
||||
#[derive(Debug, thiserror::Error, PartialEq, Eq, Clone)]
|
||||
pub enum SourceTextError {
|
||||
#[error("Failed to read notebook: {0}`")]
|
||||
FailedToReadNotebook(String),
|
||||
@@ -163,7 +157,7 @@ pub enum SourceTextError {
|
||||
}
|
||||
|
||||
/// Computes the [`LineIndex`] for `file`.
|
||||
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
|
||||
#[salsa::tracked]
|
||||
pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
|
||||
let _span = tracing::trace_span!("line_index", ?file).entered();
|
||||
|
||||
|
||||
@@ -124,11 +124,6 @@ pub trait System: Debug {
|
||||
/// Returns `None` if no such convention exists for the system.
|
||||
fn user_config_directory(&self) -> Option<SystemPathBuf>;
|
||||
|
||||
/// Returns the directory path where cached files are stored.
|
||||
///
|
||||
/// Returns `None` if no such convention exists for the system.
|
||||
fn cache_dir(&self) -> Option<SystemPathBuf>;
|
||||
|
||||
/// Iterate over the contents of the directory at `path`.
|
||||
///
|
||||
/// The returned iterator must have the following properties:
|
||||
@@ -191,9 +186,6 @@ pub trait System: Debug {
|
||||
Err(std::env::VarError::NotPresent)
|
||||
}
|
||||
|
||||
/// Returns a handle to a [`WritableSystem`] if this system is writeable.
|
||||
fn as_writable(&self) -> Option<&dyn WritableSystem>;
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any;
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
|
||||
@@ -234,52 +226,11 @@ impl fmt::Display for CaseSensitivity {
|
||||
|
||||
/// System trait for non-readonly systems.
|
||||
pub trait WritableSystem: System {
|
||||
/// Creates a file at the given path.
|
||||
///
|
||||
/// Returns an error if the file already exists.
|
||||
fn create_new_file(&self, path: &SystemPath) -> Result<()>;
|
||||
|
||||
/// Writes the given content to the file at the given path.
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()>;
|
||||
|
||||
/// Creates a directory at `path` as well as any intermediate directories.
|
||||
fn create_directory_all(&self, path: &SystemPath) -> Result<()>;
|
||||
|
||||
/// Reads the provided file from the system cache, or creates the file if necessary.
|
||||
///
|
||||
/// Returns `Ok(None)` if the system does not expose a suitable cache directory.
|
||||
fn get_or_cache(
|
||||
&self,
|
||||
path: &SystemPath,
|
||||
read_contents: &dyn Fn() -> Result<String>,
|
||||
) -> Result<Option<SystemPathBuf>> {
|
||||
let Some(cache_dir) = self.cache_dir() else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let cache_path = cache_dir.join(path);
|
||||
|
||||
// The file has already been cached.
|
||||
if self.is_file(&cache_path) {
|
||||
return Ok(Some(cache_path));
|
||||
}
|
||||
|
||||
// Read the file contents.
|
||||
let contents = read_contents()?;
|
||||
|
||||
// Create the parent directory.
|
||||
self.create_directory_all(cache_path.parent().unwrap())?;
|
||||
|
||||
// Create and write to the file on the system.
|
||||
//
|
||||
// Note that `create_new_file` will fail if the file has already been created. This
|
||||
// ensures that only one thread/process ever attempts to write to it to avoid corrupting
|
||||
// the cache.
|
||||
self.create_new_file(&cache_path)?;
|
||||
self.write_file(&cache_path, &contents)?;
|
||||
|
||||
Ok(Some(cache_path))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::collections::{BTreeMap, btree_map};
|
||||
use std::io;
|
||||
use std::collections::BTreeMap;
|
||||
use std::iter::FusedIterator;
|
||||
use std::sync::{Arc, RwLock, RwLockWriteGuard};
|
||||
|
||||
@@ -154,26 +153,6 @@ impl MemoryFileSystem {
|
||||
virtual_files.contains_key(&path.to_path_buf())
|
||||
}
|
||||
|
||||
pub(crate) fn create_new_file(&self, path: &SystemPath) -> Result<()> {
|
||||
let normalized = self.normalize_path(path);
|
||||
|
||||
let mut by_path = self.inner.by_path.write().unwrap();
|
||||
match by_path.entry(normalized) {
|
||||
btree_map::Entry::Vacant(entry) => {
|
||||
entry.insert(Entry::File(File {
|
||||
content: String::new(),
|
||||
last_modified: file_time_now(),
|
||||
}));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
btree_map::Entry::Occupied(_) => Err(io::Error::new(
|
||||
io::ErrorKind::AlreadyExists,
|
||||
"File already exists",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Stores a new file in the file system.
|
||||
///
|
||||
/// The operation overrides the content for an existing file with the same normalized `path`.
|
||||
@@ -299,14 +278,14 @@ impl MemoryFileSystem {
|
||||
let normalized = fs.normalize_path(path);
|
||||
|
||||
match by_path.entry(normalized) {
|
||||
btree_map::Entry::Occupied(entry) => match entry.get() {
|
||||
std::collections::btree_map::Entry::Occupied(entry) => match entry.get() {
|
||||
Entry::File(_) => {
|
||||
entry.remove();
|
||||
Ok(())
|
||||
}
|
||||
Entry::Directory(_) => Err(is_a_directory()),
|
||||
},
|
||||
btree_map::Entry::Vacant(_) => Err(not_found()),
|
||||
std::collections::btree_map::Entry::Vacant(_) => Err(not_found()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -366,14 +345,14 @@ impl MemoryFileSystem {
|
||||
}
|
||||
|
||||
match by_path.entry(normalized.clone()) {
|
||||
btree_map::Entry::Occupied(entry) => match entry.get() {
|
||||
std::collections::btree_map::Entry::Occupied(entry) => match entry.get() {
|
||||
Entry::Directory(_) => {
|
||||
entry.remove();
|
||||
Ok(())
|
||||
}
|
||||
Entry::File(_) => Err(not_a_directory()),
|
||||
},
|
||||
btree_map::Entry::Vacant(_) => Err(not_found()),
|
||||
std::collections::btree_map::Entry::Vacant(_) => Err(not_found()),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -160,39 +160,6 @@ impl System for OsSystem {
|
||||
None
|
||||
}
|
||||
|
||||
/// Returns an absolute cache directory on the system.
|
||||
///
|
||||
/// On Linux and macOS, uses `$XDG_CACHE_HOME/ty` or `.cache/ty`.
|
||||
/// On Windows, uses `C:\Users\User\AppData\Local\ty\cache`.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn cache_dir(&self) -> Option<SystemPathBuf> {
|
||||
use etcetera::BaseStrategy as _;
|
||||
|
||||
let cache_dir = etcetera::base_strategy::choose_base_strategy()
|
||||
.ok()
|
||||
.map(|dirs| dirs.cache_dir().join("ty"))
|
||||
.map(|cache_dir| {
|
||||
if cfg!(windows) {
|
||||
// On Windows, we append `cache` to the LocalAppData directory, i.e., prefer
|
||||
// `C:\Users\User\AppData\Local\ty\cache` over `C:\Users\User\AppData\Local\ty`.
|
||||
cache_dir.join("cache")
|
||||
} else {
|
||||
cache_dir
|
||||
}
|
||||
})
|
||||
.and_then(|path| SystemPathBuf::from_path_buf(path).ok())
|
||||
.unwrap_or_else(|| SystemPathBuf::from(".ty_cache"));
|
||||
|
||||
Some(cache_dir)
|
||||
}
|
||||
|
||||
// TODO: Remove this feature gating once `ruff_wasm` no longer indirectly depends on `ruff_db` with the
|
||||
// `os` feature enabled (via `ruff_workspace` -> `ruff_graph` -> `ruff_db`).
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
fn cache_dir(&self) -> Option<SystemPathBuf> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Creates a builder to recursively walk `path`.
|
||||
///
|
||||
/// The walker ignores files according to [`ignore::WalkBuilder::standard_filters`]
|
||||
@@ -225,10 +192,6 @@ impl System for OsSystem {
|
||||
})
|
||||
}
|
||||
|
||||
fn as_writable(&self) -> Option<&dyn WritableSystem> {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
@@ -347,10 +310,6 @@ impl OsSystem {
|
||||
}
|
||||
|
||||
impl WritableSystem for OsSystem {
|
||||
fn create_new_file(&self, path: &SystemPath) -> Result<()> {
|
||||
std::fs::File::create_new(path).map(drop)
|
||||
}
|
||||
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
|
||||
std::fs::write(path.as_std_path(), content)
|
||||
}
|
||||
|
||||
@@ -503,12 +503,6 @@ impl ToOwned for SystemPath {
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct SystemPathBuf(#[cfg_attr(feature = "schemars", schemars(with = "String"))] Utf8PathBuf);
|
||||
|
||||
impl get_size2::GetSize for SystemPathBuf {
|
||||
fn get_heap_size(&self) -> usize {
|
||||
self.0.capacity()
|
||||
}
|
||||
}
|
||||
|
||||
impl SystemPathBuf {
|
||||
pub fn new() -> Self {
|
||||
Self(Utf8PathBuf::new())
|
||||
|
||||
@@ -102,10 +102,6 @@ impl System for TestSystem {
|
||||
self.system().user_config_directory()
|
||||
}
|
||||
|
||||
fn cache_dir(&self) -> Option<SystemPathBuf> {
|
||||
self.system().cache_dir()
|
||||
}
|
||||
|
||||
fn read_directory<'a>(
|
||||
&'a self,
|
||||
path: &SystemPath,
|
||||
@@ -127,10 +123,6 @@ impl System for TestSystem {
|
||||
self.system().glob(pattern)
|
||||
}
|
||||
|
||||
fn as_writable(&self) -> Option<&dyn WritableSystem> {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
@@ -157,10 +149,6 @@ impl Default for TestSystem {
|
||||
}
|
||||
|
||||
impl WritableSystem for TestSystem {
|
||||
fn create_new_file(&self, path: &SystemPath) -> Result<()> {
|
||||
self.system().create_new_file(path)
|
||||
}
|
||||
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
|
||||
self.system().write_file(path, content)
|
||||
}
|
||||
@@ -347,10 +335,6 @@ impl System for InMemorySystem {
|
||||
self.user_config_directory.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
fn cache_dir(&self) -> Option<SystemPathBuf> {
|
||||
None
|
||||
}
|
||||
|
||||
fn read_directory<'a>(
|
||||
&'a self,
|
||||
path: &SystemPath,
|
||||
@@ -373,10 +357,6 @@ impl System for InMemorySystem {
|
||||
Ok(Box::new(iterator))
|
||||
}
|
||||
|
||||
fn as_writable(&self) -> Option<&dyn WritableSystem> {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
@@ -397,10 +377,6 @@ impl System for InMemorySystem {
|
||||
}
|
||||
|
||||
impl WritableSystem for InMemorySystem {
|
||||
fn create_new_file(&self, path: &SystemPath) -> Result<()> {
|
||||
self.memory_fs.create_new_file(path)
|
||||
}
|
||||
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
|
||||
self.memory_fs.write_file(path, content)
|
||||
}
|
||||
|
||||
@@ -212,7 +212,7 @@ impl Display for Error {
|
||||
path: Some(path),
|
||||
err,
|
||||
} => {
|
||||
write!(f, "IO error for operation on {path}: {err}")
|
||||
write!(f, "IO error for operation on {}: {}", path, err)
|
||||
}
|
||||
ErrorKind::Io { path: None, err } => err.fmt(f),
|
||||
ErrorKind::NonUtf8Path { path } => {
|
||||
|
||||
@@ -4,12 +4,12 @@ use std::fmt::{self, Debug};
|
||||
use std::io::{self, Read, Write};
|
||||
use std::sync::{Arc, Mutex, MutexGuard};
|
||||
|
||||
use crate::file_revision::FileRevision;
|
||||
use zip::result::ZipResult;
|
||||
use zip::write::FileOptions;
|
||||
use zip::{CompressionMethod, ZipArchive, ZipWriter, read::ZipFile};
|
||||
|
||||
pub use self::path::{VendoredPath, VendoredPathBuf};
|
||||
use crate::file_revision::FileRevision;
|
||||
|
||||
mod path;
|
||||
|
||||
@@ -21,19 +21,6 @@ type LockedZipArchive<'a> = MutexGuard<'a, VendoredZipArchive>;
|
||||
///
|
||||
/// "Files" in the `VendoredFileSystem` are read-only and immutable.
|
||||
/// Directories are supported, but symlinks and hardlinks cannot exist.
|
||||
///
|
||||
/// # Path separators
|
||||
///
|
||||
/// At time of writing (2025-07-11), this implementation always uses `/` as a
|
||||
/// path separator, even in Windows environments where `\` is traditionally
|
||||
/// used as a file path separator. Namely, this is only currently used with zip
|
||||
/// files built by `crates/ty_vendored/build.rs`.
|
||||
///
|
||||
/// Callers using this may provide paths that use a `\` as a separator. It will
|
||||
/// be transparently normalized to `/`.
|
||||
///
|
||||
/// This is particularly important because the presence of a trailing separator
|
||||
/// in a zip file is conventionally used to indicate a directory entry.
|
||||
#[derive(Clone)]
|
||||
pub struct VendoredFileSystem {
|
||||
inner: Arc<Mutex<VendoredZipArchive>>,
|
||||
@@ -128,68 +115,6 @@ impl VendoredFileSystem {
|
||||
read_to_string(self, path.as_ref())
|
||||
}
|
||||
|
||||
/// Read the direct children of the directory
|
||||
/// identified by `path`.
|
||||
///
|
||||
/// If `path` is not a directory, then this will
|
||||
/// return an empty `Vec`.
|
||||
pub fn read_directory(&self, dir: impl AsRef<VendoredPath>) -> Vec<DirectoryEntry> {
|
||||
// N.B. We specifically do not return an iterator here to avoid
|
||||
// holding a lock for the lifetime of the iterator returned.
|
||||
// That is, it seems like a footgun to keep the zip archive
|
||||
// locked during iteration, since the unit of work for each
|
||||
// item in the iterator could be arbitrarily long. Allocating
|
||||
// up front and stuffing all entries into it is probably the
|
||||
// simplest solution and what we do here. If this becomes
|
||||
// a problem, there are other strategies we could pursue.
|
||||
// (Amortizing allocs, using a different synchronization
|
||||
// behavior or even exposing additional APIs.) ---AG
|
||||
|
||||
fn read_directory(fs: &VendoredFileSystem, dir: &VendoredPath) -> Vec<DirectoryEntry> {
|
||||
let mut normalized = NormalizedVendoredPath::from(dir);
|
||||
if !normalized.as_str().ends_with('/') {
|
||||
normalized = normalized.with_trailing_slash();
|
||||
}
|
||||
let archive = fs.lock_archive();
|
||||
let mut entries = vec![];
|
||||
for name in archive.0.file_names() {
|
||||
// Any entry that doesn't have the `path` (with a
|
||||
// trailing slash) as a prefix cannot possibly be in
|
||||
// the directory referenced by `path`.
|
||||
let Some(without_dir_prefix) = name.strip_prefix(normalized.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
// Filter out an entry equivalent to the path given
|
||||
// since we only want children of the directory.
|
||||
if without_dir_prefix.is_empty() {
|
||||
continue;
|
||||
}
|
||||
// We only want *direct* children. Files that are
|
||||
// direct children cannot have any slashes (or else
|
||||
// they are not direct children). Directories that
|
||||
// are direct children can only have one slash and
|
||||
// it must be at the end.
|
||||
//
|
||||
// (We do this manually ourselves to avoid doing a
|
||||
// full file lookup and metadata retrieval via the
|
||||
// `zip` crate.)
|
||||
let file_type = FileType::from_zip_file_name(without_dir_prefix);
|
||||
let slash_count = without_dir_prefix.matches('/').count();
|
||||
match file_type {
|
||||
FileType::File if slash_count > 0 => continue,
|
||||
FileType::Directory if slash_count > 1 => continue,
|
||||
_ => {}
|
||||
}
|
||||
entries.push(DirectoryEntry {
|
||||
path: VendoredPathBuf::from(name),
|
||||
file_type,
|
||||
});
|
||||
}
|
||||
entries
|
||||
}
|
||||
read_directory(self, dir.as_ref())
|
||||
}
|
||||
|
||||
/// Acquire a lock on the underlying zip archive.
|
||||
/// The call will block until it is able to acquire the lock.
|
||||
///
|
||||
@@ -281,14 +206,6 @@ pub enum FileType {
|
||||
}
|
||||
|
||||
impl FileType {
|
||||
fn from_zip_file_name(name: &str) -> FileType {
|
||||
if name.ends_with('/') {
|
||||
FileType::Directory
|
||||
} else {
|
||||
FileType::File
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn is_file(self) -> bool {
|
||||
matches!(self, Self::File)
|
||||
}
|
||||
@@ -327,30 +244,6 @@ impl Metadata {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct DirectoryEntry {
|
||||
path: VendoredPathBuf,
|
||||
file_type: FileType,
|
||||
}
|
||||
|
||||
impl DirectoryEntry {
|
||||
pub fn new(path: VendoredPathBuf, file_type: FileType) -> Self {
|
||||
Self { path, file_type }
|
||||
}
|
||||
|
||||
pub fn into_path(self) -> VendoredPathBuf {
|
||||
self.path
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &VendoredPath {
|
||||
&self.path
|
||||
}
|
||||
|
||||
pub fn file_type(&self) -> FileType {
|
||||
self.file_type
|
||||
}
|
||||
}
|
||||
|
||||
/// Newtype wrapper around a ZipArchive.
|
||||
#[derive(Debug)]
|
||||
struct VendoredZipArchive(ZipArchive<io::Cursor<Cow<'static, [u8]>>>);
|
||||
@@ -605,60 +498,6 @@ pub(crate) mod tests {
|
||||
test_directory("./stdlib/asyncio/../asyncio/")
|
||||
}
|
||||
|
||||
fn readdir_snapshot(fs: &VendoredFileSystem, path: &str) -> String {
|
||||
let mut paths = fs
|
||||
.read_directory(VendoredPath::new(path))
|
||||
.into_iter()
|
||||
.map(|entry| entry.path().to_string())
|
||||
.collect::<Vec<String>>();
|
||||
paths.sort();
|
||||
paths.join("\n")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_directory_stdlib() {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "stdlib/"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
assert_snapshot!(readdir_snapshot(&mock_typeshed, "./stdlib/"), @r"
|
||||
vendored://stdlib/asyncio/
|
||||
vendored://stdlib/functools.pyi
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_directory_asyncio() {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "stdlib/asyncio"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "stdlib/asyncio/"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
assert_snapshot!(
|
||||
readdir_snapshot(&mock_typeshed, "./stdlib/asyncio/"),
|
||||
@"vendored://stdlib/asyncio/tasks.pyi",
|
||||
);
|
||||
}
|
||||
|
||||
fn test_nonexistent_path(path: &str) {
|
||||
let mock_typeshed = mock_typeshed();
|
||||
let path = VendoredPath::new(path);
|
||||
|
||||
@@ -17,10 +17,6 @@ impl VendoredPath {
|
||||
unsafe { &*(path as *const Utf8Path as *const VendoredPath) }
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.0.file_name()
|
||||
}
|
||||
|
||||
pub fn to_path_buf(&self) -> VendoredPathBuf {
|
||||
VendoredPathBuf(self.0.to_path_buf())
|
||||
}
|
||||
@@ -91,12 +87,6 @@ impl ToOwned for VendoredPath {
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Hash)]
|
||||
pub struct VendoredPathBuf(Utf8PathBuf);
|
||||
|
||||
impl get_size2::GetSize for VendoredPathBuf {
|
||||
fn get_heap_size(&self) -> usize {
|
||||
self.0.capacity()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for VendoredPathBuf {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
|
||||
@@ -13,7 +13,6 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ty = { workspace = true }
|
||||
ty_project = { workspace = true, features = ["schemars"] }
|
||||
ty_static = { workspace = true }
|
||||
ruff = { workspace = true }
|
||||
ruff_formatter = { workspace = true }
|
||||
ruff_linter = { workspace = true, features = ["schemars"] }
|
||||
|
||||
@@ -4,7 +4,7 @@ use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
generate_cli_help, generate_docs, generate_json_schema, generate_ty_cli_reference,
|
||||
generate_ty_env_vars_reference, generate_ty_options, generate_ty_rules, generate_ty_schema,
|
||||
generate_ty_options, generate_ty_rules, generate_ty_schema,
|
||||
};
|
||||
|
||||
pub(crate) const REGENERATE_ALL_COMMAND: &str = "cargo dev generate-all";
|
||||
@@ -44,8 +44,5 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
generate_ty_options::main(&generate_ty_options::Args { mode: args.mode })?;
|
||||
generate_ty_rules::main(&generate_ty_rules::Args { mode: args.mode })?;
|
||||
generate_ty_cli_reference::main(&generate_ty_cli_reference::Args { mode: args.mode })?;
|
||||
generate_ty_env_vars_reference::main(&generate_ty_env_vars_reference::Args {
|
||||
mode: args.mode,
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
//! Generate the environment variables reference from `ty_static::EnvVars`.
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::bail;
|
||||
use pretty_assertions::StrComparison;
|
||||
|
||||
use ty_static::EnvVars;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
#[arg(long, default_value_t, value_enum)]
|
||||
pub(crate) mode: Mode,
|
||||
}
|
||||
|
||||
pub(crate) fn main(args: &Args) -> anyhow::Result<()> {
|
||||
let reference_string = generate();
|
||||
let filename = "environment.md";
|
||||
let reference_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("crates")
|
||||
.join("ty")
|
||||
.join("docs")
|
||||
.join(filename);
|
||||
|
||||
match args.mode {
|
||||
Mode::DryRun => {
|
||||
println!("{reference_string}");
|
||||
}
|
||||
Mode::Check => match fs::read_to_string(&reference_path) {
|
||||
Ok(current) => {
|
||||
if current == reference_string {
|
||||
println!("Up-to-date: {filename}");
|
||||
} else {
|
||||
let comparison = StrComparison::new(¤t, &reference_string);
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{comparison}"
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
bail!(
|
||||
"{filename} not found, please run `cargo dev generate-ty-env-vars-reference`"
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{err}"
|
||||
);
|
||||
}
|
||||
},
|
||||
Mode::Write => {
|
||||
// Ensure the docs directory exists
|
||||
if let Some(parent) = reference_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
match fs::read_to_string(&reference_path) {
|
||||
Ok(current) => {
|
||||
if current == reference_string {
|
||||
println!("Up-to-date: {filename}");
|
||||
} else {
|
||||
println!("Updating: {filename}");
|
||||
fs::write(&reference_path, reference_string.as_bytes())?;
|
||||
}
|
||||
}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
println!("Updating: {filename}");
|
||||
fs::write(&reference_path, reference_string.as_bytes())?;
|
||||
}
|
||||
Err(err) => {
|
||||
bail!(
|
||||
"{filename} changed, please run `cargo dev generate-ty-env-vars-reference`:\n{err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate() -> String {
|
||||
let mut output = String::new();
|
||||
|
||||
output.push_str("# Environment variables\n\n");
|
||||
|
||||
// Partition and sort environment variables into TY_ and external variables.
|
||||
let (ty_vars, external_vars): (BTreeSet<_>, BTreeSet<_>) = EnvVars::metadata()
|
||||
.iter()
|
||||
.partition(|(var, _)| var.starts_with("TY_"));
|
||||
|
||||
output.push_str("ty defines and respects the following environment variables:\n\n");
|
||||
|
||||
for (var, doc) in ty_vars {
|
||||
output.push_str(&render(var, doc));
|
||||
}
|
||||
|
||||
output.push_str("## Externally-defined variables\n\n");
|
||||
output.push_str("ty also reads the following externally defined environment variables:\n\n");
|
||||
|
||||
for (var, doc) in external_vars {
|
||||
output.push_str(&render(var, doc));
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
/// Render an environment variable and its documentation.
|
||||
fn render(var: &str, doc: &str) -> String {
|
||||
format!("### `{var}`\n\n{doc}\n\n")
|
||||
}
|
||||
@@ -114,7 +114,6 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec<Set>) {
|
||||
parents.pop();
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Set {
|
||||
Toplevel(OptionSet),
|
||||
Named { name: String, set: OptionSet },
|
||||
@@ -137,7 +136,7 @@ impl Set {
|
||||
}
|
||||
|
||||
fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[Set]) {
|
||||
let header_level = "#".repeat(parents.len() + 1);
|
||||
let header_level = if parents.is_empty() { "###" } else { "####" };
|
||||
|
||||
let _ = writeln!(output, "{header_level} `{name}`");
|
||||
|
||||
|
||||
@@ -73,20 +73,12 @@ fn generate_markdown() -> String {
|
||||
for lint in lints {
|
||||
let _ = writeln!(&mut output, "## `{rule_name}`\n", rule_name = lint.name());
|
||||
|
||||
// Reformat headers as bold text
|
||||
let mut in_code_fence = false;
|
||||
// Increase the header-level by one
|
||||
let documentation = lint
|
||||
.documentation_lines()
|
||||
.map(|line| {
|
||||
// Toggle the code fence state if we encounter a boundary
|
||||
if line.starts_with("```") {
|
||||
in_code_fence = !in_code_fence;
|
||||
}
|
||||
if !in_code_fence && line.starts_with('#') {
|
||||
Cow::Owned(format!(
|
||||
"**{line}**\n",
|
||||
line = line.trim_start_matches('#').trim_start()
|
||||
))
|
||||
if line.starts_with('#') {
|
||||
Cow::Owned(format!("#{line}"))
|
||||
} else {
|
||||
Cow::Borrowed(line)
|
||||
}
|
||||
@@ -95,15 +87,21 @@ fn generate_markdown() -> String {
|
||||
|
||||
let _ = writeln!(
|
||||
&mut output,
|
||||
r#"<small>
|
||||
Default level: [`{level}`](../rules.md#rule-levels "This lint has a default level of '{level}'.") ·
|
||||
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20{encoded_name}) ·
|
||||
[View source](https://github.com/astral-sh/ruff/blob/main/{file}#L{line})
|
||||
</small>
|
||||
r#"**Default level**: {level}
|
||||
|
||||
<details>
|
||||
<summary>{summary}</summary>
|
||||
|
||||
{documentation}
|
||||
|
||||
### Links
|
||||
* [Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20{encoded_name})
|
||||
* [View source](https://github.com/astral-sh/ruff/blob/main/{file}#L{line})
|
||||
</details>
|
||||
"#,
|
||||
level = lint.default_level(),
|
||||
// GitHub doesn't support markdown in `summary` headers
|
||||
summary = replace_inline_code(lint.summary()),
|
||||
encoded_name = url::form_urlencoded::byte_serialize(lint.name().as_str().as_bytes())
|
||||
.collect::<String>(),
|
||||
file = url::form_urlencoded::byte_serialize(lint.file().replace('\\', "/").as_bytes())
|
||||
@@ -115,6 +113,25 @@ Default level: [`{level}`](../rules.md#rule-levels "This lint has a default leve
|
||||
output
|
||||
}
|
||||
|
||||
/// Replaces inline code blocks (`code`) with `<code>code</code>`
|
||||
fn replace_inline_code(input: &str) -> String {
|
||||
let mut output = String::new();
|
||||
let mut parts = input.split('`');
|
||||
|
||||
while let Some(before) = parts.next() {
|
||||
if let Some(between) = parts.next() {
|
||||
output.push_str(before);
|
||||
output.push_str("<code>");
|
||||
output.push_str(between);
|
||||
output.push_str("</code>");
|
||||
} else {
|
||||
output.push_str(before);
|
||||
}
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
|
||||
@@ -18,7 +18,6 @@ mod generate_json_schema;
|
||||
mod generate_options;
|
||||
mod generate_rules_table;
|
||||
mod generate_ty_cli_reference;
|
||||
mod generate_ty_env_vars_reference;
|
||||
mod generate_ty_options;
|
||||
mod generate_ty_rules;
|
||||
mod generate_ty_schema;
|
||||
@@ -54,8 +53,6 @@ enum Command {
|
||||
/// Generate a Markdown-compatible listing of configuration options.
|
||||
GenerateOptions,
|
||||
GenerateTyOptions(generate_ty_options::Args),
|
||||
/// Generate environment variables reference for ty.
|
||||
GenerateTyEnvVarsReference(generate_ty_env_vars_reference::Args),
|
||||
/// Generate CLI help.
|
||||
GenerateCliHelp(generate_cli_help::Args),
|
||||
/// Generate Markdown docs.
|
||||
@@ -101,7 +98,6 @@ fn main() -> Result<ExitCode> {
|
||||
Command::GenerateTyRules(args) => generate_ty_rules::main(&args)?,
|
||||
Command::GenerateOptions => println!("{}", generate_options::generate()),
|
||||
Command::GenerateTyOptions(args) => generate_ty_options::main(&args)?,
|
||||
Command::GenerateTyEnvVarsReference(args) => generate_ty_env_vars_reference::main(&args)?,
|
||||
Command::GenerateCliHelp(args) => generate_cli_help::main(&args)?,
|
||||
Command::GenerateDocs(args) => generate_docs::main(&args)?,
|
||||
Command::PrintAST(args) => print_ast::main(&args)?,
|
||||
|
||||
@@ -16,6 +16,5 @@ doctest = false
|
||||
[dependencies]
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
get-size2 = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
serde = { workspace = true, optional = true, features = [] }
|
||||
|
||||
@@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
|
||||
/// content at a given location.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct Edit {
|
||||
/// The start location of the edit.
|
||||
|
||||
@@ -6,9 +6,7 @@ use ruff_text_size::{Ranged, TextSize};
|
||||
use crate::edit::Edit;
|
||||
|
||||
/// Indicates if a fix can be applied.
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is, get_size2::GetSize,
|
||||
)]
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))]
|
||||
pub enum Applicability {
|
||||
@@ -32,7 +30,7 @@ pub enum Applicability {
|
||||
}
|
||||
|
||||
/// Indicates the level of isolation required to apply a fix.
|
||||
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, get_size2::GetSize)]
|
||||
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub enum IsolationLevel {
|
||||
/// The fix should be applied as long as no other fixes in the same group have been applied.
|
||||
@@ -43,7 +41,7 @@ pub enum IsolationLevel {
|
||||
}
|
||||
|
||||
/// A collection of [`Edit`] elements to be applied to a source file.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, get_size2::GetSize)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct Fix {
|
||||
/// The [`Edit`] elements to be applied, sorted by [`Edit::start`] in ascending order.
|
||||
|
||||
@@ -20,7 +20,6 @@ ty_python_semantic = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, optional = true }
|
||||
memchr = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::StringImports;
|
||||
use ruff_python_ast::visitor::source_order::{
|
||||
SourceOrderVisitor, walk_expr, walk_module, walk_stmt,
|
||||
};
|
||||
@@ -11,13 +10,13 @@ pub(crate) struct Collector<'a> {
|
||||
/// The path to the current module.
|
||||
module_path: Option<&'a [String]>,
|
||||
/// Whether to detect imports from string literals.
|
||||
string_imports: StringImports,
|
||||
string_imports: bool,
|
||||
/// The collected imports from the Python AST.
|
||||
imports: Vec<CollectedImport>,
|
||||
}
|
||||
|
||||
impl<'a> Collector<'a> {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
|
||||
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: bool) -> Self {
|
||||
Self {
|
||||
module_path,
|
||||
string_imports,
|
||||
@@ -119,7 +118,7 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {
|
||||
// Only traverse simple statements when string imports is enabled.
|
||||
if self.string_imports.enabled {
|
||||
if self.string_imports {
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
@@ -127,26 +126,20 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'ast Expr) {
|
||||
if self.string_imports.enabled {
|
||||
if self.string_imports {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral {
|
||||
value,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) = expr
|
||||
{
|
||||
let value = value.to_str();
|
||||
// Determine whether the string literal "looks like" an import statement: contains
|
||||
// the requisite number of dots, and consists solely of valid Python identifiers.
|
||||
if self.string_imports.min_dots == 0
|
||||
|| memchr::memchr_iter(b'.', value.as_bytes()).count()
|
||||
>= self.string_imports.min_dots
|
||||
{
|
||||
if let Some(module_name) = ModuleName::new(value) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
}
|
||||
// a dot, and consists solely of valid Python identifiers.
|
||||
let value = value.to_str();
|
||||
if let Some(module_name) = ModuleName::new(value) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
}
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::Result;
|
||||
use std::sync::Arc;
|
||||
use zip::CompressionMethod;
|
||||
|
||||
use ruff_db::Db as SourceDb;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredFileSystem, VendoredFileSystemBuilder};
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use ty_python_semantic::{
|
||||
Db, Program, ProgramSettings, PythonEnvironment, PythonPlatform, PythonVersionSource,
|
||||
Db, Program, ProgramSettings, PythonPath, PythonPlatform, PythonVersionSource,
|
||||
PythonVersionWithSource, SearchPathSettings, SysPrefixPathOrigin, default_lint_registry,
|
||||
};
|
||||
|
||||
@@ -35,37 +35,38 @@ impl ModuleDb {
|
||||
python_version: PythonVersion,
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
) -> Result<Self> {
|
||||
let db = Self::default();
|
||||
let mut search_paths = SearchPathSettings::new(src_roots);
|
||||
// TODO: Consider calling `PythonEnvironment::discover` if the `venv_path` is not provided.
|
||||
if let Some(venv_path) = venv_path {
|
||||
let environment =
|
||||
PythonEnvironment::new(venv_path, SysPrefixPathOrigin::PythonCliFlag, db.system())?;
|
||||
search_paths.site_packages_paths = environment
|
||||
.site_packages_paths(db.system())
|
||||
.context("Failed to discover the site-packages directory")?
|
||||
.into_vec();
|
||||
search_paths.python_path =
|
||||
PythonPath::sys_prefix(venv_path, SysPrefixPathOrigin::PythonCliFlag);
|
||||
}
|
||||
let search_paths = search_paths
|
||||
.to_search_paths(db.system(), db.vendored())
|
||||
.context("Invalid search path settings")?;
|
||||
|
||||
let db = Self::default();
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersionWithSource {
|
||||
python_version: Some(PythonVersionWithSource {
|
||||
version: python_version,
|
||||
source: PythonVersionSource::default(),
|
||||
},
|
||||
}),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths,
|
||||
},
|
||||
);
|
||||
)?;
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for ModuleDb {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for ModuleDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
@@ -87,7 +88,7 @@ impl SourceDb for ModuleDb {
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for ModuleDb {
|
||||
fn should_check_file(&self, file: File) -> bool {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user