Compare commits
3 Commits
dcreager/s
...
dcreager/a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42945dc9dc | ||
|
|
20563db9c0 | ||
|
|
a3b5df8a64 |
@@ -8,7 +8,3 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
|||||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||||
rustflags = ["-C", "target-feature=+crt-static"]
|
rustflags = ["-C", "target-feature=+crt-static"]
|
||||||
|
|
||||||
[target.'wasm32-unknown-unknown']
|
|
||||||
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
|
|
||||||
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']
|
|
||||||
@@ -6,10 +6,3 @@ failure-output = "immediate-final"
|
|||||||
fail-fast = false
|
fail-fast = false
|
||||||
|
|
||||||
status-level = "skip"
|
status-level = "skip"
|
||||||
|
|
||||||
# Mark tests that take longer than 1s as slow.
|
|
||||||
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
|
|
||||||
slow-timeout = { period = "1s", terminate-after = 60 }
|
|
||||||
|
|
||||||
# Show slow jobs in the final summary
|
|
||||||
final-status-level = "slow"
|
|
||||||
|
|||||||
7
.github/CODEOWNERS
vendored
7
.github/CODEOWNERS
vendored
@@ -18,7 +18,6 @@
|
|||||||
/python/py-fuzzer/ @AlexWaygood
|
/python/py-fuzzer/ @AlexWaygood
|
||||||
|
|
||||||
# red-knot
|
# red-knot
|
||||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||||
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||||
/crates/red_knot_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
|
||||||
|
|||||||
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
31
.github/ISSUE_TEMPLATE/1_bug_report.yaml
vendored
@@ -1,31 +0,0 @@
|
|||||||
name: Bug report
|
|
||||||
description: Report an error or unexpected behavior
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
|
||||||
|
|
||||||
**Before reporting, please make sure to search through [existing issues](https://github.com/astral-sh/ruff/issues?q=is:issue+is:open+label:bug) (including [closed](https://github.com/astral-sh/ruff/issues?q=is:issue%20state:closed%20label:bug)).**
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Summary
|
|
||||||
description: |
|
|
||||||
A clear and concise description of the bug, including a minimal reproducible example.
|
|
||||||
|
|
||||||
Be sure to include the command you invoked (e.g., `ruff check /path/to/file.py --fix`), ideally including the `--isolated` flag and
|
|
||||||
the current Ruff settings (e.g., relevant sections from your `pyproject.toml`).
|
|
||||||
|
|
||||||
If possible, try to include the [playground](https://play.ruff.rs) link that reproduces this issue.
|
|
||||||
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: Version
|
|
||||||
description: What version of ruff are you using? (see `ruff version`)
|
|
||||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
10
.github/ISSUE_TEMPLATE/2_rule_request.yaml
vendored
@@ -1,10 +0,0 @@
|
|||||||
name: Rule request
|
|
||||||
description: Anything related to lint rules (proposing new rules, changes to existing rules, auto-fixes, etc.)
|
|
||||||
body:
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Summary
|
|
||||||
description: |
|
|
||||||
A clear and concise description of the relevant request. If applicable, please describe the current behavior as well.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
18
.github/ISSUE_TEMPLATE/3_question.yaml
vendored
@@ -1,18 +0,0 @@
|
|||||||
name: Question
|
|
||||||
description: Ask a question about Ruff
|
|
||||||
labels: ["question"]
|
|
||||||
body:
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Question
|
|
||||||
description: Describe your question in detail.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: Version
|
|
||||||
description: What version of ruff are you using? (see `ruff version`)
|
|
||||||
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
8
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +0,0 @@
|
|||||||
blank_issues_enabled: true
|
|
||||||
contact_links:
|
|
||||||
- name: Documentation
|
|
||||||
url: https://docs.astral.sh/ruff
|
|
||||||
about: Please consult the documentation before creating an issue.
|
|
||||||
- name: Community
|
|
||||||
url: https://discord.com/invite/astral-sh
|
|
||||||
about: Join our Discord community to ask questions and collaborate.
|
|
||||||
3
.github/actionlint.yaml
vendored
3
.github/actionlint.yaml
vendored
@@ -6,5 +6,4 @@ self-hosted-runner:
|
|||||||
labels:
|
labels:
|
||||||
- depot-ubuntu-latest-8
|
- depot-ubuntu-latest-8
|
||||||
- depot-ubuntu-22.04-16
|
- depot-ubuntu-22.04-16
|
||||||
- github-windows-2025-x86_64-8
|
- windows-latest-xlarge
|
||||||
- github-windows-2025-x86_64-16
|
|
||||||
|
|||||||
20
.github/renovate.json5
vendored
20
.github/renovate.json5
vendored
@@ -40,17 +40,6 @@
|
|||||||
enabled: true,
|
enabled: true,
|
||||||
},
|
},
|
||||||
packageRules: [
|
packageRules: [
|
||||||
// Pin GitHub Actions to immutable SHAs.
|
|
||||||
{
|
|
||||||
matchDepTypes: ["action"],
|
|
||||||
pinDigests: true,
|
|
||||||
},
|
|
||||||
// Annotate GitHub Actions SHAs with a SemVer version.
|
|
||||||
{
|
|
||||||
extends: ["helpers:pinGitHubActionDigests"],
|
|
||||||
extractVersion: "^(?<version>v?\\d+\\.\\d+\\.\\d+)$",
|
|
||||||
versioning: "regex:^v?(?<major>\\d+)(\\.(?<minor>\\d+)\\.(?<patch>\\d+))?$",
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
// Group upload/download artifact updates, the versions are dependent
|
// Group upload/download artifact updates, the versions are dependent
|
||||||
groupName: "Artifact GitHub Actions dependencies",
|
groupName: "Artifact GitHub Actions dependencies",
|
||||||
@@ -106,7 +95,14 @@
|
|||||||
matchManagers: ["cargo"],
|
matchManagers: ["cargo"],
|
||||||
matchPackageNames: ["strum"],
|
matchPackageNames: ["strum"],
|
||||||
description: "Weekly update of strum dependencies",
|
description: "Weekly update of strum dependencies",
|
||||||
}
|
},
|
||||||
|
{
|
||||||
|
groupName: "ESLint",
|
||||||
|
matchManagers: ["npm"],
|
||||||
|
matchPackageNames: ["eslint"],
|
||||||
|
allowedVersions: "<9",
|
||||||
|
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
||||||
|
},
|
||||||
],
|
],
|
||||||
vulnerabilityAlerts: {
|
vulnerabilityAlerts: {
|
||||||
commitMessageSuffix: "",
|
commitMessageSuffix: "",
|
||||||
|
|||||||
86
.github/workflows/build-binaries.yml
vendored
86
.github/workflows/build-binaries.yml
vendored
@@ -23,8 +23,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PACKAGE_NAME: ruff
|
PACKAGE_NAME: ruff
|
||||||
MODULE_NAME: ruff
|
MODULE_NAME: ruff
|
||||||
@@ -39,17 +37,17 @@ jobs:
|
|||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build sdist"
|
- name: "Build sdist"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
command: sdist
|
command: sdist
|
||||||
args: --out dist
|
args: --out dist
|
||||||
@@ -59,7 +57,7 @@ jobs:
|
|||||||
"${MODULE_NAME}" --help
|
"${MODULE_NAME}" --help
|
||||||
python -m "${MODULE_NAME}" --help
|
python -m "${MODULE_NAME}" --help
|
||||||
- name: "Upload sdist"
|
- name: "Upload sdist"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wheels-sdist
|
name: wheels-sdist
|
||||||
path: dist
|
path: dist
|
||||||
@@ -68,23 +66,23 @@ jobs:
|
|||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: macos-14
|
runs-on: macos-14
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels - x86_64"
|
- name: "Build wheels - x86_64"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: x86_64
|
target: x86_64
|
||||||
args: --release --locked --out dist
|
args: --release --locked --out dist
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wheels-macos-x86_64
|
name: wheels-macos-x86_64
|
||||||
path: dist
|
path: dist
|
||||||
@@ -99,7 +97,7 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-macos-x86_64
|
name: artifacts-macos-x86_64
|
||||||
path: |
|
path: |
|
||||||
@@ -110,18 +108,18 @@ jobs:
|
|||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: macos-14
|
runs-on: macos-14
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: arm64
|
architecture: arm64
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels - aarch64"
|
- name: "Build wheels - aarch64"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: aarch64
|
target: aarch64
|
||||||
args: --release --locked --out dist
|
args: --release --locked --out dist
|
||||||
@@ -131,7 +129,7 @@ jobs:
|
|||||||
ruff --help
|
ruff --help
|
||||||
python -m ruff --help
|
python -m ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wheels-aarch64-apple-darwin
|
name: wheels-aarch64-apple-darwin
|
||||||
path: dist
|
path: dist
|
||||||
@@ -146,7 +144,7 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-aarch64-apple-darwin
|
name: artifacts-aarch64-apple-darwin
|
||||||
path: |
|
path: |
|
||||||
@@ -166,18 +164,18 @@ jobs:
|
|||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
arch: x64
|
arch: x64
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: ${{ matrix.platform.arch }}
|
architecture: ${{ matrix.platform.arch }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
args: --release --locked --out dist
|
args: --release --locked --out dist
|
||||||
@@ -192,7 +190,7 @@ jobs:
|
|||||||
"${MODULE_NAME}" --help
|
"${MODULE_NAME}" --help
|
||||||
python -m "${MODULE_NAME}" --help
|
python -m "${MODULE_NAME}" --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels-${{ matrix.platform.target }}
|
||||||
path: dist
|
path: dist
|
||||||
@@ -203,7 +201,7 @@ jobs:
|
|||||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.platform.target }}
|
name: artifacts-${{ matrix.platform.target }}
|
||||||
path: |
|
path: |
|
||||||
@@ -219,18 +217,18 @@ jobs:
|
|||||||
- x86_64-unknown-linux-gnu
|
- x86_64-unknown-linux-gnu
|
||||||
- i686-unknown-linux-gnu
|
- i686-unknown-linux-gnu
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
@@ -242,7 +240,7 @@ jobs:
|
|||||||
"${MODULE_NAME}" --help
|
"${MODULE_NAME}" --help
|
||||||
python -m "${MODULE_NAME}" --help
|
python -m "${MODULE_NAME}" --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.target }}
|
name: wheels-${{ matrix.target }}
|
||||||
path: dist
|
path: dist
|
||||||
@@ -260,7 +258,7 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.target }}
|
name: artifacts-${{ matrix.target }}
|
||||||
path: |
|
path: |
|
||||||
@@ -294,24 +292,24 @@ jobs:
|
|||||||
arch: arm
|
arch: arm
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
args: --release --locked --out dist
|
args: --release --locked --out dist
|
||||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
if: ${{ matrix.platform.arch != 'ppc64' && matrix.platform.arch != 'ppc64le'}}
|
if: matrix.platform.arch != 'ppc64'
|
||||||
name: Test wheel
|
name: Test wheel
|
||||||
with:
|
with:
|
||||||
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
|
||||||
@@ -325,7 +323,7 @@ jobs:
|
|||||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
ruff --help
|
ruff --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels-${{ matrix.platform.target }}
|
||||||
path: dist
|
path: dist
|
||||||
@@ -343,7 +341,7 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.platform.target }}
|
name: artifacts-${{ matrix.platform.target }}
|
||||||
path: |
|
path: |
|
||||||
@@ -359,18 +357,18 @@ jobs:
|
|||||||
- x86_64-unknown-linux-musl
|
- x86_64-unknown-linux-musl
|
||||||
- i686-unknown-linux-musl
|
- i686-unknown-linux-musl
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: musllinux_1_2
|
manylinux: musllinux_1_2
|
||||||
@@ -387,7 +385,7 @@ jobs:
|
|||||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.target }}
|
name: wheels-${{ matrix.target }}
|
||||||
path: dist
|
path: dist
|
||||||
@@ -405,7 +403,7 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.target }}
|
name: artifacts-${{ matrix.target }}
|
||||||
path: |
|
path: |
|
||||||
@@ -425,23 +423,23 @@ jobs:
|
|||||||
arch: armv7
|
arch: armv7
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: musllinux_1_2
|
manylinux: musllinux_1_2
|
||||||
args: --release --locked --out dist
|
args: --release --locked --out dist
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
- uses: uraimo/run-on-arch-action@v2
|
||||||
name: Test wheel
|
name: Test wheel
|
||||||
with:
|
with:
|
||||||
arch: ${{ matrix.platform.arch }}
|
arch: ${{ matrix.platform.arch }}
|
||||||
@@ -454,7 +452,7 @@ jobs:
|
|||||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wheels-${{ matrix.platform.target }}
|
name: wheels-${{ matrix.platform.target }}
|
||||||
path: dist
|
path: dist
|
||||||
@@ -472,7 +470,7 @@ jobs:
|
|||||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-${{ matrix.platform.target }}
|
name: artifacts-${{ matrix.platform.target }}
|
||||||
path: |
|
path: |
|
||||||
|
|||||||
40
.github/workflows/build-docker.yml
vendored
40
.github/workflows/build-docker.yml
vendored
@@ -33,14 +33,14 @@ jobs:
|
|||||||
- linux/amd64
|
- linux/amd64
|
||||||
- linux/arm64
|
- linux/arm64
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
- uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
- uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -51,7 +51,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
||||||
run: |
|
run: |
|
||||||
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||||
if [ "${TAG}" != "${version}" ]; then
|
if [ "${TAG}" != "${version}" ]; then
|
||||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||||
echo "${TAG}" >&2
|
echo "${TAG}" >&2
|
||||||
@@ -63,7 +63,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: ${{ env.RUFF_BASE_IMG }}
|
images: ${{ env.RUFF_BASE_IMG }}
|
||||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||||
@@ -79,7 +79,7 @@ jobs:
|
|||||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||||
- name: Build and push by digest
|
- name: Build and push by digest
|
||||||
id: build
|
id: build
|
||||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: ${{ matrix.platform }}
|
platforms: ${{ matrix.platform }}
|
||||||
@@ -96,7 +96,7 @@ jobs:
|
|||||||
touch "/tmp/digests/${digest#sha256:}"
|
touch "/tmp/digests/${digest#sha256:}"
|
||||||
|
|
||||||
- name: Upload digests
|
- name: Upload digests
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||||
path: /tmp/digests/*
|
path: /tmp/digests/*
|
||||||
@@ -113,17 +113,17 @@ jobs:
|
|||||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||||
steps:
|
steps:
|
||||||
- name: Download digests
|
- name: Download digests
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: /tmp/digests
|
path: /tmp/digests
|
||||||
pattern: digests-*
|
pattern: digests-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
|
||||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
- uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: ${{ env.RUFF_BASE_IMG }}
|
images: ${{ env.RUFF_BASE_IMG }}
|
||||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||||
@@ -131,7 +131,7 @@ jobs:
|
|||||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||||
|
|
||||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
- uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -163,13 +163,13 @@ jobs:
|
|||||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||||
image-mapping:
|
image-mapping:
|
||||||
- alpine:3.21,alpine3.21,alpine
|
- alpine:3.20,alpine3.20,alpine
|
||||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||||
- buildpack-deps:bookworm,bookworm,debian
|
- buildpack-deps:bookworm,bookworm,debian
|
||||||
steps:
|
steps:
|
||||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
- uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
- uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -219,7 +219,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
uses: docker/metadata-action@v5
|
||||||
# ghcr.io prefers index level annotations
|
# ghcr.io prefers index level annotations
|
||||||
env:
|
env:
|
||||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||||
@@ -231,7 +231,7 @@ jobs:
|
|||||||
${{ env.TAG_PATTERNS }}
|
${{ env.TAG_PATTERNS }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
@@ -256,17 +256,17 @@ jobs:
|
|||||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||||
steps:
|
steps:
|
||||||
- name: Download digests
|
- name: Download digests
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: /tmp/digests
|
path: /tmp/digests
|
||||||
pattern: digests-*
|
pattern: digests-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
|
||||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
- uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
uses: docker/metadata-action@v5
|
||||||
env:
|
env:
|
||||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||||
with:
|
with:
|
||||||
@@ -276,7 +276,7 @@ jobs:
|
|||||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||||
|
|
||||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
- uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|||||||
378
.github/workflows/ci.yaml
vendored
378
.github/workflows/ci.yaml
vendored
@@ -1,7 +1,5 @@
|
|||||||
name: CI
|
name: CI
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
@@ -26,152 +24,82 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
# Flag that is raised when any code that affects parser is changed
|
# Flag that is raised when any code that affects parser is changed
|
||||||
parser: ${{ steps.check_parser.outputs.changed }}
|
parser: ${{ steps.changed.outputs.parser_any_changed }}
|
||||||
# Flag that is raised when any code that affects linter is changed
|
# Flag that is raised when any code that affects linter is changed
|
||||||
linter: ${{ steps.check_linter.outputs.changed }}
|
linter: ${{ steps.changed.outputs.linter_any_changed }}
|
||||||
# Flag that is raised when any code that affects formatter is changed
|
# Flag that is raised when any code that affects formatter is changed
|
||||||
formatter: ${{ steps.check_formatter.outputs.changed }}
|
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
|
||||||
# Flag that is raised when any code is changed
|
# Flag that is raised when any code is changed
|
||||||
# This is superset of the linter and formatter
|
# This is superset of the linter and formatter
|
||||||
code: ${{ steps.check_code.outputs.changed }}
|
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||||
# Flag that is raised when any code that affects the fuzzer is changed
|
# Flag that is raised when any code that affects the fuzzer is changed
|
||||||
fuzz: ${{ steps.check_fuzzer.outputs.changed }}
|
fuzz: ${{ steps.changed.outputs.fuzz_any_changed }}
|
||||||
|
|
||||||
# Flag that is set to "true" when code related to the playground changes.
|
|
||||||
playground: ${{ steps.check_playground.outputs.changed }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Determine merge base
|
- uses: tj-actions/changed-files@v45
|
||||||
id: merge_base
|
id: changed
|
||||||
env:
|
with:
|
||||||
BASE_REF: ${{ github.event.pull_request.base.ref || 'main' }}
|
files_yaml: |
|
||||||
run: |
|
parser:
|
||||||
sha=$(git merge-base HEAD "origin/${BASE_REF}")
|
- Cargo.toml
|
||||||
echo "sha=${sha}" >> "$GITHUB_OUTPUT"
|
- Cargo.lock
|
||||||
|
- crates/ruff_python_trivia/**
|
||||||
|
- crates/ruff_source_file/**
|
||||||
|
- crates/ruff_text_size/**
|
||||||
|
- crates/ruff_python_ast/**
|
||||||
|
- crates/ruff_python_parser/**
|
||||||
|
- python/py-fuzzer/**
|
||||||
|
- .github/workflows/ci.yaml
|
||||||
|
|
||||||
- name: Check if the parser code changed
|
linter:
|
||||||
id: check_parser
|
- Cargo.toml
|
||||||
env:
|
- Cargo.lock
|
||||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
- crates/**
|
||||||
run: |
|
- "!crates/ruff_python_formatter/**"
|
||||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
- "!crates/ruff_formatter/**"
|
||||||
':Cargo.toml' \
|
- "!crates/ruff_dev/**"
|
||||||
':Cargo.lock' \
|
- scripts/*
|
||||||
':crates/ruff_python_trivia/**' \
|
- python/**
|
||||||
':crates/ruff_source_file/**' \
|
- .github/workflows/ci.yaml
|
||||||
':crates/ruff_text_size/**' \
|
|
||||||
':crates/ruff_python_ast/**' \
|
|
||||||
':crates/ruff_python_parser/**' \
|
|
||||||
':python/py-fuzzer/**' \
|
|
||||||
':.github/workflows/ci.yaml' \
|
|
||||||
; then
|
|
||||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check if the linter code changed
|
formatter:
|
||||||
id: check_linter
|
- Cargo.toml
|
||||||
env:
|
- Cargo.lock
|
||||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
- crates/ruff_python_formatter/**
|
||||||
run: |
|
- crates/ruff_formatter/**
|
||||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
|
- crates/ruff_python_trivia/**
|
||||||
':Cargo.lock' \
|
- crates/ruff_python_ast/**
|
||||||
':crates/**' \
|
- crates/ruff_source_file/**
|
||||||
':!crates/red_knot*/**' \
|
- crates/ruff_python_index/**
|
||||||
':!crates/ruff_python_formatter/**' \
|
- crates/ruff_text_size/**
|
||||||
':!crates/ruff_formatter/**' \
|
- crates/ruff_python_parser/**
|
||||||
':!crates/ruff_dev/**' \
|
- crates/ruff_dev/**
|
||||||
':!crates/ruff_db/**' \
|
- scripts/*
|
||||||
':scripts/*' \
|
- python/**
|
||||||
':python/**' \
|
- .github/workflows/ci.yaml
|
||||||
':.github/workflows/ci.yaml' \
|
|
||||||
; then
|
|
||||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check if the formatter code changed
|
fuzz:
|
||||||
id: check_formatter
|
- fuzz/Cargo.toml
|
||||||
env:
|
- fuzz/Cargo.lock
|
||||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
- fuzz/fuzz_targets/**
|
||||||
run: |
|
|
||||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
|
|
||||||
':Cargo.lock' \
|
|
||||||
':crates/ruff_python_formatter/**' \
|
|
||||||
':crates/ruff_formatter/**' \
|
|
||||||
':crates/ruff_python_trivia/**' \
|
|
||||||
':crates/ruff_python_ast/**' \
|
|
||||||
':crates/ruff_source_file/**' \
|
|
||||||
':crates/ruff_python_index/**' \
|
|
||||||
':crates/ruff_python_index/**' \
|
|
||||||
':crates/ruff_text_size/**' \
|
|
||||||
':crates/ruff_python_parser/**' \
|
|
||||||
':scripts/*' \
|
|
||||||
':python/**' \
|
|
||||||
':.github/workflows/ci.yaml' \
|
|
||||||
; then
|
|
||||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check if the fuzzer code changed
|
code:
|
||||||
id: check_fuzzer
|
- "**/*"
|
||||||
env:
|
- "!**/*.md"
|
||||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
- "crates/red_knot_python_semantic/resources/mdtest/**/*.md"
|
||||||
run: |
|
- "!docs/**"
|
||||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
|
- "!assets/**"
|
||||||
':Cargo.lock' \
|
|
||||||
':fuzz/fuzz_targets/**' \
|
|
||||||
':.github/workflows/ci.yaml' \
|
|
||||||
; then
|
|
||||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check if there was any code related change
|
|
||||||
id: check_code
|
|
||||||
env:
|
|
||||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
|
||||||
run: |
|
|
||||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**/*' \
|
|
||||||
':!**/*.md' \
|
|
||||||
':crates/red_knot_python_semantic/resources/mdtest/**/*.md' \
|
|
||||||
':!docs/**' \
|
|
||||||
':!assets/**' \
|
|
||||||
':.github/workflows/ci.yaml' \
|
|
||||||
; then
|
|
||||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check if there was any playground related change
|
|
||||||
id: check_playground
|
|
||||||
env:
|
|
||||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
|
||||||
run: |
|
|
||||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
|
||||||
':playground/**' \
|
|
||||||
; then
|
|
||||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
cargo-fmt:
|
cargo-fmt:
|
||||||
name: "cargo fmt"
|
name: "cargo fmt"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
@@ -185,14 +113,14 @@ jobs:
|
|||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: |
|
run: |
|
||||||
rustup component add clippy
|
rustup component add clippy
|
||||||
rustup target add wasm32-unknown-unknown
|
rustup target add wasm32-unknown-unknown
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Clippy"
|
- name: "Clippy"
|
||||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||||
- name: "Clippy (wasm)"
|
- name: "Clippy (wasm)"
|
||||||
@@ -202,25 +130,25 @@ jobs:
|
|||||||
name: "cargo test (linux)"
|
name: "cargo test (linux)"
|
||||||
runs-on: depot-ubuntu-22.04-16
|
runs-on: depot-ubuntu-22.04-16
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@v1
|
uses: rui314/setup-mold@v1
|
||||||
- name: "Install cargo nextest"
|
- name: "Install cargo nextest"
|
||||||
uses: taiki-e/install-action@914ac1e29db2d22aef69891f032778d9adc3990d # v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-nextest
|
||||||
- name: "Install cargo insta"
|
- name: "Install cargo insta"
|
||||||
uses: taiki-e/install-action@914ac1e29db2d22aef69891f032778d9adc3990d # v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-insta
|
tool: cargo-insta
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
@@ -239,7 +167,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||||
RUSTDOCFLAGS: "-D warnings"
|
RUSTDOCFLAGS: "-D warnings"
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ruff
|
name: ruff
|
||||||
path: target/debug/ruff
|
path: target/debug/ruff
|
||||||
@@ -248,25 +176,25 @@ jobs:
|
|||||||
name: "cargo test (linux, release)"
|
name: "cargo test (linux, release)"
|
||||||
runs-on: depot-ubuntu-22.04-16
|
runs-on: depot-ubuntu-22.04-16
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@v1
|
uses: rui314/setup-mold@v1
|
||||||
- name: "Install cargo nextest"
|
- name: "Install cargo nextest"
|
||||||
uses: taiki-e/install-action@914ac1e29db2d22aef69891f032778d9adc3990d # v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-nextest
|
||||||
- name: "Install cargo insta"
|
- name: "Install cargo insta"
|
||||||
uses: taiki-e/install-action@914ac1e29db2d22aef69891f032778d9adc3990d # v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-insta
|
tool: cargo-insta
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
@@ -275,25 +203,24 @@ jobs:
|
|||||||
|
|
||||||
cargo-test-windows:
|
cargo-test-windows:
|
||||||
name: "cargo test (windows)"
|
name: "cargo test (windows)"
|
||||||
runs-on: github-windows-2025-x86_64-16
|
runs-on: windows-latest-xlarge
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install cargo nextest"
|
- name: "Install cargo nextest"
|
||||||
uses: taiki-e/install-action@914ac1e29db2d22aef69891f032778d9adc3990d # v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-nextest
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
NEXTEST_PROFILE: "ci"
|
|
||||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||||
run: |
|
run: |
|
||||||
@@ -304,23 +231,23 @@ jobs:
|
|||||||
name: "cargo test (wasm)"
|
name: "cargo test (wasm)"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup target add wasm32-unknown-unknown
|
run: rustup target add wasm32-unknown-unknown
|
||||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: playground/package-lock.json
|
cache-dependency-path: playground/package-lock.json
|
||||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
- uses: jetli/wasm-pack-action@v0.4.0
|
||||||
with:
|
with:
|
||||||
version: v0.13.1
|
version: v0.13.1
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Test ruff_wasm"
|
- name: "Test ruff_wasm"
|
||||||
run: |
|
run: |
|
||||||
cd crates/ruff_wasm
|
cd crates/ruff_wasm
|
||||||
@@ -336,33 +263,32 @@ jobs:
|
|||||||
if: ${{ github.ref == 'refs/heads/main' }}
|
if: ${{ github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@v1
|
uses: rui314/setup-mold@v1
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Build"
|
- name: "Build"
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
|
|
||||||
cargo-build-msrv:
|
cargo-build-msrv:
|
||||||
name: "cargo build (msrv)"
|
name: "cargo build (msrv)"
|
||||||
runs-on: depot-ubuntu-latest-8
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: SebRollen/toml-action@b1b3628f55fc3a28208d4203ada8b737e9687876 # v1.2.0
|
- uses: SebRollen/toml-action@v1.2.0
|
||||||
id: msrv
|
id: msrv
|
||||||
with:
|
with:
|
||||||
file: "Cargo.toml"
|
file: "Cargo.toml"
|
||||||
field: "workspace.package.rust-version"
|
field: "workspace.package.rust-version"
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
env:
|
env:
|
||||||
MSRV: ${{ steps.msrv.outputs.value }}
|
MSRV: ${{ steps.msrv.outputs.value }}
|
||||||
@@ -370,13 +296,14 @@ jobs:
|
|||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@v1
|
uses: rui314/setup-mold@v1
|
||||||
- name: "Install cargo nextest"
|
- name: "Install cargo nextest"
|
||||||
uses: taiki-e/install-action@914ac1e29db2d22aef69891f032778d9adc3990d # v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-nextest
|
||||||
- name: "Install cargo insta"
|
- name: "Install cargo insta"
|
||||||
uses: taiki-e/install-action@914ac1e29db2d22aef69891f032778d9adc3990d # v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-insta
|
tool: cargo-insta
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
@@ -391,14 +318,14 @@ jobs:
|
|||||||
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
|
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
with:
|
|
||||||
workspaces: "fuzz -> target"
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: "fuzz -> target"
|
||||||
- name: "Install cargo-binstall"
|
- name: "Install cargo-binstall"
|
||||||
uses: cargo-bins/cargo-binstall@main
|
uses: cargo-bins/cargo-binstall@main
|
||||||
with:
|
with:
|
||||||
@@ -414,16 +341,16 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- cargo-test-linux
|
- cargo-test-linux
|
||||||
- determine_changes
|
- determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
|
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
env:
|
env:
|
||||||
FORCE_COLOR: 1
|
FORCE_COLOR: 1
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@22695119d769bdb6f7032ad67b9bca0ef8c4a174 # v5
|
- uses: astral-sh/setup-uv@v5
|
||||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
- uses: actions/download-artifact@v4
|
||||||
name: Download Ruff binary to test
|
name: Download Ruff binary to test
|
||||||
id: download-cached-binary
|
id: download-cached-binary
|
||||||
with:
|
with:
|
||||||
@@ -450,15 +377,15 @@ jobs:
|
|||||||
name: "test scripts"
|
name: "test scripts"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
timeout-minutes: 5
|
timeout-minutes: 5
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup component add rustfmt
|
run: rustup component add rustfmt
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
# Run all code generation scripts, and verify that the current output is
|
# Run all code generation scripts, and verify that the current output is
|
||||||
# already checked into git.
|
# already checked into git.
|
||||||
- run: python crates/ruff_python_ast/generate.py
|
- run: python crates/ruff_python_ast/generate.py
|
||||||
@@ -482,24 +409,24 @@ jobs:
|
|||||||
- determine_changes
|
- determine_changes
|
||||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||||
# Ecosystem check needs linter and/or formatter changes.
|
# Ecosystem check needs linter and/or formatter changes.
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
- uses: actions/download-artifact@v4
|
||||||
name: Download comparison Ruff binary
|
name: Download comparison Ruff binary
|
||||||
id: ruff-target
|
id: ruff-target
|
||||||
with:
|
with:
|
||||||
name: ruff
|
name: ruff
|
||||||
path: target/debug
|
path: target/debug
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
- uses: dawidd6/action-download-artifact@v7
|
||||||
name: Download baseline Ruff binary
|
name: Download baseline Ruff binary
|
||||||
with:
|
with:
|
||||||
name: ruff
|
name: ruff
|
||||||
@@ -587,13 +514,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo ${{ github.event.number }} > pr-number
|
echo ${{ github.event.number }} > pr-number
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
- uses: actions/upload-artifact@v4
|
||||||
name: Upload PR Number
|
name: Upload PR Number
|
||||||
with:
|
with:
|
||||||
name: pr-number
|
name: pr-number
|
||||||
path: pr-number
|
path: pr-number
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
- uses: actions/upload-artifact@v4
|
||||||
name: Upload Results
|
name: Upload Results
|
||||||
with:
|
with:
|
||||||
name: ecosystem-result
|
name: ecosystem-result
|
||||||
@@ -605,7 +532,7 @@ jobs:
|
|||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: cargo-bins/cargo-binstall@main
|
- uses: cargo-bins/cargo-binstall@main
|
||||||
@@ -616,20 +543,19 @@ jobs:
|
|||||||
name: "python package"
|
name: "python package"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@22fe573c6ed0c03ab9b84e631cbfa49bddf6e20e # v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
args: --out dist
|
args: --out dist
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
@@ -645,19 +571,19 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Install pre-commit"
|
- name: "Install pre-commit"
|
||||||
run: pip install pre-commit
|
run: pip install pre-commit
|
||||||
- name: "Cache pre-commit"
|
- name: "Cache pre-commit"
|
||||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pre-commit
|
path: ~/.cache/pre-commit
|
||||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
@@ -679,22 +605,22 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.13"
|
python-version: "3.13"
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Add SSH key"
|
- name: "Add SSH key"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
uses: webfactory/ssh-agent@dc588b651fe13675774614f8e6a936a468676387 # v0.9.0
|
uses: webfactory/ssh-agent@v0.9.0
|
||||||
with:
|
with:
|
||||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@22695119d769bdb6f7032ad67b9bca0ef8c4a174 # v5
|
uses: astral-sh/setup-uv@v5
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Install Insiders dependencies"
|
- name: "Install Insiders dependencies"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||||
@@ -718,15 +644,16 @@ jobs:
|
|||||||
name: "formatter instabilities and black similarity"
|
name: "formatter instabilities and black similarity"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
|
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
- name: "Cache rust"
|
||||||
|
uses: Swatinem/rust-cache@v2
|
||||||
- name: "Run checks"
|
- name: "Run checks"
|
||||||
run: scripts/formatter_ecosystem_checks.sh
|
run: scripts/formatter_ecosystem_checks.sh
|
||||||
- name: "Github step summary"
|
- name: "Github step summary"
|
||||||
@@ -741,23 +668,23 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- cargo-test-linux
|
- cargo-test-linux
|
||||||
- determine_changes
|
- determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: extractions/setup-just@dd310ad5a97d8e7b41793f8ef055398d51ad4de6 # v2
|
- uses: extractions/setup-just@v2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
name: "Download ruff-lsp source"
|
name: "Download ruff-lsp source"
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
repository: "astral-sh/ruff-lsp"
|
repository: "astral-sh/ruff-lsp"
|
||||||
|
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
- uses: actions/download-artifact@v4
|
||||||
name: Download development ruff binary
|
name: Download development ruff binary
|
||||||
id: ruff-target
|
id: ruff-target
|
||||||
with:
|
with:
|
||||||
@@ -780,65 +707,32 @@ jobs:
|
|||||||
|
|
||||||
just test
|
just test
|
||||||
|
|
||||||
check-playground:
|
|
||||||
name: "check playground"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 5
|
|
||||||
needs:
|
|
||||||
- determine_changes
|
|
||||||
if: ${{ (needs.determine_changes.outputs.playground == 'true') }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup target add wasm32-unknown-unknown
|
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
|
||||||
with:
|
|
||||||
node-version: 22
|
|
||||||
cache: "npm"
|
|
||||||
cache-dependency-path: playground/package-lock.json
|
|
||||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
|
||||||
- name: "Install Node dependencies"
|
|
||||||
run: npm ci
|
|
||||||
working-directory: playground
|
|
||||||
- name: "Build playgrounds"
|
|
||||||
run: npm run dev:wasm
|
|
||||||
working-directory: playground
|
|
||||||
- name: "Run TypeScript checks"
|
|
||||||
run: npm run check
|
|
||||||
working-directory: playground
|
|
||||||
- name: "Check formatting"
|
|
||||||
run: npm run fmt:check
|
|
||||||
working-directory: playground
|
|
||||||
|
|
||||||
benchmarks:
|
benchmarks:
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout Branch"
|
- name: "Checkout Branch"
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- name: "Install codspeed"
|
- name: "Install codspeed"
|
||||||
uses: taiki-e/install-action@914ac1e29db2d22aef69891f032778d9adc3990d # v2
|
uses: taiki-e/install-action@v2
|
||||||
with:
|
with:
|
||||||
tool: cargo-codspeed
|
tool: cargo-codspeed
|
||||||
|
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: "Build benchmarks"
|
- name: "Build benchmarks"
|
||||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3
|
uses: CodSpeedHQ/action@v3
|
||||||
with:
|
with:
|
||||||
run: cargo codspeed run
|
run: cargo codspeed run
|
||||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||||
|
|||||||
8
.github/workflows/daily_fuzz.yaml
vendored
8
.github/workflows/daily_fuzz.yaml
vendored
@@ -31,15 +31,15 @@ jobs:
|
|||||||
# Don't run the cron job on forks:
|
# Don't run the cron job on forks:
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@22695119d769bdb6f7032ad67b9bca0ef8c4a174 # v5
|
- uses: astral-sh/setup-uv@v5
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@v1
|
uses: rui314/setup-mold@v1
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: Build ruff
|
- name: Build ruff
|
||||||
# A debug build means the script runs slower once it gets started,
|
# A debug build means the script runs slower once it gets started,
|
||||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||||
@@ -65,7 +65,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
- uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
|||||||
7
.github/workflows/daily_property_tests.yaml
vendored
7
.github/workflows/daily_property_tests.yaml
vendored
@@ -30,14 +30,14 @@ jobs:
|
|||||||
# Don't run the cron job on forks:
|
# Don't run the cron job on forks:
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@v1
|
uses: rui314/setup-mold@v1
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: Build Red Knot
|
- name: Build Red Knot
|
||||||
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
|
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
|
||||||
# mode (1.5 min vs 14 min), so the overall time is shorter with a release build.
|
# mode (1.5 min vs 14 min), so the overall time is shorter with a release build.
|
||||||
@@ -47,7 +47,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
export QUICKCHECK_TESTS=100000
|
export QUICKCHECK_TESTS=100000
|
||||||
for _ in {1..5}; do
|
for _ in {1..5}; do
|
||||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored list::property_tests
|
|
||||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
|
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
|
||||||
done
|
done
|
||||||
|
|
||||||
@@ -59,7 +58,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
- uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
|||||||
93
.github/workflows/mypy_primer.yaml
vendored
93
.github/workflows/mypy_primer.yaml
vendored
@@ -1,93 +0,0 @@
|
|||||||
name: Run mypy_primer
|
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "crates/red_knot*/**"
|
|
||||||
- "crates/ruff_db"
|
|
||||||
- "crates/ruff_python_ast"
|
|
||||||
- "crates/ruff_python_parser"
|
|
||||||
- ".github/workflows/mypy_primer.yaml"
|
|
||||||
- ".github/workflows/mypy_primer_comment.yaml"
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
CARGO_NET_RETRY: 10
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTUP_MAX_RETRIES: 10
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mypy_primer:
|
|
||||||
name: Run mypy_primer
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
timeout-minutes: 20
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
|
||||||
with:
|
|
||||||
path: ruff
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Install the latest version of uv
|
|
||||||
uses: astral-sh/setup-uv@22695119d769bdb6f7032ad67b9bca0ef8c4a174 # v5
|
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
|
||||||
with:
|
|
||||||
workspaces: "ruff"
|
|
||||||
- name: Install Rust toolchain
|
|
||||||
run: rustup show
|
|
||||||
|
|
||||||
- name: Install mypy_primer
|
|
||||||
run: |
|
|
||||||
uv tool install "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support"
|
|
||||||
|
|
||||||
- name: Run mypy_primer
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
cd ruff
|
|
||||||
|
|
||||||
echo "new commit"
|
|
||||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
|
||||||
|
|
||||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
|
||||||
git checkout -b base_commit "$MERGE_BASE"
|
|
||||||
echo "base commit"
|
|
||||||
git rev-list --format=%s --max-count=1 base_commit
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
|
|
||||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
|
||||||
uvx mypy_primer \
|
|
||||||
--repo ruff \
|
|
||||||
--type-checker knot \
|
|
||||||
--old base_commit \
|
|
||||||
--new "$GITHUB_SHA" \
|
|
||||||
--project-selector '/(mypy_primer|black|pyp|git-revise|zipp|arrow|isort|itsdangerous|rich|packaging|pybind11|pyinstrument)$' \
|
|
||||||
--output concise \
|
|
||||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
|
||||||
|
|
||||||
# Output diff with ANSI color codes
|
|
||||||
cat mypy_primer.diff
|
|
||||||
|
|
||||||
# Remove ANSI color codes before uploading
|
|
||||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
|
||||||
|
|
||||||
echo ${{ github.event.number }} > pr-number
|
|
||||||
|
|
||||||
- name: Upload diff
|
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
|
||||||
with:
|
|
||||||
name: mypy_primer_diff
|
|
||||||
path: mypy_primer.diff
|
|
||||||
|
|
||||||
- name: Upload pr-number
|
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
|
||||||
with:
|
|
||||||
name: pr-number
|
|
||||||
path: pr-number
|
|
||||||
97
.github/workflows/mypy_primer_comment.yaml
vendored
97
.github/workflows/mypy_primer_comment.yaml
vendored
@@ -1,97 +0,0 @@
|
|||||||
name: PR comment (mypy_primer)
|
|
||||||
|
|
||||||
on: # zizmor: ignore[dangerous-triggers]
|
|
||||||
workflow_run:
|
|
||||||
workflows: [Run mypy_primer]
|
|
||||||
types: [completed]
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
workflow_run_id:
|
|
||||||
description: The mypy_primer workflow that triggers the workflow run
|
|
||||||
required: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
comment:
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
|
||||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
|
||||||
name: Download PR number
|
|
||||||
with:
|
|
||||||
name: pr-number
|
|
||||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
|
||||||
if_no_artifact_found: ignore
|
|
||||||
allow_forks: true
|
|
||||||
|
|
||||||
- name: Parse pull request number
|
|
||||||
id: pr-number
|
|
||||||
run: |
|
|
||||||
if [[ -f pr-number ]]
|
|
||||||
then
|
|
||||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
|
||||||
name: "Download mypy_primer results"
|
|
||||||
id: download-mypy_primer_diff
|
|
||||||
if: steps.pr-number.outputs.pr-number
|
|
||||||
with:
|
|
||||||
name: mypy_primer_diff
|
|
||||||
workflow: mypy_primer.yaml
|
|
||||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
|
||||||
path: pr/mypy_primer_diff
|
|
||||||
workflow_conclusion: completed
|
|
||||||
if_no_artifact_found: ignore
|
|
||||||
allow_forks: true
|
|
||||||
|
|
||||||
- name: Generate comment content
|
|
||||||
id: generate-comment
|
|
||||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
|
||||||
run: |
|
|
||||||
# Guard against malicious mypy_primer results that symlink to a secret
|
|
||||||
# file on this runner
|
|
||||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
|
||||||
then
|
|
||||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Note this identifier is used to find the comment to update on
|
|
||||||
# subsequent runs
|
|
||||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
|
||||||
|
|
||||||
echo '## `mypy_primer` results' >> comment.txt
|
|
||||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
|
||||||
echo '<details>' >> comment.txt
|
|
||||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
|
||||||
echo '' >> comment.txt
|
|
||||||
echo '```diff' >> comment.txt
|
|
||||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
|
||||||
echo '```' >> comment.txt
|
|
||||||
echo '</details>' >> comment.txt
|
|
||||||
else
|
|
||||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
|
||||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
|
||||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Find existing comment
|
|
||||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
|
|
||||||
if: steps.generate-comment.outcome == 'success'
|
|
||||||
id: find-comment
|
|
||||||
with:
|
|
||||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
|
||||||
comment-author: "github-actions[bot]"
|
|
||||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
|
||||||
|
|
||||||
- name: Create or update comment
|
|
||||||
if: steps.find-comment.outcome == 'success'
|
|
||||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
|
||||||
with:
|
|
||||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
|
||||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
|
||||||
body-path: comment.txt
|
|
||||||
edit-mode: replace
|
|
||||||
2
.github/workflows/notify-dependents.yml
vendored
2
.github/workflows/notify-dependents.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: "Update pre-commit mirror"
|
- name: "Update pre-commit mirror"
|
||||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
|
||||||
script: |
|
script: |
|
||||||
|
|||||||
8
.github/workflows/pr-comment.yaml
vendored
8
.github/workflows/pr-comment.yaml
vendored
@@ -16,7 +16,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
- uses: dawidd6/action-download-artifact@v7
|
||||||
name: Download pull request number
|
name: Download pull request number
|
||||||
with:
|
with:
|
||||||
name: pr-number
|
name: pr-number
|
||||||
@@ -32,7 +32,7 @@ jobs:
|
|||||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
- uses: dawidd6/action-download-artifact@v7
|
||||||
name: "Download ecosystem results"
|
name: "Download ecosystem results"
|
||||||
id: download-ecosystem-result
|
id: download-ecosystem-result
|
||||||
if: steps.pr-number.outputs.pr-number
|
if: steps.pr-number.outputs.pr-number
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Find existing comment
|
- name: Find existing comment
|
||||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
|
uses: peter-evans/find-comment@v3
|
||||||
if: steps.generate-comment.outcome == 'success'
|
if: steps.generate-comment.outcome == 'success'
|
||||||
id: find-comment
|
id: find-comment
|
||||||
with:
|
with:
|
||||||
@@ -80,7 +80,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create or update comment
|
- name: Create or update comment
|
||||||
if: steps.find-comment.outcome == 'success'
|
if: steps.find-comment.outcome == 'success'
|
||||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
uses: peter-evans/create-or-update-comment@v4
|
||||||
with:
|
with:
|
||||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
|||||||
8
.github/workflows/publish-docs.yml
vendored
8
.github/workflows/publish-docs.yml
vendored
@@ -23,12 +23,12 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
persist-credentials: true
|
persist-credentials: true
|
||||||
|
|
||||||
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
|
|
||||||
@@ -61,14 +61,14 @@ jobs:
|
|||||||
|
|
||||||
- name: "Add SSH key"
|
- name: "Add SSH key"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
uses: webfactory/ssh-agent@dc588b651fe13675774614f8e6a936a468676387 # v0.9.0
|
uses: webfactory/ssh-agent@v0.9.0
|
||||||
with:
|
with:
|
||||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: "Install Insiders dependencies"
|
- name: "Install Insiders dependencies"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
|||||||
58
.github/workflows/publish-knot-playground.yml
vendored
58
.github/workflows/publish-knot-playground.yml
vendored
@@ -1,58 +0,0 @@
|
|||||||
# Publish the Red Knot playground.
|
|
||||||
name: "[Knot Playground] Release"
|
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
paths:
|
|
||||||
- "crates/red_knot*/**"
|
|
||||||
- "crates/ruff_db/**"
|
|
||||||
- "crates/ruff_python_ast/**"
|
|
||||||
- "crates/ruff_python_parser/**"
|
|
||||||
- "playground/**"
|
|
||||||
- ".github/workflows/publish-knot-playground.yml"
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
CARGO_NET_RETRY: 10
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTUP_MAX_RETRIES: 10
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
publish:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup target add wasm32-unknown-unknown
|
|
||||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
|
||||||
with:
|
|
||||||
node-version: 22
|
|
||||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
|
||||||
- name: "Install Node dependencies"
|
|
||||||
run: npm ci
|
|
||||||
working-directory: playground
|
|
||||||
- name: "Run TypeScript checks"
|
|
||||||
run: npm run check
|
|
||||||
working-directory: playground
|
|
||||||
- name: "Build Knot playground"
|
|
||||||
run: npm run build --workspace knot-playground
|
|
||||||
working-directory: playground
|
|
||||||
- name: "Deploy to Cloudflare Pages"
|
|
||||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
|
||||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
|
||||||
with:
|
|
||||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
|
||||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
|
||||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
|
||||||
command: pages deploy playground/knot/dist --project-name=knot-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
|
||||||
19
.github/workflows/publish-playground.yml
vendored
19
.github/workflows/publish-playground.yml
vendored
@@ -24,31 +24,34 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup target add wasm32-unknown-unknown
|
run: rustup target add wasm32-unknown-unknown
|
||||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 22
|
node-version: 20
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: playground/package-lock.json
|
cache-dependency-path: playground/package-lock.json
|
||||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
- uses: jetli/wasm-pack-action@v0.4.0
|
||||||
|
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||||
|
- name: "Run wasm-pack"
|
||||||
|
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
|
||||||
- name: "Install Node dependencies"
|
- name: "Install Node dependencies"
|
||||||
run: npm ci
|
run: npm ci
|
||||||
working-directory: playground
|
working-directory: playground
|
||||||
- name: "Run TypeScript checks"
|
- name: "Run TypeScript checks"
|
||||||
run: npm run check
|
run: npm run check
|
||||||
working-directory: playground
|
working-directory: playground
|
||||||
- name: "Build Ruff playground"
|
- name: "Build JavaScript bundle"
|
||||||
run: npm run build --workspace ruff-playground
|
run: npm run build
|
||||||
working-directory: playground
|
working-directory: playground
|
||||||
- name: "Deploy to Cloudflare Pages"
|
- name: "Deploy to Cloudflare Pages"
|
||||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
|
uses: cloudflare/wrangler-action@v3.13.1
|
||||||
with:
|
with:
|
||||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||||
command: pages deploy playground/ruff/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||||
|
|||||||
4
.github/workflows/publish-pypi.yml
vendored
4
.github/workflows/publish-pypi.yml
vendored
@@ -22,8 +22,8 @@ jobs:
|
|||||||
id-token: write
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@22695119d769bdb6f7032ad67b9bca0ef8c4a174 # v5
|
uses: astral-sh/setup-uv@v5
|
||||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
pattern: wheels-*
|
pattern: wheels-*
|
||||||
path: wheels
|
path: wheels
|
||||||
|
|||||||
10
.github/workflows/publish-wasm.yml
vendored
10
.github/workflows/publish-wasm.yml
vendored
@@ -29,15 +29,13 @@ jobs:
|
|||||||
target: [web, bundler, nodejs]
|
target: [web, bundler, nodejs]
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup target add wasm32-unknown-unknown
|
run: rustup target add wasm32-unknown-unknown
|
||||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
|
- uses: jetli/wasm-pack-action@v0.4.0
|
||||||
with:
|
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||||
version: v0.13.1
|
|
||||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
|
||||||
- name: "Run wasm-pack build"
|
- name: "Run wasm-pack build"
|
||||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||||
- name: "Rename generated package"
|
- name: "Rename generated package"
|
||||||
@@ -45,7 +43,7 @@ jobs:
|
|||||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
|
|||||||
34
.github/workflows/release.yml
vendored
34
.github/workflows/release.yml
vendored
@@ -50,7 +50,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||||
plan:
|
plan:
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "ubuntu-20.04"
|
||||||
outputs:
|
outputs:
|
||||||
val: ${{ steps.plan.outputs.manifest }}
|
val: ${{ steps.plan.outputs.manifest }}
|
||||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||||
@@ -59,7 +59,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
- name: Install dist
|
- name: Install dist
|
||||||
@@ -68,7 +68,7 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
|
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
|
||||||
- name: Cache dist
|
- name: Cache dist
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: cargo-dist-cache
|
name: cargo-dist-cache
|
||||||
path: ~/.cargo/bin/dist
|
path: ~/.cargo/bin/dist
|
||||||
@@ -84,7 +84,7 @@ jobs:
|
|||||||
cat plan-dist-manifest.json
|
cat plan-dist-manifest.json
|
||||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||||
- name: "Upload dist-manifest.json"
|
- name: "Upload dist-manifest.json"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-plan-dist-manifest
|
name: artifacts-plan-dist-manifest
|
||||||
path: plan-dist-manifest.json
|
path: plan-dist-manifest.json
|
||||||
@@ -116,23 +116,23 @@ jobs:
|
|||||||
- plan
|
- plan
|
||||||
- custom-build-binaries
|
- custom-build-binaries
|
||||||
- custom-build-docker
|
- custom-build-docker
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "ubuntu-20.04"
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
- name: Install cached dist
|
- name: Install cached dist
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: cargo-dist-cache
|
name: cargo-dist-cache
|
||||||
path: ~/.cargo/bin/
|
path: ~/.cargo/bin/
|
||||||
- run: chmod +x ~/.cargo/bin/dist
|
- run: chmod +x ~/.cargo/bin/dist
|
||||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||||
- name: Fetch local artifacts
|
- name: Fetch local artifacts
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
pattern: artifacts-*
|
pattern: artifacts-*
|
||||||
path: target/distrib/
|
path: target/distrib/
|
||||||
@@ -150,7 +150,7 @@ jobs:
|
|||||||
|
|
||||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||||
- name: "Upload artifacts"
|
- name: "Upload artifacts"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: artifacts-build-global
|
name: artifacts-build-global
|
||||||
path: |
|
path: |
|
||||||
@@ -167,22 +167,22 @@ jobs:
|
|||||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "ubuntu-20.04"
|
||||||
outputs:
|
outputs:
|
||||||
val: ${{ steps.host.outputs.manifest }}
|
val: ${{ steps.host.outputs.manifest }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
- name: Install cached dist
|
- name: Install cached dist
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: cargo-dist-cache
|
name: cargo-dist-cache
|
||||||
path: ~/.cargo/bin/
|
path: ~/.cargo/bin/
|
||||||
- run: chmod +x ~/.cargo/bin/dist
|
- run: chmod +x ~/.cargo/bin/dist
|
||||||
# Fetch artifacts from scratch-storage
|
# Fetch artifacts from scratch-storage
|
||||||
- name: Fetch artifacts
|
- name: Fetch artifacts
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
pattern: artifacts-*
|
pattern: artifacts-*
|
||||||
path: target/distrib/
|
path: target/distrib/
|
||||||
@@ -196,7 +196,7 @@ jobs:
|
|||||||
cat dist-manifest.json
|
cat dist-manifest.json
|
||||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||||
- name: "Upload dist-manifest.json"
|
- name: "Upload dist-manifest.json"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
# Overwrite the previous copy
|
# Overwrite the previous copy
|
||||||
name: artifacts-dist-manifest
|
name: artifacts-dist-manifest
|
||||||
@@ -242,16 +242,16 @@ jobs:
|
|||||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||||
# "host" however must run to completion, no skipping allowed!
|
# "host" however must run to completion, no skipping allowed!
|
||||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "ubuntu-20.04"
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
# Create a GitHub Release while uploading all files to it
|
# Create a GitHub Release while uploading all files to it
|
||||||
- name: "Download GitHub Artifacts"
|
- name: "Download GitHub Artifacts"
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
pattern: artifacts-*
|
pattern: artifacts-*
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|||||||
6
.github/workflows/sync_typeshed.yaml
vendored
6
.github/workflows/sync_typeshed.yaml
vendored
@@ -21,12 +21,12 @@ jobs:
|
|||||||
contents: write
|
contents: write
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
name: Checkout Ruff
|
name: Checkout Ruff
|
||||||
with:
|
with:
|
||||||
path: ruff
|
path: ruff
|
||||||
persist-credentials: true
|
persist-credentials: true
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
- uses: actions/checkout@v4
|
||||||
name: Checkout typeshed
|
name: Checkout typeshed
|
||||||
with:
|
with:
|
||||||
repository: python/typeshed
|
repository: python/typeshed
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
- uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
|||||||
7
.github/zizmor.yml
vendored
7
.github/zizmor.yml
vendored
@@ -10,10 +10,3 @@ rules:
|
|||||||
ignore:
|
ignore:
|
||||||
- build-docker.yml
|
- build-docker.yml
|
||||||
- publish-playground.yml
|
- publish-playground.yml
|
||||||
excessive-permissions:
|
|
||||||
# it's hard to test what the impact of removing these ignores would be
|
|
||||||
# without actually running the release workflow...
|
|
||||||
ignore:
|
|
||||||
- build-docker.yml
|
|
||||||
- publish-playground.yml
|
|
||||||
- publish-docs.yml
|
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -29,10 +29,6 @@ tracing.folded
|
|||||||
tracing-flamechart.svg
|
tracing-flamechart.svg
|
||||||
tracing-flamegraph.svg
|
tracing-flamegraph.svg
|
||||||
|
|
||||||
# insta
|
|
||||||
*.rs.pending-snap
|
|
||||||
|
|
||||||
|
|
||||||
###
|
###
|
||||||
# Rust.gitignore
|
# Rust.gitignore
|
||||||
###
|
###
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ exclude: |
|
|||||||
.github/workflows/release.yml|
|
.github/workflows/release.yml|
|
||||||
crates/red_knot_vendored/vendor/.*|
|
crates/red_knot_vendored/vendor/.*|
|
||||||
crates/red_knot_project/resources/.*|
|
crates/red_knot_project/resources/.*|
|
||||||
crates/ruff_benchmark/resources/.*|
|
|
||||||
crates/ruff_linter/resources/.*|
|
crates/ruff_linter/resources/.*|
|
||||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||||
crates/ruff_notebook/resources/.*|
|
crates/ruff_notebook/resources/.*|
|
||||||
@@ -19,12 +18,12 @@ exclude: |
|
|||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/abravalheri/validate-pyproject
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
rev: v0.24
|
rev: v0.23
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
|
|
||||||
- repo: https://github.com/executablebooks/mdformat
|
- repo: https://github.com/executablebooks/mdformat
|
||||||
rev: 0.7.22
|
rev: 0.7.21
|
||||||
hooks:
|
hooks:
|
||||||
- id: mdformat
|
- id: mdformat
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
@@ -37,7 +36,7 @@ repos:
|
|||||||
)$
|
)$
|
||||||
|
|
||||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||||
rev: v0.44.0
|
rev: v0.43.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: markdownlint-fix
|
- id: markdownlint-fix
|
||||||
exclude: |
|
exclude: |
|
||||||
@@ -57,10 +56,10 @@ repos:
|
|||||||
.*?invalid(_.+)*_syntax\.md
|
.*?invalid(_.+)*_syntax\.md
|
||||||
)$
|
)$
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- black==25.1.0
|
- black==24.10.0
|
||||||
|
|
||||||
- repo: https://github.com/crate-ci/typos
|
- repo: https://github.com/crate-ci/typos
|
||||||
rev: v1.30.2
|
rev: v1.29.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: typos
|
- id: typos
|
||||||
|
|
||||||
@@ -74,7 +73,7 @@ repos:
|
|||||||
pass_filenames: false # This makes it a lot faster
|
pass_filenames: false # This makes it a lot faster
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.11.0
|
rev: v0.9.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
- id: ruff
|
- id: ruff
|
||||||
@@ -84,7 +83,7 @@ repos:
|
|||||||
|
|
||||||
# Prettier
|
# Prettier
|
||||||
- repo: https://github.com/rbubley/mirrors-prettier
|
- repo: https://github.com/rbubley/mirrors-prettier
|
||||||
rev: v3.5.3
|
rev: v3.4.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
types: [yaml]
|
types: [yaml]
|
||||||
@@ -92,12 +91,12 @@ repos:
|
|||||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||||
rev: v1.5.1
|
rev: v1.1.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: zizmor
|
- id: zizmor
|
||||||
|
|
||||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||||
rev: 0.31.3
|
rev: 0.31.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-github-workflows
|
- id: check-github-workflows
|
||||||
|
|
||||||
|
|||||||
@@ -1,55 +1,5 @@
|
|||||||
# Breaking Changes
|
# Breaking Changes
|
||||||
|
|
||||||
## 0.11.0
|
|
||||||
|
|
||||||
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
|
|
||||||
|
|
||||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
|
||||||
|
|
||||||
In previous versions of Ruff, you could specify your Python version with:
|
|
||||||
|
|
||||||
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
|
|
||||||
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
|
|
||||||
|
|
||||||
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
|
|
||||||
|
|
||||||
In v0.10, config discovery has been updated to address this issue:
|
|
||||||
|
|
||||||
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
|
|
||||||
for a `pyproject.toml` file in the same directory and respect its
|
|
||||||
`requires-python` version, even if it does not contain a `[tool.ruff]`
|
|
||||||
section.
|
|
||||||
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
|
|
||||||
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
|
|
||||||
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
|
|
||||||
search for the closest `pyproject.toml` in the parent directories and use its
|
|
||||||
`requires-python` setting.
|
|
||||||
|
|
||||||
## 0.10.0
|
|
||||||
|
|
||||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
|
||||||
|
|
||||||
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
|
|
||||||
You can find a description of this change in the 0.11.0 section.
|
|
||||||
|
|
||||||
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
|
|
||||||
|
|
||||||
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
|
|
||||||
|
|
||||||
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
|
|
||||||
|
|
||||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [_Error suppression_](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
|
||||||
|
|
||||||
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
|
||||||
|
|
||||||
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
|
|
||||||
|
|
||||||
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
|
|
||||||
|
|
||||||
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
|
|
||||||
|
|
||||||
- **\[`unsafe-markup-use`\]: `RUF035` has been recoded to `S704`** ([#15957](https://github.com/astral-sh/ruff/pull/15957))
|
|
||||||
|
|
||||||
## 0.9.0
|
## 0.9.0
|
||||||
|
|
||||||
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
|
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
|
||||||
@@ -259,8 +209,8 @@ This change only affects those using Ruff under its default rule set. Users that
|
|||||||
|
|
||||||
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
|
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
|
||||||
|
|
||||||
Previously, Ruff supported non-standards-compliant emoji identifiers such as `📦 = 1`.
|
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
|
||||||
We decided to remove this non-standard language extension. Ruff now reports syntax errors for invalid emoji identifiers in your code, the same as CPython.
|
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
|
||||||
|
|
||||||
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))
|
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))
|
||||||
|
|
||||||
|
|||||||
510
CHANGELOG.md
510
CHANGELOG.md
@@ -1,505 +1,5 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## 0.11.2
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- [syntax-errors] Fix false-positive syntax errors emitted for annotations on variadic parameters before Python 3.11 ([#16878](https://github.com/astral-sh/ruff/pull/16878))
|
|
||||||
|
|
||||||
## 0.11.1
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`airflow`\] Add `chain`, `chain_linear` and `cross_downstream` for `AIR302` ([#16647](https://github.com/astral-sh/ruff/pull/16647))
|
|
||||||
- [syntax-errors] Improve error message and range for pre-PEP-614 decorator syntax errors ([#16581](https://github.com/astral-sh/ruff/pull/16581))
|
|
||||||
- [syntax-errors] PEP 701 f-strings before Python 3.12 ([#16543](https://github.com/astral-sh/ruff/pull/16543))
|
|
||||||
- [syntax-errors] Parenthesized context managers before Python 3.9 ([#16523](https://github.com/astral-sh/ruff/pull/16523))
|
|
||||||
- [syntax-errors] Star annotations before Python 3.11 ([#16545](https://github.com/astral-sh/ruff/pull/16545))
|
|
||||||
- [syntax-errors] Star expression in index before Python 3.11 ([#16544](https://github.com/astral-sh/ruff/pull/16544))
|
|
||||||
- [syntax-errors] Unparenthesized assignment expressions in sets and indexes ([#16404](https://github.com/astral-sh/ruff/pull/16404))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Server: Allow `FixAll` action in presence of version-specific syntax errors ([#16848](https://github.com/astral-sh/ruff/pull/16848))
|
|
||||||
- \[`flake8-bandit`\] Allow raw strings in `suspicious-mark-safe-usage` (`S308`) #16702 ([#16770](https://github.com/astral-sh/ruff/pull/16770))
|
|
||||||
- \[`refurb`\] Avoid panicking `unwrap` in `verbose-decimal-constructor` (`FURB157`) ([#16777](https://github.com/astral-sh/ruff/pull/16777))
|
|
||||||
- \[`refurb`\] Fix starred expressions fix (`FURB161`) ([#16550](https://github.com/astral-sh/ruff/pull/16550))
|
|
||||||
- Fix `--statistics` reporting for unsafe fixes ([#16756](https://github.com/astral-sh/ruff/pull/16756))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`flake8-executables`\] Allow `uv run` in shebang line for `shebang-missing-python` (`EXE003`) ([#16849](https://github.com/astral-sh/ruff/pull/16849),[#16855](https://github.com/astral-sh/ruff/pull/16855))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Add `--exit-non-zero-on-format` ([#16009](https://github.com/astral-sh/ruff/pull/16009))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Update Ruff tutorial to avoid non-existent fix in `__init__.py` ([#16818](https://github.com/astral-sh/ruff/pull/16818))
|
|
||||||
- \[`flake8-gettext`\] Swap `format-` and `printf-in-get-text-func-call` examples (`INT002`, `INT003`) ([#16769](https://github.com/astral-sh/ruff/pull/16769))
|
|
||||||
|
|
||||||
## 0.11.0
|
|
||||||
|
|
||||||
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
|
|
||||||
|
|
||||||
### Breaking changes
|
|
||||||
|
|
||||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
|
||||||
|
|
||||||
In previous versions of Ruff, you could specify your Python version with:
|
|
||||||
|
|
||||||
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
|
|
||||||
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
|
|
||||||
|
|
||||||
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
|
|
||||||
|
|
||||||
In v0.10, config discovery has been updated to address this issue:
|
|
||||||
|
|
||||||
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
|
|
||||||
for a `pyproject.toml` file in the same directory and respect its
|
|
||||||
`requires-python` version, even if it does not contain a `[tool.ruff]`
|
|
||||||
section.
|
|
||||||
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
|
|
||||||
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
|
|
||||||
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
|
|
||||||
search for the closest `pyproject.toml` in the parent directories and use its
|
|
||||||
`requires-python` setting.
|
|
||||||
|
|
||||||
### Stabilization
|
|
||||||
|
|
||||||
The following behaviors have been stabilized:
|
|
||||||
|
|
||||||
- [`blanket-noqa`](https://docs.astral.sh/ruff/rules/blanket-noqa/) (`PGH004`): Also detect blanked file-level noqa comments (and not just line level comments).
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- [syntax-errors] Tuple unpacking in `for` statement iterator clause before Python 3.9 ([#16558](https://github.com/astral-sh/ruff/pull/16558))
|
|
||||||
|
|
||||||
## 0.10.0
|
|
||||||
|
|
||||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.10.0) for a migration guide and overview of the changes!
|
|
||||||
|
|
||||||
### Breaking changes
|
|
||||||
|
|
||||||
See also, the "Remapped rules" section which may result in disabled rules.
|
|
||||||
|
|
||||||
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
|
|
||||||
|
|
||||||
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
|
|
||||||
You can find a description of this change in the 0.11.0 section.
|
|
||||||
|
|
||||||
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
|
|
||||||
|
|
||||||
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
|
|
||||||
|
|
||||||
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
|
|
||||||
|
|
||||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [*Error suppression*](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
|
||||||
|
|
||||||
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
|
||||||
|
|
||||||
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
|
|
||||||
|
|
||||||
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
|
|
||||||
|
|
||||||
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
|
|
||||||
|
|
||||||
### Deprecated Rules
|
|
||||||
|
|
||||||
The following rules have been deprecated:
|
|
||||||
|
|
||||||
- [`non-pep604-isinstance`](https://docs.astral.sh/ruff/rules/non-pep604-isinstance/) (`UP038`)
|
|
||||||
- [`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/) (`S320`)
|
|
||||||
|
|
||||||
### Remapped rules
|
|
||||||
|
|
||||||
The following rules have been remapped to new rule codes:
|
|
||||||
|
|
||||||
- \[`unsafe-markup-use`\]: `RUF035` to `S704`
|
|
||||||
|
|
||||||
### Stabilization
|
|
||||||
|
|
||||||
The following rules have been stabilized and are no longer in preview:
|
|
||||||
|
|
||||||
- [`batched-without-explicit-strict`](https://docs.astral.sh/ruff/rules/batched-without-explicit-strict) (`B911`)
|
|
||||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable) (`C420`)
|
|
||||||
- [`datetime-min-max`](https://docs.astral.sh/ruff/rules/datetime-min-max) (`DTZ901`)
|
|
||||||
- [`fast-api-unused-path-parameter`](https://docs.astral.sh/ruff/rules/fast-api-unused-path-parameter) (`FAST003`)
|
|
||||||
- [`root-logger-call`](https://docs.astral.sh/ruff/rules/root-logger-call) (`LOG015`)
|
|
||||||
- [`len-test`](https://docs.astral.sh/ruff/rules/len-test) (`PLC1802`)
|
|
||||||
- [`shallow-copy-environ`](https://docs.astral.sh/ruff/rules/shallow-copy-environ) (`PLW1507`)
|
|
||||||
- [`os-listdir`](https://docs.astral.sh/ruff/rules/os-listdir) (`PTH208`)
|
|
||||||
- [`invalid-pathlib-with-suffix`](https://docs.astral.sh/ruff/rules/invalid-pathlib-with-suffix) (`PTH210`)
|
|
||||||
- [`invalid-assert-message-literal-argument`](https://docs.astral.sh/ruff/rules/invalid-assert-message-literal-argument) (`RUF040`)
|
|
||||||
- [`unnecessary-nested-literal`](https://docs.astral.sh/ruff/rules/unnecessary-nested-literal) (`RUF041`)
|
|
||||||
- [`unnecessary-cast-to-int`](https://docs.astral.sh/ruff/rules/unnecessary-cast-to-int) (`RUF046`)
|
|
||||||
- [`map-int-version-parsing`](https://docs.astral.sh/ruff/rules/map-int-version-parsing) (`RUF048`)
|
|
||||||
- [`if-key-in-dict-del`](https://docs.astral.sh/ruff/rules/if-key-in-dict-del) (`RUF051`)
|
|
||||||
- [`unsafe-markup-use`](https://docs.astral.sh/ruff/rules/unsafe-markup-use) (`S704`). This rule has also been renamed from `RUF035`.
|
|
||||||
- [`split-static-string`](https://docs.astral.sh/ruff/rules/split-static-string) (`SIM905`)
|
|
||||||
- [`runtime-cast-value`](https://docs.astral.sh/ruff/rules/runtime-cast-value) (`TC006`)
|
|
||||||
- [`unquoted-type-alias`](https://docs.astral.sh/ruff/rules/unquoted-type-alias) (`TC007`)
|
|
||||||
- [`non-pep646-unpack`](https://docs.astral.sh/ruff/rules/non-pep646-unpack) (`UP044`)
|
|
||||||
|
|
||||||
The following behaviors have been stabilized:
|
|
||||||
|
|
||||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`) [`invalid-first-argument-name-for-class-method`](https://docs.astral.sh/ruff/rules/invalid-first-argument-name-for-class-method/) (`N804`): `__new__` methods are now no longer flagged by `invalid-first-argument-name-for-class-method` (`N804`) but instead by `bad-staticmethod-argument` (`PLW0211`)
|
|
||||||
- [`bad-str-strip-call`](https://docs.astral.sh/ruff/rules/bad-str-strip-call/) (`PLE1310`): The rule now applies to objects which are known to have type `str` or `bytes`.
|
|
||||||
- [`custom-type-var-for-self`](https://docs.astral.sh/ruff/rules/custom-type-var-for-self/) (`PYI019`): More accurate detection of custom `TypeVars` replaceable by `Self`. The range of the diagnostic is now the full function header rather than just the return annotation.
|
|
||||||
- [`invalid-argument-name`](https://docs.astral.sh/ruff/rules/invalid-argument-name/) (`N803`): Ignore argument names of functions decorated with `typing.override`
|
|
||||||
- [`invalid-envvar-default`](https://docs.astral.sh/ruff/rules/invalid-envvar-default/) (`PLW1508`): Detect default value arguments to `os.environ.get` with invalid type.
|
|
||||||
- [`pytest-raises-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-raises-with-multiple-statements/) (`PT012`) [`pytest-warns-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-warns-with-multiple-statements/) (`PT031`): Allow `for` statements with an empty body in `pytest.raises` and `pytest.warns` `with` statements.
|
|
||||||
- [`redundant-open-modes`](https://docs.astral.sh/ruff/rules/redundant-open-modes/) (`UP015`): The diagnostic range is now the range of the redundant mode argument where it previously was the range of the entire open call. You may have to replace your `noqa` comments when suppressing `UP015`.
|
|
||||||
- [`stdlib-module-shadowing`](https://docs.astral.sh/ruff/rules/stdlib-module-shadowing/) (`A005`): Changes the default value of `lint.flake8-builtins.strict-checking` from `true` to `false`.
|
|
||||||
- [`type-none-comparison`](https://docs.astral.sh/ruff/rules/type-none-comparison/) (`FURB169`): Now also recognizes `type(expr) is type(None)` comparisons where `expr` isn't a name expression.
|
|
||||||
|
|
||||||
The following fixes or improvements to fixes have been stabilized:
|
|
||||||
|
|
||||||
- [`repeated-equality-comparison`](https://docs.astral.sh/ruff/rules/repeated-equality-comparison/) (`PLR1714`) ([#16685](https://github.com/astral-sh/ruff/pull/16685))
|
|
||||||
- [`needless-bool`](https://docs.astral.sh/ruff/rules/needless-bool/) (`SIM103`) ([#16684](https://github.com/astral-sh/ruff/pull/16684))
|
|
||||||
- [`unused-private-type-var`](https://docs.astral.sh/ruff/rules/unused-private-type-var/) (`PYI018`) ([#16682](https://github.com/astral-sh/ruff/pull/16682))
|
|
||||||
|
|
||||||
### Server
|
|
||||||
|
|
||||||
- Remove logging output for `ruff.printDebugInformation` ([#16617](https://github.com/astral-sh/ruff/pull/16617))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- \[`flake8-builtins`\] Deprecate the `builtins-` prefixed options in favor of the unprefixed options (e.g. `builtins-allowed-modules` is now deprecated in favor of `allowed-modules`) ([#16092](https://github.com/astral-sh/ruff/pull/16092))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- [flake8-bandit] Fix mixed-case hash algorithm names (S324) ([#16552](https://github.com/astral-sh/ruff/pull/16552))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- [ruff] Fix `last_tag`/`commits_since_last_tag` for `version` command ([#16686](https://github.com/astral-sh/ruff/pull/16686))
|
|
||||||
|
|
||||||
## 0.9.10
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`ruff`\] Add new rule `RUF059`: Unused unpacked assignment ([#16449](https://github.com/astral-sh/ruff/pull/16449))
|
|
||||||
- \[`syntax-errors`\] Detect assignment expressions before Python 3.8 ([#16383](https://github.com/astral-sh/ruff/pull/16383))
|
|
||||||
- \[`syntax-errors`\] Named expressions in decorators before Python 3.9 ([#16386](https://github.com/astral-sh/ruff/pull/16386))
|
|
||||||
- \[`syntax-errors`\] Parenthesized keyword argument names after Python 3.8 ([#16482](https://github.com/astral-sh/ruff/pull/16482))
|
|
||||||
- \[`syntax-errors`\] Positional-only parameters before Python 3.8 ([#16481](https://github.com/astral-sh/ruff/pull/16481))
|
|
||||||
- \[`syntax-errors`\] Tuple unpacking in `return` and `yield` before Python 3.8 ([#16485](https://github.com/astral-sh/ruff/pull/16485))
|
|
||||||
- \[`syntax-errors`\] Type parameter defaults before Python 3.13 ([#16447](https://github.com/astral-sh/ruff/pull/16447))
|
|
||||||
- \[`syntax-errors`\] Type parameter lists before Python 3.12 ([#16479](https://github.com/astral-sh/ruff/pull/16479))
|
|
||||||
- \[`syntax-errors`\] `except*` before Python 3.11 ([#16446](https://github.com/astral-sh/ruff/pull/16446))
|
|
||||||
- \[`syntax-errors`\] `type` statements before Python 3.12 ([#16478](https://github.com/astral-sh/ruff/pull/16478))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Escape template filenames in glob patterns in configuration ([#16407](https://github.com/astral-sh/ruff/pull/16407))
|
|
||||||
- \[`flake8-simplify`\] Exempt unittest context methods for `SIM115` rule ([#16439](https://github.com/astral-sh/ruff/pull/16439))
|
|
||||||
- Formatter: Fix syntax error location in notebooks ([#16499](https://github.com/astral-sh/ruff/pull/16499))
|
|
||||||
- \[`pyupgrade`\] Do not offer fix when at least one target is `global`/`nonlocal` (`UP028`) ([#16451](https://github.com/astral-sh/ruff/pull/16451))
|
|
||||||
- \[`flake8-builtins`\] Ignore variables matching module attribute names (`A001`) ([#16454](https://github.com/astral-sh/ruff/pull/16454))
|
|
||||||
- \[`pylint`\] Convert `code` keyword argument to a positional argument in fix for (`PLR1722`) ([#16424](https://github.com/astral-sh/ruff/pull/16424))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Move rule code from `description` to `check_name` in GitLab output serializer ([#16437](https://github.com/astral-sh/ruff/pull/16437))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- \[`pydocstyle`\] Clarify that `D417` only checks docstrings with an arguments section ([#16494](https://github.com/astral-sh/ruff/pull/16494))
|
|
||||||
|
|
||||||
## 0.9.9
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Fix caching of unsupported-syntax errors ([#16425](https://github.com/astral-sh/ruff/pull/16425))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Only show unsupported-syntax errors in editors when preview mode is enabled ([#16429](https://github.com/astral-sh/ruff/pull/16429))
|
|
||||||
|
|
||||||
## 0.9.8
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Start detecting version-related syntax errors in the parser ([#16090](https://github.com/astral-sh/ruff/pull/16090))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`pylint`\] Mark fix unsafe (`PLW1507`) ([#16343](https://github.com/astral-sh/ruff/pull/16343))
|
|
||||||
- \[`pylint`\] Catch `case np.nan`/`case math.nan` in `match` statements (`PLW0177`) ([#16378](https://github.com/astral-sh/ruff/pull/16378))
|
|
||||||
- \[`ruff`\] Add more Pydantic models variants to the list of default copy semantics (`RUF012`) ([#16291](https://github.com/astral-sh/ruff/pull/16291))
|
|
||||||
|
|
||||||
### Server
|
|
||||||
|
|
||||||
- Avoid indexing the project if `configurationPreference` is `editorOnly` ([#16381](https://github.com/astral-sh/ruff/pull/16381))
|
|
||||||
- Avoid unnecessary info at non-trace server log level ([#16389](https://github.com/astral-sh/ruff/pull/16389))
|
|
||||||
- Expand `ruff.configuration` to allow inline config ([#16296](https://github.com/astral-sh/ruff/pull/16296))
|
|
||||||
- Notify users for invalid client settings ([#16361](https://github.com/astral-sh/ruff/pull/16361))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Add `per-file-target-version` option ([#16257](https://github.com/astral-sh/ruff/pull/16257))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- \[`refurb`\] Do not consider docstring(s) (`FURB156`) ([#16391](https://github.com/astral-sh/ruff/pull/16391))
|
|
||||||
- \[`flake8-self`\] Ignore attribute accesses on instance-like variables (`SLF001`) ([#16149](https://github.com/astral-sh/ruff/pull/16149))
|
|
||||||
- \[`pylint`\] Fix false positives, add missing methods, and support positional-only parameters (`PLE0302`) ([#16263](https://github.com/astral-sh/ruff/pull/16263))
|
|
||||||
- \[`flake8-pyi`\] Mark `PYI030` fix unsafe when comments are deleted ([#16322](https://github.com/astral-sh/ruff/pull/16322))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Fix example for `S611` ([#16316](https://github.com/astral-sh/ruff/pull/16316))
|
|
||||||
- Normalize inconsistent markdown headings in docstrings ([#16364](https://github.com/astral-sh/ruff/pull/16364))
|
|
||||||
- Document MSRV policy ([#16384](https://github.com/astral-sh/ruff/pull/16384))
|
|
||||||
|
|
||||||
## 0.9.7
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Consider `__new__` methods as special function type for enforcing class method or static method rules ([#13305](https://github.com/astral-sh/ruff/pull/13305))
|
|
||||||
- \[`airflow`\] Improve the internal logic to differentiate deprecated symbols (`AIR303`) ([#16013](https://github.com/astral-sh/ruff/pull/16013))
|
|
||||||
- \[`refurb`\] Manual timezone monkeypatching (`FURB162`) ([#16113](https://github.com/astral-sh/ruff/pull/16113))
|
|
||||||
- \[`ruff`\] Implicit class variable in dataclass (`RUF045`) ([#14349](https://github.com/astral-sh/ruff/pull/14349))
|
|
||||||
- \[`ruff`\] Skip singleton starred expressions for `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#16083](https://github.com/astral-sh/ruff/pull/16083))
|
|
||||||
- \[`refurb`\] Check for subclasses includes subscript expressions (`FURB189`) ([#16155](https://github.com/astral-sh/ruff/pull/16155))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`flake8-debugger`\] Also flag `sys.breakpointhook` and `sys.__breakpointhook__` (`T100`) ([#16191](https://github.com/astral-sh/ruff/pull/16191))
|
|
||||||
- \[`pycodestyle`\] Exempt `site.addsitedir(...)` calls (`E402`) ([#16251](https://github.com/astral-sh/ruff/pull/16251))
|
|
||||||
|
|
||||||
### Formatter
|
|
||||||
|
|
||||||
- Fix unstable formatting of trailing end-of-line comments of parenthesized attribute values ([#16187](https://github.com/astral-sh/ruff/pull/16187))
|
|
||||||
|
|
||||||
### Server
|
|
||||||
|
|
||||||
- Fix handling of requests received after shutdown message ([#16262](https://github.com/astral-sh/ruff/pull/16262))
|
|
||||||
- Ignore `source.organizeImports.ruff` and `source.fixAll.ruff` code actions for a notebook cell ([#16154](https://github.com/astral-sh/ruff/pull/16154))
|
|
||||||
- Include document specific debug info for `ruff.printDebugInformation` ([#16215](https://github.com/astral-sh/ruff/pull/16215))
|
|
||||||
- Update server to return the debug info as string with `ruff.printDebugInformation` ([#16214](https://github.com/astral-sh/ruff/pull/16214))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Warn on invalid `noqa` even when there are no diagnostics ([#16178](https://github.com/astral-sh/ruff/pull/16178))
|
|
||||||
- Better error messages while loading configuration `extend`s ([#15658](https://github.com/astral-sh/ruff/pull/15658))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- \[`flake8-comprehensions`\] Handle trailing comma in `C403` fix ([#16110](https://github.com/astral-sh/ruff/pull/16110))
|
|
||||||
- \[`flake8-pyi`\] Avoid flagging `custom-typevar-for-self` on metaclass methods (`PYI019`) ([#16141](https://github.com/astral-sh/ruff/pull/16141))
|
|
||||||
- \[`pydocstyle`\] Handle arguments with the same names as sections (`D417`) ([#16011](https://github.com/astral-sh/ruff/pull/16011))
|
|
||||||
- \[`pylint`\] Correct ordering of arguments in fix for `if-stmt-min-max` (`PLR1730`) ([#16080](https://github.com/astral-sh/ruff/pull/16080))
|
|
||||||
- \[`pylint`\] Do not offer fix for raw strings (`PLE251`) ([#16132](https://github.com/astral-sh/ruff/pull/16132))
|
|
||||||
- \[`pyupgrade`\] Do not upgrade functional `TypedDicts` with private field names to the class-based syntax (`UP013`) ([#16219](https://github.com/astral-sh/ruff/pull/16219))
|
|
||||||
- \[`pyupgrade`\] Handle micro version numbers correctly (`UP036`) ([#16091](https://github.com/astral-sh/ruff/pull/16091))
|
|
||||||
- \[`pyupgrade`\] Unwrap unary expressions correctly (`UP018`) ([#15919](https://github.com/astral-sh/ruff/pull/15919))
|
|
||||||
- \[`refurb`\] Correctly handle lengths of literal strings in `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#16237](https://github.com/astral-sh/ruff/pull/16237))
|
|
||||||
- \[`ruff`\] Skip `RUF001` diagnostics when visiting string type definitions ([#16122](https://github.com/astral-sh/ruff/pull/16122))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Add FAQ entry for `source.*` code actions in Notebook ([#16212](https://github.com/astral-sh/ruff/pull/16212))
|
|
||||||
- Add `SECURITY.md` ([#16224](https://github.com/astral-sh/ruff/pull/16224))
|
|
||||||
|
|
||||||
## 0.9.6
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`airflow`\] Add `external_task.{ExternalTaskMarker, ExternalTaskSensor}` for `AIR302` ([#16014](https://github.com/astral-sh/ruff/pull/16014))
|
|
||||||
- \[`flake8-builtins`\] Make strict module name comparison optional (`A005`) ([#15951](https://github.com/astral-sh/ruff/pull/15951))
|
|
||||||
- \[`flake8-pyi`\] Extend fix to Python \<= 3.9 for `redundant-none-literal` (`PYI061`) ([#16044](https://github.com/astral-sh/ruff/pull/16044))
|
|
||||||
- \[`pylint`\] Also report when the object isn't a literal (`PLE1310`) ([#15985](https://github.com/astral-sh/ruff/pull/15985))
|
|
||||||
- \[`ruff`\] Implement `indented-form-feed` (`RUF054`) ([#16049](https://github.com/astral-sh/ruff/pull/16049))
|
|
||||||
- \[`ruff`\] Skip type definitions for `missing-f-string-syntax` (`RUF027`) ([#16054](https://github.com/astral-sh/ruff/pull/16054))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`flake8-annotations`\] Correct syntax for `typing.Union` in suggested return type fixes for `ANN20x` rules ([#16025](https://github.com/astral-sh/ruff/pull/16025))
|
|
||||||
- \[`flake8-builtins`\] Match upstream module name comparison (`A005`) ([#16006](https://github.com/astral-sh/ruff/pull/16006))
|
|
||||||
- \[`flake8-comprehensions`\] Detect overshadowed `list`/`set`/`dict`, ignore variadics and named expressions (`C417`) ([#15955](https://github.com/astral-sh/ruff/pull/15955))
|
|
||||||
- \[`flake8-pie`\] Remove following comma correctly when the unpacked dictionary is empty (`PIE800`) ([#16008](https://github.com/astral-sh/ruff/pull/16008))
|
|
||||||
- \[`flake8-simplify`\] Only trigger `SIM401` on known dictionaries ([#15995](https://github.com/astral-sh/ruff/pull/15995))
|
|
||||||
- \[`pylint`\] Do not report calls when object type and argument type mismatch, remove custom escape handling logic (`PLE1310`) ([#15984](https://github.com/astral-sh/ruff/pull/15984))
|
|
||||||
- \[`pyupgrade`\] Comments within parenthesized value ranges should not affect applicability (`UP040`) ([#16027](https://github.com/astral-sh/ruff/pull/16027))
|
|
||||||
- \[`pyupgrade`\] Don't introduce invalid syntax when upgrading old-style type aliases with parenthesized multiline values (`UP040`) ([#16026](https://github.com/astral-sh/ruff/pull/16026))
|
|
||||||
- \[`pyupgrade`\] Ensure we do not rename two type parameters to the same name (`UP049`) ([#16038](https://github.com/astral-sh/ruff/pull/16038))
|
|
||||||
- \[`pyupgrade`\] \[`ruff`\] Don't apply renamings if the new name is shadowed in a scope of one of the references to the binding (`UP049`, `RUF052`) ([#16032](https://github.com/astral-sh/ruff/pull/16032))
|
|
||||||
- \[`ruff`\] Update `RUF009` to behave similar to `B008` and ignore attributes with immutable types ([#16048](https://github.com/astral-sh/ruff/pull/16048))
|
|
||||||
|
|
||||||
### Server
|
|
||||||
|
|
||||||
- Root exclusions in the server to project root ([#16043](https://github.com/astral-sh/ruff/pull/16043))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- \[`flake8-datetime`\] Ignore `.replace()` calls while looking for `.astimezone` ([#16050](https://github.com/astral-sh/ruff/pull/16050))
|
|
||||||
- \[`flake8-type-checking`\] Avoid `TC004` false positive where the runtime definition is provided by `__getattr__` ([#16052](https://github.com/astral-sh/ruff/pull/16052))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Improve `ruff-lsp` migration document ([#16072](https://github.com/astral-sh/ruff/pull/16072))
|
|
||||||
- Undeprecate `ruff.nativeServer` ([#16039](https://github.com/astral-sh/ruff/pull/16039))
|
|
||||||
|
|
||||||
## 0.9.5
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Recognize all symbols named `TYPE_CHECKING` for `in_type_checking_block` ([#15719](https://github.com/astral-sh/ruff/pull/15719))
|
|
||||||
- \[`flake8-comprehensions`\] Handle builtins at top of file correctly for `unnecessary-dict-comprehension-for-iterable` (`C420`) ([#15837](https://github.com/astral-sh/ruff/pull/15837))
|
|
||||||
- \[`flake8-logging`\] `.exception()` and `exc_info=` outside exception handlers (`LOG004`, `LOG014`) ([#15799](https://github.com/astral-sh/ruff/pull/15799))
|
|
||||||
- \[`flake8-pyi`\] Fix incorrect behaviour of `custom-typevar-return-type` preview-mode autofix if `typing` was already imported (`PYI019`) ([#15853](https://github.com/astral-sh/ruff/pull/15853))
|
|
||||||
- \[`flake8-pyi`\] Fix more complex cases (`PYI019`) ([#15821](https://github.com/astral-sh/ruff/pull/15821))
|
|
||||||
- \[`flake8-pyi`\] Make `PYI019` autofixable for `.py` files in preview mode as well as stubs ([#15889](https://github.com/astral-sh/ruff/pull/15889))
|
|
||||||
- \[`flake8-pyi`\] Remove type parameter correctly when it is the last (`PYI019`) ([#15854](https://github.com/astral-sh/ruff/pull/15854))
|
|
||||||
- \[`pylint`\] Fix missing parens in unsafe fix for `unnecessary-dunder-call` (`PLC2801`) ([#15762](https://github.com/astral-sh/ruff/pull/15762))
|
|
||||||
- \[`pyupgrade`\] Better messages and diagnostic range (`UP015`) ([#15872](https://github.com/astral-sh/ruff/pull/15872))
|
|
||||||
- \[`pyupgrade`\] Rename private type parameters in PEP 695 generics (`UP049`) ([#15862](https://github.com/astral-sh/ruff/pull/15862))
|
|
||||||
- \[`refurb`\] Also report non-name expressions (`FURB169`) ([#15905](https://github.com/astral-sh/ruff/pull/15905))
|
|
||||||
- \[`refurb`\] Mark fix as unsafe if there are comments (`FURB171`) ([#15832](https://github.com/astral-sh/ruff/pull/15832))
|
|
||||||
- \[`ruff`\] Classes with mixed type variable style (`RUF053`) ([#15841](https://github.com/astral-sh/ruff/pull/15841))
|
|
||||||
- \[`airflow`\] `BashOperator` has been moved to `airflow.providers.standard.operators.bash.BashOperator` (`AIR302`) ([#15922](https://github.com/astral-sh/ruff/pull/15922))
|
|
||||||
- \[`flake8-pyi`\] Add autofix for unused-private-type-var (`PYI018`) ([#15999](https://github.com/astral-sh/ruff/pull/15999))
|
|
||||||
- \[`flake8-pyi`\] Significantly improve accuracy of `PYI019` if preview mode is enabled ([#15888](https://github.com/astral-sh/ruff/pull/15888))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- Preserve triple quotes and prefixes for strings ([#15818](https://github.com/astral-sh/ruff/pull/15818))
|
|
||||||
- \[`flake8-comprehensions`\] Skip when `TypeError` present from too many (kw)args for `C410`,`C411`, and `C418` ([#15838](https://github.com/astral-sh/ruff/pull/15838))
|
|
||||||
- \[`flake8-pyi`\] Rename `PYI019` and improve its diagnostic message ([#15885](https://github.com/astral-sh/ruff/pull/15885))
|
|
||||||
- \[`pep8-naming`\] Ignore `@override` methods (`N803`) ([#15954](https://github.com/astral-sh/ruff/pull/15954))
|
|
||||||
- \[`pyupgrade`\] Reuse replacement logic from `UP046` and `UP047` to preserve more comments (`UP040`) ([#15840](https://github.com/astral-sh/ruff/pull/15840))
|
|
||||||
- \[`ruff`\] Analyze deferred annotations before enforcing `mutable-(data)class-default` and `function-call-in-dataclass-default-argument` (`RUF008`,`RUF009`,`RUF012`) ([#15921](https://github.com/astral-sh/ruff/pull/15921))
|
|
||||||
- \[`pycodestyle`\] Exempt `sys.path += ...` calls (`E402`) ([#15980](https://github.com/astral-sh/ruff/pull/15980))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Config error only when `flake8-import-conventions` alias conflicts with `isort.required-imports` bound name ([#15918](https://github.com/astral-sh/ruff/pull/15918))
|
|
||||||
- Workaround Even Better TOML crash related to `allOf` ([#15992](https://github.com/astral-sh/ruff/pull/15992))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- \[`flake8-comprehensions`\] Unnecessary `list` comprehension (rewrite as a `set` comprehension) (`C403`) - Handle extraneous parentheses around list comprehension ([#15877](https://github.com/astral-sh/ruff/pull/15877))
|
|
||||||
- \[`flake8-comprehensions`\] Handle trailing comma in fixes for `unnecessary-generator-list/set` (`C400`,`C401`) ([#15929](https://github.com/astral-sh/ruff/pull/15929))
|
|
||||||
- \[`flake8-pyi`\] Fix several correctness issues with `custom-type-var-return-type` (`PYI019`) ([#15851](https://github.com/astral-sh/ruff/pull/15851))
|
|
||||||
- \[`pep8-naming`\] Consider any number of leading underscore for `N801` ([#15988](https://github.com/astral-sh/ruff/pull/15988))
|
|
||||||
- \[`pyflakes`\] Visit forward annotations in `TypeAliasType` as types (`F401`) ([#15829](https://github.com/astral-sh/ruff/pull/15829))
|
|
||||||
- \[`pylint`\] Correct min/max auto-fix and suggestion for (`PL1730`) ([#15930](https://github.com/astral-sh/ruff/pull/15930))
|
|
||||||
- \[`refurb`\] Handle unparenthesized tuples correctly (`FURB122`, `FURB142`) ([#15953](https://github.com/astral-sh/ruff/pull/15953))
|
|
||||||
- \[`refurb`\] Avoid `None | None` as well as better detection and fix (`FURB168`) ([#15779](https://github.com/astral-sh/ruff/pull/15779))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Add deprecation warning for `ruff-lsp` related settings ([#15850](https://github.com/astral-sh/ruff/pull/15850))
|
|
||||||
- Docs (`linter.md`): clarify that Python files are always searched for in subdirectories ([#15882](https://github.com/astral-sh/ruff/pull/15882))
|
|
||||||
- Fix a typo in `non_pep695_generic_class.rs` ([#15946](https://github.com/astral-sh/ruff/pull/15946))
|
|
||||||
- Improve Docs: Pylint subcategories' codes ([#15909](https://github.com/astral-sh/ruff/pull/15909))
|
|
||||||
- Remove non-existing `lint.extendIgnore` editor setting ([#15844](https://github.com/astral-sh/ruff/pull/15844))
|
|
||||||
- Update black deviations ([#15928](https://github.com/astral-sh/ruff/pull/15928))
|
|
||||||
- Mention `UP049` in `UP046` and `UP047`, add `See also` section to `UP040` ([#15956](https://github.com/astral-sh/ruff/pull/15956))
|
|
||||||
- Add instance variable examples to `RUF012` ([#15982](https://github.com/astral-sh/ruff/pull/15982))
|
|
||||||
- Explain precedence for `ignore` and `select` config ([#15883](https://github.com/astral-sh/ruff/pull/15883))
|
|
||||||
|
|
||||||
## 0.9.4
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`airflow`\] Extend airflow context parameter check for `BaseOperator.execute` (`AIR302`) ([#15713](https://github.com/astral-sh/ruff/pull/15713))
|
|
||||||
- \[`airflow`\] Update `AIR302` to check for deprecated context keys ([#15144](https://github.com/astral-sh/ruff/pull/15144))
|
|
||||||
- \[`flake8-bandit`\] Permit suspicious imports within stub files (`S4`) ([#15822](https://github.com/astral-sh/ruff/pull/15822))
|
|
||||||
- \[`pylint`\] Do not trigger `PLR6201` on empty collections ([#15732](https://github.com/astral-sh/ruff/pull/15732))
|
|
||||||
- \[`refurb`\] Do not emit diagnostic when loop variables are used outside loop body (`FURB122`) ([#15757](https://github.com/astral-sh/ruff/pull/15757))
|
|
||||||
- \[`ruff`\] Add support for more `re` patterns (`RUF055`) ([#15764](https://github.com/astral-sh/ruff/pull/15764))
|
|
||||||
- \[`ruff`\] Check for shadowed `map` before suggesting fix (`RUF058`) ([#15790](https://github.com/astral-sh/ruff/pull/15790))
|
|
||||||
- \[`ruff`\] Do not emit diagnostic when all arguments to `zip()` are variadic (`RUF058`) ([#15744](https://github.com/astral-sh/ruff/pull/15744))
|
|
||||||
- \[`ruff`\] Parenthesize fix when argument spans multiple lines for `unnecessary-round` (`RUF057`) ([#15703](https://github.com/astral-sh/ruff/pull/15703))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- Preserve quote style in generated code ([#15726](https://github.com/astral-sh/ruff/pull/15726), [#15778](https://github.com/astral-sh/ruff/pull/15778), [#15794](https://github.com/astral-sh/ruff/pull/15794))
|
|
||||||
- \[`flake8-bugbear`\] Exempt `NewType` calls where the original type is immutable (`B008`) ([#15765](https://github.com/astral-sh/ruff/pull/15765))
|
|
||||||
- \[`pylint`\] Honor banned top-level imports by `TID253` in `PLC0415`. ([#15628](https://github.com/astral-sh/ruff/pull/15628))
|
|
||||||
- \[`pyupgrade`\] Ignore `is_typeddict` and `TypedDict` for `deprecated-import` (`UP035`) ([#15800](https://github.com/astral-sh/ruff/pull/15800))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Fix formatter warning message for `flake8-quotes` option ([#15788](https://github.com/astral-sh/ruff/pull/15788))
|
|
||||||
- Implement tab autocomplete for `ruff config` ([#15603](https://github.com/astral-sh/ruff/pull/15603))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- \[`flake8-comprehensions`\] Do not emit `unnecessary-map` diagnostic when lambda has different arity (`C417`) ([#15802](https://github.com/astral-sh/ruff/pull/15802))
|
|
||||||
- \[`flake8-comprehensions`\] Parenthesize `sorted` when needed for `unnecessary-call-around-sorted` (`C413`) ([#15825](https://github.com/astral-sh/ruff/pull/15825))
|
|
||||||
- \[`pyupgrade`\] Handle end-of-line comments for `quoted-annotation` (`UP037`) ([#15824](https://github.com/astral-sh/ruff/pull/15824))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Add missing config docstrings ([#15803](https://github.com/astral-sh/ruff/pull/15803))
|
|
||||||
- Add references to `trio.run_process` and `anyio.run_process` ([#15761](https://github.com/astral-sh/ruff/pull/15761))
|
|
||||||
- Use `uv init --lib` in tutorial ([#15718](https://github.com/astral-sh/ruff/pull/15718))
|
|
||||||
|
|
||||||
## 0.9.3
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`airflow`\] Argument `fail_stop` in DAG has been renamed as `fail_fast` (`AIR302`) ([#15633](https://github.com/astral-sh/ruff/pull/15633))
|
|
||||||
- \[`airflow`\] Extend `AIR303` with more symbols ([#15611](https://github.com/astral-sh/ruff/pull/15611))
|
|
||||||
- \[`flake8-bandit`\] Report all references to suspicious functions (`S3`) ([#15541](https://github.com/astral-sh/ruff/pull/15541))
|
|
||||||
- \[`flake8-pytest-style`\] Do not emit diagnostics for empty `for` loops (`PT012`, `PT031`) ([#15542](https://github.com/astral-sh/ruff/pull/15542))
|
|
||||||
- \[`flake8-simplify`\] Avoid double negations (`SIM103`) ([#15562](https://github.com/astral-sh/ruff/pull/15562))
|
|
||||||
- \[`pyflakes`\] Fix infinite loop with unused local import in `__init__.py` (`F401`) ([#15517](https://github.com/astral-sh/ruff/pull/15517))
|
|
||||||
- \[`pylint`\] Do not report methods with only one `EM101`-compatible `raise` (`PLR6301`) ([#15507](https://github.com/astral-sh/ruff/pull/15507))
|
|
||||||
- \[`pylint`\] Implement `redefined-slots-in-subclass` (`W0244`) ([#9640](https://github.com/astral-sh/ruff/pull/9640))
|
|
||||||
- \[`pyupgrade`\] Add rules to use PEP 695 generics in classes and functions (`UP046`, `UP047`) ([#15565](https://github.com/astral-sh/ruff/pull/15565), [#15659](https://github.com/astral-sh/ruff/pull/15659))
|
|
||||||
- \[`refurb`\] Implement `for-loop-writes` (`FURB122`) ([#10630](https://github.com/astral-sh/ruff/pull/10630))
|
|
||||||
- \[`ruff`\] Implement `needless-else` clause (`RUF047`) ([#15051](https://github.com/astral-sh/ruff/pull/15051))
|
|
||||||
- \[`ruff`\] Implement `starmap-zip` (`RUF058`) ([#15483](https://github.com/astral-sh/ruff/pull/15483))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`flake8-bugbear`\] Do not raise error if keyword argument is present and target-python version is less or equals than 3.9 (`B903`) ([#15549](https://github.com/astral-sh/ruff/pull/15549))
|
|
||||||
- \[`flake8-comprehensions`\] strip parentheses around generators in `unnecessary-generator-set` (`C401`) ([#15553](https://github.com/astral-sh/ruff/pull/15553))
|
|
||||||
- \[`flake8-pytest-style`\] Rewrite references to `.exception` (`PT027`) ([#15680](https://github.com/astral-sh/ruff/pull/15680))
|
|
||||||
- \[`flake8-simplify`\] Mark fixes as unsafe (`SIM201`, `SIM202`) ([#15626](https://github.com/astral-sh/ruff/pull/15626))
|
|
||||||
- \[`flake8-type-checking`\] Fix some safe fixes being labeled unsafe (`TC006`,`TC008`) ([#15638](https://github.com/astral-sh/ruff/pull/15638))
|
|
||||||
- \[`isort`\] Omit trailing whitespace in `unsorted-imports` (`I001`) ([#15518](https://github.com/astral-sh/ruff/pull/15518))
|
|
||||||
- \[`pydoclint`\] Allow ignoring one line docstrings for `DOC` rules ([#13302](https://github.com/astral-sh/ruff/pull/13302))
|
|
||||||
- \[`pyflakes`\] Apply redefinition fixes by source code order (`F811`) ([#15575](https://github.com/astral-sh/ruff/pull/15575))
|
|
||||||
- \[`pyflakes`\] Avoid removing too many imports in `redefined-while-unused` (`F811`) ([#15585](https://github.com/astral-sh/ruff/pull/15585))
|
|
||||||
- \[`pyflakes`\] Group redefinition fixes by source statement (`F811`) ([#15574](https://github.com/astral-sh/ruff/pull/15574))
|
|
||||||
- \[`pylint`\] Include name of base class in message for `redefined-slots-in-subclass` (`W0244`) ([#15559](https://github.com/astral-sh/ruff/pull/15559))
|
|
||||||
- \[`ruff`\] Update fix for `RUF055` to use `var == value` ([#15605](https://github.com/astral-sh/ruff/pull/15605))
|
|
||||||
|
|
||||||
### Formatter
|
|
||||||
|
|
||||||
- Fix bracket spacing for single-element tuples in f-string expressions ([#15537](https://github.com/astral-sh/ruff/pull/15537))
|
|
||||||
- Fix unstable f-string formatting for expressions containing a trailing comma ([#15545](https://github.com/astral-sh/ruff/pull/15545))
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
|
|
||||||
- Avoid quadratic membership check in import fixes ([#15576](https://github.com/astral-sh/ruff/pull/15576))
|
|
||||||
|
|
||||||
### Server
|
|
||||||
|
|
||||||
- Allow `unsafe-fixes` settings for code actions ([#15666](https://github.com/astral-sh/ruff/pull/15666))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- \[`flake8-bandit`\] Add missing single-line/dotall regex flag (`S608`) ([#15654](https://github.com/astral-sh/ruff/pull/15654))
|
|
||||||
- \[`flake8-import-conventions`\] Fix infinite loop between `ICN001` and `I002` (`ICN001`) ([#15480](https://github.com/astral-sh/ruff/pull/15480))
|
|
||||||
- \[`flake8-simplify`\] Do not emit diagnostics for expressions inside string type annotations (`SIM222`, `SIM223`) ([#15405](https://github.com/astral-sh/ruff/pull/15405))
|
|
||||||
- \[`pyflakes`\] Treat arguments passed to the `default=` parameter of `TypeVar` as type expressions (`F821`) ([#15679](https://github.com/astral-sh/ruff/pull/15679))
|
|
||||||
- \[`pyupgrade`\] Avoid syntax error when the iterable is a non-parenthesized tuple (`UP028`) ([#15543](https://github.com/astral-sh/ruff/pull/15543))
|
|
||||||
- \[`ruff`\] Exempt `NewType` calls where the original type is immutable (`RUF009`) ([#15588](https://github.com/astral-sh/ruff/pull/15588))
|
|
||||||
- Preserve raw string prefix and escapes in all codegen fixes ([#15694](https://github.com/astral-sh/ruff/pull/15694))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Generate documentation redirects for lowercase rule codes ([#15564](https://github.com/astral-sh/ruff/pull/15564))
|
|
||||||
- `TRY300`: Add some extra notes on not catching exceptions you didn't expect ([#15036](https://github.com/astral-sh/ruff/pull/15036))
|
|
||||||
|
|
||||||
## 0.9.2
|
## 0.9.2
|
||||||
|
|
||||||
### Preview features
|
### Preview features
|
||||||
@@ -1421,11 +921,11 @@ The following rules have been stabilized and are no longer in preview:
|
|||||||
|
|
||||||
The following behaviors have been stabilized:
|
The following behaviors have been stabilized:
|
||||||
|
|
||||||
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context managers.
|
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
|
||||||
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context managers.
|
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
|
||||||
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context managers.
|
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
|
||||||
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context managers.
|
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
|
||||||
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context managers.
|
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
|
||||||
|
|
||||||
The following fixes have been stabilized:
|
The following fixes have been stabilized:
|
||||||
|
|
||||||
|
|||||||
@@ -526,7 +526,7 @@ cargo benchmark
|
|||||||
#### Benchmark-driven Development
|
#### Benchmark-driven Development
|
||||||
|
|
||||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||||
`--save-baseline=<name>` to store an initial baseline benchmark (e.g., on `main`) and then use
|
`--save-baseline=<name>` to store an initial baseline benchmark (e.g. on `main`) and then use
|
||||||
`--benchmark=<name>` to compare against that benchmark. Criterion will print a message telling you
|
`--benchmark=<name>` to compare against that benchmark. Criterion will print a message telling you
|
||||||
if the benchmark improved/regressed compared to that baseline.
|
if the benchmark improved/regressed compared to that baseline.
|
||||||
|
|
||||||
@@ -678,9 +678,9 @@ utils with it:
|
|||||||
23 Newline 24
|
23 Newline 24
|
||||||
```
|
```
|
||||||
|
|
||||||
- `cargo dev print-cst <file>`: Print the CST of a Python file using
|
- `cargo dev print-cst <file>`: Print the CST of a python file using
|
||||||
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
|
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
|
||||||
in Ruff. For example, for `if True: pass # comment`, everything, including the whitespace, is represented:
|
in Ruff. E.g. for `if True: pass # comment` everything including the whitespace is represented:
|
||||||
|
|
||||||
```text
|
```text
|
||||||
Module {
|
Module {
|
||||||
|
|||||||
1461
Cargo.lock
generated
1461
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
41
Cargo.toml
41
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
|||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.83"
|
rust-version = "1.80"
|
||||||
homepage = "https://docs.astral.sh/ruff"
|
homepage = "https://docs.astral.sh/ruff"
|
||||||
documentation = "https://docs.astral.sh/ruff"
|
documentation = "https://docs.astral.sh/ruff"
|
||||||
repository = "https://github.com/astral-sh/ruff"
|
repository = "https://github.com/astral-sh/ruff"
|
||||||
@@ -63,7 +63,7 @@ colored = { version = "3.0.0" }
|
|||||||
console_error_panic_hook = { version = "0.1.7" }
|
console_error_panic_hook = { version = "0.1.7" }
|
||||||
console_log = { version = "1.0.0" }
|
console_log = { version = "1.0.0" }
|
||||||
countme = { version = "3.0.1" }
|
countme = { version = "3.0.1" }
|
||||||
compact_str = "0.9.0"
|
compact_str = "0.8.0"
|
||||||
criterion = { version = "0.5.1", default-features = false }
|
criterion = { version = "0.5.1", default-features = false }
|
||||||
crossbeam = { version = "0.8.4" }
|
crossbeam = { version = "0.8.4" }
|
||||||
dashmap = { version = "6.0.1" }
|
dashmap = { version = "6.0.1" }
|
||||||
@@ -71,16 +71,14 @@ dir-test = { version = "0.4.0" }
|
|||||||
dunce = { version = "1.0.5" }
|
dunce = { version = "1.0.5" }
|
||||||
drop_bomb = { version = "0.1.5" }
|
drop_bomb = { version = "0.1.5" }
|
||||||
env_logger = { version = "0.11.0" }
|
env_logger = { version = "0.11.0" }
|
||||||
etcetera = { version = "0.10.0" }
|
etcetera = { version = "0.8.0" }
|
||||||
fern = { version = "0.7.0" }
|
fern = { version = "0.7.0" }
|
||||||
filetime = { version = "0.2.23" }
|
filetime = { version = "0.2.23" }
|
||||||
getrandom = { version = "0.3.1" }
|
|
||||||
glob = { version = "0.3.1" }
|
glob = { version = "0.3.1" }
|
||||||
globset = { version = "0.4.14" }
|
globset = { version = "0.4.14" }
|
||||||
globwalk = { version = "0.9.1" }
|
globwalk = { version = "0.9.1" }
|
||||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||||
"raw-entry",
|
"raw-entry",
|
||||||
"equivalent",
|
|
||||||
"inline-more",
|
"inline-more",
|
||||||
] }
|
] }
|
||||||
ignore = { version = "0.4.22" }
|
ignore = { version = "0.4.22" }
|
||||||
@@ -118,12 +116,12 @@ proc-macro2 = { version = "1.0.79" }
|
|||||||
pyproject-toml = { version = "0.13.4" }
|
pyproject-toml = { version = "0.13.4" }
|
||||||
quick-junit = { version = "0.5.0" }
|
quick-junit = { version = "0.5.0" }
|
||||||
quote = { version = "1.0.23" }
|
quote = { version = "1.0.23" }
|
||||||
rand = { version = "0.9.0" }
|
rand = { version = "0.8.5" }
|
||||||
rayon = { version = "1.10.0" }
|
rayon = { version = "1.10.0" }
|
||||||
regex = { version = "1.10.2" }
|
regex = { version = "1.10.2" }
|
||||||
rustc-hash = { version = "2.0.0" }
|
rustc-hash = { version = "2.0.0" }
|
||||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "d758691ba17ee1a60c5356ea90888d529e1782ad" }
|
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "88a1d7774d78f048fbd77d40abca9ebd729fd1f0" }
|
||||||
schemars = { version = "0.8.16" }
|
schemars = { version = "0.8.16" }
|
||||||
seahash = { version = "4.1.0" }
|
seahash = { version = "4.1.0" }
|
||||||
serde = { version = "1.0.197", features = ["derive"] }
|
serde = { version = "1.0.197", features = ["derive"] }
|
||||||
@@ -136,15 +134,10 @@ serde_with = { version = "3.6.0", default-features = false, features = [
|
|||||||
shellexpand = { version = "3.0.0" }
|
shellexpand = { version = "3.0.0" }
|
||||||
similar = { version = "2.4.0", features = ["inline"] }
|
similar = { version = "2.4.0", features = ["inline"] }
|
||||||
smallvec = { version = "1.13.2" }
|
smallvec = { version = "1.13.2" }
|
||||||
snapbox = { version = "0.6.0", features = [
|
snapbox = { version = "0.6.0", features = ["diff", "term-svg", "cmd", "examples"] }
|
||||||
"diff",
|
|
||||||
"term-svg",
|
|
||||||
"cmd",
|
|
||||||
"examples",
|
|
||||||
] }
|
|
||||||
static_assertions = "1.1.0"
|
static_assertions = "1.1.0"
|
||||||
strum = { version = "0.27.0", features = ["strum_macros"] }
|
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||||
strum_macros = { version = "0.27.0" }
|
strum_macros = { version = "0.26.0" }
|
||||||
syn = { version = "2.0.55" }
|
syn = { version = "2.0.55" }
|
||||||
tempfile = { version = "3.9.0" }
|
tempfile = { version = "3.9.0" }
|
||||||
test-case = { version = "3.3.1" }
|
test-case = { version = "3.3.1" }
|
||||||
@@ -154,7 +147,6 @@ toml = { version = "0.8.11" }
|
|||||||
tracing = { version = "0.1.40" }
|
tracing = { version = "0.1.40" }
|
||||||
tracing-flame = { version = "0.2.0" }
|
tracing-flame = { version = "0.2.0" }
|
||||||
tracing-indicatif = { version = "0.3.6" }
|
tracing-indicatif = { version = "0.3.6" }
|
||||||
tracing-log = { version = "0.2.0" }
|
|
||||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||||
"env-filter",
|
"env-filter",
|
||||||
"fmt",
|
"fmt",
|
||||||
@@ -167,6 +159,7 @@ unicode-ident = { version = "1.0.12" }
|
|||||||
unicode-width = { version = "0.2.0" }
|
unicode-width = { version = "0.2.0" }
|
||||||
unicode_names2 = { version = "1.2.2" }
|
unicode_names2 = { version = "1.2.2" }
|
||||||
unicode-normalization = { version = "0.1.23" }
|
unicode-normalization = { version = "0.1.23" }
|
||||||
|
ureq = { version = "2.9.6" }
|
||||||
url = { version = "2.5.0" }
|
url = { version = "2.5.0" }
|
||||||
uuid = { version = "1.6.1", features = [
|
uuid = { version = "1.6.1", features = [
|
||||||
"v4",
|
"v4",
|
||||||
@@ -180,10 +173,6 @@ wasm-bindgen-test = { version = "0.3.42" }
|
|||||||
wild = { version = "2" }
|
wild = { version = "2" }
|
||||||
zip = { version = "0.6.6", default-features = false }
|
zip = { version = "0.6.6", default-features = false }
|
||||||
|
|
||||||
[workspace.metadata.cargo-shear]
|
|
||||||
ignored = ["getrandom"]
|
|
||||||
|
|
||||||
|
|
||||||
[workspace.lints.rust]
|
[workspace.lints.rust]
|
||||||
unsafe_code = "warn"
|
unsafe_code = "warn"
|
||||||
unreachable_pub = "warn"
|
unreachable_pub = "warn"
|
||||||
@@ -316,20 +305,10 @@ local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
|||||||
# Publish jobs to run in CI
|
# Publish jobs to run in CI
|
||||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||||
# Post-announce jobs to run in CI
|
# Post-announce jobs to run in CI
|
||||||
post-announce-jobs = [
|
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||||
"./notify-dependents",
|
|
||||||
"./publish-docs",
|
|
||||||
"./publish-playground",
|
|
||||||
]
|
|
||||||
# Custom permissions for GitHub Jobs
|
# Custom permissions for GitHub Jobs
|
||||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||||
# Whether to install an updater program
|
# Whether to install an updater program
|
||||||
install-updater = false
|
install-updater = false
|
||||||
# Path that installers should place binaries in
|
# Path that installers should place binaries in
|
||||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||||
# Temporarily allow changes to the `release` workflow, in which we pin actions
|
|
||||||
# to a SHA instead of a tag (https://github.com/astral-sh/uv/issues/12253)
|
|
||||||
allow-dirty = ["ci"]
|
|
||||||
|
|
||||||
[workspace.metadata.dist.github-custom-runners]
|
|
||||||
global = "depot-ubuntu-latest-4"
|
|
||||||
|
|||||||
@@ -149,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
|||||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||||
|
|
||||||
# For a specific version.
|
# For a specific version.
|
||||||
curl -LsSf https://astral.sh/ruff/0.11.2/install.sh | sh
|
curl -LsSf https://astral.sh/ruff/0.9.2/install.sh | sh
|
||||||
powershell -c "irm https://astral.sh/ruff/0.11.2/install.ps1 | iex"
|
powershell -c "irm https://astral.sh/ruff/0.9.2/install.ps1 | iex"
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||||
@@ -183,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
|||||||
```yaml
|
```yaml
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.11.2
|
rev: v0.9.2
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter.
|
# Run the linter.
|
||||||
- id: ruff
|
- id: ruff
|
||||||
@@ -452,7 +452,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
|||||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||||
- [Ibis](https://github.com/ibis-project/ibis)
|
- [Ibis](https://github.com/ibis-project/ibis)
|
||||||
- [ivy](https://github.com/unifyai/ivy)
|
- [ivy](https://github.com/unifyai/ivy)
|
||||||
- [JAX](https://github.com/jax-ml/jax)
|
|
||||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||||
- [Kraken Tech](https://kraken.tech/)
|
- [Kraken Tech](https://kraken.tech/)
|
||||||
- [LangChain](https://github.com/hwchase17/langchain)
|
- [LangChain](https://github.com/hwchase17/langchain)
|
||||||
|
|||||||
15
SECURITY.md
15
SECURITY.md
@@ -1,15 +0,0 @@
|
|||||||
# Security policy
|
|
||||||
|
|
||||||
## Reporting a vulnerability
|
|
||||||
|
|
||||||
If you have found a possible vulnerability, please email `security at astral dot sh`.
|
|
||||||
|
|
||||||
## Bug bounties
|
|
||||||
|
|
||||||
While we sincerely appreciate and encourage reports of suspected security problems, please note that
|
|
||||||
Astral does not currently run any bug bounty programs.
|
|
||||||
|
|
||||||
## Vulnerability disclosures
|
|
||||||
|
|
||||||
Critical vulnerabilities will be disclosed via GitHub's
|
|
||||||
[security advisory](https://github.com/astral-sh/ruff/security) system.
|
|
||||||
@@ -23,10 +23,6 @@ extend-ignore-re = [
|
|||||||
# Line ignore with trailing "spellchecker:disable-line"
|
# Line ignore with trailing "spellchecker:disable-line"
|
||||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||||
"LICENSEs",
|
"LICENSEs",
|
||||||
# Various third party dependencies uses `typ` as struct field names (e.g., lsp_types::LogMessageParams)
|
|
||||||
"typ",
|
|
||||||
# TODO: Remove this once the `TYP` redirects are removed from `rule_redirects.rs`
|
|
||||||
"TYP",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[default.extend-identifiers]
|
[default.extend-identifiers]
|
||||||
|
|||||||
@@ -16,10 +16,8 @@ red_knot_python_semantic = { workspace = true }
|
|||||||
red_knot_project = { workspace = true, features = ["zstd"] }
|
red_knot_project = { workspace = true, features = ["zstd"] }
|
||||||
red_knot_server = { workspace = true }
|
red_knot_server = { workspace = true }
|
||||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||||
ruff_python_ast = { workspace = true }
|
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
argfile = { workspace = true }
|
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
clap = { workspace = true, features = ["wrap_help"] }
|
clap = { workspace = true, features = ["wrap_help"] }
|
||||||
colored = { workspace = true }
|
colored = { workspace = true }
|
||||||
@@ -32,11 +30,9 @@ tracing = { workspace = true, features = ["release_max_level_debug"] }
|
|||||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||||
tracing-flame = { workspace = true }
|
tracing-flame = { workspace = true }
|
||||||
tracing-tree = { workspace = true }
|
tracing-tree = { workspace = true }
|
||||||
wild = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ruff_db = { workspace = true, features = ["testing"] }
|
ruff_db = { workspace = true, features = ["testing"] }
|
||||||
ruff_python_trivia = { workspace = true }
|
|
||||||
|
|
||||||
insta = { workspace = true, features = ["filters"] }
|
insta = { workspace = true, features = ["filters"] }
|
||||||
insta-cmd = { workspace = true }
|
insta-cmd = { workspace = true }
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
# Red Knot
|
|
||||||
|
|
||||||
Red Knot is an extremely fast type checker.
|
|
||||||
Currently, it is a work-in-progress and not ready for user testing.
|
|
||||||
|
|
||||||
Red Knot is designed to prioritize good type inference, even in unannotated code,
|
|
||||||
and aims to avoid false positives.
|
|
||||||
|
|
||||||
While Red Knot will produce similar results to mypy and pyright on many codebases,
|
|
||||||
100% compatibility with these tools is a non-goal.
|
|
||||||
On some codebases, Red Knot's design decisions lead to different outcomes
|
|
||||||
than you would get from running one of these more established tools.
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
Core type checking tests are written as Markdown code blocks.
|
|
||||||
They can be found in [`red_knot_python_semantic/resources/mdtest`][resources-mdtest].
|
|
||||||
See [`red_knot_test/README.md`][mdtest-readme] for more information
|
|
||||||
on the test framework itself.
|
|
||||||
|
|
||||||
The list of open issues can be found [here][open-issues].
|
|
||||||
|
|
||||||
[mdtest-readme]: ../red_knot_test/README.md
|
|
||||||
[open-issues]: https://github.com/astral-sh/ruff/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20label%3Ared-knot
|
|
||||||
[resources-mdtest]: ../red_knot_python_semantic/resources/mdtest
|
|
||||||
@@ -1,104 +0,0 @@
|
|||||||
use std::{
|
|
||||||
fs,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
process::Command,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
// The workspace root directory is not available without walking up the tree
|
|
||||||
// https://github.com/rust-lang/cargo/issues/3946
|
|
||||||
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
|
|
||||||
.join("..")
|
|
||||||
.join("..");
|
|
||||||
|
|
||||||
commit_info(&workspace_root);
|
|
||||||
|
|
||||||
#[allow(clippy::disallowed_methods)]
|
|
||||||
let target = std::env::var("TARGET").unwrap();
|
|
||||||
println!("cargo::rustc-env=RUST_HOST_TARGET={target}");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn commit_info(workspace_root: &Path) {
|
|
||||||
// If not in a git repository, do not attempt to retrieve commit information
|
|
||||||
let git_dir = workspace_root.join(".git");
|
|
||||||
if !git_dir.exists() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(git_head_path) = git_head(&git_dir) {
|
|
||||||
println!("cargo:rerun-if-changed={}", git_head_path.display());
|
|
||||||
|
|
||||||
let git_head_contents = fs::read_to_string(git_head_path);
|
|
||||||
if let Ok(git_head_contents) = git_head_contents {
|
|
||||||
// The contents are either a commit or a reference in the following formats
|
|
||||||
// - "<commit>" when the head is detached
|
|
||||||
// - "ref <ref>" when working on a branch
|
|
||||||
// If a commit, checking if the HEAD file has changed is sufficient
|
|
||||||
// If a ref, we need to add the head file for that ref to rebuild on commit
|
|
||||||
let mut git_ref_parts = git_head_contents.split_whitespace();
|
|
||||||
git_ref_parts.next();
|
|
||||||
if let Some(git_ref) = git_ref_parts.next() {
|
|
||||||
let git_ref_path = git_dir.join(git_ref);
|
|
||||||
println!("cargo:rerun-if-changed={}", git_ref_path.display());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let output = match Command::new("git")
|
|
||||||
.arg("log")
|
|
||||||
.arg("-1")
|
|
||||||
.arg("--date=short")
|
|
||||||
.arg("--abbrev=9")
|
|
||||||
.arg("--format=%H %h %cd %(describe)")
|
|
||||||
.output()
|
|
||||||
{
|
|
||||||
Ok(output) if output.status.success() => output,
|
|
||||||
_ => return,
|
|
||||||
};
|
|
||||||
let stdout = String::from_utf8(output.stdout).unwrap();
|
|
||||||
let mut parts = stdout.split_whitespace();
|
|
||||||
let mut next = || parts.next().unwrap();
|
|
||||||
let _commit_hash = next();
|
|
||||||
println!("cargo::rustc-env=RED_KNOT_COMMIT_SHORT_HASH={}", next());
|
|
||||||
println!("cargo::rustc-env=RED_KNOT_COMMIT_DATE={}", next());
|
|
||||||
|
|
||||||
// Describe can fail for some commits
|
|
||||||
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
|
|
||||||
if let Some(describe) = parts.next() {
|
|
||||||
let mut describe_parts = describe.split('-');
|
|
||||||
let _last_tag = describe_parts.next().unwrap();
|
|
||||||
|
|
||||||
// If this is the tagged commit, this component will be missing
|
|
||||||
println!(
|
|
||||||
"cargo::rustc-env=RED_KNOT_LAST_TAG_DISTANCE={}",
|
|
||||||
describe_parts.next().unwrap_or("0")
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn git_head(git_dir: &Path) -> Option<PathBuf> {
|
|
||||||
// The typical case is a standard git repository.
|
|
||||||
let git_head_path = git_dir.join("HEAD");
|
|
||||||
if git_head_path.exists() {
|
|
||||||
return Some(git_head_path);
|
|
||||||
}
|
|
||||||
if !git_dir.is_file() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
|
|
||||||
// then let's try to attempt to read it as a worktree. If it's
|
|
||||||
// a worktree, then its contents will look like this, e.g.:
|
|
||||||
//
|
|
||||||
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
|
|
||||||
//
|
|
||||||
// And the HEAD file we want to watch will be at:
|
|
||||||
//
|
|
||||||
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
|
|
||||||
let contents = fs::read_to_string(git_dir).ok()?;
|
|
||||||
let (label, worktree_path) = contents.split_once(':')?;
|
|
||||||
if label != "gitdir" {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let worktree_path = worktree_path.trim();
|
|
||||||
Some(PathBuf::from(worktree_path))
|
|
||||||
}
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
# Running `mypy_primer`
|
|
||||||
|
|
||||||
## Basics
|
|
||||||
|
|
||||||
For now, we use our own [fork of mypy primer]. It can be run using `uvx --from "…" mypy_primer`. For example, to see the help message, run:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
uvx --from "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support" mypy_primer -h
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively, you can install the forked version of `mypy_primer` using:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
uv tool install "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support"
|
|
||||||
```
|
|
||||||
|
|
||||||
and then run it using `uvx mypy_primer` or just `mypy_primer`, if your `PATH` is set up accordingly (see: [Tool executables]).
|
|
||||||
|
|
||||||
## Showing the diagnostics diff between two Git revisions
|
|
||||||
|
|
||||||
To show the diagnostics diff between two Git revisions (e.g. your feature branch and `main`), run:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
mypy_primer \
|
|
||||||
--type-checker knot \
|
|
||||||
--old origin/main \
|
|
||||||
--new my/feature \
|
|
||||||
--debug \
|
|
||||||
--output concise \
|
|
||||||
--project-selector '/black$'
|
|
||||||
```
|
|
||||||
|
|
||||||
This will show the diagnostics diff for the `black` project between the `main` branch and your `my/feature` branch. To run the
|
|
||||||
diff for all projects, you currently need to copy the project-selector regex from the CI pipeline in `.github/workflows/mypy_primer.yaml`.
|
|
||||||
|
|
||||||
You can also take a look at the [full list of ecosystem projects]. Note that some of them might still need a `knot_paths` configuration
|
|
||||||
option to work correctly.
|
|
||||||
|
|
||||||
## Avoiding recompilation
|
|
||||||
|
|
||||||
If you want to run `mypy_primer` repeatedly, e.g. for different projects, but for the same combination of `--old` and `--new`, you
|
|
||||||
can use set the `MYPY_PRIMER_NO_REBUILD` environment variable to avoid recompilation of Red Knot:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
MYPY_PRIMER_NO_REBUILD=1 mypy_primer …
|
|
||||||
```
|
|
||||||
|
|
||||||
## Running from a local copy of the repository
|
|
||||||
|
|
||||||
If you are working on a local branch, you can use `mypy_primer`'s `--repo` option to specify the path to your local copy of the `ruff` repository.
|
|
||||||
This allows `mypy_primer` to check out local branches:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
mypy_primer --repo /path/to/ruff --old origin/main --new my/local-branch …
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that you might need to clean up `/tmp/mypy_primer` in order for this to work correctly.
|
|
||||||
|
|
||||||
[fork of mypy primer]: https://github.com/astral-sh/mypy_primer/tree/add-red-knot-support
|
|
||||||
[full list of ecosystem projects]: https://github.com/astral-sh/mypy_primer/blob/add-red-knot-support/mypy_primer/projects.py
|
|
||||||
[tool executables]: https://docs.astral.sh/uv/concepts/tools/#tool-executables
|
|
||||||
@@ -1,269 +0,0 @@
|
|||||||
use crate::logging::Verbosity;
|
|
||||||
use crate::python_version::PythonVersion;
|
|
||||||
use clap::{ArgAction, ArgMatches, Error, Parser};
|
|
||||||
use red_knot_project::metadata::options::{EnvironmentOptions, Options, TerminalOptions};
|
|
||||||
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
|
|
||||||
use red_knot_python_semantic::lint;
|
|
||||||
use ruff_db::system::SystemPathBuf;
|
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
|
||||||
#[command(
|
|
||||||
author,
|
|
||||||
name = "red-knot",
|
|
||||||
about = "An extremely fast Python type checker."
|
|
||||||
)]
|
|
||||||
#[command(version)]
|
|
||||||
pub(crate) struct Args {
|
|
||||||
#[command(subcommand)]
|
|
||||||
pub(crate) command: Command,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, clap::Subcommand)]
|
|
||||||
pub(crate) enum Command {
|
|
||||||
/// Check a project for type errors.
|
|
||||||
Check(CheckCommand),
|
|
||||||
|
|
||||||
/// Start the language server
|
|
||||||
Server,
|
|
||||||
|
|
||||||
/// Display Red Knot's version
|
|
||||||
Version,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
|
||||||
pub(crate) struct CheckCommand {
|
|
||||||
/// List of files or directories to check.
|
|
||||||
#[clap(
|
|
||||||
help = "List of files or directories to check [default: the project root]",
|
|
||||||
value_name = "PATH"
|
|
||||||
)]
|
|
||||||
pub paths: Vec<SystemPathBuf>,
|
|
||||||
|
|
||||||
/// Run the command within the given project directory.
|
|
||||||
///
|
|
||||||
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
|
|
||||||
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
|
|
||||||
///
|
|
||||||
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
|
|
||||||
#[arg(long, value_name = "PROJECT")]
|
|
||||||
pub(crate) project: Option<SystemPathBuf>,
|
|
||||||
|
|
||||||
/// Path to the Python installation from which Red Knot resolves type information and third-party dependencies.
|
|
||||||
///
|
|
||||||
/// If not specified, Red Knot will look at the `VIRTUAL_ENV` environment variable.
|
|
||||||
///
|
|
||||||
/// Red Knot will search in the path's `site-packages` directories for type information and
|
|
||||||
/// third-party imports.
|
|
||||||
///
|
|
||||||
/// This option is commonly used to specify the path to a virtual environment.
|
|
||||||
#[arg(long, value_name = "PATH")]
|
|
||||||
pub(crate) python: Option<SystemPathBuf>,
|
|
||||||
|
|
||||||
/// Custom directory to use for stdlib typeshed stubs.
|
|
||||||
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
|
|
||||||
pub(crate) typeshed: Option<SystemPathBuf>,
|
|
||||||
|
|
||||||
/// Additional path to use as a module-resolution source (can be passed multiple times).
|
|
||||||
#[arg(long, value_name = "PATH")]
|
|
||||||
pub(crate) extra_search_path: Option<Vec<SystemPathBuf>>,
|
|
||||||
|
|
||||||
/// Python version to assume when resolving types.
|
|
||||||
#[arg(long, value_name = "VERSION", alias = "target-version")]
|
|
||||||
pub(crate) python_version: Option<PythonVersion>,
|
|
||||||
|
|
||||||
#[clap(flatten)]
|
|
||||||
pub(crate) verbosity: Verbosity,
|
|
||||||
|
|
||||||
#[clap(flatten)]
|
|
||||||
pub(crate) rules: RulesArg,
|
|
||||||
|
|
||||||
/// The format to use for printing diagnostic messages.
|
|
||||||
#[arg(long)]
|
|
||||||
pub(crate) output_format: Option<OutputFormat>,
|
|
||||||
|
|
||||||
/// Control when colored output is used.
|
|
||||||
#[arg(long, value_name = "WHEN")]
|
|
||||||
pub(crate) color: Option<TerminalColor>,
|
|
||||||
|
|
||||||
/// Use exit code 1 if there are any warning-level diagnostics.
|
|
||||||
#[arg(long, conflicts_with = "exit_zero", default_missing_value = "true", num_args=0..1)]
|
|
||||||
pub(crate) error_on_warning: Option<bool>,
|
|
||||||
|
|
||||||
/// Always use exit code 0, even when there are error-level diagnostics.
|
|
||||||
#[arg(long)]
|
|
||||||
pub(crate) exit_zero: bool,
|
|
||||||
|
|
||||||
/// Watch files for changes and recheck files related to the changed files.
|
|
||||||
#[arg(long, short = 'W')]
|
|
||||||
pub(crate) watch: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CheckCommand {
|
|
||||||
pub(crate) fn into_options(self) -> Options {
|
|
||||||
let rules = if self.rules.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(
|
|
||||||
self.rules
|
|
||||||
.into_iter()
|
|
||||||
.map(|(rule, level)| (RangedValue::cli(rule), RangedValue::cli(level)))
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
Options {
|
|
||||||
environment: Some(EnvironmentOptions {
|
|
||||||
python_version: self
|
|
||||||
.python_version
|
|
||||||
.map(|version| RangedValue::cli(version.into())),
|
|
||||||
python: self.python.map(RelativePathBuf::cli),
|
|
||||||
typeshed: self.typeshed.map(RelativePathBuf::cli),
|
|
||||||
extra_paths: self.extra_search_path.map(|extra_search_paths| {
|
|
||||||
extra_search_paths
|
|
||||||
.into_iter()
|
|
||||||
.map(RelativePathBuf::cli)
|
|
||||||
.collect()
|
|
||||||
}),
|
|
||||||
..EnvironmentOptions::default()
|
|
||||||
}),
|
|
||||||
terminal: Some(TerminalOptions {
|
|
||||||
output_format: self
|
|
||||||
.output_format
|
|
||||||
.map(|output_format| RangedValue::cli(output_format.into())),
|
|
||||||
error_on_warning: self.error_on_warning,
|
|
||||||
}),
|
|
||||||
rules,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A list of rules to enable or disable with a given severity.
|
|
||||||
///
|
|
||||||
/// This type is used to parse the `--error`, `--warn`, and `--ignore` arguments
|
|
||||||
/// while preserving the order in which they were specified (arguments last override previous severities).
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct RulesArg(Vec<(String, lint::Level)>);
|
|
||||||
|
|
||||||
impl RulesArg {
|
|
||||||
fn is_empty(&self) -> bool {
|
|
||||||
self.0.is_empty()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_iter(self) -> impl Iterator<Item = (String, lint::Level)> {
|
|
||||||
self.0.into_iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl clap::FromArgMatches for RulesArg {
|
|
||||||
fn from_arg_matches(matches: &ArgMatches) -> Result<Self, Error> {
|
|
||||||
let mut rules = Vec::new();
|
|
||||||
|
|
||||||
for (level, arg_id) in [
|
|
||||||
(lint::Level::Ignore, "ignore"),
|
|
||||||
(lint::Level::Warn, "warn"),
|
|
||||||
(lint::Level::Error, "error"),
|
|
||||||
] {
|
|
||||||
let indices = matches.indices_of(arg_id).into_iter().flatten();
|
|
||||||
let levels = matches.get_many::<String>(arg_id).into_iter().flatten();
|
|
||||||
rules.extend(
|
|
||||||
indices
|
|
||||||
.zip(levels)
|
|
||||||
.map(|(index, rule)| (index, rule, level)),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort by their index so that values specified later override earlier ones.
|
|
||||||
rules.sort_by_key(|(index, _, _)| *index);
|
|
||||||
|
|
||||||
Ok(Self(
|
|
||||||
rules
|
|
||||||
.into_iter()
|
|
||||||
.map(|(_, rule, level)| (rule.to_owned(), level))
|
|
||||||
.collect(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), Error> {
|
|
||||||
self.0 = Self::from_arg_matches(matches)?.0;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl clap::Args for RulesArg {
|
|
||||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
|
||||||
const HELP_HEADING: &str = "Enabling / disabling rules";
|
|
||||||
|
|
||||||
cmd.arg(
|
|
||||||
clap::Arg::new("error")
|
|
||||||
.long("error")
|
|
||||||
.action(ArgAction::Append)
|
|
||||||
.help("Treat the given rule as having severity 'error'. Can be specified multiple times.")
|
|
||||||
.value_name("RULE")
|
|
||||||
.help_heading(HELP_HEADING),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
clap::Arg::new("warn")
|
|
||||||
.long("warn")
|
|
||||||
.action(ArgAction::Append)
|
|
||||||
.help("Treat the given rule as having severity 'warn'. Can be specified multiple times.")
|
|
||||||
.value_name("RULE")
|
|
||||||
.help_heading(HELP_HEADING),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
clap::Arg::new("ignore")
|
|
||||||
.long("ignore")
|
|
||||||
.action(ArgAction::Append)
|
|
||||||
.help("Disables the rule. Can be specified multiple times.")
|
|
||||||
.value_name("RULE")
|
|
||||||
.help_heading(HELP_HEADING),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
|
||||||
Self::augment_args(cmd)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The diagnostic output format.
|
|
||||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
|
||||||
pub enum OutputFormat {
|
|
||||||
/// Print diagnostics verbosely, with context and helpful hints.
|
|
||||||
///
|
|
||||||
/// Diagnostic messages may include additional context and
|
|
||||||
/// annotations on the input to help understand the message.
|
|
||||||
#[default]
|
|
||||||
#[value(name = "full")]
|
|
||||||
Full,
|
|
||||||
/// Print diagnostics concisely, one per line.
|
|
||||||
///
|
|
||||||
/// This will guarantee that each diagnostic is printed on
|
|
||||||
/// a single line. Only the most important or primary aspects
|
|
||||||
/// of the diagnostic are included. Contextual information is
|
|
||||||
/// dropped.
|
|
||||||
#[value(name = "concise")]
|
|
||||||
Concise,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<OutputFormat> for ruff_db::diagnostic::DiagnosticFormat {
|
|
||||||
fn from(format: OutputFormat) -> ruff_db::diagnostic::DiagnosticFormat {
|
|
||||||
match format {
|
|
||||||
OutputFormat::Full => Self::Full,
|
|
||||||
OutputFormat::Concise => Self::Concise,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Control when colored output is used.
|
|
||||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
|
||||||
pub(crate) enum TerminalColor {
|
|
||||||
/// Display colors if the output goes to an interactive terminal.
|
|
||||||
#[default]
|
|
||||||
Auto,
|
|
||||||
|
|
||||||
/// Always display colors.
|
|
||||||
Always,
|
|
||||||
|
|
||||||
/// Never display colors.
|
|
||||||
Never,
|
|
||||||
}
|
|
||||||
@@ -1,28 +1,104 @@
|
|||||||
use std::io::{self, stdout, BufWriter, Write};
|
|
||||||
use std::process::{ExitCode, Termination};
|
use std::process::{ExitCode, Termination};
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
|
||||||
use crate::args::{Args, CheckCommand, Command, TerminalColor};
|
|
||||||
use crate::logging::setup_tracing;
|
|
||||||
use anyhow::{anyhow, Context};
|
use anyhow::{anyhow, Context};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use crossbeam::channel as crossbeam_channel;
|
use crossbeam::channel as crossbeam_channel;
|
||||||
use red_knot_project::metadata::options::Options;
|
use python_version::PythonVersion;
|
||||||
|
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||||
|
use red_knot_project::watch;
|
||||||
use red_knot_project::watch::ProjectWatcher;
|
use red_knot_project::watch::ProjectWatcher;
|
||||||
use red_knot_project::{watch, Db};
|
|
||||||
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||||
use red_knot_server::run_server;
|
use red_knot_server::run_server;
|
||||||
use ruff_db::diagnostic::{DisplayDiagnosticConfig, OldDiagnosticTrait, Severity};
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||||
use salsa::plumbing::ZalsaDatabase;
|
use salsa::plumbing::ZalsaDatabase;
|
||||||
|
|
||||||
mod args;
|
use crate::logging::{setup_tracing, Verbosity};
|
||||||
|
|
||||||
mod logging;
|
mod logging;
|
||||||
mod python_version;
|
mod python_version;
|
||||||
mod version;
|
mod verbosity;
|
||||||
|
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
#[command(
|
||||||
|
author,
|
||||||
|
name = "red-knot",
|
||||||
|
about = "An extremely fast Python type checker."
|
||||||
|
)]
|
||||||
|
#[command(version)]
|
||||||
|
struct Args {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub(crate) command: Option<Command>,
|
||||||
|
|
||||||
|
/// Run the command within the given project directory.
|
||||||
|
///
|
||||||
|
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
|
||||||
|
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
|
||||||
|
///
|
||||||
|
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
|
||||||
|
#[arg(long, value_name = "PROJECT")]
|
||||||
|
project: Option<SystemPathBuf>,
|
||||||
|
|
||||||
|
/// Path to the virtual environment the project uses.
|
||||||
|
///
|
||||||
|
/// If provided, red-knot will use the `site-packages` directory of this virtual environment
|
||||||
|
/// to resolve type information for the project's third-party dependencies.
|
||||||
|
#[arg(long, value_name = "PATH")]
|
||||||
|
venv_path: Option<SystemPathBuf>,
|
||||||
|
|
||||||
|
/// Custom directory to use for stdlib typeshed stubs.
|
||||||
|
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
|
||||||
|
typeshed: Option<SystemPathBuf>,
|
||||||
|
|
||||||
|
/// Additional path to use as a module-resolution source (can be passed multiple times).
|
||||||
|
#[arg(long, value_name = "PATH")]
|
||||||
|
extra_search_path: Option<Vec<SystemPathBuf>>,
|
||||||
|
|
||||||
|
/// Python version to assume when resolving types.
|
||||||
|
#[arg(long, value_name = "VERSION", alias = "target-version")]
|
||||||
|
python_version: Option<PythonVersion>,
|
||||||
|
|
||||||
|
#[clap(flatten)]
|
||||||
|
verbosity: Verbosity,
|
||||||
|
|
||||||
|
/// Run in watch mode by re-running whenever files change.
|
||||||
|
#[arg(long, short = 'W')]
|
||||||
|
watch: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Args {
|
||||||
|
fn to_options(&self, cli_cwd: &SystemPath) -> Options {
|
||||||
|
Options {
|
||||||
|
environment: Some(EnvironmentOptions {
|
||||||
|
python_version: self.python_version.map(Into::into),
|
||||||
|
venv_path: self
|
||||||
|
.venv_path
|
||||||
|
.as_ref()
|
||||||
|
.map(|venv_path| SystemPath::absolute(venv_path, cli_cwd)),
|
||||||
|
typeshed: self
|
||||||
|
.typeshed
|
||||||
|
.as_ref()
|
||||||
|
.map(|typeshed| SystemPath::absolute(typeshed, cli_cwd)),
|
||||||
|
extra_paths: self.extra_search_path.as_ref().map(|extra_search_paths| {
|
||||||
|
extra_search_paths
|
||||||
|
.iter()
|
||||||
|
.map(|path| SystemPath::absolute(path, cli_cwd))
|
||||||
|
.collect()
|
||||||
|
}),
|
||||||
|
..EnvironmentOptions::default()
|
||||||
|
}),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, clap::Subcommand)]
|
||||||
|
pub enum Command {
|
||||||
|
/// Start the language server
|
||||||
|
Server,
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||||
pub fn main() -> ExitStatus {
|
pub fn main() -> ExitStatus {
|
||||||
@@ -39,15 +115,6 @@ pub fn main() -> ExitStatus {
|
|||||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||||
// "failed to read file: subdir/pyproject.toml")
|
// "failed to read file: subdir/pyproject.toml")
|
||||||
for cause in error.chain() {
|
for cause in error.chain() {
|
||||||
// Exit "gracefully" on broken pipe errors.
|
|
||||||
//
|
|
||||||
// See: https://github.com/BurntSushi/ripgrep/blob/bf63fe8f258afc09bae6caa48f0ae35eaf115005/crates/core/main.rs#L47C1-L61C14
|
|
||||||
if let Some(ioerr) = cause.downcast_ref::<io::Error>() {
|
|
||||||
if ioerr.kind() == io::ErrorKind::BrokenPipe {
|
|
||||||
return ExitStatus::Success;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,36 +123,18 @@ pub fn main() -> ExitStatus {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn run() -> anyhow::Result<ExitStatus> {
|
fn run() -> anyhow::Result<ExitStatus> {
|
||||||
let args = wild::args_os();
|
let args = Args::parse_from(std::env::args());
|
||||||
let args = argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX)
|
|
||||||
.context("Failed to read CLI arguments from file")?;
|
|
||||||
let args = Args::parse_from(args);
|
|
||||||
|
|
||||||
match args.command {
|
if matches!(args.command, Some(Command::Server)) {
|
||||||
Command::Server => run_server().map(|()| ExitStatus::Success),
|
return run_server().map(|()| ExitStatus::Success);
|
||||||
Command::Check(check_args) => run_check(check_args),
|
|
||||||
Command::Version => version().map(|()| ExitStatus::Success),
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn version() -> Result<()> {
|
|
||||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
|
||||||
let version_info = crate::version::version();
|
|
||||||
writeln!(stdout, "red knot {}", &version_info)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
|
||||||
set_colored_override(args.color);
|
|
||||||
|
|
||||||
let verbosity = args.verbosity.level();
|
let verbosity = args.verbosity.level();
|
||||||
countme::enable(verbosity.is_trace());
|
countme::enable(verbosity.is_trace());
|
||||||
let _guard = setup_tracing(verbosity)?;
|
let _guard = setup_tracing(verbosity)?;
|
||||||
|
|
||||||
tracing::debug!("Version: {}", version::version());
|
|
||||||
|
|
||||||
// The base path to which all CLI arguments are relative to.
|
// The base path to which all CLI arguments are relative to.
|
||||||
let cwd = {
|
let cli_base_path = {
|
||||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||||
SystemPathBuf::from_path_buf(cwd)
|
SystemPathBuf::from_path_buf(cwd)
|
||||||
.map_err(|path| {
|
.map_err(|path| {
|
||||||
@@ -96,41 +145,25 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
|||||||
})?
|
})?
|
||||||
};
|
};
|
||||||
|
|
||||||
let project_path = args
|
let cwd = args
|
||||||
.project
|
.project
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|project| {
|
.map(|cwd| {
|
||||||
if project.as_std_path().is_dir() {
|
if cwd.as_std_path().is_dir() {
|
||||||
Ok(SystemPath::absolute(project, &cwd))
|
Ok(SystemPath::absolute(cwd, &cli_base_path))
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow!(
|
Err(anyhow!("Provided project path `{cwd}` is not a directory"))
|
||||||
"Provided project path `{project}` is not a directory"
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.unwrap_or_else(|| cwd.clone());
|
.unwrap_or_else(|| cli_base_path.clone());
|
||||||
|
|
||||||
let check_paths: Vec<_> = args
|
let system = OsSystem::new(cwd.clone());
|
||||||
.paths
|
let cli_options = args.to_options(&cwd);
|
||||||
.iter()
|
let mut workspace_metadata = ProjectMetadata::discover(system.current_directory(), &system)?;
|
||||||
.map(|path| SystemPath::absolute(path, &cwd))
|
workspace_metadata.apply_cli_options(cli_options.clone());
|
||||||
.collect();
|
|
||||||
|
|
||||||
let system = OsSystem::new(cwd);
|
let mut db = ProjectDatabase::new(workspace_metadata, system)?;
|
||||||
let watch = args.watch;
|
|
||||||
let exit_zero = args.exit_zero;
|
|
||||||
|
|
||||||
let cli_options = args.into_options();
|
|
||||||
let mut project_metadata = ProjectMetadata::discover(&project_path, &system)?;
|
|
||||||
project_metadata.apply_cli_options(cli_options.clone());
|
|
||||||
project_metadata.apply_configuration_files(&system)?;
|
|
||||||
|
|
||||||
let mut db = ProjectDatabase::new(project_metadata, system)?;
|
|
||||||
|
|
||||||
if !check_paths.is_empty() {
|
|
||||||
db.project().set_included_paths(&mut db, check_paths);
|
|
||||||
}
|
|
||||||
|
|
||||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options);
|
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options);
|
||||||
|
|
||||||
@@ -144,21 +177,17 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
|||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let exit_status = if watch {
|
let exit_status = if args.watch {
|
||||||
main_loop.watch(&mut db)?
|
main_loop.watch(&mut db)?
|
||||||
} else {
|
} else {
|
||||||
main_loop.run(&mut db)?
|
main_loop.run(&mut db)
|
||||||
};
|
};
|
||||||
|
|
||||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||||
|
|
||||||
std::mem::forget(db);
|
std::mem::forget(db);
|
||||||
|
|
||||||
if exit_zero {
|
Ok(exit_status)
|
||||||
Ok(ExitStatus::Success)
|
|
||||||
} else {
|
|
||||||
Ok(exit_status)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
@@ -207,7 +236,7 @@ impl MainLoop {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn watch(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
|
fn watch(mut self, db: &mut ProjectDatabase) -> anyhow::Result<ExitStatus> {
|
||||||
tracing::debug!("Starting watch mode");
|
tracing::debug!("Starting watch mode");
|
||||||
let sender = self.sender.clone();
|
let sender = self.sender.clone();
|
||||||
let watcher = watch::directory_watcher(move |event| {
|
let watcher = watch::directory_watcher(move |event| {
|
||||||
@@ -216,12 +245,12 @@ impl MainLoop {
|
|||||||
|
|
||||||
self.watcher = Some(ProjectWatcher::new(watcher, db));
|
self.watcher = Some(ProjectWatcher::new(watcher, db));
|
||||||
|
|
||||||
self.run(db)?;
|
self.run(db);
|
||||||
|
|
||||||
Ok(ExitStatus::Success)
|
Ok(ExitStatus::Success)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
|
fn run(mut self, db: &mut ProjectDatabase) -> ExitStatus {
|
||||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||||
|
|
||||||
let result = self.main_loop(db);
|
let result = self.main_loop(db);
|
||||||
@@ -231,7 +260,7 @@ impl MainLoop {
|
|||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main_loop(&mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
|
fn main_loop(&mut self, db: &mut ProjectDatabase) -> ExitStatus {
|
||||||
// Schedule the first check.
|
// Schedule the first check.
|
||||||
tracing::debug!("Starting main loop");
|
tracing::debug!("Starting main loop");
|
||||||
|
|
||||||
@@ -259,54 +288,11 @@ impl MainLoop {
|
|||||||
result,
|
result,
|
||||||
revision: check_revision,
|
revision: check_revision,
|
||||||
} => {
|
} => {
|
||||||
let terminal_settings = db.project().settings(db).terminal();
|
let has_diagnostics = !result.is_empty();
|
||||||
let display_config = DisplayDiagnosticConfig::default()
|
|
||||||
.format(terminal_settings.output_format)
|
|
||||||
.color(colored::control::SHOULD_COLORIZE.should_colorize());
|
|
||||||
|
|
||||||
let min_error_severity = if terminal_settings.error_on_warning {
|
|
||||||
Severity::Warning
|
|
||||||
} else {
|
|
||||||
Severity::Error
|
|
||||||
};
|
|
||||||
|
|
||||||
if check_revision == revision {
|
if check_revision == revision {
|
||||||
if db.project().files(db).is_empty() {
|
#[allow(clippy::print_stdout)]
|
||||||
tracing::warn!("No python files found under the given path(s)");
|
for diagnostic in result {
|
||||||
}
|
println!("{}", diagnostic.display(db));
|
||||||
|
|
||||||
let mut stdout = stdout().lock();
|
|
||||||
|
|
||||||
if result.is_empty() {
|
|
||||||
writeln!(stdout, "All checks passed!")?;
|
|
||||||
|
|
||||||
if self.watcher.is_none() {
|
|
||||||
return Ok(ExitStatus::Success);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let mut failed = false;
|
|
||||||
let diagnostics_count = result.len();
|
|
||||||
|
|
||||||
for diagnostic in result {
|
|
||||||
writeln!(stdout, "{}", diagnostic.display(db, &display_config))?;
|
|
||||||
|
|
||||||
failed |= diagnostic.severity() >= min_error_severity;
|
|
||||||
}
|
|
||||||
|
|
||||||
writeln!(
|
|
||||||
stdout,
|
|
||||||
"Found {} diagnostic{}",
|
|
||||||
diagnostics_count,
|
|
||||||
if diagnostics_count > 1 { "s" } else { "" }
|
|
||||||
)?;
|
|
||||||
|
|
||||||
if self.watcher.is_none() {
|
|
||||||
return Ok(if failed {
|
|
||||||
ExitStatus::Failure
|
|
||||||
} else {
|
|
||||||
ExitStatus::Success
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
@@ -314,6 +300,14 @@ impl MainLoop {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.watcher.is_none() {
|
||||||
|
return if has_diagnostics {
|
||||||
|
ExitStatus::Failure
|
||||||
|
} else {
|
||||||
|
ExitStatus::Success
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -331,14 +325,14 @@ impl MainLoop {
|
|||||||
// TODO: Don't use Salsa internal APIs
|
// TODO: Don't use Salsa internal APIs
|
||||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||||
let _ = db.zalsa_mut();
|
let _ = db.zalsa_mut();
|
||||||
return Ok(ExitStatus::Success);
|
return ExitStatus::Success;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tracing::debug!("Waiting for next main loop message.");
|
tracing::debug!("Waiting for next main loop message.");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ExitStatus::Success)
|
ExitStatus::Success
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -358,28 +352,9 @@ impl MainLoopCancellationToken {
|
|||||||
enum MainLoopMessage {
|
enum MainLoopMessage {
|
||||||
CheckWorkspace,
|
CheckWorkspace,
|
||||||
CheckCompleted {
|
CheckCompleted {
|
||||||
/// The diagnostics that were found during the check.
|
result: Vec<Box<dyn Diagnostic>>,
|
||||||
result: Vec<Box<dyn OldDiagnosticTrait>>,
|
|
||||||
revision: u64,
|
revision: u64,
|
||||||
},
|
},
|
||||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||||
Exit,
|
Exit,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_colored_override(color: Option<TerminalColor>) {
|
|
||||||
let Some(color) = color else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
match color {
|
|
||||||
TerminalColor::Auto => {
|
|
||||||
colored::control::unset_override();
|
|
||||||
}
|
|
||||||
TerminalColor::Always => {
|
|
||||||
colored::control::set_override(true);
|
|
||||||
}
|
|
||||||
TerminalColor::Never => {
|
|
||||||
colored::control::set_override(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ impl std::fmt::Display for PythonVersion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<PythonVersion> for ruff_python_ast::PythonVersion {
|
impl From<PythonVersion> for red_knot_python_semantic::PythonVersion {
|
||||||
fn from(value: PythonVersion) -> Self {
|
fn from(value: PythonVersion) -> Self {
|
||||||
match value {
|
match value {
|
||||||
PythonVersion::Py37 => Self::PY37,
|
PythonVersion::Py37 => Self::PY37,
|
||||||
@@ -61,8 +61,8 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn same_default_as_python_version() {
|
fn same_default_as_python_version() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ruff_python_ast::PythonVersion::from(PythonVersion::default()),
|
red_knot_python_semantic::PythonVersion::from(PythonVersion::default()),
|
||||||
ruff_python_ast::PythonVersion::default()
|
red_knot_python_semantic::PythonVersion::default()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1
crates/red_knot/src/verbosity.rs
Normal file
1
crates/red_knot/src/verbosity.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
//! Code for representing Red Knot's release version number.
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
/// Information about the git repository where Red Knot was built from.
|
|
||||||
pub(crate) struct CommitInfo {
|
|
||||||
short_commit_hash: String,
|
|
||||||
commit_date: String,
|
|
||||||
commits_since_last_tag: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Red Knot's version.
|
|
||||||
pub(crate) struct VersionInfo {
|
|
||||||
/// Red Knot's version, such as "0.5.1"
|
|
||||||
version: String,
|
|
||||||
/// Information about the git commit we may have been built from.
|
|
||||||
///
|
|
||||||
/// `None` if not built from a git repo or if retrieval failed.
|
|
||||||
commit_info: Option<CommitInfo>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for VersionInfo {
|
|
||||||
/// Formatted version information: `<version>[+<commits>] (<commit> <date>)`
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.version)?;
|
|
||||||
|
|
||||||
if let Some(ref ci) = self.commit_info {
|
|
||||||
if ci.commits_since_last_tag > 0 {
|
|
||||||
write!(f, "+{}", ci.commits_since_last_tag)?;
|
|
||||||
}
|
|
||||||
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns information about Red Knot's version.
|
|
||||||
pub(crate) fn version() -> VersionInfo {
|
|
||||||
// Environment variables are only read at compile-time
|
|
||||||
macro_rules! option_env_str {
|
|
||||||
($name:expr) => {
|
|
||||||
option_env!($name).map(|s| s.to_string())
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// This version is pulled from Cargo.toml and set by Cargo
|
|
||||||
let version = option_env_str!("CARGO_PKG_VERSION").unwrap();
|
|
||||||
|
|
||||||
// Commit info is pulled from git and set by `build.rs`
|
|
||||||
let commit_info =
|
|
||||||
option_env_str!("RED_KNOT_COMMIT_SHORT_HASH").map(|short_commit_hash| CommitInfo {
|
|
||||||
short_commit_hash,
|
|
||||||
commit_date: option_env_str!("RED_KNOT_COMMIT_DATE").unwrap(),
|
|
||||||
commits_since_last_tag: option_env_str!("RED_KNOT_LAST_TAG_DISTANCE")
|
|
||||||
.as_deref()
|
|
||||||
.map_or(0, |value| value.parse::<u32>().unwrap_or(0)),
|
|
||||||
});
|
|
||||||
|
|
||||||
VersionInfo {
|
|
||||||
version,
|
|
||||||
commit_info,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use insta::assert_snapshot;
|
|
||||||
|
|
||||||
use super::{CommitInfo, VersionInfo};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn version_formatting() {
|
|
||||||
let version = VersionInfo {
|
|
||||||
version: "0.0.0".to_string(),
|
|
||||||
commit_info: None,
|
|
||||||
};
|
|
||||||
assert_snapshot!(version, @"0.0.0");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn version_formatting_with_commit_info() {
|
|
||||||
let version = VersionInfo {
|
|
||||||
version: "0.0.0".to_string(),
|
|
||||||
commit_info: Some(CommitInfo {
|
|
||||||
short_commit_hash: "53b0f5d92".to_string(),
|
|
||||||
commit_date: "2023-10-19".to_string(),
|
|
||||||
commits_since_last_tag: 0,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn version_formatting_with_commits_since_last_tag() {
|
|
||||||
let version = VersionInfo {
|
|
||||||
version: "0.0.0".to_string(),
|
|
||||||
commit_info: Some(CommitInfo {
|
|
||||||
short_commit_hash: "53b0f5d92".to_string(),
|
|
||||||
commit_date: "2023-10-19".to_string(),
|
|
||||||
commits_since_last_tag: 24,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -13,7 +13,7 @@ license.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ruff_cache = { workspace = true }
|
ruff_cache = { workspace = true }
|
||||||
ruff_db = { workspace = true, features = ["cache", "serde"] }
|
ruff_db = { workspace = true, features = ["os", "cache", "serde"] }
|
||||||
ruff_macros = { workspace = true }
|
ruff_macros = { workspace = true }
|
||||||
ruff_python_ast = { workspace = true, features = ["serde"] }
|
ruff_python_ast = { workspace = true, features = ["serde"] }
|
||||||
ruff_text_size = { workspace = true }
|
ruff_text_size = { workspace = true }
|
||||||
@@ -24,11 +24,10 @@ anyhow = { workspace = true }
|
|||||||
crossbeam = { workspace = true }
|
crossbeam = { workspace = true }
|
||||||
glob = { workspace = true }
|
glob = { workspace = true }
|
||||||
notify = { workspace = true }
|
notify = { workspace = true }
|
||||||
pep440_rs = { workspace = true, features = ["version-ranges"] }
|
pep440_rs = { workspace = true }
|
||||||
rayon = { workspace = true }
|
rayon = { workspace = true }
|
||||||
rustc-hash = { workspace = true }
|
rustc-hash = { workspace = true }
|
||||||
salsa = { workspace = true }
|
salsa = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
toml = { workspace = true }
|
toml = { workspace = true }
|
||||||
@@ -41,9 +40,8 @@ insta = { workspace = true, features = ["redactions", "ron"] }
|
|||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["zstd"]
|
default = ["zstd"]
|
||||||
deflate = ["red_knot_vendored/deflate"]
|
|
||||||
schemars = ["dep:schemars", "ruff_db/schemars", "red_knot_python_semantic/schemars"]
|
|
||||||
zstd = ["red_knot_vendored/zstd"]
|
zstd = ["red_knot_vendored/zstd"]
|
||||||
|
deflate = ["red_knot_vendored/deflate"]
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
use std::{collections::HashMap, hash::BuildHasher};
|
use std::{collections::HashMap, hash::BuildHasher};
|
||||||
|
|
||||||
use red_knot_python_semantic::{PythonPath, PythonPlatform};
|
use red_knot_python_semantic::{PythonPlatform, PythonVersion, SitePackages};
|
||||||
use ruff_db::system::SystemPathBuf;
|
use ruff_db::system::SystemPathBuf;
|
||||||
use ruff_python_ast::PythonVersion;
|
|
||||||
|
|
||||||
/// Combine two values, preferring the values in `self`.
|
/// Combine two values, preferring the values in `self`.
|
||||||
///
|
///
|
||||||
@@ -128,7 +127,7 @@ macro_rules! impl_noop_combine {
|
|||||||
|
|
||||||
impl_noop_combine!(SystemPathBuf);
|
impl_noop_combine!(SystemPathBuf);
|
||||||
impl_noop_combine!(PythonPlatform);
|
impl_noop_combine!(PythonPlatform);
|
||||||
impl_noop_combine!(PythonPath);
|
impl_noop_combine!(SitePackages);
|
||||||
impl_noop_combine!(PythonVersion);
|
impl_noop_combine!(PythonVersion);
|
||||||
|
|
||||||
// std types
|
// std types
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ use std::panic::RefUnwindSafe;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::DEFAULT_LINT_REGISTRY;
|
use crate::DEFAULT_LINT_REGISTRY;
|
||||||
use crate::{Project, ProjectMetadata};
|
use crate::{check_file, Project, ProjectMetadata};
|
||||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||||
use red_knot_python_semantic::{Db as SemanticDb, Program};
|
use red_knot_python_semantic::{Db as SemanticDb, Program};
|
||||||
use ruff_db::diagnostic::OldDiagnosticTrait;
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_db::files::{File, Files};
|
use ruff_db::files::{File, Files};
|
||||||
use ruff_db::system::System;
|
use ruff_db::system::System;
|
||||||
use ruff_db::vendored::VendoredFileSystem;
|
use ruff_db::vendored::VendoredFileSystem;
|
||||||
@@ -27,6 +27,7 @@ pub struct ProjectDatabase {
|
|||||||
storage: salsa::Storage<ProjectDatabase>,
|
storage: salsa::Storage<ProjectDatabase>,
|
||||||
files: Files,
|
files: Files,
|
||||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||||
|
rule_selection: Arc<RuleSelection>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProjectDatabase {
|
impl ProjectDatabase {
|
||||||
@@ -34,11 +35,14 @@ impl ProjectDatabase {
|
|||||||
where
|
where
|
||||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||||
{
|
{
|
||||||
|
let rule_selection = RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY);
|
||||||
|
|
||||||
let mut db = Self {
|
let mut db = Self {
|
||||||
project: None,
|
project: None,
|
||||||
storage: salsa::Storage::default(),
|
storage: salsa::Storage::default(),
|
||||||
files: Files::default(),
|
files: Files::default(),
|
||||||
system: Arc::new(system),
|
system: Arc::new(system),
|
||||||
|
rule_selection: Arc::new(rule_selection),
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||||
@@ -55,13 +59,14 @@ impl ProjectDatabase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Checks all open files in the project and its dependencies.
|
/// Checks all open files in the project and its dependencies.
|
||||||
pub fn check(&self) -> Result<Vec<Box<dyn OldDiagnosticTrait>>, Cancelled> {
|
pub fn check(&self) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
|
||||||
self.with_db(|db| db.project().check(db))
|
self.with_db(|db| db.project().check(db))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
|
||||||
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn OldDiagnosticTrait>>, Cancelled> {
|
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
|
||||||
self.with_db(|db| self.project().check_file(db, file))
|
|
||||||
|
self.with_db(|db| check_file(db, file))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a mutable reference to the system.
|
/// Returns a mutable reference to the system.
|
||||||
@@ -113,8 +118,8 @@ impl SemanticDb for ProjectDatabase {
|
|||||||
project.is_file_open(self, file)
|
project.is_file_open(self, file)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
fn rule_selection(&self) -> &RuleSelection {
|
||||||
self.project().rules(self)
|
&self.rule_selection
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lint_registry(&self) -> &LintRegistry {
|
fn lint_registry(&self) -> &LintRegistry {
|
||||||
@@ -185,6 +190,7 @@ pub(crate) mod tests {
|
|||||||
files: Files,
|
files: Files,
|
||||||
system: TestSystem,
|
system: TestSystem,
|
||||||
vendored: VendoredFileSystem,
|
vendored: VendoredFileSystem,
|
||||||
|
rule_selection: RuleSelection,
|
||||||
project: Option<Project>,
|
project: Option<Project>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -196,6 +202,7 @@ pub(crate) mod tests {
|
|||||||
vendored: red_knot_vendored::file_system().clone(),
|
vendored: red_knot_vendored::file_system().clone(),
|
||||||
files: Files::default(),
|
files: Files::default(),
|
||||||
events: Arc::default(),
|
events: Arc::default(),
|
||||||
|
rule_selection: RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY),
|
||||||
project: None,
|
project: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -267,8 +274,8 @@ pub(crate) mod tests {
|
|||||||
!file.path(self).is_vendored_path()
|
!file.path(self).is_vendored_path()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
fn rule_selection(&self) -> &RuleSelection {
|
||||||
self.project().rules(self)
|
&self.rule_selection
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lint_registry(&self) -> &LintRegistry {
|
fn lint_registry(&self) -> &LintRegistry {
|
||||||
|
|||||||
@@ -2,11 +2,10 @@ use crate::db::{Db, ProjectDatabase};
|
|||||||
use crate::metadata::options::Options;
|
use crate::metadata::options::Options;
|
||||||
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
|
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
|
||||||
use crate::{Project, ProjectMetadata};
|
use crate::{Project, ProjectMetadata};
|
||||||
use std::collections::BTreeSet;
|
|
||||||
|
|
||||||
use crate::walk::ProjectFilesWalker;
|
|
||||||
use red_knot_python_semantic::Program;
|
use red_knot_python_semantic::Program;
|
||||||
use ruff_db::files::{File, Files};
|
use ruff_db::files::{system_path_to_file, File, Files};
|
||||||
|
use ruff_db::system::walk_directory::WalkState;
|
||||||
use ruff_db::system::SystemPath;
|
use ruff_db::system::SystemPath;
|
||||||
use ruff_db::Db as _;
|
use ruff_db::Db as _;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
@@ -15,7 +14,7 @@ impl ProjectDatabase {
|
|||||||
#[tracing::instrument(level = "debug", skip(self, changes, cli_options))]
|
#[tracing::instrument(level = "debug", skip(self, changes, cli_options))]
|
||||||
pub fn apply_changes(&mut self, changes: Vec<ChangeEvent>, cli_options: Option<&Options>) {
|
pub fn apply_changes(&mut self, changes: Vec<ChangeEvent>, cli_options: Option<&Options>) {
|
||||||
let mut project = self.project();
|
let mut project = self.project();
|
||||||
let project_root = project.root(self).to_path_buf();
|
let project_path = project.root(self).to_path_buf();
|
||||||
let program = Program::get(self);
|
let program = Program::get(self);
|
||||||
let custom_stdlib_versions_path = program
|
let custom_stdlib_versions_path = program
|
||||||
.custom_stdlib_search_path(self)
|
.custom_stdlib_search_path(self)
|
||||||
@@ -30,7 +29,7 @@ impl ProjectDatabase {
|
|||||||
|
|
||||||
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
|
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
|
||||||
let mut synced_files = FxHashSet::default();
|
let mut synced_files = FxHashSet::default();
|
||||||
let mut sync_recursively = BTreeSet::default();
|
let mut synced_recursively = FxHashSet::default();
|
||||||
|
|
||||||
let mut sync_path = |db: &mut ProjectDatabase, path: &SystemPath| {
|
let mut sync_path = |db: &mut ProjectDatabase, path: &SystemPath| {
|
||||||
if synced_files.insert(path.to_path_buf()) {
|
if synced_files.insert(path.to_path_buf()) {
|
||||||
@@ -38,13 +37,17 @@ impl ProjectDatabase {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
for change in changes {
|
let mut sync_recursively = |db: &mut ProjectDatabase, path: &SystemPath| {
|
||||||
tracing::trace!("Handle change: {:?}", change);
|
if synced_recursively.insert(path.to_path_buf()) {
|
||||||
|
Files::sync_recursively(db, path);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for change in changes {
|
||||||
if let Some(path) = change.system_path() {
|
if let Some(path) = change.system_path() {
|
||||||
if matches!(
|
if matches!(
|
||||||
path.file_name(),
|
path.file_name(),
|
||||||
Some(".gitignore" | ".ignore" | "knot.toml" | "pyproject.toml")
|
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||||
) {
|
) {
|
||||||
// Changes to ignore files or settings can change the project structure or add/remove files.
|
// Changes to ignore files or settings can change the project structure or add/remove files.
|
||||||
project_changed = true;
|
project_changed = true;
|
||||||
@@ -66,27 +69,16 @@ impl ProjectDatabase {
|
|||||||
match kind {
|
match kind {
|
||||||
CreatedKind::File => sync_path(self, &path),
|
CreatedKind::File => sync_path(self, &path),
|
||||||
CreatedKind::Directory | CreatedKind::Any => {
|
CreatedKind::Directory | CreatedKind::Any => {
|
||||||
sync_recursively.insert(path.clone());
|
sync_recursively(self, &path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unlike other files, it's not only important to update the status of existing
|
if self.system().is_file(&path) {
|
||||||
// and known `File`s (`sync_recursively`), it's also important to discover new files
|
// Add the parent directory because `walkdir` always visits explicitly passed files
|
||||||
// that were added in the project's root (or any of the paths included for checking).
|
// even if they match an exclude filter.
|
||||||
//
|
added_paths.insert(path.parent().unwrap().to_path_buf());
|
||||||
// This is important because `Project::check` iterates over all included files.
|
} else {
|
||||||
// The code below walks the `added_paths` and adds all files that
|
added_paths.insert(path);
|
||||||
// should be included in the project. We can skip this check for
|
|
||||||
// paths that aren't part of the project or shouldn't be included
|
|
||||||
// when checking the project.
|
|
||||||
if project.is_path_included(self, &path) {
|
|
||||||
if self.system().is_file(&path) {
|
|
||||||
// Add the parent directory because `walkdir` always visits explicitly passed files
|
|
||||||
// even if they match an exclude filter.
|
|
||||||
added_paths.insert(path.parent().unwrap().to_path_buf());
|
|
||||||
} else {
|
|
||||||
added_paths.insert(path);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -110,7 +102,7 @@ impl ProjectDatabase {
|
|||||||
project.remove_file(self, file);
|
project.remove_file(self, file);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
sync_recursively.insert(path.clone());
|
sync_recursively(self, &path);
|
||||||
|
|
||||||
if custom_stdlib_versions_path
|
if custom_stdlib_versions_path
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@@ -119,19 +111,11 @@ impl ProjectDatabase {
|
|||||||
custom_stdlib_change = true;
|
custom_stdlib_change = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if project.is_path_included(self, &path) || path == project_root {
|
// Perform a full-reload in case the deleted directory contained the pyproject.toml.
|
||||||
// TODO: Shouldn't it be enough to simply traverse the project files and remove all
|
// We may want to make this more clever in the future, to e.g. iterate over the
|
||||||
// that start with the given path?
|
// indexed files and remove the once that start with the same path, unless
|
||||||
tracing::debug!(
|
// the deleted path is the project configuration.
|
||||||
"Reload project because of a path that could have been a directory."
|
project_changed = true;
|
||||||
);
|
|
||||||
|
|
||||||
// Perform a full-reload in case the deleted directory contained the pyproject.toml.
|
|
||||||
// We may want to make this more clever in the future, to e.g. iterate over the
|
|
||||||
// indexed files and remove the once that start with the same path, unless
|
|
||||||
// the deleted path is the project configuration.
|
|
||||||
project_changed = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -148,40 +132,18 @@ impl ProjectDatabase {
|
|||||||
ChangeEvent::Rescan => {
|
ChangeEvent::Rescan => {
|
||||||
project_changed = true;
|
project_changed = true;
|
||||||
Files::sync_all(self);
|
Files::sync_all(self);
|
||||||
sync_recursively.clear();
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let sync_recursively = sync_recursively.into_iter();
|
|
||||||
let mut last = None;
|
|
||||||
|
|
||||||
for path in sync_recursively {
|
|
||||||
// Avoid re-syncing paths that are sub-paths of each other.
|
|
||||||
if let Some(last) = &last {
|
|
||||||
if path.starts_with(last) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Files::sync_recursively(self, &path);
|
|
||||||
last = Some(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
if project_changed {
|
if project_changed {
|
||||||
match ProjectMetadata::discover(&project_root, self.system()) {
|
match ProjectMetadata::discover(&project_path, self.system()) {
|
||||||
Ok(mut metadata) => {
|
Ok(mut metadata) => {
|
||||||
if let Some(cli_options) = cli_options {
|
if let Some(cli_options) = cli_options {
|
||||||
metadata.apply_cli_options(cli_options.clone());
|
metadata.apply_cli_options(cli_options.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(error) = metadata.apply_configuration_files(self.system()) {
|
|
||||||
tracing::error!(
|
|
||||||
"Failed to apply configuration files, continuing without applying them: {error}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let program_settings = metadata.to_program_settings(self.system());
|
let program_settings = metadata.to_program_settings(self.system());
|
||||||
|
|
||||||
let program = Program::get(self);
|
let program = Program::get(self);
|
||||||
@@ -217,24 +179,43 @@ impl ProjectDatabase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let diagnostics = if let Some(walker) = ProjectFilesWalker::incremental(self, added_paths) {
|
let mut added_paths = added_paths.into_iter();
|
||||||
// Use directory walking to discover newly added files.
|
|
||||||
let (files, diagnostics) = walker.collect_vec(self);
|
|
||||||
|
|
||||||
for file in files {
|
// Use directory walking to discover newly added files.
|
||||||
project.add_file(self, file);
|
if let Some(path) = added_paths.next() {
|
||||||
|
let mut walker = self.system().walk_directory(&path);
|
||||||
|
|
||||||
|
for extra_path in added_paths {
|
||||||
|
walker = walker.add(&extra_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
diagnostics
|
let added_paths = std::sync::Mutex::new(Vec::default());
|
||||||
} else {
|
|
||||||
Vec::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Note: We simply replace all IO related diagnostics here. This isn't ideal, because
|
walker.run(|| {
|
||||||
// it removes IO errors that may still be relevant. However, tracking IO errors correctly
|
Box::new(|entry| {
|
||||||
// across revisions doesn't feel essential, considering that they're rare. However, we could
|
let Ok(entry) = entry else {
|
||||||
// implement a `BTreeMap` or similar and only prune the diagnostics from paths that we've
|
return WalkState::Continue;
|
||||||
// re-scanned (or that were removed etc).
|
};
|
||||||
project.replace_index_diagnostics(self, diagnostics);
|
|
||||||
|
if !entry.file_type().is_file() {
|
||||||
|
return WalkState::Continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut paths = added_paths.lock().unwrap();
|
||||||
|
|
||||||
|
paths.push(entry.into_path());
|
||||||
|
|
||||||
|
WalkState::Continue
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
for path in added_paths.into_inner().unwrap() {
|
||||||
|
let file = system_path_to_file(self, &path);
|
||||||
|
|
||||||
|
if let Ok(file) = file {
|
||||||
|
project.add_file(self, file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,10 @@ use salsa::Setter;
|
|||||||
use ruff_db::files::File;
|
use ruff_db::files::File;
|
||||||
|
|
||||||
use crate::db::Db;
|
use crate::db::Db;
|
||||||
use crate::{IOErrorDiagnostic, Project};
|
use crate::Project;
|
||||||
|
|
||||||
|
/// Cheap cloneable hash set of files.
|
||||||
|
type FileSet = Arc<FxHashSet<File>>;
|
||||||
|
|
||||||
/// The indexed files of a project.
|
/// The indexed files of a project.
|
||||||
///
|
///
|
||||||
@@ -32,9 +35,9 @@ impl IndexedFiles {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn indexed(inner: Arc<IndexedInner>) -> Self {
|
fn indexed(files: FileSet) -> Self {
|
||||||
Self {
|
Self {
|
||||||
state: std::sync::Mutex::new(State::Indexed(inner)),
|
state: std::sync::Mutex::new(State::Indexed(files)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -43,8 +46,8 @@ impl IndexedFiles {
|
|||||||
|
|
||||||
match &*state {
|
match &*state {
|
||||||
State::Lazy => Index::Lazy(LazyFiles { files: state }),
|
State::Lazy => Index::Lazy(LazyFiles { files: state }),
|
||||||
State::Indexed(inner) => Index::Indexed(Indexed {
|
State::Indexed(files) => Index::Indexed(Indexed {
|
||||||
inner: Arc::clone(inner),
|
files: Arc::clone(files),
|
||||||
_lifetime: PhantomData,
|
_lifetime: PhantomData,
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
@@ -91,7 +94,7 @@ impl IndexedFiles {
|
|||||||
Some(IndexedMut {
|
Some(IndexedMut {
|
||||||
db: Some(db),
|
db: Some(db),
|
||||||
project,
|
project,
|
||||||
indexed,
|
files: indexed,
|
||||||
did_change: false,
|
did_change: false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -109,7 +112,7 @@ enum State {
|
|||||||
Lazy,
|
Lazy,
|
||||||
|
|
||||||
/// The files are indexed. Stores the known files of a package.
|
/// The files are indexed. Stores the known files of a package.
|
||||||
Indexed(Arc<IndexedInner>),
|
Indexed(FileSet),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) enum Index<'db> {
|
pub(super) enum Index<'db> {
|
||||||
@@ -126,48 +129,32 @@ pub(super) struct LazyFiles<'db> {
|
|||||||
|
|
||||||
impl<'db> LazyFiles<'db> {
|
impl<'db> LazyFiles<'db> {
|
||||||
/// Sets the indexed files of a package to `files`.
|
/// Sets the indexed files of a package to `files`.
|
||||||
pub(super) fn set(
|
pub(super) fn set(mut self, files: FxHashSet<File>) -> Indexed<'db> {
|
||||||
mut self,
|
|
||||||
files: FxHashSet<File>,
|
|
||||||
diagnostics: Vec<IOErrorDiagnostic>,
|
|
||||||
) -> Indexed<'db> {
|
|
||||||
let files = Indexed {
|
let files = Indexed {
|
||||||
inner: Arc::new(IndexedInner { files, diagnostics }),
|
files: Arc::new(files),
|
||||||
_lifetime: PhantomData,
|
_lifetime: PhantomData,
|
||||||
};
|
};
|
||||||
*self.files = State::Indexed(Arc::clone(&files.inner));
|
*self.files = State::Indexed(Arc::clone(&files.files));
|
||||||
files
|
files
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The indexed files of the project.
|
/// The indexed files of a package.
|
||||||
///
|
///
|
||||||
/// Note: This type is intentionally non-cloneable. Making it cloneable requires
|
/// Note: This type is intentionally non-cloneable. Making it cloneable requires
|
||||||
/// revisiting the locking behavior in [`IndexedFiles::indexed_mut`].
|
/// revisiting the locking behavior in [`IndexedFiles::indexed_mut`].
|
||||||
#[derive(Debug)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct Indexed<'db> {
|
pub struct Indexed<'db> {
|
||||||
inner: Arc<IndexedInner>,
|
files: FileSet,
|
||||||
// Preserve the lifetime of `PackageFiles`.
|
// Preserve the lifetime of `PackageFiles`.
|
||||||
_lifetime: PhantomData<&'db ()>,
|
_lifetime: PhantomData<&'db ()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct IndexedInner {
|
|
||||||
files: FxHashSet<File>,
|
|
||||||
diagnostics: Vec<IOErrorDiagnostic>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Indexed<'_> {
|
|
||||||
pub(super) fn diagnostics(&self) -> &[IOErrorDiagnostic] {
|
|
||||||
&self.inner.diagnostics
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for Indexed<'_> {
|
impl Deref for Indexed<'_> {
|
||||||
type Target = FxHashSet<File>;
|
type Target = FxHashSet<File>;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.inner.files
|
&self.files
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -178,7 +165,7 @@ impl<'a> IntoIterator for &'a Indexed<'_> {
|
|||||||
type IntoIter = IndexedIter<'a>;
|
type IntoIter = IndexedIter<'a>;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
self.inner.files.iter().copied()
|
self.files.iter().copied()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -189,13 +176,13 @@ impl<'a> IntoIterator for &'a Indexed<'_> {
|
|||||||
pub(super) struct IndexedMut<'db> {
|
pub(super) struct IndexedMut<'db> {
|
||||||
db: Option<&'db mut dyn Db>,
|
db: Option<&'db mut dyn Db>,
|
||||||
project: Project,
|
project: Project,
|
||||||
indexed: Arc<IndexedInner>,
|
files: FileSet,
|
||||||
did_change: bool,
|
did_change: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IndexedMut<'_> {
|
impl IndexedMut<'_> {
|
||||||
pub(super) fn insert(&mut self, file: File) -> bool {
|
pub(super) fn insert(&mut self, file: File) -> bool {
|
||||||
if self.inner_mut().files.insert(file) {
|
if self.files_mut().insert(file) {
|
||||||
self.did_change = true;
|
self.did_change = true;
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
@@ -204,7 +191,7 @@ impl IndexedMut<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn remove(&mut self, file: File) -> bool {
|
pub(super) fn remove(&mut self, file: File) -> bool {
|
||||||
if self.inner_mut().files.remove(&file) {
|
if self.files_mut().remove(&file) {
|
||||||
self.did_change = true;
|
self.did_change = true;
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
@@ -212,13 +199,8 @@ impl IndexedMut<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn set_diagnostics(&mut self, diagnostics: Vec<IOErrorDiagnostic>) {
|
fn files_mut(&mut self) -> &mut FxHashSet<File> {
|
||||||
self.inner_mut().diagnostics = diagnostics;
|
Arc::get_mut(&mut self.files).expect("All references to `FilesSet` to have been dropped")
|
||||||
}
|
|
||||||
|
|
||||||
fn inner_mut(&mut self) -> &mut IndexedInner {
|
|
||||||
Arc::get_mut(&mut self.indexed)
|
|
||||||
.expect("All references to `FilesSet` should have been dropped")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_impl(&mut self) {
|
fn set_impl(&mut self) {
|
||||||
@@ -226,16 +208,16 @@ impl IndexedMut<'_> {
|
|||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
let indexed = Arc::clone(&self.indexed);
|
let files = Arc::clone(&self.files);
|
||||||
|
|
||||||
if self.did_change {
|
if self.did_change {
|
||||||
// If there are changes, set the new file_set to trigger a salsa revision change.
|
// If there are changes, set the new file_set to trigger a salsa revision change.
|
||||||
self.project
|
self.project
|
||||||
.set_file_set(db)
|
.set_file_set(db)
|
||||||
.to(IndexedFiles::indexed(indexed));
|
.to(IndexedFiles::indexed(files));
|
||||||
} else {
|
} else {
|
||||||
// The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state.
|
// The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state.
|
||||||
*self.project.file_set(db).state.lock().unwrap() = State::Indexed(indexed);
|
*self.project.file_set(db).state.lock().unwrap() = State::Indexed(files);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -255,7 +237,7 @@ mod tests {
|
|||||||
use crate::files::Index;
|
use crate::files::Index;
|
||||||
use crate::ProjectMetadata;
|
use crate::ProjectMetadata;
|
||||||
use ruff_db::files::system_path_to_file;
|
use ruff_db::files::system_path_to_file;
|
||||||
use ruff_db::system::{DbWithWritableSystem as _, SystemPathBuf};
|
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||||
use ruff_python_ast::name::Name;
|
use ruff_python_ast::name::Name;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -270,7 +252,7 @@ mod tests {
|
|||||||
let file = system_path_to_file(&db, "test.py").unwrap();
|
let file = system_path_to_file(&db, "test.py").unwrap();
|
||||||
|
|
||||||
let files = match project.file_set(&db).get() {
|
let files = match project.file_set(&db).get() {
|
||||||
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file]), Vec::new()),
|
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file])),
|
||||||
Index::Indexed(files) => files,
|
Index::Indexed(files) => files,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,32 +1,31 @@
|
|||||||
#![allow(clippy::ref_option)]
|
#![allow(clippy::ref_option)]
|
||||||
|
|
||||||
use crate::metadata::options::OptionDiagnostic;
|
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder};
|
||||||
use crate::walk::{ProjectFilesFilter, ProjectFilesWalker};
|
|
||||||
pub use db::{Db, ProjectDatabase};
|
|
||||||
use files::{Index, Indexed, IndexedFiles};
|
|
||||||
use metadata::settings::Settings;
|
|
||||||
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
|
|
||||||
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder, RuleSelection};
|
|
||||||
use red_knot_python_semantic::register_lints;
|
use red_knot_python_semantic::register_lints;
|
||||||
use red_knot_python_semantic::types::check_types;
|
use red_knot_python_semantic::types::check_types;
|
||||||
use ruff_db::diagnostic::{DiagnosticId, OldDiagnosticTrait, OldParseDiagnostic, Severity, Span};
|
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
|
||||||
use ruff_db::files::File;
|
use ruff_db::files::{system_path_to_file, File};
|
||||||
use ruff_db::parsed::parsed_module;
|
use ruff_db::parsed::parsed_module;
|
||||||
use ruff_db::source::{source_text, SourceTextError};
|
use ruff_db::source::{source_text, SourceTextError};
|
||||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
use ruff_db::system::walk_directory::WalkState;
|
||||||
use rustc_hash::FxHashSet;
|
use ruff_db::system::{FileType, SystemPath};
|
||||||
|
use ruff_python_ast::PySourceType;
|
||||||
|
use ruff_text_size::TextRange;
|
||||||
|
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||||
use salsa::Durability;
|
use salsa::Durability;
|
||||||
use salsa::Setter;
|
use salsa::Setter;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use thiserror::Error;
|
|
||||||
|
pub use db::{Db, ProjectDatabase};
|
||||||
|
use files::{Index, Indexed, IndexedFiles};
|
||||||
|
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
|
||||||
|
|
||||||
pub mod combine;
|
pub mod combine;
|
||||||
|
|
||||||
mod db;
|
mod db;
|
||||||
mod files;
|
mod files;
|
||||||
pub mod metadata;
|
pub mod metadata;
|
||||||
mod walk;
|
|
||||||
pub mod watch;
|
pub mod watch;
|
||||||
|
|
||||||
pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> =
|
pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> =
|
||||||
@@ -67,46 +66,11 @@ pub struct Project {
|
|||||||
/// The metadata describing the project, including the unresolved options.
|
/// The metadata describing the project, including the unresolved options.
|
||||||
#[return_ref]
|
#[return_ref]
|
||||||
pub metadata: ProjectMetadata,
|
pub metadata: ProjectMetadata,
|
||||||
|
|
||||||
/// The resolved project settings.
|
|
||||||
#[return_ref]
|
|
||||||
pub settings: Settings,
|
|
||||||
|
|
||||||
/// The paths that should be included when checking this project.
|
|
||||||
///
|
|
||||||
/// The default (when this list is empty) is to include all files in the project root
|
|
||||||
/// (that satisfy the configured include and exclude patterns).
|
|
||||||
/// However, it's sometimes desired to only check a subset of the project, e.g. to see
|
|
||||||
/// the diagnostics for a single file or a folder.
|
|
||||||
///
|
|
||||||
/// This list gets initialized by the paths passed to `knot check <paths>`
|
|
||||||
///
|
|
||||||
/// ## How is this different from `open_files`?
|
|
||||||
///
|
|
||||||
/// The `included_paths` is closely related to `open_files`. The only difference is that
|
|
||||||
/// `open_files` is already a resolved set of files whereas `included_paths` is only a list of paths
|
|
||||||
/// that are resolved to files by indexing them. The other difference is that
|
|
||||||
/// new files added to any directory in `included_paths` will be indexed and added to the project
|
|
||||||
/// whereas `open_files` needs to be updated manually (e.g. by the IDE).
|
|
||||||
///
|
|
||||||
/// In short, `open_files` is cheaper in contexts where the set of files is known, like
|
|
||||||
/// in an IDE when the user only wants to check the open tabs. This could be modeled
|
|
||||||
/// with `included_paths` too but it would require an explicit walk dir step that's simply unnecessary.
|
|
||||||
#[default]
|
|
||||||
#[return_ref]
|
|
||||||
included_paths_list: Vec<SystemPathBuf>,
|
|
||||||
|
|
||||||
/// Diagnostics that were generated when resolving the project settings.
|
|
||||||
#[return_ref]
|
|
||||||
settings_diagnostics: Vec<OptionDiagnostic>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[salsa::tracked]
|
|
||||||
impl Project {
|
impl Project {
|
||||||
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
|
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
|
||||||
let (settings, settings_diagnostics) = metadata.options().to_settings(db);
|
Project::builder(metadata)
|
||||||
|
|
||||||
Project::builder(metadata, settings, settings_diagnostics)
|
|
||||||
.durability(Durability::MEDIUM)
|
.durability(Durability::MEDIUM)
|
||||||
.open_fileset_durability(Durability::LOW)
|
.open_fileset_durability(Durability::LOW)
|
||||||
.file_set_durability(Durability::LOW)
|
.file_set_durability(Durability::LOW)
|
||||||
@@ -121,41 +85,11 @@ impl Project {
|
|||||||
self.metadata(db).name()
|
self.metadata(db).name()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the resolved linter rules for the project.
|
|
||||||
///
|
|
||||||
/// This is a salsa query to prevent re-computing queries if other, unrelated
|
|
||||||
/// settings change. For example, we don't want that changing the terminal settings
|
|
||||||
/// invalidates any type checking queries.
|
|
||||||
#[salsa::tracked]
|
|
||||||
pub fn rules(self, db: &dyn Db) -> Arc<RuleSelection> {
|
|
||||||
self.settings(db).to_rules()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if `path` is both part of the project and included (see `included_paths_list`).
|
|
||||||
///
|
|
||||||
/// Unlike [Self::files], this method does not respect `.gitignore` files. It only checks
|
|
||||||
/// the project's include and exclude settings as well as the paths that were passed to `knot check <paths>`.
|
|
||||||
/// This means, that this method is an over-approximation of `Self::files` and may return `true` for paths
|
|
||||||
/// that won't be included when checking the project because they're ignored in a `.gitignore` file.
|
|
||||||
pub fn is_path_included(self, db: &dyn Db, path: &SystemPath) -> bool {
|
|
||||||
ProjectFilesFilter::from_project(db, self).is_included(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
|
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
|
||||||
tracing::debug!("Reloading project");
|
tracing::debug!("Reloading project");
|
||||||
assert_eq!(self.root(db), metadata.root());
|
assert_eq!(self.root(db), metadata.root());
|
||||||
|
|
||||||
if &metadata != self.metadata(db) {
|
if &metadata != self.metadata(db) {
|
||||||
let (settings, settings_diagnostics) = metadata.options().to_settings(db);
|
|
||||||
|
|
||||||
if self.settings(db) != &settings {
|
|
||||||
self.set_settings(db).to(settings);
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.settings_diagnostics(db) != &settings_diagnostics {
|
|
||||||
self.set_settings_diagnostics(db).to(settings_diagnostics);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.set_metadata(db).to(metadata);
|
self.set_metadata(db).to(metadata);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -163,43 +97,29 @@ impl Project {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Checks all open files in the project and its dependencies.
|
/// Checks all open files in the project and its dependencies.
|
||||||
pub(crate) fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn OldDiagnosticTrait>> {
|
pub fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
|
||||||
let project_span = tracing::debug_span!("Project::check");
|
let project_span = tracing::debug_span!("Project::check");
|
||||||
let _span = project_span.enter();
|
let _span = project_span.enter();
|
||||||
|
|
||||||
tracing::debug!("Checking project '{name}'", name = self.name(db));
|
tracing::debug!("Checking project '{name}'", name = self.name(db));
|
||||||
|
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
|
||||||
let mut diagnostics: Vec<Box<dyn OldDiagnosticTrait>> = Vec::new();
|
|
||||||
diagnostics.extend(self.settings_diagnostics(db).iter().map(|diagnostic| {
|
|
||||||
let diagnostic: Box<dyn OldDiagnosticTrait> = Box::new(diagnostic.clone());
|
|
||||||
diagnostic
|
|
||||||
}));
|
|
||||||
|
|
||||||
let files = ProjectFiles::new(db, self);
|
|
||||||
|
|
||||||
diagnostics.extend(files.diagnostics().iter().cloned().map(|diagnostic| {
|
|
||||||
let diagnostic: Box<dyn OldDiagnosticTrait> = Box::new(diagnostic);
|
|
||||||
diagnostic
|
|
||||||
}));
|
|
||||||
|
|
||||||
let result = Arc::new(std::sync::Mutex::new(diagnostics));
|
|
||||||
let inner_result = Arc::clone(&result);
|
let inner_result = Arc::clone(&result);
|
||||||
|
|
||||||
let db = db.clone();
|
let db = db.clone();
|
||||||
let project_span = project_span.clone();
|
let project_span = project_span.clone();
|
||||||
|
|
||||||
rayon::scope(move |scope| {
|
rayon::scope(move |scope| {
|
||||||
|
let files = ProjectFiles::new(&db, self);
|
||||||
for file in &files {
|
for file in &files {
|
||||||
let result = inner_result.clone();
|
let result = inner_result.clone();
|
||||||
let db = db.clone();
|
let db = db.clone();
|
||||||
let project_span = project_span.clone();
|
let project_span = project_span.clone();
|
||||||
|
|
||||||
scope.spawn(move |_| {
|
scope.spawn(move |_| {
|
||||||
let check_file_span =
|
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
|
||||||
tracing::debug_span!(parent: &project_span, "check_file", ?file);
|
|
||||||
let _entered = check_file_span.entered();
|
let _entered = check_file_span.entered();
|
||||||
|
|
||||||
let file_diagnostics = check_file_impl(&db, file);
|
let file_diagnostics = check_file(&db, file);
|
||||||
result.lock().unwrap().extend(file_diagnostics);
|
result.lock().unwrap().extend(file_diagnostics);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -208,22 +128,6 @@ impl Project {
|
|||||||
Arc::into_inner(result).unwrap().into_inner().unwrap()
|
Arc::into_inner(result).unwrap().into_inner().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Box<dyn OldDiagnosticTrait>> {
|
|
||||||
let mut file_diagnostics: Vec<_> = self
|
|
||||||
.settings_diagnostics(db)
|
|
||||||
.iter()
|
|
||||||
.map(|diagnostic| {
|
|
||||||
let diagnostic: Box<dyn OldDiagnosticTrait> = Box::new(diagnostic.clone());
|
|
||||||
diagnostic
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let check_diagnostics = check_file_impl(db, file);
|
|
||||||
file_diagnostics.extend(check_diagnostics);
|
|
||||||
|
|
||||||
file_diagnostics
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Opens a file in the project.
|
/// Opens a file in the project.
|
||||||
///
|
///
|
||||||
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
||||||
@@ -249,30 +153,6 @@ impl Project {
|
|||||||
removed
|
removed
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_included_paths(self, db: &mut dyn Db, paths: Vec<SystemPathBuf>) {
|
|
||||||
tracing::debug!("Setting included paths: {paths}", paths = paths.len());
|
|
||||||
|
|
||||||
self.set_included_paths_list(db).to(paths);
|
|
||||||
self.reload_files(db);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the paths that should be checked.
|
|
||||||
///
|
|
||||||
/// The default is to check the entire project in which case this method returns
|
|
||||||
/// the project root. However, users can specify to only check specific sub-folders or
|
|
||||||
/// even files of a project by using `knot check <paths>`. In that case, this method
|
|
||||||
/// returns the provided absolute paths.
|
|
||||||
///
|
|
||||||
/// Note: The CLI doesn't prohibit users from specifying paths outside the project root.
|
|
||||||
/// This can be useful to check arbitrary files, but it isn't something we recommend.
|
|
||||||
/// We should try to support this use case but it's okay if there are some limitations around it.
|
|
||||||
fn included_paths_or_root(self, db: &dyn Db) -> &[SystemPathBuf] {
|
|
||||||
match &**self.included_paths_list(db) {
|
|
||||||
[] => std::slice::from_ref(&self.metadata(db).root),
|
|
||||||
paths => paths,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the open files in the project or `None` if the entire project should be checked.
|
/// Returns the open files in the project or `None` if the entire project should be checked.
|
||||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||||
self.open_fileset(db).as_deref()
|
self.open_fileset(db).as_deref()
|
||||||
@@ -326,7 +206,7 @@ impl Project {
|
|||||||
self.files(db).contains(&file)
|
self.files(db).contains(&file)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
"Removing file `{}` from project `{}`",
|
"Removing file `{}` from project `{}`",
|
||||||
@@ -355,17 +235,6 @@ impl Project {
|
|||||||
index.insert(file);
|
index.insert(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replaces the diagnostics from indexing the project files with `diagnostics`.
|
|
||||||
///
|
|
||||||
/// This is a no-op if the project files haven't been indexed yet.
|
|
||||||
pub fn replace_index_diagnostics(self, db: &mut dyn Db, diagnostics: Vec<IOErrorDiagnostic>) {
|
|
||||||
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
index.set_diagnostics(diagnostics);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the files belonging to this project.
|
/// Returns the files belonging to this project.
|
||||||
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
|
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
|
||||||
let files = self.file_set(db);
|
let files = self.file_set(db);
|
||||||
@@ -373,14 +242,12 @@ impl Project {
|
|||||||
let indexed = match files.get() {
|
let indexed = match files.get() {
|
||||||
Index::Lazy(vacant) => {
|
Index::Lazy(vacant) => {
|
||||||
let _entered =
|
let _entered =
|
||||||
tracing::debug_span!("Project::index_files", project = %self.name(db))
|
tracing::debug_span!("Project::index_files", package = %self.name(db))
|
||||||
.entered();
|
.entered();
|
||||||
|
|
||||||
let walker = ProjectFilesWalker::new(db);
|
let files = discover_project_files(db, self);
|
||||||
let (files, diagnostics) = walker.collect_set(db);
|
tracing::info!("Found {} files in project `{}`", files.len(), self.name(db));
|
||||||
|
vacant.set(files)
|
||||||
tracing::info!("Indexed {} file(s)", files.len());
|
|
||||||
vacant.set(files, diagnostics)
|
|
||||||
}
|
}
|
||||||
Index::Indexed(indexed) => indexed,
|
Index::Indexed(indexed) => indexed,
|
||||||
};
|
};
|
||||||
@@ -398,43 +265,82 @@ impl Project {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn OldDiagnosticTrait>> {
|
pub(crate) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||||
let mut diagnostics: Vec<Box<dyn OldDiagnosticTrait>> = Vec::new();
|
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
||||||
|
|
||||||
// Abort checking if there are IO errors.
|
// Abort checking if there are IO errors.
|
||||||
let source = source_text(db.upcast(), file);
|
let source = source_text(db.upcast(), file);
|
||||||
|
|
||||||
if let Some(read_error) = source.read_error() {
|
if let Some(read_error) = source.read_error() {
|
||||||
diagnostics.push(Box::new(IOErrorDiagnostic {
|
diagnostics.push(Box::new(IOErrorDiagnostic {
|
||||||
file: Some(file),
|
file,
|
||||||
error: read_error.clone().into(),
|
error: read_error.clone(),
|
||||||
}));
|
}));
|
||||||
return diagnostics;
|
return diagnostics;
|
||||||
}
|
}
|
||||||
|
|
||||||
let parsed = parsed_module(db.upcast(), file);
|
let parsed = parsed_module(db.upcast(), file);
|
||||||
diagnostics.extend(parsed.errors().iter().map(|error| {
|
diagnostics.extend(parsed.errors().iter().map(|error| {
|
||||||
let diagnostic: Box<dyn OldDiagnosticTrait> =
|
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
|
||||||
Box::new(OldParseDiagnostic::new(file, error.clone()));
|
|
||||||
diagnostic
|
diagnostic
|
||||||
}));
|
}));
|
||||||
|
|
||||||
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
|
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
|
||||||
let boxed: Box<dyn OldDiagnosticTrait> = Box::new(diagnostic.clone());
|
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||||
boxed
|
boxed
|
||||||
}));
|
}));
|
||||||
|
|
||||||
diagnostics.sort_unstable_by_key(|diagnostic| {
|
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
|
||||||
diagnostic
|
|
||||||
.span()
|
|
||||||
.and_then(|span| span.range())
|
|
||||||
.unwrap_or_default()
|
|
||||||
.start()
|
|
||||||
});
|
|
||||||
|
|
||||||
diagnostics
|
diagnostics
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn discover_project_files(db: &dyn Db, project: Project) -> FxHashSet<File> {
|
||||||
|
let paths = std::sync::Mutex::new(Vec::new());
|
||||||
|
|
||||||
|
db.system().walk_directory(project.root(db)).run(|| {
|
||||||
|
Box::new(|entry| {
|
||||||
|
match entry {
|
||||||
|
Ok(entry) => {
|
||||||
|
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||||
|
match entry.file_type() {
|
||||||
|
FileType::File => {
|
||||||
|
if entry
|
||||||
|
.path()
|
||||||
|
.extension()
|
||||||
|
.and_then(PySourceType::try_from_extension)
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
let mut paths = paths.lock().unwrap();
|
||||||
|
paths.push(entry.into_path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FileType::Directory | FileType::Symlink => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
// TODO Handle error
|
||||||
|
tracing::error!("Failed to walk path: {error}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
WalkState::Continue
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let paths = paths.into_inner().unwrap();
|
||||||
|
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||||
|
|
||||||
|
for path in paths {
|
||||||
|
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||||
|
// We can ignore this.
|
||||||
|
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
|
||||||
|
files.insert(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
files
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum ProjectFiles<'a> {
|
enum ProjectFiles<'a> {
|
||||||
OpenFiles(&'a FxHashSet<File>),
|
OpenFiles(&'a FxHashSet<File>),
|
||||||
@@ -449,13 +355,6 @@ impl<'a> ProjectFiles<'a> {
|
|||||||
ProjectFiles::Indexed(project.files(db))
|
ProjectFiles::Indexed(project.files(db))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn diagnostics(&self) -> &[IOErrorDiagnostic] {
|
|
||||||
match self {
|
|
||||||
ProjectFiles::OpenFiles(_) => &[],
|
|
||||||
ProjectFiles::Indexed(indexed) => indexed.diagnostics(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> IntoIterator for &'a ProjectFiles<'a> {
|
impl<'a> IntoIterator for &'a ProjectFiles<'a> {
|
||||||
@@ -488,13 +387,13 @@ impl Iterator for ProjectFilesIter<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug)]
|
||||||
pub struct IOErrorDiagnostic {
|
pub struct IOErrorDiagnostic {
|
||||||
file: Option<File>,
|
file: File,
|
||||||
error: IOErrorKind,
|
error: SourceTextError,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OldDiagnosticTrait for IOErrorDiagnostic {
|
impl Diagnostic for IOErrorDiagnostic {
|
||||||
fn id(&self) -> DiagnosticId {
|
fn id(&self) -> DiagnosticId {
|
||||||
DiagnosticId::Io
|
DiagnosticId::Io
|
||||||
}
|
}
|
||||||
@@ -503,8 +402,12 @@ impl OldDiagnosticTrait for IOErrorDiagnostic {
|
|||||||
self.error.to_string().into()
|
self.error.to_string().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn span(&self) -> Option<Span> {
|
fn file(&self) -> File {
|
||||||
self.file.map(Span::from)
|
self.file
|
||||||
|
}
|
||||||
|
|
||||||
|
fn range(&self) -> Option<TextRange> {
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn severity(&self) -> Severity {
|
fn severity(&self) -> Severity {
|
||||||
@@ -512,24 +415,15 @@ impl OldDiagnosticTrait for IOErrorDiagnostic {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Error, Debug, Clone)]
|
|
||||||
enum IOErrorKind {
|
|
||||||
#[error(transparent)]
|
|
||||||
Walk(#[from] walk::WalkError),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
SourceText(#[from] SourceTextError),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::db::tests::TestDb;
|
use crate::db::tests::TestDb;
|
||||||
use crate::{check_file_impl, ProjectMetadata};
|
use crate::{check_file, ProjectMetadata};
|
||||||
use red_knot_python_semantic::types::check_types;
|
use red_knot_python_semantic::types::check_types;
|
||||||
use ruff_db::diagnostic::OldDiagnosticTrait;
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_db::files::system_path_to_file;
|
use ruff_db::files::system_path_to_file;
|
||||||
use ruff_db::source::source_text;
|
use ruff_db::source::source_text;
|
||||||
use ruff_db::system::{DbWithTestSystem, DbWithWritableSystem as _, SystemPath, SystemPathBuf};
|
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||||
use ruff_db::testing::assert_function_query_was_not_run;
|
use ruff_db::testing::assert_function_query_was_not_run;
|
||||||
use ruff_python_ast::name::Name;
|
use ruff_python_ast::name::Name;
|
||||||
|
|
||||||
@@ -548,7 +442,7 @@ mod tests {
|
|||||||
|
|
||||||
assert_eq!(source_text(&db, file).as_str(), "");
|
assert_eq!(source_text(&db, file).as_str(), "");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
check_file_impl(&db, file)
|
check_file(&db, file)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|diagnostic| diagnostic.message().into_owned())
|
.map(|diagnostic| diagnostic.message().into_owned())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
@@ -564,7 +458,7 @@ mod tests {
|
|||||||
|
|
||||||
assert_eq!(source_text(&db, file).as_str(), "");
|
assert_eq!(source_text(&db, file).as_str(), "");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
check_file_impl(&db, file)
|
check_file(&db, file)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|diagnostic| diagnostic.message().into_owned())
|
.map(|diagnostic| diagnostic.message().into_owned())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
|
|||||||
@@ -1,21 +1,15 @@
|
|||||||
use configuration_file::{ConfigurationFile, ConfigurationFileError};
|
|
||||||
use red_knot_python_semantic::ProgramSettings;
|
use red_knot_python_semantic::ProgramSettings;
|
||||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||||
use ruff_python_ast::name::Name;
|
use ruff_python_ast::name::Name;
|
||||||
use std::sync::Arc;
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::combine::Combine;
|
use crate::combine::Combine;
|
||||||
use crate::metadata::pyproject::{Project, PyProject, PyProjectError, ResolveRequiresPythonError};
|
use crate::metadata::pyproject::{Project, PyProject, PyProjectError};
|
||||||
use crate::metadata::value::ValueSource;
|
|
||||||
use options::KnotTomlError;
|
use options::KnotTomlError;
|
||||||
use options::Options;
|
use options::Options;
|
||||||
|
|
||||||
mod configuration_file;
|
|
||||||
pub mod options;
|
pub mod options;
|
||||||
pub mod pyproject;
|
pub mod pyproject;
|
||||||
pub mod settings;
|
|
||||||
pub mod value;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
@@ -26,15 +20,6 @@ pub struct ProjectMetadata {
|
|||||||
|
|
||||||
/// The raw options
|
/// The raw options
|
||||||
pub(super) options: Options,
|
pub(super) options: Options,
|
||||||
|
|
||||||
/// Paths of configurations other than the project's configuration that were combined into [`Self::options`].
|
|
||||||
///
|
|
||||||
/// This field stores the paths of the configuration files, mainly for
|
|
||||||
/// knowing which files to watch for changes.
|
|
||||||
///
|
|
||||||
/// The path ordering doesn't imply precedence.
|
|
||||||
#[cfg_attr(test, serde(skip_serializing_if = "Vec::is_empty"))]
|
|
||||||
pub(super) extra_configuration_paths: Vec<SystemPathBuf>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProjectMetadata {
|
impl ProjectMetadata {
|
||||||
@@ -43,16 +28,12 @@ impl ProjectMetadata {
|
|||||||
Self {
|
Self {
|
||||||
name,
|
name,
|
||||||
root,
|
root,
|
||||||
extra_configuration_paths: Vec::default(),
|
|
||||||
options: Options::default(),
|
options: Options::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Loads a project from a `pyproject.toml` file.
|
/// Loads a project from a `pyproject.toml` file.
|
||||||
pub(crate) fn from_pyproject(
|
pub(crate) fn from_pyproject(pyproject: PyProject, root: SystemPathBuf) -> Self {
|
||||||
pyproject: PyProject,
|
|
||||||
root: SystemPathBuf,
|
|
||||||
) -> Result<Self, ResolveRequiresPythonError> {
|
|
||||||
Self::from_options(
|
Self::from_options(
|
||||||
pyproject
|
pyproject
|
||||||
.tool
|
.tool
|
||||||
@@ -64,38 +45,22 @@ impl ProjectMetadata {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Loads a project from a set of options with an optional pyproject-project table.
|
/// Loads a project from a set of options with an optional pyproject-project table.
|
||||||
pub fn from_options(
|
pub(crate) fn from_options(
|
||||||
mut options: Options,
|
options: Options,
|
||||||
root: SystemPathBuf,
|
root: SystemPathBuf,
|
||||||
project: Option<&Project>,
|
project: Option<&Project>,
|
||||||
) -> Result<Self, ResolveRequiresPythonError> {
|
) -> Self {
|
||||||
let name = project
|
let name = project
|
||||||
.and_then(|project| project.name.as_deref())
|
.and_then(|project| project.name.as_ref())
|
||||||
.map(|name| Name::new(&**name))
|
.map(|name| Name::new(&**name))
|
||||||
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
|
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
|
||||||
|
|
||||||
// If the `options` don't specify a python version but the `project.requires-python` field is set,
|
// TODO(https://github.com/astral-sh/ruff/issues/15491): Respect requires-python
|
||||||
// use that as a lower bound instead.
|
Self {
|
||||||
if let Some(project) = project {
|
|
||||||
if options
|
|
||||||
.environment
|
|
||||||
.as_ref()
|
|
||||||
.is_none_or(|env| env.python_version.is_none())
|
|
||||||
{
|
|
||||||
if let Some(requires_python) = project.resolve_requires_python_lower_bound()? {
|
|
||||||
let mut environment = options.environment.unwrap_or_default();
|
|
||||||
environment.python_version = Some(requires_python);
|
|
||||||
options.environment = Some(environment);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
name,
|
name,
|
||||||
root,
|
root,
|
||||||
options,
|
options,
|
||||||
extra_configuration_paths: Vec::new(),
|
}
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Discovers the closest project at `path` and returns its metadata.
|
/// Discovers the closest project at `path` and returns its metadata.
|
||||||
@@ -122,10 +87,7 @@ impl ProjectMetadata {
|
|||||||
let pyproject_path = project_root.join("pyproject.toml");
|
let pyproject_path = project_root.join("pyproject.toml");
|
||||||
|
|
||||||
let pyproject = if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
|
let pyproject = if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
|
||||||
match PyProject::from_toml_str(
|
match PyProject::from_toml_str(&pyproject_str) {
|
||||||
&pyproject_str,
|
|
||||||
ValueSource::File(Arc::new(pyproject_path.clone())),
|
|
||||||
) {
|
|
||||||
Ok(pyproject) => Some(pyproject),
|
Ok(pyproject) => Some(pyproject),
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
return Err(ProjectDiscoveryError::InvalidPyProject {
|
return Err(ProjectDiscoveryError::InvalidPyProject {
|
||||||
@@ -141,10 +103,7 @@ impl ProjectMetadata {
|
|||||||
// A `knot.toml` takes precedence over a `pyproject.toml`.
|
// A `knot.toml` takes precedence over a `pyproject.toml`.
|
||||||
let knot_toml_path = project_root.join("knot.toml");
|
let knot_toml_path = project_root.join("knot.toml");
|
||||||
if let Ok(knot_str) = system.read_to_string(&knot_toml_path) {
|
if let Ok(knot_str) = system.read_to_string(&knot_toml_path) {
|
||||||
let options = match Options::from_toml_str(
|
let options = match Options::from_toml_str(&knot_str) {
|
||||||
&knot_str,
|
|
||||||
ValueSource::File(Arc::new(knot_toml_path.clone())),
|
|
||||||
) {
|
|
||||||
Ok(options) => options,
|
Ok(options) => options,
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
return Err(ProjectDiscoveryError::InvalidKnotToml {
|
return Err(ProjectDiscoveryError::InvalidKnotToml {
|
||||||
@@ -163,34 +122,19 @@ impl ProjectMetadata {
|
|||||||
}
|
}
|
||||||
|
|
||||||
tracing::debug!("Found project at '{}'", project_root);
|
tracing::debug!("Found project at '{}'", project_root);
|
||||||
|
return Ok(ProjectMetadata::from_options(
|
||||||
let metadata = ProjectMetadata::from_options(
|
|
||||||
options,
|
options,
|
||||||
project_root.to_path_buf(),
|
project_root.to_path_buf(),
|
||||||
pyproject
|
pyproject
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|pyproject| pyproject.project.as_ref()),
|
.and_then(|pyproject| pyproject.project.as_ref()),
|
||||||
)
|
));
|
||||||
.map_err(|err| {
|
|
||||||
ProjectDiscoveryError::InvalidRequiresPythonConstraint {
|
|
||||||
source: err,
|
|
||||||
path: pyproject_path,
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
return Ok(metadata);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(pyproject) = pyproject {
|
if let Some(pyproject) = pyproject {
|
||||||
let has_knot_section = pyproject.knot().is_some();
|
let has_knot_section = pyproject.knot().is_some();
|
||||||
let metadata =
|
let metadata =
|
||||||
ProjectMetadata::from_pyproject(pyproject, project_root.to_path_buf())
|
ProjectMetadata::from_pyproject(pyproject, project_root.to_path_buf());
|
||||||
.map_err(
|
|
||||||
|err| ProjectDiscoveryError::InvalidRequiresPythonConstraint {
|
|
||||||
source: err,
|
|
||||||
path: pyproject_path,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
if has_knot_section {
|
if has_knot_section {
|
||||||
tracing::debug!("Found project at '{}'", project_root);
|
tracing::debug!("Found project at '{}'", project_root);
|
||||||
@@ -238,10 +182,6 @@ impl ProjectMetadata {
|
|||||||
&self.options
|
&self.options
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extra_configuration_paths(&self) -> &[SystemPathBuf] {
|
|
||||||
&self.extra_configuration_paths
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_program_settings(&self, system: &dyn System) -> ProgramSettings {
|
pub fn to_program_settings(&self, system: &dyn System) -> ProgramSettings {
|
||||||
self.options.to_program_settings(self.root(), system)
|
self.options.to_program_settings(self.root(), system)
|
||||||
}
|
}
|
||||||
@@ -251,31 +191,9 @@ impl ProjectMetadata {
|
|||||||
self.options = options.combine(std::mem::take(&mut self.options));
|
self.options = options.combine(std::mem::take(&mut self.options));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Applies the options from the configuration files to the project's options.
|
/// Combine the project options with the user options where project options take precedence.
|
||||||
///
|
pub fn apply_user_options(&mut self, options: Options) {
|
||||||
/// This includes:
|
self.options.combine_with(options);
|
||||||
///
|
|
||||||
/// * The user-level configuration
|
|
||||||
pub fn apply_configuration_files(
|
|
||||||
&mut self,
|
|
||||||
system: &dyn System,
|
|
||||||
) -> Result<(), ConfigurationFileError> {
|
|
||||||
if let Some(user) = ConfigurationFile::user(system)? {
|
|
||||||
tracing::debug!(
|
|
||||||
"Applying user-level configuration loaded from `{path}`.",
|
|
||||||
path = user.path()
|
|
||||||
);
|
|
||||||
self.apply_configuration_file(user);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Applies a lower-precedence configuration files to the project's options.
|
|
||||||
fn apply_configuration_file(&mut self, options: ConfigurationFile) {
|
|
||||||
self.extra_configuration_paths
|
|
||||||
.push(options.path().to_owned());
|
|
||||||
self.options.combine_with(options.into_options());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -295,22 +213,16 @@ pub enum ProjectDiscoveryError {
|
|||||||
source: Box<KnotTomlError>,
|
source: Box<KnotTomlError>,
|
||||||
path: SystemPathBuf,
|
path: SystemPathBuf,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[error("Invalid `requires-python` version specifier (`{path}`): {source}")]
|
|
||||||
InvalidRequiresPythonConstraint {
|
|
||||||
source: ResolveRequiresPythonError,
|
|
||||||
path: SystemPathBuf,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
//! Integration tests for project discovery
|
//! Integration tests for project discovery
|
||||||
|
|
||||||
|
use crate::snapshot_project;
|
||||||
use anyhow::{anyhow, Context};
|
use anyhow::{anyhow, Context};
|
||||||
use insta::assert_ron_snapshot;
|
use insta::assert_ron_snapshot;
|
||||||
use ruff_db::system::{SystemPathBuf, TestSystem};
|
use ruff_db::system::{SystemPathBuf, TestSystem};
|
||||||
use ruff_python_ast::PythonVersion;
|
|
||||||
|
|
||||||
use crate::{ProjectDiscoveryError, ProjectMetadata};
|
use crate::{ProjectDiscoveryError, ProjectMetadata};
|
||||||
|
|
||||||
@@ -321,7 +233,7 @@ mod tests {
|
|||||||
|
|
||||||
system
|
system
|
||||||
.memory_file_system()
|
.memory_file_system()
|
||||||
.write_files_all([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
|
.write_files([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
|
||||||
.context("Failed to write files")?;
|
.context("Failed to write files")?;
|
||||||
|
|
||||||
let project =
|
let project =
|
||||||
@@ -329,15 +241,7 @@ mod tests {
|
|||||||
|
|
||||||
assert_eq!(project.root(), &*root);
|
assert_eq!(project.root(), &*root);
|
||||||
|
|
||||||
with_escaped_paths(|| {
|
snapshot_project!(project);
|
||||||
assert_ron_snapshot!(&project, @r#"
|
|
||||||
ProjectMetadata(
|
|
||||||
name: Name("app"),
|
|
||||||
root: "/app",
|
|
||||||
options: Options(),
|
|
||||||
)
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -349,7 +253,7 @@ mod tests {
|
|||||||
|
|
||||||
system
|
system
|
||||||
.memory_file_system()
|
.memory_file_system()
|
||||||
.write_files_all([
|
.write_files([
|
||||||
(
|
(
|
||||||
root.join("pyproject.toml"),
|
root.join("pyproject.toml"),
|
||||||
r#"
|
r#"
|
||||||
@@ -366,16 +270,7 @@ mod tests {
|
|||||||
ProjectMetadata::discover(&root, &system).context("Failed to discover project")?;
|
ProjectMetadata::discover(&root, &system).context("Failed to discover project")?;
|
||||||
|
|
||||||
assert_eq!(project.root(), &*root);
|
assert_eq!(project.root(), &*root);
|
||||||
|
snapshot_project!(project);
|
||||||
with_escaped_paths(|| {
|
|
||||||
assert_ron_snapshot!(&project, @r#"
|
|
||||||
ProjectMetadata(
|
|
||||||
name: Name("backend"),
|
|
||||||
root: "/app",
|
|
||||||
options: Options(),
|
|
||||||
)
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Discovering the same package from a subdirectory should give the same result
|
// Discovering the same package from a subdirectory should give the same result
|
||||||
let from_src = ProjectMetadata::discover(&root.join("db"), &system)
|
let from_src = ProjectMetadata::discover(&root.join("db"), &system)
|
||||||
@@ -393,7 +288,7 @@ mod tests {
|
|||||||
|
|
||||||
system
|
system
|
||||||
.memory_file_system()
|
.memory_file_system()
|
||||||
.write_files_all([
|
.write_files([
|
||||||
(
|
(
|
||||||
root.join("pyproject.toml"),
|
root.join("pyproject.toml"),
|
||||||
r#"
|
r#"
|
||||||
@@ -432,7 +327,7 @@ expected `.`, `]`
|
|||||||
|
|
||||||
system
|
system
|
||||||
.memory_file_system()
|
.memory_file_system()
|
||||||
.write_files_all([
|
.write_files([
|
||||||
(
|
(
|
||||||
root.join("pyproject.toml"),
|
root.join("pyproject.toml"),
|
||||||
r#"
|
r#"
|
||||||
@@ -458,19 +353,7 @@ expected `.`, `]`
|
|||||||
|
|
||||||
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||||
|
|
||||||
with_escaped_paths(|| {
|
snapshot_project!(sub_project);
|
||||||
assert_ron_snapshot!(sub_project, @r#"
|
|
||||||
ProjectMetadata(
|
|
||||||
name: Name("nested-project"),
|
|
||||||
root: "/app/packages/a",
|
|
||||||
options: Options(
|
|
||||||
src: Some(SrcOptions(
|
|
||||||
root: Some("src"),
|
|
||||||
)),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -482,7 +365,7 @@ expected `.`, `]`
|
|||||||
|
|
||||||
system
|
system
|
||||||
.memory_file_system()
|
.memory_file_system()
|
||||||
.write_files_all([
|
.write_files([
|
||||||
(
|
(
|
||||||
root.join("pyproject.toml"),
|
root.join("pyproject.toml"),
|
||||||
r#"
|
r#"
|
||||||
@@ -508,19 +391,7 @@ expected `.`, `]`
|
|||||||
|
|
||||||
let root = ProjectMetadata::discover(&root, &system)?;
|
let root = ProjectMetadata::discover(&root, &system)?;
|
||||||
|
|
||||||
with_escaped_paths(|| {
|
snapshot_project!(root);
|
||||||
assert_ron_snapshot!(root, @r#"
|
|
||||||
ProjectMetadata(
|
|
||||||
name: Name("project-root"),
|
|
||||||
root: "/app",
|
|
||||||
options: Options(
|
|
||||||
src: Some(SrcOptions(
|
|
||||||
root: Some("src"),
|
|
||||||
)),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -532,7 +403,7 @@ expected `.`, `]`
|
|||||||
|
|
||||||
system
|
system
|
||||||
.memory_file_system()
|
.memory_file_system()
|
||||||
.write_files_all([
|
.write_files([
|
||||||
(
|
(
|
||||||
root.join("pyproject.toml"),
|
root.join("pyproject.toml"),
|
||||||
r#"
|
r#"
|
||||||
@@ -552,15 +423,7 @@ expected `.`, `]`
|
|||||||
|
|
||||||
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||||
|
|
||||||
with_escaped_paths(|| {
|
snapshot_project!(sub_project);
|
||||||
assert_ron_snapshot!(sub_project, @r#"
|
|
||||||
ProjectMetadata(
|
|
||||||
name: Name("nested-project"),
|
|
||||||
root: "/app/packages/a",
|
|
||||||
options: Options(),
|
|
||||||
)
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -572,7 +435,7 @@ expected `.`, `]`
|
|||||||
|
|
||||||
system
|
system
|
||||||
.memory_file_system()
|
.memory_file_system()
|
||||||
.write_files_all([
|
.write_files([
|
||||||
(
|
(
|
||||||
root.join("pyproject.toml"),
|
root.join("pyproject.toml"),
|
||||||
r#"
|
r#"
|
||||||
@@ -595,19 +458,7 @@ expected `.`, `]`
|
|||||||
|
|
||||||
let root = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
let root = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||||
|
|
||||||
with_escaped_paths(|| {
|
snapshot_project!(root);
|
||||||
assert_ron_snapshot!(root, @r#"
|
|
||||||
ProjectMetadata(
|
|
||||||
name: Name("project-root"),
|
|
||||||
root: "/app",
|
|
||||||
options: Options(
|
|
||||||
environment: Some(EnvironmentOptions(
|
|
||||||
r#python-version: Some("3.10"),
|
|
||||||
)),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -623,308 +474,31 @@ expected `.`, `]`
|
|||||||
|
|
||||||
system
|
system
|
||||||
.memory_file_system()
|
.memory_file_system()
|
||||||
.write_files_all([
|
.write_files([
|
||||||
(
|
(
|
||||||
root.join("pyproject.toml"),
|
root.join("pyproject.toml"),
|
||||||
r#"
|
r#"
|
||||||
[project]
|
[project]
|
||||||
name = "super-app"
|
name = "super-app"
|
||||||
requires-python = ">=3.12"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[tool.knot.src]
|
[tool.knot.src]
|
||||||
root = "this_option_is_ignored"
|
root = "this_option_is_ignored"
|
||||||
"#,
|
"#,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
root.join("knot.toml"),
|
root.join("knot.toml"),
|
||||||
r#"
|
r#"
|
||||||
[src]
|
[src]
|
||||||
root = "src"
|
root = "src"
|
||||||
"#,
|
"#,
|
||||||
),
|
),
|
||||||
])
|
])
|
||||||
.context("Failed to write files")?;
|
.context("Failed to write files")?;
|
||||||
|
|
||||||
let root = ProjectMetadata::discover(&root, &system)?;
|
let root = ProjectMetadata::discover(&root, &system)?;
|
||||||
|
|
||||||
with_escaped_paths(|| {
|
snapshot_project!(root);
|
||||||
assert_ron_snapshot!(root, @r#"
|
|
||||||
ProjectMetadata(
|
|
||||||
name: Name("super-app"),
|
|
||||||
root: "/app",
|
|
||||||
options: Options(
|
|
||||||
environment: Some(EnvironmentOptions(
|
|
||||||
r#python-version: Some("3.12"),
|
|
||||||
)),
|
|
||||||
src: Some(SrcOptions(
|
|
||||||
root: Some("src"),
|
|
||||||
)),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn requires_python_major_minor() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
requires-python = ">=3.12"
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let root = ProjectMetadata::discover(&root, &system)?;
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
root.options
|
|
||||||
.environment
|
|
||||||
.unwrap_or_default()
|
|
||||||
.python_version
|
|
||||||
.as_deref(),
|
|
||||||
Some(&PythonVersion::PY312)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn requires_python_major_only() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
requires-python = ">=3"
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let root = ProjectMetadata::discover(&root, &system)?;
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
root.options
|
|
||||||
.environment
|
|
||||||
.unwrap_or_default()
|
|
||||||
.python_version
|
|
||||||
.as_deref(),
|
|
||||||
Some(&PythonVersion::from((3, 0)))
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A `requires-python` constraint with major, minor and patch can be simplified
|
|
||||||
/// to major and minor (e.g. 3.12.1 -> 3.12).
|
|
||||||
#[test]
|
|
||||||
fn requires_python_major_minor_patch() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
requires-python = ">=3.12.8"
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let root = ProjectMetadata::discover(&root, &system)?;
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
root.options
|
|
||||||
.environment
|
|
||||||
.unwrap_or_default()
|
|
||||||
.python_version
|
|
||||||
.as_deref(),
|
|
||||||
Some(&PythonVersion::PY312)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn requires_python_beta_version() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
requires-python = ">= 3.13.0b0"
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let root = ProjectMetadata::discover(&root, &system)?;
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
root.options
|
|
||||||
.environment
|
|
||||||
.unwrap_or_default()
|
|
||||||
.python_version
|
|
||||||
.as_deref(),
|
|
||||||
Some(&PythonVersion::PY313)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn requires_python_greater_than_major_minor() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
# This is somewhat nonsensical because 3.12.1 > 3.12 is true.
|
|
||||||
# That's why simplifying the constraint to >= 3.12 is correct
|
|
||||||
requires-python = ">3.12"
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let root = ProjectMetadata::discover(&root, &system)?;
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
root.options
|
|
||||||
.environment
|
|
||||||
.unwrap_or_default()
|
|
||||||
.python_version
|
|
||||||
.as_deref(),
|
|
||||||
Some(&PythonVersion::PY312)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `python-version` takes precedence if both `requires-python` and `python-version` are configured.
|
|
||||||
#[test]
|
|
||||||
fn requires_python_and_python_version() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
requires-python = ">=3.12"
|
|
||||||
|
|
||||||
[tool.knot.environment]
|
|
||||||
python-version = "3.10"
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let root = ProjectMetadata::discover(&root, &system)?;
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
root.options
|
|
||||||
.environment
|
|
||||||
.unwrap_or_default()
|
|
||||||
.python_version
|
|
||||||
.as_deref(),
|
|
||||||
Some(&PythonVersion::PY310)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn requires_python_less_than() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
requires-python = "<3.12"
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let Err(error) = ProjectMetadata::discover(&root, &system) else {
|
|
||||||
return Err(anyhow!("Expected project discovery to fail because the `requires-python` doesn't specify a lower bound (it only specifies an upper bound)."));
|
|
||||||
};
|
|
||||||
|
|
||||||
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): value `<3.12` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn requires_python_no_specifiers() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
requires-python = ""
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let Err(error) = ProjectMetadata::discover(&root, &system) else {
|
|
||||||
return Err(anyhow!("Expected project discovery to fail because the `requires-python` specifiers are empty and don't define a lower bound."));
|
|
||||||
};
|
|
||||||
|
|
||||||
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): value `` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn requires_python_too_large_major_version() -> anyhow::Result<()> {
|
|
||||||
let system = TestSystem::default();
|
|
||||||
let root = SystemPathBuf::from("/app");
|
|
||||||
|
|
||||||
system
|
|
||||||
.memory_file_system()
|
|
||||||
.write_file_all(
|
|
||||||
root.join("pyproject.toml"),
|
|
||||||
r#"
|
|
||||||
[project]
|
|
||||||
requires-python = ">=999.0"
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.context("Failed to write file")?;
|
|
||||||
|
|
||||||
let Err(error) = ProjectMetadata::discover(&root, &system) else {
|
|
||||||
return Err(anyhow!("Expected project discovery to fail because of the requires-python major version that is larger than 255."));
|
|
||||||
};
|
|
||||||
|
|
||||||
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): The major version `999` is larger than the maximum supported value 255");
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -934,12 +508,15 @@ expected `.`, `]`
|
|||||||
assert_eq!(error.to_string().replace('\\', "/"), message);
|
assert_eq!(error.to_string().replace('\\', "/"), message);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_escaped_paths<R>(f: impl FnOnce() -> R) -> R {
|
/// Snapshots a project but with all paths using unix separators.
|
||||||
let mut settings = insta::Settings::clone_current();
|
#[macro_export]
|
||||||
settings.add_dynamic_redaction(".root", |content, _path| {
|
macro_rules! snapshot_project {
|
||||||
content.as_str().unwrap().replace('\\', "/")
|
($project:expr) => {{
|
||||||
|
assert_ron_snapshot!($project,{
|
||||||
|
".root" => insta::dynamic_redaction(|content, _content_path| {
|
||||||
|
content.as_str().unwrap().replace("\\", "/")
|
||||||
|
}),
|
||||||
});
|
});
|
||||||
|
}};
|
||||||
settings.bind(f)
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,69 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
use crate::metadata::value::ValueSource;
|
|
||||||
|
|
||||||
use super::options::{KnotTomlError, Options};
|
|
||||||
|
|
||||||
/// A `knot.toml` configuration file with the options it contains.
|
|
||||||
pub(crate) struct ConfigurationFile {
|
|
||||||
path: SystemPathBuf,
|
|
||||||
options: Options,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ConfigurationFile {
|
|
||||||
/// Loads the user-level configuration file if it exists.
|
|
||||||
///
|
|
||||||
/// Returns `None` if the file does not exist or if the concept of user-level configurations
|
|
||||||
/// doesn't exist on `system`.
|
|
||||||
pub(crate) fn user(system: &dyn System) -> Result<Option<Self>, ConfigurationFileError> {
|
|
||||||
let Some(configuration_directory) = system.user_config_directory() else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
let knot_toml_path = configuration_directory.join("knot").join("knot.toml");
|
|
||||||
|
|
||||||
tracing::debug!(
|
|
||||||
"Searching for a user-level configuration at `{path}`",
|
|
||||||
path = &knot_toml_path
|
|
||||||
);
|
|
||||||
|
|
||||||
let Ok(knot_toml_str) = system.read_to_string(&knot_toml_path) else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
match Options::from_toml_str(
|
|
||||||
&knot_toml_str,
|
|
||||||
ValueSource::File(Arc::new(knot_toml_path.clone())),
|
|
||||||
) {
|
|
||||||
Ok(options) => Ok(Some(Self {
|
|
||||||
path: knot_toml_path,
|
|
||||||
options,
|
|
||||||
})),
|
|
||||||
Err(error) => Err(ConfigurationFileError::InvalidKnotToml {
|
|
||||||
source: Box::new(error),
|
|
||||||
path: knot_toml_path,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the path to the configuration file.
|
|
||||||
pub(crate) fn path(&self) -> &SystemPath {
|
|
||||||
&self.path
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn into_options(self) -> Options {
|
|
||||||
self.options
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum ConfigurationFileError {
|
|
||||||
#[error("{path} is not a valid `knot.toml`: {source}")]
|
|
||||||
InvalidKnotToml {
|
|
||||||
source: Box<KnotTomlError>,
|
|
||||||
path: SystemPathBuf,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,55 +1,26 @@
|
|||||||
use crate::metadata::value::{RangedValue, RelativePathBuf, ValueSource, ValueSourceGuard};
|
use red_knot_python_semantic::{
|
||||||
use crate::Db;
|
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
|
||||||
use red_knot_python_semantic::lint::{GetLintError, Level, LintSource, RuleSelection};
|
};
|
||||||
use red_knot_python_semantic::{ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings};
|
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||||
use ruff_db::diagnostic::{DiagnosticFormat, DiagnosticId, OldDiagnosticTrait, Severity, Span};
|
|
||||||
use ruff_db::files::system_path_to_file;
|
|
||||||
use ruff_db::system::{System, SystemPath};
|
|
||||||
use ruff_macros::Combine;
|
use ruff_macros::Combine;
|
||||||
use ruff_python_ast::PythonVersion;
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use super::settings::{Settings, TerminalSettings};
|
|
||||||
|
|
||||||
/// The options for the project.
|
/// The options for the project.
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Combine, Serialize, Deserialize)]
|
#[derive(Debug, Default, Clone, PartialEq, Eq, Combine, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct Options {
|
pub struct Options {
|
||||||
/// Configures the type checking environment.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub environment: Option<EnvironmentOptions>,
|
pub environment: Option<EnvironmentOptions>,
|
||||||
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub src: Option<SrcOptions>,
|
pub src: Option<SrcOptions>,
|
||||||
|
|
||||||
/// Configures the enabled lints and their severity.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub rules: Option<Rules>,
|
|
||||||
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub terminal: Option<TerminalOptions>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Options {
|
impl Options {
|
||||||
pub(crate) fn from_toml_str(content: &str, source: ValueSource) -> Result<Self, KnotTomlError> {
|
pub(crate) fn from_toml_str(content: &str) -> Result<Self, KnotTomlError> {
|
||||||
let _guard = ValueSourceGuard::new(source, true);
|
|
||||||
let options = toml::from_str(content)?;
|
let options = toml::from_str(content)?;
|
||||||
Ok(options)
|
Ok(options)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize_with<'de, D>(source: ValueSource, deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let _guard = ValueSourceGuard::new(source, false);
|
|
||||||
Self::deserialize(deserializer)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn to_program_settings(
|
pub(crate) fn to_program_settings(
|
||||||
&self,
|
&self,
|
||||||
project_root: &SystemPath,
|
project_root: &SystemPath,
|
||||||
@@ -58,12 +29,7 @@ impl Options {
|
|||||||
let (python_version, python_platform) = self
|
let (python_version, python_platform) = self
|
||||||
.environment
|
.environment
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|env| {
|
.map(|env| (env.python_version, env.python_platform.as_ref()))
|
||||||
(
|
|
||||||
env.python_version.as_deref().copied(),
|
|
||||||
env.python_platform.as_deref(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
ProgramSettings {
|
ProgramSettings {
|
||||||
@@ -78,19 +44,19 @@ impl Options {
|
|||||||
project_root: &SystemPath,
|
project_root: &SystemPath,
|
||||||
system: &dyn System,
|
system: &dyn System,
|
||||||
) -> SearchPathSettings {
|
) -> SearchPathSettings {
|
||||||
let src_roots = if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_ref())
|
let src_roots =
|
||||||
{
|
if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_deref()) {
|
||||||
vec![src_root.absolute(project_root, system)]
|
vec![src_root.to_path_buf()]
|
||||||
} else {
|
|
||||||
let src = project_root.join("src");
|
|
||||||
|
|
||||||
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
|
|
||||||
if system.is_directory(&src) {
|
|
||||||
vec![project_root.to_path_buf(), src]
|
|
||||||
} else {
|
} else {
|
||||||
vec![project_root.to_path_buf()]
|
let src = project_root.join("src");
|
||||||
}
|
|
||||||
};
|
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
|
||||||
|
if system.is_directory(&src) {
|
||||||
|
vec![project_root.to_path_buf(), src]
|
||||||
|
} else {
|
||||||
|
vec![project_root.to_path_buf()]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let (extra_paths, python, typeshed) = self
|
let (extra_paths, python, typeshed) = self
|
||||||
.environment
|
.environment
|
||||||
@@ -98,275 +64,50 @@ impl Options {
|
|||||||
.map(|env| {
|
.map(|env| {
|
||||||
(
|
(
|
||||||
env.extra_paths.clone(),
|
env.extra_paths.clone(),
|
||||||
env.python.clone(),
|
env.venv_path.clone(),
|
||||||
env.typeshed.clone(),
|
env.typeshed.clone(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
SearchPathSettings {
|
SearchPathSettings {
|
||||||
extra_paths: extra_paths
|
extra_paths: extra_paths.unwrap_or_default(),
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter()
|
|
||||||
.map(|path| path.absolute(project_root, system))
|
|
||||||
.collect(),
|
|
||||||
src_roots,
|
src_roots,
|
||||||
custom_typeshed: typeshed.map(|path| path.absolute(project_root, system)),
|
typeshed,
|
||||||
python_path: python
|
site_packages: python
|
||||||
.map(|python_path| {
|
.map(|venv_path| SitePackages::Derived { venv_path })
|
||||||
PythonPath::from_cli_flag(python_path.absolute(project_root, system))
|
.unwrap_or(SitePackages::Known(vec![])),
|
||||||
})
|
|
||||||
.or_else(|| {
|
|
||||||
std::env::var("VIRTUAL_ENV")
|
|
||||||
.ok()
|
|
||||||
.map(PythonPath::from_virtual_env_var)
|
|
||||||
})
|
|
||||||
.unwrap_or_else(|| PythonPath::KnownSitePackages(vec![])),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn to_settings(&self, db: &dyn Db) -> (Settings, Vec<OptionDiagnostic>) {
|
|
||||||
let (rules, diagnostics) = self.to_rule_selection(db);
|
|
||||||
|
|
||||||
let mut settings = Settings::new(rules);
|
|
||||||
|
|
||||||
if let Some(terminal) = self.terminal.as_ref() {
|
|
||||||
settings.set_terminal(TerminalSettings {
|
|
||||||
output_format: terminal
|
|
||||||
.output_format
|
|
||||||
.as_deref()
|
|
||||||
.copied()
|
|
||||||
.unwrap_or_default(),
|
|
||||||
error_on_warning: terminal.error_on_warning.unwrap_or_default(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
(settings, diagnostics)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn to_rule_selection(&self, db: &dyn Db) -> (RuleSelection, Vec<OptionDiagnostic>) {
|
|
||||||
let registry = db.lint_registry();
|
|
||||||
let mut diagnostics = Vec::new();
|
|
||||||
|
|
||||||
// Initialize the selection with the defaults
|
|
||||||
let mut selection = RuleSelection::from_registry(registry);
|
|
||||||
|
|
||||||
let rules = self
|
|
||||||
.rules
|
|
||||||
.as_ref()
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|rules| rules.inner.iter());
|
|
||||||
|
|
||||||
for (rule_name, level) in rules {
|
|
||||||
let source = rule_name.source();
|
|
||||||
match registry.get(rule_name) {
|
|
||||||
Ok(lint) => {
|
|
||||||
let lint_source = match source {
|
|
||||||
ValueSource::File(_) => LintSource::File,
|
|
||||||
ValueSource::Cli => LintSource::Cli,
|
|
||||||
};
|
|
||||||
if let Ok(severity) = Severity::try_from(**level) {
|
|
||||||
selection.enable(lint, severity, lint_source);
|
|
||||||
} else {
|
|
||||||
selection.disable(lint);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
// `system_path_to_file` can return `Err` if the file was deleted since the configuration
|
|
||||||
// was read. This should be rare and it should be okay to default to not showing a configuration
|
|
||||||
// file in that case.
|
|
||||||
let file = source
|
|
||||||
.file()
|
|
||||||
.and_then(|path| system_path_to_file(db.upcast(), path).ok());
|
|
||||||
|
|
||||||
// TODO: Add a note if the value was configured on the CLI
|
|
||||||
let diagnostic = match error {
|
|
||||||
GetLintError::Unknown(_) => OptionDiagnostic::new(
|
|
||||||
DiagnosticId::UnknownRule,
|
|
||||||
format!("Unknown lint rule `{rule_name}`"),
|
|
||||||
Severity::Warning,
|
|
||||||
),
|
|
||||||
GetLintError::PrefixedWithCategory { suggestion, .. } => {
|
|
||||||
OptionDiagnostic::new(
|
|
||||||
DiagnosticId::UnknownRule,
|
|
||||||
format!(
|
|
||||||
"Unknown lint rule `{rule_name}`. Did you mean `{suggestion}`?"
|
|
||||||
),
|
|
||||||
Severity::Warning,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
GetLintError::Removed(_) => OptionDiagnostic::new(
|
|
||||||
DiagnosticId::UnknownRule,
|
|
||||||
format!("Unknown lint rule `{rule_name}`"),
|
|
||||||
Severity::Warning,
|
|
||||||
),
|
|
||||||
};
|
|
||||||
|
|
||||||
let span = file.map(Span::from).map(|span| {
|
|
||||||
if let Some(range) = rule_name.range() {
|
|
||||||
span.with_range(range)
|
|
||||||
} else {
|
|
||||||
span
|
|
||||||
}
|
|
||||||
});
|
|
||||||
diagnostics.push(diagnostic.with_span(span));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(selection, diagnostics)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct EnvironmentOptions {
|
pub struct EnvironmentOptions {
|
||||||
/// Specifies the version of Python that will be used to execute the source code.
|
pub python_version: Option<PythonVersion>,
|
||||||
/// The version should be specified as a string in the format `M.m` where `M` is the major version
|
|
||||||
/// and `m` is the minor (e.g. "3.0" or "3.6").
|
|
||||||
/// If a version is provided, knot will generate errors if the source code makes use of language features
|
|
||||||
/// that are not supported in that version.
|
|
||||||
/// It will also tailor its use of type stub files, which conditionalizes type definitions based on the version.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub python_version: Option<RangedValue<PythonVersion>>,
|
|
||||||
|
|
||||||
/// Specifies the target platform that will be used to execute the source code.
|
pub python_platform: Option<PythonPlatform>,
|
||||||
/// If specified, Red Knot will tailor its use of type stub files,
|
|
||||||
/// which conditionalize type definitions based on the platform.
|
|
||||||
///
|
|
||||||
/// If no platform is specified, knot will use `all` or the current platform in the LSP use case.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub python_platform: Option<RangedValue<PythonPlatform>>,
|
|
||||||
|
|
||||||
/// List of user-provided paths that should take first priority in the module resolution.
|
/// List of user-provided paths that should take first priority in the module resolution.
|
||||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||||
/// or pyright's stubPath configuration setting.
|
/// or pyright's stubPath configuration setting.
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
pub extra_paths: Option<Vec<SystemPathBuf>>,
|
||||||
pub extra_paths: Option<Vec<RelativePathBuf>>,
|
|
||||||
|
|
||||||
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
|
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
|
||||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||||
/// bundled as a zip file in the binary
|
/// bundled as a zip file in the binary
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
pub typeshed: Option<SystemPathBuf>,
|
||||||
pub typeshed: Option<RelativePathBuf>,
|
|
||||||
|
|
||||||
/// Path to the Python installation from which Red Knot resolves type information and third-party dependencies.
|
// TODO: Rename to python, see https://github.com/astral-sh/ruff/issues/15530
|
||||||
///
|
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||||
/// Red Knot will search in the path's `site-packages` directories for type information and
|
pub venv_path: Option<SystemPathBuf>,
|
||||||
/// third-party imports.
|
|
||||||
///
|
|
||||||
/// This option is commonly used to specify the path to a virtual environment.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub python: Option<RelativePathBuf>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct SrcOptions {
|
pub struct SrcOptions {
|
||||||
/// The root of the project, used for finding first-party modules.
|
/// The root of the project, used for finding first-party modules.
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
pub root: Option<SystemPathBuf>,
|
||||||
pub root: Option<RelativePathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "kebab-case", transparent)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct Rules {
|
|
||||||
#[cfg_attr(feature = "schemars", schemars(with = "schema::Rules"))]
|
|
||||||
inner: FxHashMap<RangedValue<String>, RangedValue<Level>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromIterator<(RangedValue<String>, RangedValue<Level>)> for Rules {
|
|
||||||
fn from_iter<T: IntoIterator<Item = (RangedValue<String>, RangedValue<Level>)>>(
|
|
||||||
iter: T,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
inner: iter.into_iter().collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct TerminalOptions {
|
|
||||||
/// The format to use for printing diagnostic messages.
|
|
||||||
///
|
|
||||||
/// Defaults to `full`.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub output_format: Option<RangedValue<DiagnosticFormat>>,
|
|
||||||
/// Use exit code 1 if there are any warning-level diagnostics.
|
|
||||||
///
|
|
||||||
/// Defaults to `false`.
|
|
||||||
pub error_on_warning: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
mod schema {
|
|
||||||
use crate::DEFAULT_LINT_REGISTRY;
|
|
||||||
use red_knot_python_semantic::lint::Level;
|
|
||||||
use schemars::gen::SchemaGenerator;
|
|
||||||
use schemars::schema::{
|
|
||||||
InstanceType, Metadata, ObjectValidation, Schema, SchemaObject, SubschemaValidation,
|
|
||||||
};
|
|
||||||
use schemars::JsonSchema;
|
|
||||||
|
|
||||||
pub(super) struct Rules;
|
|
||||||
|
|
||||||
impl JsonSchema for Rules {
|
|
||||||
fn schema_name() -> String {
|
|
||||||
"Rules".to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn json_schema(gen: &mut SchemaGenerator) -> Schema {
|
|
||||||
let registry = &*DEFAULT_LINT_REGISTRY;
|
|
||||||
|
|
||||||
let level_schema = gen.subschema_for::<Level>();
|
|
||||||
|
|
||||||
let properties: schemars::Map<String, Schema> = registry
|
|
||||||
.lints()
|
|
||||||
.iter()
|
|
||||||
.map(|lint| {
|
|
||||||
(
|
|
||||||
lint.name().to_string(),
|
|
||||||
Schema::Object(SchemaObject {
|
|
||||||
metadata: Some(Box::new(Metadata {
|
|
||||||
title: Some(lint.summary().to_string()),
|
|
||||||
description: Some(lint.documentation()),
|
|
||||||
deprecated: lint.status.is_deprecated(),
|
|
||||||
default: Some(lint.default_level.to_string().into()),
|
|
||||||
..Metadata::default()
|
|
||||||
})),
|
|
||||||
subschemas: Some(Box::new(SubschemaValidation {
|
|
||||||
one_of: Some(vec![level_schema.clone()]),
|
|
||||||
..Default::default()
|
|
||||||
})),
|
|
||||||
..Default::default()
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Schema::Object(SchemaObject {
|
|
||||||
instance_type: Some(InstanceType::Object.into()),
|
|
||||||
object: Some(Box::new(ObjectValidation {
|
|
||||||
properties,
|
|
||||||
// Allow unknown rules: Red Knot will warn about them.
|
|
||||||
// It gives a better experience when using an older Red Knot version because
|
|
||||||
// the schema will not deny rules that have been removed in newer versions.
|
|
||||||
additional_properties: Some(Box::new(level_schema)),
|
|
||||||
..ObjectValidation::default()
|
|
||||||
})),
|
|
||||||
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
@@ -374,45 +115,3 @@ pub enum KnotTomlError {
|
|||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
TomlSyntax(#[from] toml::de::Error),
|
TomlSyntax(#[from] toml::de::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
|
||||||
pub struct OptionDiagnostic {
|
|
||||||
id: DiagnosticId,
|
|
||||||
message: String,
|
|
||||||
severity: Severity,
|
|
||||||
span: Option<Span>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OptionDiagnostic {
|
|
||||||
pub fn new(id: DiagnosticId, message: String, severity: Severity) -> Self {
|
|
||||||
Self {
|
|
||||||
id,
|
|
||||||
message,
|
|
||||||
severity,
|
|
||||||
span: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn with_span(self, span: Option<Span>) -> Self {
|
|
||||||
OptionDiagnostic { span, ..self }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OldDiagnosticTrait for OptionDiagnostic {
|
|
||||||
fn id(&self) -> DiagnosticId {
|
|
||||||
self.id
|
|
||||||
}
|
|
||||||
|
|
||||||
fn message(&self) -> Cow<str> {
|
|
||||||
Cow::Borrowed(&self.message)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn span(&self) -> Option<Span> {
|
|
||||||
self.span.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn severity(&self) -> Severity {
|
|
||||||
self.severity
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,12 +1,10 @@
|
|||||||
use crate::metadata::options::Options;
|
use pep440_rs::{Version, VersionSpecifiers};
|
||||||
use crate::metadata::value::{RangedValue, ValueSource, ValueSourceGuard};
|
|
||||||
use pep440_rs::{release_specifiers_to_ranges, Version, VersionSpecifiers};
|
|
||||||
use ruff_python_ast::PythonVersion;
|
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
use std::collections::Bound;
|
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
use crate::metadata::options::Options;
|
||||||
|
|
||||||
/// A `pyproject.toml` as specified in PEP 517.
|
/// A `pyproject.toml` as specified in PEP 517.
|
||||||
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
|
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
@@ -30,11 +28,7 @@ pub enum PyProjectError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PyProject {
|
impl PyProject {
|
||||||
pub(crate) fn from_toml_str(
|
pub(crate) fn from_toml_str(content: &str) -> Result<Self, PyProjectError> {
|
||||||
content: &str,
|
|
||||||
source: ValueSource,
|
|
||||||
) -> Result<Self, PyProjectError> {
|
|
||||||
let _guard = ValueSourceGuard::new(source, true);
|
|
||||||
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
|
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -49,78 +43,11 @@ pub struct Project {
|
|||||||
///
|
///
|
||||||
/// Note: Intentionally option to be more permissive during deserialization.
|
/// Note: Intentionally option to be more permissive during deserialization.
|
||||||
/// `PackageMetadata::from_pyproject` reports missing names.
|
/// `PackageMetadata::from_pyproject` reports missing names.
|
||||||
pub name: Option<RangedValue<PackageName>>,
|
pub name: Option<PackageName>,
|
||||||
/// The version of the project
|
/// The version of the project
|
||||||
pub version: Option<RangedValue<Version>>,
|
pub version: Option<Version>,
|
||||||
/// The Python versions this project is compatible with.
|
/// The Python versions this project is compatible with.
|
||||||
pub requires_python: Option<RangedValue<VersionSpecifiers>>,
|
pub requires_python: Option<VersionSpecifiers>,
|
||||||
}
|
|
||||||
|
|
||||||
impl Project {
|
|
||||||
pub(super) fn resolve_requires_python_lower_bound(
|
|
||||||
&self,
|
|
||||||
) -> Result<Option<RangedValue<PythonVersion>>, ResolveRequiresPythonError> {
|
|
||||||
let Some(requires_python) = self.requires_python.as_ref() else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!("Resolving requires-python constraint: `{requires_python}`");
|
|
||||||
|
|
||||||
let ranges = release_specifiers_to_ranges((**requires_python).clone());
|
|
||||||
let Some((lower, _)) = ranges.bounding_range() else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
let version = match lower {
|
|
||||||
// Ex) `>=3.10.1` -> `>=3.10`
|
|
||||||
Bound::Included(version) => version,
|
|
||||||
|
|
||||||
// Ex) `>3.10.1` -> `>=3.10` or `>3.10` -> `>=3.10`
|
|
||||||
// The second example looks obscure at first but it is required because
|
|
||||||
// `3.10.1 > 3.10` is true but we only have two digits here. So including 3.10 is the
|
|
||||||
// right move. Overall, using `>` without a patch release is most likely bogus.
|
|
||||||
Bound::Excluded(version) => version,
|
|
||||||
|
|
||||||
// Ex) `<3.10` or ``
|
|
||||||
Bound::Unbounded => {
|
|
||||||
return Err(ResolveRequiresPythonError::NoLowerBound(
|
|
||||||
requires_python.to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Take the major and minor version
|
|
||||||
let mut versions = version.release().iter().take(2);
|
|
||||||
|
|
||||||
let Some(major) = versions.next().copied() else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
let minor = versions.next().copied().unwrap_or_default();
|
|
||||||
|
|
||||||
tracing::debug!("Resolved requires-python constraint to: {major}.{minor}");
|
|
||||||
|
|
||||||
let major =
|
|
||||||
u8::try_from(major).map_err(|_| ResolveRequiresPythonError::TooLargeMajor(major))?;
|
|
||||||
let minor =
|
|
||||||
u8::try_from(minor).map_err(|_| ResolveRequiresPythonError::TooLargeMajor(minor))?;
|
|
||||||
|
|
||||||
Ok(Some(
|
|
||||||
requires_python
|
|
||||||
.clone()
|
|
||||||
.map_value(|_| PythonVersion::from((major, minor))),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum ResolveRequiresPythonError {
|
|
||||||
#[error("The major version `{0}` is larger than the maximum supported value 255")]
|
|
||||||
TooLargeMajor(u64),
|
|
||||||
#[error("The minor version `{0}` is larger than the maximum supported value 255")]
|
|
||||||
TooLargeMinor(u64),
|
|
||||||
#[error("value `{0}` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.")]
|
|
||||||
NoLowerBound(String),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
||||||
|
|||||||
@@ -1,55 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use red_knot_python_semantic::lint::RuleSelection;
|
|
||||||
use ruff_db::diagnostic::DiagnosticFormat;
|
|
||||||
|
|
||||||
/// The resolved [`super::Options`] for the project.
|
|
||||||
///
|
|
||||||
/// Unlike [`super::Options`], the struct has default values filled in and
|
|
||||||
/// uses representations that are optimized for reads (instead of preserving the source representation).
|
|
||||||
/// It's also not required that this structure precisely resembles the TOML schema, although
|
|
||||||
/// it's encouraged to use a similar structure.
|
|
||||||
///
|
|
||||||
/// It's worth considering to adding a salsa query for specific settings to
|
|
||||||
/// limit the blast radius when only some settings change. For example,
|
|
||||||
/// changing the terminal settings shouldn't invalidate any core type-checking queries.
|
|
||||||
/// This can be achieved by adding a salsa query for the type checking specific settings.
|
|
||||||
///
|
|
||||||
/// Settings that are part of [`red_knot_python_semantic::ProgramSettings`] are not included here.
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub struct Settings {
|
|
||||||
rules: Arc<RuleSelection>,
|
|
||||||
|
|
||||||
terminal: TerminalSettings,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Settings {
|
|
||||||
pub fn new(rules: RuleSelection) -> Self {
|
|
||||||
Self {
|
|
||||||
rules: Arc::new(rules),
|
|
||||||
terminal: TerminalSettings::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn rules(&self) -> &RuleSelection {
|
|
||||||
&self.rules
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_rules(&self) -> Arc<RuleSelection> {
|
|
||||||
self.rules.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn terminal(&self) -> &TerminalSettings {
|
|
||||||
&self.terminal
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_terminal(&mut self, terminal: TerminalSettings) {
|
|
||||||
self.terminal = terminal;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
|
||||||
pub struct TerminalSettings {
|
|
||||||
pub output_format: DiagnosticFormat,
|
|
||||||
pub error_on_warning: bool,
|
|
||||||
}
|
|
||||||
@@ -1,346 +0,0 @@
|
|||||||
use crate::combine::Combine;
|
|
||||||
use crate::Db;
|
|
||||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
|
||||||
use ruff_macros::Combine;
|
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
|
||||||
use serde::{Deserialize, Deserializer};
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::cmp::Ordering;
|
|
||||||
use std::fmt;
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::ops::{Deref, DerefMut};
|
|
||||||
use std::sync::Arc;
|
|
||||||
use toml::Spanned;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum ValueSource {
|
|
||||||
/// Value loaded from a project's configuration file.
|
|
||||||
///
|
|
||||||
/// Ideally, we'd use [`ruff_db::files::File`] but we can't because the database hasn't been
|
|
||||||
/// created when loading the configuration.
|
|
||||||
File(Arc<SystemPathBuf>),
|
|
||||||
|
|
||||||
/// The value comes from a CLI argument, while it's left open if specified using a short argument,
|
|
||||||
/// long argument (`--extra-paths`) or `--config key=value`.
|
|
||||||
Cli,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ValueSource {
|
|
||||||
pub fn file(&self) -> Option<&SystemPath> {
|
|
||||||
match self {
|
|
||||||
ValueSource::File(path) => Some(&**path),
|
|
||||||
ValueSource::Cli => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
thread_local! {
|
|
||||||
/// Serde doesn't provide any easy means to pass a value to a [`Deserialize`] implementation,
|
|
||||||
/// but we want to associate each deserialized [`RelativePath`] with the source from
|
|
||||||
/// which it originated. We use a thread local variable to work around this limitation.
|
|
||||||
///
|
|
||||||
/// Use the [`ValueSourceGuard`] to initialize the thread local before calling into any
|
|
||||||
/// deserialization code. It ensures that the thread local variable gets cleaned up
|
|
||||||
/// once deserialization is done (once the guard gets dropped).
|
|
||||||
static VALUE_SOURCE: RefCell<Option<(ValueSource, bool)>> = const { RefCell::new(None) };
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Guard to safely change the [`VALUE_SOURCE`] for the current thread.
|
|
||||||
#[must_use]
|
|
||||||
pub(super) struct ValueSourceGuard {
|
|
||||||
prev_value: Option<(ValueSource, bool)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ValueSourceGuard {
|
|
||||||
pub(super) fn new(source: ValueSource, is_toml: bool) -> Self {
|
|
||||||
let prev = VALUE_SOURCE.replace(Some((source, is_toml)));
|
|
||||||
Self { prev_value: prev }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for ValueSourceGuard {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
VALUE_SOURCE.set(self.prev_value.take());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A value that "remembers" where it comes from (source) and its range in source.
|
|
||||||
///
|
|
||||||
/// ## Equality, Hash, and Ordering
|
|
||||||
/// The equality, hash, and ordering are solely based on the value. They disregard the value's range
|
|
||||||
/// or source.
|
|
||||||
///
|
|
||||||
/// This ensures that two resolved configurations are identical even if the position of a value has changed
|
|
||||||
/// or if the values were loaded from different sources.
|
|
||||||
#[derive(Clone, serde::Serialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct RangedValue<T> {
|
|
||||||
value: T,
|
|
||||||
#[serde(skip)]
|
|
||||||
source: ValueSource,
|
|
||||||
|
|
||||||
/// The byte range of `value` in `source`.
|
|
||||||
///
|
|
||||||
/// Can be `None` because not all sources support a range.
|
|
||||||
/// For example, arguments provided on the CLI won't have a range attached.
|
|
||||||
#[serde(skip)]
|
|
||||||
range: Option<TextRange>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> RangedValue<T> {
|
|
||||||
pub fn new(value: T, source: ValueSource) -> Self {
|
|
||||||
Self::with_range(value, source, TextRange::default())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cli(value: T) -> Self {
|
|
||||||
Self::with_range(value, ValueSource::Cli, TextRange::default())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_range(value: T, source: ValueSource, range: TextRange) -> Self {
|
|
||||||
Self {
|
|
||||||
value,
|
|
||||||
range: Some(range),
|
|
||||||
source,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn range(&self) -> Option<TextRange> {
|
|
||||||
self.range
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn source(&self) -> &ValueSource {
|
|
||||||
&self.source
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_source(mut self, source: ValueSource) -> Self {
|
|
||||||
self.source = source;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn map_value<R>(self, f: impl FnOnce(T) -> R) -> RangedValue<R> {
|
|
||||||
RangedValue {
|
|
||||||
value: f(self.value),
|
|
||||||
source: self.source,
|
|
||||||
range: self.range,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_inner(self) -> T {
|
|
||||||
self.value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Combine for RangedValue<T> {
|
|
||||||
fn combine(self, _other: Self) -> Self
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
self
|
|
||||||
}
|
|
||||||
fn combine_with(&mut self, _other: Self) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> IntoIterator for RangedValue<T>
|
|
||||||
where
|
|
||||||
T: IntoIterator,
|
|
||||||
{
|
|
||||||
type Item = T::Item;
|
|
||||||
type IntoIter = T::IntoIter;
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
self.value.into_iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The type already has an `iter` method thanks to `Deref`.
|
|
||||||
#[allow(clippy::into_iter_without_iter)]
|
|
||||||
impl<'a, T> IntoIterator for &'a RangedValue<T>
|
|
||||||
where
|
|
||||||
&'a T: IntoIterator,
|
|
||||||
{
|
|
||||||
type Item = <&'a T as IntoIterator>::Item;
|
|
||||||
type IntoIter = <&'a T as IntoIterator>::IntoIter;
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
self.value.into_iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The type already has a `into_iter_mut` method thanks to `DerefMut`.
|
|
||||||
#[allow(clippy::into_iter_without_iter)]
|
|
||||||
impl<'a, T> IntoIterator for &'a mut RangedValue<T>
|
|
||||||
where
|
|
||||||
&'a mut T: IntoIterator,
|
|
||||||
{
|
|
||||||
type Item = <&'a mut T as IntoIterator>::Item;
|
|
||||||
type IntoIter = <&'a mut T as IntoIterator>::IntoIter;
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
self.value.into_iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> fmt::Debug for RangedValue<T>
|
|
||||||
where
|
|
||||||
T: fmt::Debug,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
self.value.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> fmt::Display for RangedValue<T>
|
|
||||||
where
|
|
||||||
T: fmt::Display,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
self.value.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Deref for RangedValue<T> {
|
|
||||||
type Target = T;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> DerefMut for RangedValue<T> {
|
|
||||||
fn deref_mut(&mut self) -> &mut T {
|
|
||||||
&mut self.value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, U: ?Sized> AsRef<U> for RangedValue<T>
|
|
||||||
where
|
|
||||||
T: AsRef<U>,
|
|
||||||
{
|
|
||||||
fn as_ref(&self) -> &U {
|
|
||||||
self.value.as_ref()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: PartialEq> PartialEq for RangedValue<T> {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.value.eq(&other.value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: PartialEq<T>> PartialEq<T> for RangedValue<T> {
|
|
||||||
fn eq(&self, other: &T) -> bool {
|
|
||||||
self.value.eq(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Eq> Eq for RangedValue<T> {}
|
|
||||||
|
|
||||||
impl<T: Hash> Hash for RangedValue<T> {
|
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
||||||
self.value.hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: PartialOrd> PartialOrd for RangedValue<T> {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
self.value.partial_cmp(&other.value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: PartialOrd<T>> PartialOrd<T> for RangedValue<T> {
|
|
||||||
fn partial_cmp(&self, other: &T) -> Option<Ordering> {
|
|
||||||
self.value.partial_cmp(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Ord> Ord for RangedValue<T> {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
self.value.cmp(&other.value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de, T> Deserialize<'de> for RangedValue<T>
|
|
||||||
where
|
|
||||||
T: Deserialize<'de>,
|
|
||||||
{
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
VALUE_SOURCE.with_borrow(|source| {
|
|
||||||
let (source, has_span) = source.clone().unwrap();
|
|
||||||
|
|
||||||
if has_span {
|
|
||||||
let spanned: Spanned<T> = Spanned::deserialize(deserializer)?;
|
|
||||||
let span = spanned.span();
|
|
||||||
let range = TextRange::new(
|
|
||||||
TextSize::try_from(span.start)
|
|
||||||
.expect("Configuration file to be smaller than 4GB"),
|
|
||||||
TextSize::try_from(span.end)
|
|
||||||
.expect("Configuration file to be smaller than 4GB"),
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(Self::with_range(spanned.into_inner(), source, range))
|
|
||||||
} else {
|
|
||||||
Ok(Self::new(T::deserialize(deserializer)?, source))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A possibly relative path in a configuration file.
|
|
||||||
///
|
|
||||||
/// Relative paths in configuration files or from CLI options
|
|
||||||
/// require different anchoring:
|
|
||||||
///
|
|
||||||
/// * CLI: The path is relative to the current working directory
|
|
||||||
/// * Configuration file: The path is relative to the project's root.
|
|
||||||
#[derive(
|
|
||||||
Debug,
|
|
||||||
Clone,
|
|
||||||
serde::Serialize,
|
|
||||||
serde::Deserialize,
|
|
||||||
PartialEq,
|
|
||||||
Eq,
|
|
||||||
PartialOrd,
|
|
||||||
Ord,
|
|
||||||
Hash,
|
|
||||||
Combine,
|
|
||||||
)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct RelativePathBuf(RangedValue<SystemPathBuf>);
|
|
||||||
|
|
||||||
impl RelativePathBuf {
|
|
||||||
pub fn new(path: impl AsRef<SystemPath>, source: ValueSource) -> Self {
|
|
||||||
Self(RangedValue::new(path.as_ref().to_path_buf(), source))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cli(path: impl AsRef<SystemPath>) -> Self {
|
|
||||||
Self::new(path, ValueSource::Cli)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the relative path as specified by the user.
|
|
||||||
pub fn path(&self) -> &SystemPath {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the owned relative path.
|
|
||||||
pub fn into_path_buf(self) -> SystemPathBuf {
|
|
||||||
self.0.into_inner()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolves the absolute path for `self` based on its origin.
|
|
||||||
pub fn absolute_with_db(&self, db: &dyn Db) -> SystemPathBuf {
|
|
||||||
self.absolute(db.project().root(db), db.system())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolves the absolute path for `self` based on its origin.
|
|
||||||
pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> SystemPathBuf {
|
|
||||||
let relative_to = match &self.0.source {
|
|
||||||
ValueSource::File(_) => project_root,
|
|
||||||
ValueSource::Cli => system.current_directory(),
|
|
||||||
};
|
|
||||||
|
|
||||||
SystemPath::absolute(&self.0, relative_to)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
|
expression: root
|
||||||
|
---
|
||||||
|
ProjectMetadata(
|
||||||
|
name: Name("project-root"),
|
||||||
|
root: "/app",
|
||||||
|
options: Options(
|
||||||
|
environment: None,
|
||||||
|
src: Some(SrcOptions(
|
||||||
|
root: Some("src"),
|
||||||
|
)),
|
||||||
|
),
|
||||||
|
)
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
|
expression: sub_project
|
||||||
|
---
|
||||||
|
ProjectMetadata(
|
||||||
|
name: Name("nested-project"),
|
||||||
|
root: "/app/packages/a",
|
||||||
|
options: Options(
|
||||||
|
environment: None,
|
||||||
|
src: Some(SrcOptions(
|
||||||
|
root: Some("src"),
|
||||||
|
)),
|
||||||
|
),
|
||||||
|
)
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
---
|
||||||
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
|
expression: root
|
||||||
|
---
|
||||||
|
ProjectMetadata(
|
||||||
|
name: Name("project-root"),
|
||||||
|
root: "/app",
|
||||||
|
options: Options(
|
||||||
|
environment: Some(EnvironmentOptions(
|
||||||
|
r#python-version: Some("3.10"),
|
||||||
|
r#python-platform: None,
|
||||||
|
r#extra-paths: None,
|
||||||
|
typeshed: None,
|
||||||
|
r#venv-path: None,
|
||||||
|
)),
|
||||||
|
src: None,
|
||||||
|
),
|
||||||
|
)
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
---
|
||||||
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
|
expression: sub_project
|
||||||
|
---
|
||||||
|
ProjectMetadata(
|
||||||
|
name: Name("nested-project"),
|
||||||
|
root: "/app/packages/a",
|
||||||
|
options: Options(
|
||||||
|
environment: None,
|
||||||
|
src: None,
|
||||||
|
),
|
||||||
|
)
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
|
expression: root
|
||||||
|
---
|
||||||
|
ProjectMetadata(
|
||||||
|
name: Name("super-app"),
|
||||||
|
root: "/app",
|
||||||
|
options: Options(
|
||||||
|
environment: None,
|
||||||
|
src: Some(SrcOptions(
|
||||||
|
root: Some("src"),
|
||||||
|
)),
|
||||||
|
),
|
||||||
|
)
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
---
|
||||||
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
|
expression: project
|
||||||
|
---
|
||||||
|
ProjectMetadata(
|
||||||
|
name: Name("backend"),
|
||||||
|
root: "/app",
|
||||||
|
options: Options(
|
||||||
|
environment: None,
|
||||||
|
src: None,
|
||||||
|
),
|
||||||
|
)
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
---
|
||||||
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
|
expression: project
|
||||||
|
---
|
||||||
|
ProjectMetadata(
|
||||||
|
name: Name("app"),
|
||||||
|
root: "/app",
|
||||||
|
options: Options(
|
||||||
|
environment: None,
|
||||||
|
src: None,
|
||||||
|
),
|
||||||
|
)
|
||||||
@@ -1,256 +0,0 @@
|
|||||||
use crate::{Db, IOErrorDiagnostic, IOErrorKind, Project};
|
|
||||||
use ruff_db::files::{system_path_to_file, File};
|
|
||||||
use ruff_db::system::walk_directory::{ErrorKind, WalkDirectoryBuilder, WalkState};
|
|
||||||
use ruff_db::system::{FileType, SystemPath, SystemPathBuf};
|
|
||||||
use ruff_python_ast::PySourceType;
|
|
||||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
/// Filter that decides which files are included in the project.
|
|
||||||
///
|
|
||||||
/// In the future, this will hold a reference to the `include` and `exclude` pattern.
|
|
||||||
///
|
|
||||||
/// This struct mainly exists because `dyn Db` isn't `Send` or `Sync`, making it impossible
|
|
||||||
/// to access fields from within the walker.
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
pub(crate) struct ProjectFilesFilter<'a> {
|
|
||||||
/// The same as [`Project::included_paths_or_root`].
|
|
||||||
included_paths: &'a [SystemPathBuf],
|
|
||||||
|
|
||||||
/// The filter skips checking if the path is in `included_paths` if set to `true`.
|
|
||||||
///
|
|
||||||
/// Skipping this check is useful when the walker only walks over `included_paths`.
|
|
||||||
skip_included_paths: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> ProjectFilesFilter<'a> {
|
|
||||||
pub(crate) fn from_project(db: &'a dyn Db, project: Project) -> Self {
|
|
||||||
Self {
|
|
||||||
included_paths: project.included_paths_or_root(db),
|
|
||||||
skip_included_paths: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if a file is part of the project and included in the paths to check.
|
|
||||||
///
|
|
||||||
/// A file is included in the checked files if it is a sub path of the project's root
|
|
||||||
/// (when no CLI path arguments are specified) or if it is a sub path of any path provided on the CLI (`knot check <paths>`) AND:
|
|
||||||
///
|
|
||||||
/// * It matches a positive `include` pattern and isn't excluded by a later negative `include` pattern.
|
|
||||||
/// * It doesn't match a positive `exclude` pattern or is re-included by a later negative `exclude` pattern.
|
|
||||||
///
|
|
||||||
/// ## Note
|
|
||||||
///
|
|
||||||
/// This method may return `true` for files that don't end up being included when walking the
|
|
||||||
/// project tree because it doesn't consider `.gitignore` and other ignore files when deciding
|
|
||||||
/// if a file's included.
|
|
||||||
pub(crate) fn is_included(&self, path: &SystemPath) -> bool {
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
|
||||||
enum CheckPathMatch {
|
|
||||||
/// The path is a partial match of the checked path (it's a sub path)
|
|
||||||
Partial,
|
|
||||||
|
|
||||||
/// The path matches a check path exactly.
|
|
||||||
Full,
|
|
||||||
}
|
|
||||||
|
|
||||||
let m = if self.skip_included_paths {
|
|
||||||
Some(CheckPathMatch::Partial)
|
|
||||||
} else {
|
|
||||||
self.included_paths
|
|
||||||
.iter()
|
|
||||||
.filter_map(|included_path| {
|
|
||||||
if let Ok(relative_path) = path.strip_prefix(included_path) {
|
|
||||||
// Exact matches are always included
|
|
||||||
if relative_path.as_str().is_empty() {
|
|
||||||
Some(CheckPathMatch::Full)
|
|
||||||
} else {
|
|
||||||
Some(CheckPathMatch::Partial)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.max()
|
|
||||||
};
|
|
||||||
|
|
||||||
match m {
|
|
||||||
None => false,
|
|
||||||
Some(CheckPathMatch::Partial) => {
|
|
||||||
// TODO: For partial matches, only include the file if it is included by the project's include/exclude settings.
|
|
||||||
true
|
|
||||||
}
|
|
||||||
Some(CheckPathMatch::Full) => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct ProjectFilesWalker<'a> {
|
|
||||||
walker: WalkDirectoryBuilder,
|
|
||||||
|
|
||||||
filter: ProjectFilesFilter<'a>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> ProjectFilesWalker<'a> {
|
|
||||||
pub(crate) fn new(db: &'a dyn Db) -> Self {
|
|
||||||
let project = db.project();
|
|
||||||
|
|
||||||
let mut filter = ProjectFilesFilter::from_project(db, project);
|
|
||||||
// It's unnecessary to filter on included paths because it only iterates over those to start with.
|
|
||||||
filter.skip_included_paths = true;
|
|
||||||
|
|
||||||
Self::from_paths(db, project.included_paths_or_root(db), filter)
|
|
||||||
.expect("included_paths_or_root to never return an empty iterator")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a walker for indexing the project files incrementally.
|
|
||||||
///
|
|
||||||
/// The main difference to a full project walk is that `paths` may contain paths
|
|
||||||
/// that aren't part of the included files.
|
|
||||||
pub(crate) fn incremental<P>(db: &'a dyn Db, paths: impl IntoIterator<Item = P>) -> Option<Self>
|
|
||||||
where
|
|
||||||
P: AsRef<SystemPath>,
|
|
||||||
{
|
|
||||||
let project = db.project();
|
|
||||||
|
|
||||||
let filter = ProjectFilesFilter::from_project(db, project);
|
|
||||||
|
|
||||||
Self::from_paths(db, paths, filter)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_paths<P>(
|
|
||||||
db: &'a dyn Db,
|
|
||||||
paths: impl IntoIterator<Item = P>,
|
|
||||||
filter: ProjectFilesFilter<'a>,
|
|
||||||
) -> Option<Self>
|
|
||||||
where
|
|
||||||
P: AsRef<SystemPath>,
|
|
||||||
{
|
|
||||||
let mut paths = paths.into_iter();
|
|
||||||
|
|
||||||
let mut walker = db.system().walk_directory(paths.next()?.as_ref());
|
|
||||||
|
|
||||||
for path in paths {
|
|
||||||
walker = walker.add(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(Self { walker, filter })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Walks the project paths and collects the paths of all files that
|
|
||||||
/// are included in the project.
|
|
||||||
pub(crate) fn walk_paths(self) -> (Vec<SystemPathBuf>, Vec<IOErrorDiagnostic>) {
|
|
||||||
let paths = std::sync::Mutex::new(Vec::new());
|
|
||||||
let diagnostics = std::sync::Mutex::new(Vec::new());
|
|
||||||
|
|
||||||
self.walker.run(|| {
|
|
||||||
Box::new(|entry| {
|
|
||||||
match entry {
|
|
||||||
Ok(entry) => {
|
|
||||||
if !self.filter.is_included(entry.path()) {
|
|
||||||
tracing::debug!("Ignoring not-included path: {}", entry.path());
|
|
||||||
return WalkState::Skip;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
|
||||||
match entry.file_type() {
|
|
||||||
FileType::File => {
|
|
||||||
if entry
|
|
||||||
.path()
|
|
||||||
.extension()
|
|
||||||
.and_then(PySourceType::try_from_extension)
|
|
||||||
.is_some()
|
|
||||||
{
|
|
||||||
let mut paths = paths.lock().unwrap();
|
|
||||||
paths.push(entry.into_path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
FileType::Directory | FileType::Symlink => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(error) => match error.kind() {
|
|
||||||
ErrorKind::Loop { .. } => {
|
|
||||||
unreachable!("Loops shouldn't be possible without following symlinks.")
|
|
||||||
}
|
|
||||||
ErrorKind::Io { path, err } => {
|
|
||||||
let mut diagnostics = diagnostics.lock().unwrap();
|
|
||||||
let error = if let Some(path) = path {
|
|
||||||
WalkError::IOPathError {
|
|
||||||
path: path.clone(),
|
|
||||||
error: err.to_string(),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
WalkError::IOError {
|
|
||||||
error: err.to_string(),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
diagnostics.push(IOErrorDiagnostic {
|
|
||||||
file: None,
|
|
||||||
error: IOErrorKind::Walk(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
ErrorKind::NonUtf8Path { path } => {
|
|
||||||
diagnostics.lock().unwrap().push(IOErrorDiagnostic {
|
|
||||||
file: None,
|
|
||||||
error: IOErrorKind::Walk(WalkError::NonUtf8Path {
|
|
||||||
path: path.clone(),
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
WalkState::Continue
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
(
|
|
||||||
paths.into_inner().unwrap(),
|
|
||||||
diagnostics.into_inner().unwrap(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn collect_vec(self, db: &dyn Db) -> (Vec<File>, Vec<IOErrorDiagnostic>) {
|
|
||||||
let (paths, diagnostics) = self.walk_paths();
|
|
||||||
|
|
||||||
(
|
|
||||||
paths
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(move |path| {
|
|
||||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
|
||||||
// We can ignore this.
|
|
||||||
system_path_to_file(db.upcast(), &path).ok()
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
diagnostics,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn collect_set(self, db: &dyn Db) -> (FxHashSet<File>, Vec<IOErrorDiagnostic>) {
|
|
||||||
let (paths, diagnostics) = self.walk_paths();
|
|
||||||
|
|
||||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
|
||||||
|
|
||||||
for path in paths {
|
|
||||||
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
|
|
||||||
files.insert(file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(files, diagnostics)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Error, Debug, Clone)]
|
|
||||||
pub(crate) enum WalkError {
|
|
||||||
#[error("`{path}`: {error}")]
|
|
||||||
IOPathError { path: SystemPathBuf, error: String },
|
|
||||||
|
|
||||||
#[error("Failed to walk project directory: {error}")]
|
|
||||||
IOError { error: String },
|
|
||||||
|
|
||||||
#[error("`{path}` is not a valid UTF-8 path")]
|
|
||||||
NonUtf8Path { path: PathBuf },
|
|
||||||
}
|
|
||||||
@@ -6,7 +6,7 @@ use tracing::info;
|
|||||||
use red_knot_python_semantic::system_module_search_paths;
|
use red_knot_python_semantic::system_module_search_paths;
|
||||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||||
use ruff_db::Upcast;
|
use ruff_db::{Db as _, Upcast};
|
||||||
|
|
||||||
use crate::db::{Db, ProjectDatabase};
|
use crate::db::{Db, ProjectDatabase};
|
||||||
use crate::watch::Watcher;
|
use crate::watch::Watcher;
|
||||||
@@ -42,9 +42,9 @@ impl ProjectWatcher {
|
|||||||
|
|
||||||
pub fn update(&mut self, db: &ProjectDatabase) {
|
pub fn update(&mut self, db: &ProjectDatabase) {
|
||||||
let search_paths: Vec<_> = system_module_search_paths(db.upcast()).collect();
|
let search_paths: Vec<_> = system_module_search_paths(db.upcast()).collect();
|
||||||
let project_path = db.project().root(db);
|
let project_path = db.project().root(db).to_path_buf();
|
||||||
|
|
||||||
let new_cache_key = Self::compute_cache_key(project_path, &search_paths);
|
let new_cache_key = Self::compute_cache_key(&project_path, &search_paths);
|
||||||
|
|
||||||
if self.cache_key == Some(new_cache_key) {
|
if self.cache_key == Some(new_cache_key) {
|
||||||
return;
|
return;
|
||||||
@@ -68,47 +68,31 @@ impl ProjectWatcher {
|
|||||||
|
|
||||||
self.has_errored_paths = false;
|
self.has_errored_paths = false;
|
||||||
|
|
||||||
let config_paths = db
|
let project_path = db
|
||||||
.project()
|
.system()
|
||||||
.metadata(db)
|
.canonicalize_path(&project_path)
|
||||||
.extra_configuration_paths()
|
.unwrap_or(project_path);
|
||||||
.iter()
|
|
||||||
.map(SystemPathBuf::as_path);
|
|
||||||
|
|
||||||
// Watch both the project root and any paths provided by the user on the CLI (removing any redundant nested paths).
|
|
||||||
// This is necessary to observe changes to files that are outside the project root.
|
|
||||||
// We always need to watch the project root to observe changes to its configuration.
|
|
||||||
let included_paths = ruff_db::system::deduplicate_nested_paths(
|
|
||||||
std::iter::once(project_path).chain(
|
|
||||||
db.project()
|
|
||||||
.included_paths_list(db)
|
|
||||||
.iter()
|
|
||||||
.map(SystemPathBuf::as_path),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Find the non-overlapping module search paths and filter out paths that are already covered by the project.
|
// Find the non-overlapping module search paths and filter out paths that are already covered by the project.
|
||||||
// Module search paths are already canonicalized.
|
// Module search paths are already canonicalized.
|
||||||
let unique_module_paths = ruff_db::system::deduplicate_nested_paths(
|
let unique_module_paths = ruff_db::system::deduplicate_nested_paths(
|
||||||
search_paths
|
search_paths
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|path| !path.starts_with(project_path)),
|
.filter(|path| !path.starts_with(&project_path)),
|
||||||
);
|
)
|
||||||
|
.map(SystemPath::to_path_buf);
|
||||||
|
|
||||||
// Now add the new paths, first starting with the project path and then
|
// Now add the new paths, first starting with the project path and then
|
||||||
// adding the library search paths, and finally the paths for configurations.
|
// adding the library search paths.
|
||||||
for path in included_paths
|
for path in std::iter::once(project_path).chain(unique_module_paths) {
|
||||||
.chain(unique_module_paths)
|
|
||||||
.chain(config_paths)
|
|
||||||
{
|
|
||||||
// Log a warning. It's not worth aborting if registering a single folder fails because
|
// Log a warning. It's not worth aborting if registering a single folder fails because
|
||||||
// Ruff otherwise stills works as expected.
|
// Ruff otherwise stills works as expected.
|
||||||
if let Err(error) = self.watcher.watch(path) {
|
if let Err(error) = self.watcher.watch(&path) {
|
||||||
// TODO: Log a user-facing warning.
|
// TODO: Log a user-facing warning.
|
||||||
tracing::warn!("Failed to setup watcher for path `{path}`: {error}. You have to restart Ruff after making changes to files under this path or you might see stale results.");
|
tracing::warn!("Failed to setup watcher for path `{path}`: {error}. You have to restart Ruff after making changes to files under this path or you might see stale results.");
|
||||||
self.has_errored_paths = true;
|
self.has_errored_paths = true;
|
||||||
} else {
|
} else {
|
||||||
self.watched_paths.push(path.to_path_buf());
|
self.watched_paths.push(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use anyhow::{anyhow, Context};
|
use anyhow::{anyhow, Context};
|
||||||
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||||
use red_knot_python_semantic::{HasType, SemanticModel};
|
use red_knot_python_semantic::{HasTy, SemanticModel};
|
||||||
use ruff_db::files::{system_path_to_file, File};
|
use ruff_db::files::{system_path_to_file, File};
|
||||||
use ruff_db::parsed::parsed_module;
|
use ruff_db::parsed::parsed_module;
|
||||||
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
|
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
|
||||||
@@ -117,7 +117,7 @@ fn run_corpus_tests(pattern: &str) -> anyhow::Result<()> {
|
|||||||
let code = std::fs::read_to_string(source)?;
|
let code = std::fs::read_to_string(source)?;
|
||||||
|
|
||||||
let mut check_with_file_name = |path: &SystemPath| {
|
let mut check_with_file_name = |path: &SystemPath| {
|
||||||
memory_fs.write_file_all(path, &code).unwrap();
|
memory_fs.write_file(path, &code).unwrap();
|
||||||
File::sync_path(&mut db, path);
|
File::sync_path(&mut db, path);
|
||||||
|
|
||||||
// this test is only asserting that we can pull every expression type without a panic
|
// this test is only asserting that we can pull every expression type without a panic
|
||||||
@@ -197,10 +197,10 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
|||||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||||
match stmt {
|
match stmt {
|
||||||
Stmt::FunctionDef(function) => {
|
Stmt::FunctionDef(function) => {
|
||||||
let _ty = function.inferred_type(&self.model);
|
let _ty = function.ty(&self.model);
|
||||||
}
|
}
|
||||||
Stmt::ClassDef(class) => {
|
Stmt::ClassDef(class) => {
|
||||||
let _ty = class.inferred_type(&self.model);
|
let _ty = class.ty(&self.model);
|
||||||
}
|
}
|
||||||
Stmt::Assign(assign) => {
|
Stmt::Assign(assign) => {
|
||||||
for target in &assign.targets {
|
for target in &assign.targets {
|
||||||
@@ -216,17 +216,6 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
|||||||
self.visit_body(&for_stmt.orelse);
|
self.visit_body(&for_stmt.orelse);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Stmt::With(with_stmt) => {
|
|
||||||
for item in &with_stmt.items {
|
|
||||||
if let Some(target) = &item.optional_vars {
|
|
||||||
self.visit_target(target);
|
|
||||||
}
|
|
||||||
self.visit_expr(&item.context_expr);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.visit_body(&with_stmt.body);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Stmt::AnnAssign(_)
|
Stmt::AnnAssign(_)
|
||||||
| Stmt::Return(_)
|
| Stmt::Return(_)
|
||||||
| Stmt::Delete(_)
|
| Stmt::Delete(_)
|
||||||
@@ -234,6 +223,7 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
|||||||
| Stmt::TypeAlias(_)
|
| Stmt::TypeAlias(_)
|
||||||
| Stmt::While(_)
|
| Stmt::While(_)
|
||||||
| Stmt::If(_)
|
| Stmt::If(_)
|
||||||
|
| Stmt::With(_)
|
||||||
| Stmt::Match(_)
|
| Stmt::Match(_)
|
||||||
| Stmt::Raise(_)
|
| Stmt::Raise(_)
|
||||||
| Stmt::Try(_)
|
| Stmt::Try(_)
|
||||||
@@ -253,25 +243,25 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_expr(&mut self, expr: &Expr) {
|
fn visit_expr(&mut self, expr: &Expr) {
|
||||||
let _ty = expr.inferred_type(&self.model);
|
let _ty = expr.ty(&self.model);
|
||||||
|
|
||||||
source_order::walk_expr(self, expr);
|
source_order::walk_expr(self, expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_parameter(&mut self, parameter: &Parameter) {
|
fn visit_parameter(&mut self, parameter: &Parameter) {
|
||||||
let _ty = parameter.inferred_type(&self.model);
|
let _ty = parameter.ty(&self.model);
|
||||||
|
|
||||||
source_order::walk_parameter(self, parameter);
|
source_order::walk_parameter(self, parameter);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_parameter_with_default(&mut self, parameter_with_default: &ParameterWithDefault) {
|
fn visit_parameter_with_default(&mut self, parameter_with_default: &ParameterWithDefault) {
|
||||||
let _ty = parameter_with_default.inferred_type(&self.model);
|
let _ty = parameter_with_default.ty(&self.model);
|
||||||
|
|
||||||
source_order::walk_parameter_with_default(self, parameter_with_default);
|
source_order::walk_parameter_with_default(self, parameter_with_default);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_alias(&mut self, alias: &Alias) {
|
fn visit_alias(&mut self, alias: &Alias) {
|
||||||
let _ty = alias.inferred_type(&self.model);
|
let _ty = alias.ty(&self.model);
|
||||||
|
|
||||||
source_order::walk_alias(self, alias);
|
source_order::walk_alias(self, alias);
|
||||||
}
|
}
|
||||||
@@ -279,4 +269,16 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
|||||||
|
|
||||||
/// Whether or not the .py/.pyi version of this file is expected to fail
|
/// Whether or not the .py/.pyi version of this file is expected to fail
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
const KNOWN_FAILURES: &[(&str, bool, bool)] = &[];
|
const KNOWN_FAILURES: &[(&str, bool, bool)] = &[
|
||||||
|
// related to circular references in class definitions
|
||||||
|
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_26.py", true, true),
|
||||||
|
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_27.py", true, true),
|
||||||
|
("crates/ruff_linter/resources/test/fixtures/pyflakes/F811_19.py", true, false),
|
||||||
|
("crates/ruff_linter/resources/test/fixtures/pyupgrade/UP039.py", true, false),
|
||||||
|
// related to circular references in type aliases (salsa cycle panic):
|
||||||
|
("crates/ruff_python_parser/resources/inline/err/type_alias_invalid_value_expr.py", true, true),
|
||||||
|
("crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC008.py", true, true),
|
||||||
|
// related to circular references in f-string annotations (invalid syntax)
|
||||||
|
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_15.py", true, true),
|
||||||
|
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_14.py", false, true),
|
||||||
|
];
|
||||||
|
|||||||
@@ -12,9 +12,9 @@ license = { workspace = true }
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ruff_db = { workspace = true }
|
ruff_db = { workspace = true }
|
||||||
ruff_index = { workspace = true, features = ["salsa"] }
|
ruff_index = { workspace = true }
|
||||||
ruff_macros = { workspace = true }
|
ruff_macros = { workspace = true }
|
||||||
ruff_python_ast = { workspace = true, features = ["salsa"] }
|
ruff_python_ast = { workspace = true }
|
||||||
ruff_python_parser = { workspace = true }
|
ruff_python_parser = { workspace = true }
|
||||||
ruff_python_stdlib = { workspace = true }
|
ruff_python_stdlib = { workspace = true }
|
||||||
ruff_source_file = { workspace = true }
|
ruff_source_file = { workspace = true }
|
||||||
@@ -31,22 +31,19 @@ drop_bomb = { workspace = true }
|
|||||||
indexmap = { workspace = true }
|
indexmap = { workspace = true }
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
ordermap = { workspace = true }
|
ordermap = { workspace = true }
|
||||||
salsa = { workspace = true, features = ["compact_str"] }
|
salsa = { workspace = true }
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
rustc-hash = { workspace = true }
|
rustc-hash = { workspace = true }
|
||||||
hashbrown = { workspace = true }
|
hashbrown = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
|
||||||
serde = { workspace = true, optional = true }
|
serde = { workspace = true, optional = true }
|
||||||
smallvec = { workspace = true }
|
smallvec = { workspace = true }
|
||||||
static_assertions = { workspace = true }
|
static_assertions = { workspace = true }
|
||||||
test-case = { workspace = true }
|
test-case = { workspace = true }
|
||||||
memchr = { workspace = true }
|
memchr = { workspace = true }
|
||||||
strum = { workspace = true}
|
|
||||||
strum_macros = { workspace = true}
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ruff_db = { workspace = true, features = ["testing", "os"] }
|
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||||
ruff_python_parser = { workspace = true }
|
ruff_python_parser = { workspace = true }
|
||||||
red_knot_test = { workspace = true }
|
red_knot_test = { workspace = true }
|
||||||
red_knot_vendored = { workspace = true }
|
red_knot_vendored = { workspace = true }
|
||||||
@@ -59,7 +56,7 @@ quickcheck = { version = "1.0.3", default-features = false }
|
|||||||
quickcheck_macros = { version = "1.0.0" }
|
quickcheck_macros = { version = "1.0.0" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
serde = ["ruff_db/serde", "dep:serde", "ruff_python_ast/serde"]
|
serde = ["ruff_db/serde", "dep:serde"]
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|||||||
@@ -61,13 +61,7 @@ class MDTestRunner:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Run it again with 'json' format to find the mdtest executable:
|
# Run it again with 'json' format to find the mdtest executable:
|
||||||
try:
|
json_output = self._run_cargo_test(message_format="json")
|
||||||
json_output = self._run_cargo_test(message_format="json")
|
|
||||||
except subprocess.CalledProcessError as _:
|
|
||||||
# `cargo test` can still fail if something changed in between the two runs.
|
|
||||||
# Here we don't have a human-readable output, so just show a generic message:
|
|
||||||
self.console.print("[red]Error[/red]: Failed to compile tests")
|
|
||||||
return False
|
|
||||||
|
|
||||||
if json_output:
|
if json_output:
|
||||||
self._get_executable_path_from_json(json_output)
|
self._get_executable_path_from_json(json_output)
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ It is invalid to parameterize `Annotated` with less than two arguments.
|
|||||||
```py
|
```py
|
||||||
from typing_extensions import Annotated
|
from typing_extensions import Annotated
|
||||||
|
|
||||||
# error: [invalid-type-form] "`typing.Annotated` requires at least two arguments when used in a type expression"
|
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||||
def _(x: Annotated):
|
def _(x: Annotated):
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: Unknown
|
||||||
|
|
||||||
@@ -39,11 +39,11 @@ def _(flag: bool):
|
|||||||
else:
|
else:
|
||||||
X = bool
|
X = bool
|
||||||
|
|
||||||
# error: [invalid-type-form] "`typing.Annotated` requires at least two arguments when used in a type expression"
|
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||||
def f(y: X):
|
def f(y: X):
|
||||||
reveal_type(y) # revealed: Unknown | bool
|
reveal_type(y) # revealed: Unknown | bool
|
||||||
|
|
||||||
# error: [invalid-type-form] "`typing.Annotated` requires at least two arguments when used in a type expression"
|
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||||
def _(x: Annotated | bool):
|
def _(x: Annotated | bool):
|
||||||
reveal_type(x) # revealed: Unknown | bool
|
reveal_type(x) # revealed: Unknown | bool
|
||||||
|
|
||||||
@@ -73,10 +73,12 @@ Inheriting from `Annotated[T, ...]` is equivalent to inheriting from `T` itself.
|
|||||||
```py
|
```py
|
||||||
from typing_extensions import Annotated
|
from typing_extensions import Annotated
|
||||||
|
|
||||||
|
# TODO: False positive
|
||||||
|
# error: [invalid-base]
|
||||||
class C(Annotated[int, "foo"]): ...
|
class C(Annotated[int, "foo"]): ...
|
||||||
|
|
||||||
# TODO: Should be `tuple[Literal[C], Literal[int], Literal[object]]`
|
# TODO: Should be `tuple[Literal[C], Literal[int], Literal[object]]`
|
||||||
reveal_type(C.__mro__) # revealed: tuple[Literal[C], @Todo(Inference of subscript on special form), Literal[object]]
|
reveal_type(C.__mro__) # revealed: tuple[Literal[C], Unknown, Literal[object]]
|
||||||
```
|
```
|
||||||
|
|
||||||
### Not parameterized
|
### Not parameterized
|
||||||
|
|||||||
@@ -1,302 +0,0 @@
|
|||||||
# Callable
|
|
||||||
|
|
||||||
References:
|
|
||||||
|
|
||||||
- <https://typing.readthedocs.io/en/latest/spec/callables.html#callable>
|
|
||||||
|
|
||||||
Note that `typing.Callable` is deprecated at runtime, in favour of `collections.abc.Callable` (see:
|
|
||||||
<https://docs.python.org/3/library/typing.html#deprecated-aliases>). However, removal of
|
|
||||||
`typing.Callable` is not currently planned, and the canonical location of the stub for the symbol in
|
|
||||||
typeshed is still `typing.pyi`.
|
|
||||||
|
|
||||||
## Invalid forms
|
|
||||||
|
|
||||||
The `Callable` special form requires _exactly_ two arguments where the first argument is either a
|
|
||||||
parameter type list, parameter specification, `typing.Concatenate`, or `...` and the second argument
|
|
||||||
is the return type. Here, we explore various invalid forms.
|
|
||||||
|
|
||||||
### Empty
|
|
||||||
|
|
||||||
A bare `Callable` without any type arguments:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
def _(c: Callable):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
### Invalid parameter type argument
|
|
||||||
|
|
||||||
When it's not a list:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
# error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
|
|
||||||
def _(c: Callable[int, str]):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
Or, when it's a literal type:
|
|
||||||
|
|
||||||
```py
|
|
||||||
# error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
|
|
||||||
def _(c: Callable[42, str]):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
Or, when one of the parameter type is invalid in the list:
|
|
||||||
|
|
||||||
```py
|
|
||||||
# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression"
|
|
||||||
# error: [invalid-type-form] "Boolean literals are not allowed in this context in a type expression"
|
|
||||||
def _(c: Callable[[int, 42, str, False], None]):
|
|
||||||
# revealed: (int, Unknown, str, Unknown, /) -> None
|
|
||||||
reveal_type(c)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Missing return type
|
|
||||||
|
|
||||||
Using a parameter list:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
|
||||||
def _(c: Callable[[int, str]]):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
Or, an ellipsis:
|
|
||||||
|
|
||||||
```py
|
|
||||||
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
|
||||||
def _(c: Callable[...]):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
Or something else that's invalid in a type expression generally:
|
|
||||||
|
|
||||||
```py
|
|
||||||
# fmt: off
|
|
||||||
|
|
||||||
def _(c: Callable[ # error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
|
||||||
{1, 2} # error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
|
|
||||||
]
|
|
||||||
):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
### More than two arguments
|
|
||||||
|
|
||||||
We can't reliably infer the callable type if there are more then 2 arguments because we don't know
|
|
||||||
which argument corresponds to either the parameters or the return type.
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
|
||||||
def _(c: Callable[[int], str, str]):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
### List as the second argument
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
# fmt: off
|
|
||||||
|
|
||||||
def _(c: Callable[
|
|
||||||
int, # error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
|
|
||||||
[str] # error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
|
|
||||||
]
|
|
||||||
):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
### List as both arguments
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
# error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
|
|
||||||
def _(c: Callable[[int], [str]]):
|
|
||||||
reveal_type(c) # revealed: (int, /) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
### Three list arguments
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
# fmt: off
|
|
||||||
|
|
||||||
|
|
||||||
def _(c: Callable[ # error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
|
||||||
[int],
|
|
||||||
[str], # error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
|
|
||||||
[bytes] # error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
|
|
||||||
]
|
|
||||||
):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
## Simple
|
|
||||||
|
|
||||||
A simple `Callable` with multiple parameters and a return type:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
def _(c: Callable[[int, str], int]):
|
|
||||||
reveal_type(c) # revealed: (int, str, /) -> int
|
|
||||||
```
|
|
||||||
|
|
||||||
## Union
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable, Union
|
|
||||||
|
|
||||||
def _(
|
|
||||||
c: Callable[[Union[int, str]], int] | None,
|
|
||||||
d: None | Callable[[Union[int, str]], int],
|
|
||||||
e: None | Callable[[Union[int, str]], int] | int,
|
|
||||||
):
|
|
||||||
reveal_type(c) # revealed: ((int | str, /) -> int) | None
|
|
||||||
reveal_type(d) # revealed: None | ((int | str, /) -> int)
|
|
||||||
reveal_type(e) # revealed: None | ((int | str, /) -> int) | int
|
|
||||||
```
|
|
||||||
|
|
||||||
## Intersection
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable, Union
|
|
||||||
from knot_extensions import Intersection, Not
|
|
||||||
|
|
||||||
def _(
|
|
||||||
c: Intersection[Callable[[Union[int, str]], int], int],
|
|
||||||
d: Intersection[int, Callable[[Union[int, str]], int]],
|
|
||||||
e: Intersection[int, Callable[[Union[int, str]], int], str],
|
|
||||||
f: Intersection[Not[Callable[[int, str], Intersection[int, str]]]],
|
|
||||||
):
|
|
||||||
reveal_type(c) # revealed: ((int | str, /) -> int) & int
|
|
||||||
reveal_type(d) # revealed: int & ((int | str, /) -> int)
|
|
||||||
reveal_type(e) # revealed: int & ((int | str, /) -> int) & str
|
|
||||||
reveal_type(f) # revealed: ~((int, str, /) -> int & str)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Nested
|
|
||||||
|
|
||||||
A nested `Callable` as one of the parameter types:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
def _(c: Callable[[Callable[[int], str]], int]):
|
|
||||||
reveal_type(c) # revealed: ((int, /) -> str, /) -> int
|
|
||||||
```
|
|
||||||
|
|
||||||
And, as the return type:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def _(c: Callable[[int, str], Callable[[int], int]]):
|
|
||||||
reveal_type(c) # revealed: (int, str, /) -> (int, /) -> int
|
|
||||||
```
|
|
||||||
|
|
||||||
## Gradual form
|
|
||||||
|
|
||||||
The `Callable` special form supports the use of `...` in place of the list of parameter types. This
|
|
||||||
is a [gradual form] indicating that the type is consistent with any input signature:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
def gradual_form(c: Callable[..., str]):
|
|
||||||
reveal_type(c) # revealed: (...) -> str
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using `typing.Concatenate`
|
|
||||||
|
|
||||||
Using `Concatenate` as the first argument to `Callable`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import Callable, Concatenate
|
|
||||||
|
|
||||||
def _(c: Callable[Concatenate[int, str, ...], int]):
|
|
||||||
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
|
|
||||||
```
|
|
||||||
|
|
||||||
And, as one of the parameter types:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def _(c: Callable[[Concatenate[int, str, ...], int], int]):
|
|
||||||
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using `typing.ParamSpec`
|
|
||||||
|
|
||||||
Using a `ParamSpec` in a `Callable` annotation:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import Callable
|
|
||||||
|
|
||||||
# TODO: Not an error; remove once `ParamSpec` is supported
|
|
||||||
# error: [invalid-type-form]
|
|
||||||
def _[**P1](c: Callable[P1, int]):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
And, using the legacy syntax:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import ParamSpec
|
|
||||||
|
|
||||||
P2 = ParamSpec("P2")
|
|
||||||
|
|
||||||
# TODO: Not an error; remove once `ParamSpec` is supported
|
|
||||||
# error: [invalid-type-form]
|
|
||||||
def _(c: Callable[P2, int]):
|
|
||||||
reveal_type(c) # revealed: (...) -> Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using `typing.Unpack`
|
|
||||||
|
|
||||||
Using the unpack operator (`*`):
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import Callable, TypeVarTuple
|
|
||||||
|
|
||||||
Ts = TypeVarTuple("Ts")
|
|
||||||
|
|
||||||
def _(c: Callable[[int, *Ts], int]):
|
|
||||||
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
|
|
||||||
```
|
|
||||||
|
|
||||||
And, using the legacy syntax using `Unpack`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import Unpack
|
|
||||||
|
|
||||||
def _(c: Callable[[int, Unpack[Ts]], int]):
|
|
||||||
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
|
|
||||||
```
|
|
||||||
|
|
||||||
## Member lookup
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
def _(c: Callable[[int], int]):
|
|
||||||
reveal_type(c.__init__) # revealed: Literal[__init__]
|
|
||||||
reveal_type(c.__class__) # revealed: type
|
|
||||||
|
|
||||||
# TODO: The member lookup for `Callable` uses `object` which does not have a `__call__`
|
|
||||||
# attribute. We could special case `__call__` in this context. Refer to
|
|
||||||
# https://github.com/astral-sh/ruff/pull/16493#discussion_r1985098508 for more details.
|
|
||||||
# error: [unresolved-attribute] "Type `(int, /) -> int` has no attribute `__call__`"
|
|
||||||
reveal_type(c.__call__) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
[gradual form]: https://typing.readthedocs.io/en/latest/spec/glossary.html#term-gradual-form
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
# Deferred annotations
|
|
||||||
|
|
||||||
## Deferred annotations in stubs always resolve
|
|
||||||
|
|
||||||
`mod.pyi`:
|
|
||||||
|
|
||||||
```pyi
|
|
||||||
def get_foo() -> Foo: ...
|
|
||||||
class Foo: ...
|
|
||||||
```
|
|
||||||
|
|
||||||
```py
|
|
||||||
from mod import get_foo
|
|
||||||
|
|
||||||
reveal_type(get_foo()) # revealed: Foo
|
|
||||||
```
|
|
||||||
|
|
||||||
## Deferred annotations in regular code fail
|
|
||||||
|
|
||||||
In (regular) source files, annotations are *not* deferred. This also tests that imports from
|
|
||||||
`__future__` that are not `annotations` are ignored.
|
|
||||||
|
|
||||||
```py
|
|
||||||
from __future__ import with_statement as annotations
|
|
||||||
|
|
||||||
# error: [unresolved-reference]
|
|
||||||
def get_foo() -> Foo: ...
|
|
||||||
|
|
||||||
class Foo: ...
|
|
||||||
|
|
||||||
reveal_type(get_foo()) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
## Deferred annotations in regular code with `__future__.annotations`
|
|
||||||
|
|
||||||
If `__future__.annotations` is imported, annotations *are* deferred.
|
|
||||||
|
|
||||||
```py
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
def get_foo() -> Foo:
|
|
||||||
return Foo()
|
|
||||||
|
|
||||||
class Foo: ...
|
|
||||||
|
|
||||||
reveal_type(get_foo()) # revealed: Foo
|
|
||||||
```
|
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
# Special cases for int/float/complex in annotations
|
|
||||||
|
|
||||||
In order to support common use cases, an annotation of `float` actually means `int | float`, and an
|
|
||||||
annotation of `complex` actually means `int | float | complex`. See
|
|
||||||
[the specification](https://typing.readthedocs.io/en/latest/spec/special-types.html#special-cases-for-float-and-complex)
|
|
||||||
|
|
||||||
## float
|
|
||||||
|
|
||||||
An annotation of `float` means `int | float`, so `int` is assignable to it:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def takes_float(x: float):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def passes_int_to_float(x: int):
|
|
||||||
# no error!
|
|
||||||
takes_float(x)
|
|
||||||
```
|
|
||||||
|
|
||||||
It also applies to variable annotations:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def assigns_int_to_float(x: int):
|
|
||||||
# no error!
|
|
||||||
y: float = x
|
|
||||||
```
|
|
||||||
|
|
||||||
It doesn't work the other way around:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def takes_int(x: int):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def passes_float_to_int(x: float):
|
|
||||||
# error: [invalid-argument-type]
|
|
||||||
takes_int(x)
|
|
||||||
|
|
||||||
def assigns_float_to_int(x: float):
|
|
||||||
# error: [invalid-assignment]
|
|
||||||
y: int = x
|
|
||||||
```
|
|
||||||
|
|
||||||
Unlike other type checkers, we choose not to obfuscate this special case by displaying `int | float`
|
|
||||||
as just `float`; we display the actual type:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def f(x: float):
|
|
||||||
reveal_type(x) # revealed: int | float
|
|
||||||
```
|
|
||||||
|
|
||||||
## complex
|
|
||||||
|
|
||||||
An annotation of `complex` means `int | float | complex`, so `int` and `float` are both assignable
|
|
||||||
to it (but not the other way around):
|
|
||||||
|
|
||||||
```py
|
|
||||||
def takes_complex(x: complex):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def passes_to_complex(x: float, y: int):
|
|
||||||
# no errors!
|
|
||||||
takes_complex(x)
|
|
||||||
takes_complex(y)
|
|
||||||
|
|
||||||
def assigns_to_complex(x: float, y: int):
|
|
||||||
# no errors!
|
|
||||||
a: complex = x
|
|
||||||
b: complex = y
|
|
||||||
|
|
||||||
def takes_int(x: int):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def takes_float(x: float):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def passes_complex(x: complex):
|
|
||||||
# error: [invalid-argument-type]
|
|
||||||
takes_int(x)
|
|
||||||
# error: [invalid-argument-type]
|
|
||||||
takes_float(x)
|
|
||||||
|
|
||||||
def assigns_complex(x: complex):
|
|
||||||
# error: [invalid-assignment]
|
|
||||||
y: int = x
|
|
||||||
# error: [invalid-assignment]
|
|
||||||
z: float = x
|
|
||||||
|
|
||||||
def f(x: complex):
|
|
||||||
reveal_type(x) # revealed: int | float | complex
|
|
||||||
```
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
# Tests for invalid types in type expressions
|
|
||||||
|
|
||||||
## Invalid types are rejected
|
|
||||||
|
|
||||||
Many types are illegal in the context of a type expression:
|
|
||||||
|
|
||||||
```py
|
|
||||||
import typing
|
|
||||||
from knot_extensions import AlwaysTruthy, AlwaysFalsy
|
|
||||||
from typing_extensions import Literal, Never
|
|
||||||
|
|
||||||
class A: ...
|
|
||||||
|
|
||||||
def _(
|
|
||||||
a: type[int],
|
|
||||||
b: AlwaysTruthy,
|
|
||||||
c: AlwaysFalsy,
|
|
||||||
d: Literal[True],
|
|
||||||
e: Literal["bar"],
|
|
||||||
f: Literal[b"foo"],
|
|
||||||
g: tuple[int, str],
|
|
||||||
h: Never,
|
|
||||||
i: int,
|
|
||||||
j: A,
|
|
||||||
):
|
|
||||||
def foo(): ...
|
|
||||||
def invalid(
|
|
||||||
a_: a, # error: [invalid-type-form] "Variable of type `type[int]` is not allowed in a type expression"
|
|
||||||
b_: b, # error: [invalid-type-form]
|
|
||||||
c_: c, # error: [invalid-type-form]
|
|
||||||
d_: d, # error: [invalid-type-form]
|
|
||||||
e_: e, # error: [invalid-type-form]
|
|
||||||
f_: f, # error: [invalid-type-form]
|
|
||||||
g_: g, # error: [invalid-type-form]
|
|
||||||
h_: h, # error: [invalid-type-form]
|
|
||||||
i_: typing, # error: [invalid-type-form]
|
|
||||||
j_: foo, # error: [invalid-type-form]
|
|
||||||
k_: i, # error: [invalid-type-form] "Variable of type `int` is not allowed in a type expression"
|
|
||||||
l_: j, # error: [invalid-type-form] "Variable of type `A` is not allowed in a type expression"
|
|
||||||
):
|
|
||||||
reveal_type(a_) # revealed: Unknown
|
|
||||||
reveal_type(b_) # revealed: Unknown
|
|
||||||
reveal_type(c_) # revealed: Unknown
|
|
||||||
reveal_type(d_) # revealed: Unknown
|
|
||||||
reveal_type(e_) # revealed: Unknown
|
|
||||||
reveal_type(f_) # revealed: Unknown
|
|
||||||
reveal_type(g_) # revealed: Unknown
|
|
||||||
reveal_type(h_) # revealed: Unknown
|
|
||||||
reveal_type(i_) # revealed: Unknown
|
|
||||||
reveal_type(j_) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
## Invalid AST nodes
|
|
||||||
|
|
||||||
```py
|
|
||||||
def bar() -> None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _(
|
|
||||||
a: 1, # error: [invalid-type-form] "Int literals are not allowed in this context in a type expression"
|
|
||||||
b: 2.3, # error: [invalid-type-form] "Float literals are not allowed in type expressions"
|
|
||||||
c: 4j, # error: [invalid-type-form] "Complex literals are not allowed in type expressions"
|
|
||||||
d: True, # error: [invalid-type-form] "Boolean literals are not allowed in this context in a type expression"
|
|
||||||
e: int | b"foo", # error: [invalid-type-form] "Bytes literals are not allowed in this context in a type expression"
|
|
||||||
f: 1 and 2, # error: [invalid-type-form] "Boolean operations are not allowed in type expressions"
|
|
||||||
g: 1 or 2, # error: [invalid-type-form] "Boolean operations are not allowed in type expressions"
|
|
||||||
h: (foo := 1), # error: [invalid-type-form] "Named expressions are not allowed in type expressions"
|
|
||||||
i: not 1, # error: [invalid-type-form] "Unary operations are not allowed in type expressions"
|
|
||||||
j: lambda: 1, # error: [invalid-type-form] "`lambda` expressions are not allowed in type expressions"
|
|
||||||
k: 1 if True else 2, # error: [invalid-type-form] "`if` expressions are not allowed in type expressions"
|
|
||||||
l: await 1, # error: [invalid-type-form] "`await` expressions are not allowed in type expressions"
|
|
||||||
m: (yield 1), # error: [invalid-type-form] "`yield` expressions are not allowed in type expressions"
|
|
||||||
n: (yield from [1]), # error: [invalid-type-form] "`yield from` expressions are not allowed in type expressions"
|
|
||||||
o: 1 < 2, # error: [invalid-type-form] "Comparison expressions are not allowed in type expressions"
|
|
||||||
p: bar(), # error: [invalid-type-form] "Function calls are not allowed in type expressions"
|
|
||||||
q: int | f"foo", # error: [invalid-type-form] "F-strings are not allowed in type expressions"
|
|
||||||
r: [1, 2, 3][1:2], # error: [invalid-type-form] "Slices are not allowed in type expressions"
|
|
||||||
):
|
|
||||||
reveal_type(a) # revealed: Unknown
|
|
||||||
reveal_type(b) # revealed: Unknown
|
|
||||||
reveal_type(c) # revealed: Unknown
|
|
||||||
reveal_type(d) # revealed: Unknown
|
|
||||||
reveal_type(e) # revealed: int | Unknown
|
|
||||||
reveal_type(f) # revealed: Unknown
|
|
||||||
reveal_type(g) # revealed: Unknown
|
|
||||||
reveal_type(h) # revealed: Unknown
|
|
||||||
reveal_type(i) # revealed: Unknown
|
|
||||||
reveal_type(j) # revealed: Unknown
|
|
||||||
reveal_type(k) # revealed: Unknown
|
|
||||||
reveal_type(p) # revealed: Unknown
|
|
||||||
reveal_type(q) # revealed: int | Unknown
|
|
||||||
reveal_type(r) # revealed: @Todo(generics)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Invalid Collection based AST nodes
|
|
||||||
|
|
||||||
```py
|
|
||||||
def _(
|
|
||||||
a: {1: 2}, # error: [invalid-type-form] "Dict literals are not allowed in type expressions"
|
|
||||||
b: {1, 2}, # error: [invalid-type-form] "Set literals are not allowed in type expressions"
|
|
||||||
c: {k: v for k, v in [(1, 2)]}, # error: [invalid-type-form] "Dict comprehensions are not allowed in type expressions"
|
|
||||||
d: [k for k in [1, 2]], # error: [invalid-type-form] "List comprehensions are not allowed in type expressions"
|
|
||||||
e: {k for k in [1, 2]}, # error: [invalid-type-form] "Set comprehensions are not allowed in type expressions"
|
|
||||||
f: (k for k in [1, 2]), # error: [invalid-type-form] "Generator expressions are not allowed in type expressions"
|
|
||||||
g: [int, str], # error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
|
|
||||||
):
|
|
||||||
reveal_type(a) # revealed: Unknown
|
|
||||||
reveal_type(b) # revealed: Unknown
|
|
||||||
reveal_type(c) # revealed: Unknown
|
|
||||||
reveal_type(d) # revealed: Unknown
|
|
||||||
reveal_type(e) # revealed: Unknown
|
|
||||||
reveal_type(f) # revealed: Unknown
|
|
||||||
reveal_type(g) # revealed: Unknown
|
|
||||||
```
|
|
||||||
@@ -36,7 +36,7 @@ def f():
|
|||||||
reveal_type(a7) # revealed: None
|
reveal_type(a7) # revealed: None
|
||||||
reveal_type(a8) # revealed: Literal[1]
|
reveal_type(a8) # revealed: Literal[1]
|
||||||
# TODO: This should be Color.RED
|
# TODO: This should be Color.RED
|
||||||
reveal_type(b1) # revealed: Unknown | Literal[0]
|
reveal_type(b1) # revealed: Literal[0]
|
||||||
|
|
||||||
# error: [invalid-type-form]
|
# error: [invalid-type-form]
|
||||||
invalid1: Literal[3 + 4]
|
invalid1: Literal[3 + 4]
|
||||||
@@ -106,7 +106,7 @@ def union_example(
|
|||||||
Literal["B"],
|
Literal["B"],
|
||||||
Literal[True],
|
Literal[True],
|
||||||
None,
|
None,
|
||||||
],
|
]
|
||||||
):
|
):
|
||||||
reveal_type(x) # revealed: Unknown | Literal[-1, "A", b"A", b"\x00", b"\x07", 0, 1, "B", "foo", "bar", True] | None
|
reveal_type(x) # revealed: Unknown | Literal[-1, "A", b"A", b"\x00", b"\x07", 0, 1, "B", "foo", "bar", True] | None
|
||||||
```
|
```
|
||||||
@@ -116,9 +116,7 @@ def union_example(
|
|||||||
Only Literal that is defined in typing and typing_extension modules is detected as the special
|
Only Literal that is defined in typing and typing_extension modules is detected as the special
|
||||||
Literal.
|
Literal.
|
||||||
|
|
||||||
`other.pyi`:
|
```pyi path=other.pyi
|
||||||
|
|
||||||
```pyi
|
|
||||||
from typing import _SpecialForm
|
from typing import _SpecialForm
|
||||||
|
|
||||||
Literal: _SpecialForm
|
Literal: _SpecialForm
|
||||||
@@ -127,13 +125,6 @@ Literal: _SpecialForm
|
|||||||
```py
|
```py
|
||||||
from other import Literal
|
from other import Literal
|
||||||
|
|
||||||
# TODO: can we add a subdiagnostic here saying something like:
|
|
||||||
#
|
|
||||||
# `other.Literal` and `typing.Literal` have similar names, but are different symbols and don't have the same semantics
|
|
||||||
#
|
|
||||||
# ?
|
|
||||||
#
|
|
||||||
# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression"
|
|
||||||
a1: Literal[26]
|
a1: Literal[26]
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
@@ -156,7 +147,7 @@ def f():
|
|||||||
```py
|
```py
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
# error: [invalid-type-form] "`typing.Literal` requires at least one argument when used in a type expression"
|
# error: [invalid-type-form] "`Literal` requires at least one argument when used in a type expression"
|
||||||
def _(x: Literal):
|
def _(x: Literal):
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: Unknown
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -73,12 +73,12 @@ qux = (foo, bar)
|
|||||||
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
|
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
|
||||||
|
|
||||||
# TODO: Infer "LiteralString"
|
# TODO: Infer "LiteralString"
|
||||||
reveal_type(foo.join(qux)) # revealed: @Todo(return type of decorated function)
|
reveal_type(foo.join(qux)) # revealed: @Todo(Attribute access on `StringLiteral` types)
|
||||||
|
|
||||||
template: LiteralString = "{}, {}"
|
template: LiteralString = "{}, {}"
|
||||||
reveal_type(template) # revealed: Literal["{}, {}"]
|
reveal_type(template) # revealed: Literal["{}, {}"]
|
||||||
# TODO: Infer `LiteralString`
|
# TODO: Infer `LiteralString`
|
||||||
reveal_type(template.format(foo, bar)) # revealed: @Todo(return type of decorated function)
|
reveal_type(template.format(foo, bar)) # revealed: @Todo(Attribute access on `StringLiteral` types)
|
||||||
```
|
```
|
||||||
|
|
||||||
### Assignability
|
### Assignability
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
# NewType
|
|
||||||
|
|
||||||
Currently, red-knot doesn't support `typing.NewType` in type annotations.
|
|
||||||
|
|
||||||
## Valid forms
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import NewType
|
|
||||||
from types import GenericAlias
|
|
||||||
|
|
||||||
A = NewType("A", int)
|
|
||||||
B = GenericAlias(A, ())
|
|
||||||
|
|
||||||
def _(
|
|
||||||
a: A,
|
|
||||||
b: B,
|
|
||||||
):
|
|
||||||
reveal_type(a) # revealed: @Todo(Support for `typing.NewType` instances in type expressions)
|
|
||||||
reveal_type(b) # revealed: @Todo(Support for `typing.GenericAlias` instances in type expressions)
|
|
||||||
```
|
|
||||||
@@ -45,13 +45,3 @@ def f():
|
|||||||
# revealed: int | None
|
# revealed: int | None
|
||||||
reveal_type(a)
|
reveal_type(a)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Invalid
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
# error: [invalid-type-form] "`typing.Optional` requires exactly one argument when used in a type expression"
|
|
||||||
def f(x: Optional) -> None:
|
|
||||||
reveal_type(x) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -70,7 +70,8 @@ import typing
|
|||||||
|
|
||||||
class ListSubclass(typing.List): ...
|
class ListSubclass(typing.List): ...
|
||||||
|
|
||||||
# revealed: tuple[Literal[ListSubclass], Literal[list], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
# TODO: should have `Generic`, should not have `Unknown`
|
||||||
|
# revealed: tuple[Literal[ListSubclass], Literal[list], Unknown, Literal[object]]
|
||||||
reveal_type(ListSubclass.__mro__)
|
reveal_type(ListSubclass.__mro__)
|
||||||
|
|
||||||
class DictSubclass(typing.Dict): ...
|
class DictSubclass(typing.Dict): ...
|
||||||
@@ -81,7 +82,8 @@ reveal_type(DictSubclass.__mro__)
|
|||||||
|
|
||||||
class SetSubclass(typing.Set): ...
|
class SetSubclass(typing.Set): ...
|
||||||
|
|
||||||
# revealed: tuple[Literal[SetSubclass], Literal[set], Literal[MutableSet], Literal[AbstractSet], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
# TODO: should have `Generic`, should not have `Unknown`
|
||||||
|
# revealed: tuple[Literal[SetSubclass], Literal[set], Unknown, Literal[object]]
|
||||||
reveal_type(SetSubclass.__mro__)
|
reveal_type(SetSubclass.__mro__)
|
||||||
|
|
||||||
class FrozenSetSubclass(typing.FrozenSet): ...
|
class FrozenSetSubclass(typing.FrozenSet): ...
|
||||||
@@ -113,7 +115,8 @@ reveal_type(DefaultDictSubclass.__mro__)
|
|||||||
|
|
||||||
class DequeSubclass(typing.Deque): ...
|
class DequeSubclass(typing.Deque): ...
|
||||||
|
|
||||||
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
# TODO: Should be (DequeSubclass, deque, MutableSequence, Sequence, Reversible, Collection, Sized, Iterable, Container, Generic, object)
|
||||||
|
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Unknown, Literal[object]]
|
||||||
reveal_type(DequeSubclass.__mro__)
|
reveal_type(DequeSubclass.__mro__)
|
||||||
|
|
||||||
class OrderedDictSubclass(typing.OrderedDict): ...
|
class OrderedDictSubclass(typing.OrderedDict): ...
|
||||||
|
|||||||
@@ -116,8 +116,8 @@ MyType = int
|
|||||||
class Aliases:
|
class Aliases:
|
||||||
MyType = str
|
MyType = str
|
||||||
|
|
||||||
forward: "MyType" = "value"
|
forward: "MyType"
|
||||||
not_forward: MyType = "value"
|
not_forward: MyType
|
||||||
|
|
||||||
reveal_type(Aliases.forward) # revealed: str
|
reveal_type(Aliases.forward) # revealed: str
|
||||||
reveal_type(Aliases.not_forward) # revealed: str
|
reveal_type(Aliases.not_forward) # revealed: str
|
||||||
|
|||||||
@@ -9,9 +9,9 @@ from typing import Union
|
|||||||
|
|
||||||
a: Union[int, str]
|
a: Union[int, str]
|
||||||
a1: Union[int, bool]
|
a1: Union[int, bool]
|
||||||
a2: Union[int, Union[bytes, str]]
|
a2: Union[int, Union[float, str]]
|
||||||
a3: Union[int, None]
|
a3: Union[int, None]
|
||||||
a4: Union[Union[bytes, str]]
|
a4: Union[Union[float, str]]
|
||||||
a5: Union[int]
|
a5: Union[int]
|
||||||
a6: Union[()]
|
a6: Union[()]
|
||||||
|
|
||||||
@@ -21,11 +21,11 @@ def f():
|
|||||||
# Since bool is a subtype of int we simplify to int here. But we do allow assigning boolean values (see below).
|
# Since bool is a subtype of int we simplify to int here. But we do allow assigning boolean values (see below).
|
||||||
# revealed: int
|
# revealed: int
|
||||||
reveal_type(a1)
|
reveal_type(a1)
|
||||||
# revealed: int | bytes | str
|
# revealed: int | float | str
|
||||||
reveal_type(a2)
|
reveal_type(a2)
|
||||||
# revealed: int | None
|
# revealed: int | None
|
||||||
reveal_type(a3)
|
reveal_type(a3)
|
||||||
# revealed: bytes | str
|
# revealed: float | str
|
||||||
reveal_type(a4)
|
reveal_type(a4)
|
||||||
# revealed: int
|
# revealed: int
|
||||||
reveal_type(a5)
|
reveal_type(a5)
|
||||||
@@ -59,13 +59,3 @@ def f():
|
|||||||
# revealed: int | str
|
# revealed: int | str
|
||||||
reveal_type(a)
|
reveal_type(a)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Invalid
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
# error: [invalid-type-form] "`typing.Union` requires at least one argument when used in a type expression"
|
|
||||||
def f(x: Union) -> None:
|
|
||||||
reveal_type(x) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ def f(*args: Unpack[Ts]) -> tuple[Unpack[Ts]]:
|
|||||||
# TODO: should understand the annotation
|
# TODO: should understand the annotation
|
||||||
reveal_type(args) # revealed: tuple
|
reveal_type(args) # revealed: tuple
|
||||||
|
|
||||||
reveal_type(Alias) # revealed: @Todo(Support for `typing.TypeAlias`)
|
reveal_type(Alias) # revealed: @Todo(Unsupported or invalid type in a type expression)
|
||||||
|
|
||||||
def g() -> TypeGuard[int]: ...
|
def g() -> TypeGuard[int]: ...
|
||||||
def h() -> TypeIs[int]: ...
|
def h() -> TypeIs[int]: ...
|
||||||
@@ -33,30 +33,7 @@ def i(callback: Callable[Concatenate[int, P], R_co], *args: P.args, **kwargs: P.
|
|||||||
|
|
||||||
class Foo:
|
class Foo:
|
||||||
def method(self, x: Self):
|
def method(self, x: Self):
|
||||||
reveal_type(x) # revealed: @Todo(Support for `typing.Self`)
|
reveal_type(x) # revealed: @Todo(Unsupported or invalid type in a type expression)
|
||||||
```
|
|
||||||
|
|
||||||
## Type expressions
|
|
||||||
|
|
||||||
One thing that is supported is error messages for using special forms in type expressions.
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import Unpack, TypeGuard, TypeIs, Concatenate, ParamSpec
|
|
||||||
|
|
||||||
def _(
|
|
||||||
a: Unpack, # error: [invalid-type-form] "`typing.Unpack` requires exactly one argument when used in a type expression"
|
|
||||||
b: TypeGuard, # error: [invalid-type-form] "`typing.TypeGuard` requires exactly one argument when used in a type expression"
|
|
||||||
c: TypeIs, # error: [invalid-type-form] "`typing.TypeIs` requires exactly one argument when used in a type expression"
|
|
||||||
d: Concatenate, # error: [invalid-type-form] "`typing.Concatenate` requires at least two arguments when used in a type expression"
|
|
||||||
e: ParamSpec,
|
|
||||||
) -> None:
|
|
||||||
reveal_type(a) # revealed: Unknown
|
|
||||||
reveal_type(b) # revealed: Unknown
|
|
||||||
reveal_type(c) # revealed: Unknown
|
|
||||||
reveal_type(d) # revealed: Unknown
|
|
||||||
|
|
||||||
def foo(a_: e) -> None:
|
|
||||||
reveal_type(a_) # revealed: @Todo(Support for `typing.ParamSpec` instances in type expressions)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Inheritance
|
## Inheritance
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Unsupported type qualifiers
|
# Unsupported type qualifiers
|
||||||
|
|
||||||
## Not yet fully supported
|
## Not yet supported
|
||||||
|
|
||||||
Several type qualifiers are unsupported by red-knot currently. However, we also don't emit
|
Several type qualifiers are unsupported by red-knot currently. However, we also don't emit
|
||||||
false-positive errors if you use one in an annotation:
|
false-positive errors if you use one in an annotation:
|
||||||
@@ -19,33 +19,6 @@ class Bar(TypedDict):
|
|||||||
z: ReadOnly[bytes]
|
z: ReadOnly[bytes]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Type expressions
|
|
||||||
|
|
||||||
One thing that is supported is error messages for using type qualifiers in type expressions.
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import Final, ClassVar, Required, NotRequired, ReadOnly
|
|
||||||
|
|
||||||
def _(
|
|
||||||
a: (
|
|
||||||
Final # error: [invalid-type-form] "Type qualifier `typing.Final` is not allowed in type expressions (only in annotation expressions)"
|
|
||||||
| int
|
|
||||||
),
|
|
||||||
b: (
|
|
||||||
ClassVar # error: [invalid-type-form] "Type qualifier `typing.ClassVar` is not allowed in type expressions (only in annotation expressions)"
|
|
||||||
| int
|
|
||||||
),
|
|
||||||
c: Required, # error: [invalid-type-form] "Type qualifier `typing.Required` is not allowed in type expressions (only in annotation expressions, and only with exactly one argument)"
|
|
||||||
d: NotRequired, # error: [invalid-type-form] "Type qualifier `typing.NotRequired` is not allowed in type expressions (only in annotation expressions, and only with exactly one argument)"
|
|
||||||
e: ReadOnly, # error: [invalid-type-form] "Type qualifier `typing.ReadOnly` is not allowed in type expressions (only in annotation expressions, and only with exactly one argument)"
|
|
||||||
) -> None:
|
|
||||||
reveal_type(a) # revealed: Unknown | int
|
|
||||||
reveal_type(b) # revealed: Unknown | int
|
|
||||||
reveal_type(c) # revealed: Unknown
|
|
||||||
reveal_type(d) # revealed: Unknown
|
|
||||||
reveal_type(e) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
## Inheritance
|
## Inheritance
|
||||||
|
|
||||||
You can't inherit from a type qualifier.
|
You can't inherit from a type qualifier.
|
||||||
|
|||||||
@@ -25,9 +25,7 @@ x = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]` is not
|
|||||||
|
|
||||||
## Tuple annotations are understood
|
## Tuple annotations are understood
|
||||||
|
|
||||||
`module.py`:
|
```py path=module.py
|
||||||
|
|
||||||
```py
|
|
||||||
from typing_extensions import Unpack
|
from typing_extensions import Unpack
|
||||||
|
|
||||||
a: tuple[()] = ()
|
a: tuple[()] = ()
|
||||||
@@ -42,9 +40,7 @@ i: tuple[str | int, str | int] = (42, 42)
|
|||||||
j: tuple[str | int] = (42,)
|
j: tuple[str | int] = (42,)
|
||||||
```
|
```
|
||||||
|
|
||||||
`script.py`:
|
```py path=script.py
|
||||||
|
|
||||||
```py
|
|
||||||
from module import a, b, c, d, e, f, g, h, i, j
|
from module import a, b, c, d, e, f, g, h, i, j
|
||||||
|
|
||||||
reveal_type(a) # revealed: tuple[()]
|
reveal_type(a) # revealed: tuple[()]
|
||||||
@@ -118,7 +114,7 @@ reveal_type(x) # revealed: Foo
|
|||||||
|
|
||||||
## Annotations in stub files are deferred
|
## Annotations in stub files are deferred
|
||||||
|
|
||||||
```pyi
|
```pyi path=main.pyi
|
||||||
x: Foo
|
x: Foo
|
||||||
|
|
||||||
class Foo: ...
|
class Foo: ...
|
||||||
@@ -129,7 +125,7 @@ reveal_type(x) # revealed: Foo
|
|||||||
|
|
||||||
## Annotated assignments in stub files are inferred correctly
|
## Annotated assignments in stub files are inferred correctly
|
||||||
|
|
||||||
```pyi
|
```pyi path=main.pyi
|
||||||
x: int = 1
|
x: int = 1
|
||||||
reveal_type(x) # revealed: Literal[1]
|
reveal_type(x) # revealed: Literal[1]
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -9,11 +9,7 @@ reveal_type(x) # revealed: Literal[2]
|
|||||||
|
|
||||||
x = 1.0
|
x = 1.0
|
||||||
x /= 2
|
x /= 2
|
||||||
reveal_type(x) # revealed: int | float
|
reveal_type(x) # revealed: float
|
||||||
|
|
||||||
x = (1, 2)
|
|
||||||
x += (3, 4)
|
|
||||||
reveal_type(x) # revealed: tuple[Literal[1], Literal[2], Literal[3], Literal[4]]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Dunder methods
|
## Dunder methods
|
||||||
@@ -28,12 +24,12 @@ x -= 1
|
|||||||
reveal_type(x) # revealed: str
|
reveal_type(x) # revealed: str
|
||||||
|
|
||||||
class C:
|
class C:
|
||||||
def __iadd__(self, other: str) -> int:
|
def __iadd__(self, other: str) -> float:
|
||||||
return 1
|
return 1.0
|
||||||
|
|
||||||
x = C()
|
x = C()
|
||||||
x += "Hello"
|
x += "Hello"
|
||||||
reveal_type(x) # revealed: int
|
reveal_type(x) # revealed: float
|
||||||
```
|
```
|
||||||
|
|
||||||
## Unsupported types
|
## Unsupported types
|
||||||
@@ -44,7 +40,7 @@ class C:
|
|||||||
return 42
|
return 42
|
||||||
|
|
||||||
x = C()
|
x = C()
|
||||||
# error: [unsupported-operator] "Operator `-=` is unsupported between objects of type `C` and `Literal[1]`"
|
# error: [invalid-argument-type]
|
||||||
x -= 1
|
x -= 1
|
||||||
|
|
||||||
reveal_type(x) # revealed: int
|
reveal_type(x) # revealed: int
|
||||||
@@ -79,7 +75,8 @@ def _(flag: bool):
|
|||||||
|
|
||||||
f = Foo()
|
f = Foo()
|
||||||
|
|
||||||
# error: [unsupported-operator] "Operator `+=` is unsupported between objects of type `Foo` and `Literal["Hello, world!"]`"
|
# TODO: We should emit an `unsupported-operator` error here, possibly with the information
|
||||||
|
# that `Foo.__iadd__` may be unbound as additional context.
|
||||||
f += "Hello, world!"
|
f += "Hello, world!"
|
||||||
|
|
||||||
reveal_type(f) # revealed: int | Unknown
|
reveal_type(f) # revealed: int | Unknown
|
||||||
@@ -133,10 +130,10 @@ def _(flag: bool):
|
|||||||
if flag:
|
if flag:
|
||||||
f = Foo()
|
f = Foo()
|
||||||
else:
|
else:
|
||||||
f = 42
|
f = 42.0
|
||||||
f += 12
|
f += 12
|
||||||
|
|
||||||
reveal_type(f) # revealed: str | Literal[54]
|
reveal_type(f) # revealed: str | float
|
||||||
```
|
```
|
||||||
|
|
||||||
## Partially bound target union with `__add__`
|
## Partially bound target union with `__add__`
|
||||||
@@ -165,18 +162,3 @@ def f(flag: bool, flag2: bool):
|
|||||||
|
|
||||||
reveal_type(f) # revealed: int | str | float
|
reveal_type(f) # revealed: int | str | float
|
||||||
```
|
```
|
||||||
|
|
||||||
## Implicit dunder calls on class objects
|
|
||||||
|
|
||||||
```py
|
|
||||||
class Meta(type):
|
|
||||||
def __iadd__(cls, other: int) -> str:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
class C(metaclass=Meta): ...
|
|
||||||
|
|
||||||
cls = C
|
|
||||||
cls += 1
|
|
||||||
|
|
||||||
reveal_type(cls) # revealed: str
|
|
||||||
```
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -50,44 +50,46 @@ reveal_type(b | b) # revealed: Literal[False]
|
|||||||
## Arithmetic with a variable
|
## Arithmetic with a variable
|
||||||
|
|
||||||
```py
|
```py
|
||||||
def _(a: bool):
|
a = True
|
||||||
def lhs_is_int(x: int):
|
b = False
|
||||||
reveal_type(x + a) # revealed: int
|
|
||||||
reveal_type(x - a) # revealed: int
|
|
||||||
reveal_type(x * a) # revealed: int
|
|
||||||
reveal_type(x // a) # revealed: int
|
|
||||||
reveal_type(x / a) # revealed: int | float
|
|
||||||
reveal_type(x % a) # revealed: int
|
|
||||||
|
|
||||||
def rhs_is_int(x: int):
|
def lhs_is_int(x: int):
|
||||||
reveal_type(a + x) # revealed: int
|
reveal_type(x + a) # revealed: int
|
||||||
reveal_type(a - x) # revealed: int
|
reveal_type(x - a) # revealed: int
|
||||||
reveal_type(a * x) # revealed: int
|
reveal_type(x * a) # revealed: int
|
||||||
reveal_type(a // x) # revealed: int
|
reveal_type(x // a) # revealed: int
|
||||||
reveal_type(a / x) # revealed: int | float
|
reveal_type(x / a) # revealed: float
|
||||||
reveal_type(a % x) # revealed: int
|
reveal_type(x % a) # revealed: int
|
||||||
|
|
||||||
def lhs_is_bool(x: bool):
|
def rhs_is_int(x: int):
|
||||||
reveal_type(x + a) # revealed: int
|
reveal_type(a + x) # revealed: int
|
||||||
reveal_type(x - a) # revealed: int
|
reveal_type(a - x) # revealed: int
|
||||||
reveal_type(x * a) # revealed: int
|
reveal_type(a * x) # revealed: int
|
||||||
reveal_type(x // a) # revealed: int
|
reveal_type(a // x) # revealed: int
|
||||||
reveal_type(x / a) # revealed: int | float
|
reveal_type(a / x) # revealed: float
|
||||||
reveal_type(x % a) # revealed: int
|
reveal_type(a % x) # revealed: int
|
||||||
|
|
||||||
def rhs_is_bool(x: bool):
|
def lhs_is_bool(x: bool):
|
||||||
reveal_type(a + x) # revealed: int
|
reveal_type(x + a) # revealed: int
|
||||||
reveal_type(a - x) # revealed: int
|
reveal_type(x - a) # revealed: int
|
||||||
reveal_type(a * x) # revealed: int
|
reveal_type(x * a) # revealed: int
|
||||||
reveal_type(a // x) # revealed: int
|
reveal_type(x // a) # revealed: int
|
||||||
reveal_type(a / x) # revealed: int | float
|
reveal_type(x / a) # revealed: float
|
||||||
reveal_type(a % x) # revealed: int
|
reveal_type(x % a) # revealed: int
|
||||||
|
|
||||||
def both_are_bool(x: bool, y: bool):
|
def rhs_is_bool(x: bool):
|
||||||
reveal_type(x + y) # revealed: int
|
reveal_type(a + x) # revealed: int
|
||||||
reveal_type(x - y) # revealed: int
|
reveal_type(a - x) # revealed: int
|
||||||
reveal_type(x * y) # revealed: int
|
reveal_type(a * x) # revealed: int
|
||||||
reveal_type(x // y) # revealed: int
|
reveal_type(a // x) # revealed: int
|
||||||
reveal_type(x / y) # revealed: int | float
|
reveal_type(a / x) # revealed: float
|
||||||
reveal_type(x % y) # revealed: int
|
reveal_type(a % x) # revealed: int
|
||||||
|
|
||||||
|
def both_are_bool(x: bool, y: bool):
|
||||||
|
reveal_type(x + y) # revealed: int
|
||||||
|
reveal_type(x - y) # revealed: int
|
||||||
|
reveal_type(x * y) # revealed: int
|
||||||
|
reveal_type(x // y) # revealed: int
|
||||||
|
reveal_type(x / y) # revealed: float
|
||||||
|
reveal_type(x % y) # revealed: int
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
## Class instances
|
## Class instances
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
class Yes:
|
class Yes:
|
||||||
def __add__(self, other) -> Literal["+"]:
|
def __add__(self, other) -> Literal["+"]:
|
||||||
return "+"
|
return "+"
|
||||||
@@ -138,8 +136,6 @@ reveal_type(No() // Yes()) # revealed: Unknown
|
|||||||
## Subclass reflections override superclass dunders
|
## Subclass reflections override superclass dunders
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
class Yes:
|
class Yes:
|
||||||
def __add__(self, other) -> Literal["+"]:
|
def __add__(self, other) -> Literal["+"]:
|
||||||
return "+"
|
return "+"
|
||||||
@@ -298,8 +294,6 @@ itself. (For these operators to work on the class itself, they would have to be
|
|||||||
class's type, i.e. `type`.)
|
class's type, i.e. `type`.)
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
class Yes:
|
class Yes:
|
||||||
def __add__(self, other) -> Literal["+"]:
|
def __add__(self, other) -> Literal["+"]:
|
||||||
return "+"
|
return "+"
|
||||||
@@ -318,8 +312,6 @@ reveal_type(No + No) # revealed: Unknown
|
|||||||
## Subclass
|
## Subclass
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
class Yes:
|
class Yes:
|
||||||
def __add__(self, other) -> Literal["+"]:
|
def __add__(self, other) -> Literal["+"]:
|
||||||
return "+"
|
return "+"
|
||||||
|
|||||||
@@ -244,7 +244,10 @@ class B:
|
|||||||
def __rsub__(self, other: A) -> B:
|
def __rsub__(self, other: A) -> B:
|
||||||
return B()
|
return B()
|
||||||
|
|
||||||
reveal_type(A() - B()) # revealed: B
|
# TODO: this should be `B` (the return annotation of `B.__rsub__`),
|
||||||
|
# because `A.__sub__` is annotated as only accepting `A`,
|
||||||
|
# but `B.__rsub__` will accept `A`.
|
||||||
|
reveal_type(A() - B()) # revealed: A
|
||||||
```
|
```
|
||||||
|
|
||||||
## Callable instances as dunders
|
## Callable instances as dunders
|
||||||
@@ -259,38 +262,31 @@ class A:
|
|||||||
class B:
|
class B:
|
||||||
__add__ = A()
|
__add__ = A()
|
||||||
|
|
||||||
reveal_type(B() + B()) # revealed: Unknown | int
|
reveal_type(B() + B()) # revealed: int
|
||||||
```
|
|
||||||
|
|
||||||
Note that we union with `Unknown` here because `__add__` is not declared. We do infer just `int` if
|
|
||||||
the callable is declared:
|
|
||||||
|
|
||||||
```py
|
|
||||||
class B2:
|
|
||||||
__add__: A = A()
|
|
||||||
|
|
||||||
reveal_type(B2() + B2()) # revealed: int
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Integration test: numbers from typeshed
|
## Integration test: numbers from typeshed
|
||||||
|
|
||||||
We get less precise results from binary operations on float/complex literals due to the special case
|
|
||||||
for annotations of `float` or `complex`, which applies also to return annotations for typeshed
|
|
||||||
dunder methods. Perhaps we could have a special-case on the special-case, to exclude these typeshed
|
|
||||||
return annotations from the widening, and preserve a bit more precision here?
|
|
||||||
|
|
||||||
```py
|
```py
|
||||||
reveal_type(3j + 3.14) # revealed: int | float | complex
|
reveal_type(3j + 3.14) # revealed: complex
|
||||||
reveal_type(4.2 + 42) # revealed: int | float
|
reveal_type(4.2 + 42) # revealed: float
|
||||||
reveal_type(3j + 3) # revealed: int | float | complex
|
reveal_type(3j + 3) # revealed: complex
|
||||||
reveal_type(3.14 + 3j) # revealed: int | float | complex
|
|
||||||
reveal_type(42 + 4.2) # revealed: int | float
|
# TODO should be complex, need to check arg type and fall back to `rhs.__radd__`
|
||||||
reveal_type(3 + 3j) # revealed: int | float | complex
|
reveal_type(3.14 + 3j) # revealed: float
|
||||||
|
|
||||||
|
# TODO should be float, need to check arg type and fall back to `rhs.__radd__`
|
||||||
|
reveal_type(42 + 4.2) # revealed: int
|
||||||
|
|
||||||
|
# TODO should be complex, need to check arg type and fall back to `rhs.__radd__`
|
||||||
|
reveal_type(3 + 3j) # revealed: int
|
||||||
|
|
||||||
def _(x: bool, y: int):
|
def _(x: bool, y: int):
|
||||||
reveal_type(x + y) # revealed: int
|
reveal_type(x + y) # revealed: int
|
||||||
reveal_type(4.2 + x) # revealed: int | float
|
reveal_type(4.2 + x) # revealed: float
|
||||||
reveal_type(y + 4.12) # revealed: int | float
|
|
||||||
|
# TODO should be float, need to check arg type and fall back to `rhs.__radd__`
|
||||||
|
reveal_type(y + 4.12) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
## With literal types
|
## With literal types
|
||||||
@@ -307,12 +303,13 @@ class A:
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
reveal_type(A() + 1) # revealed: A
|
reveal_type(A() + 1) # revealed: A
|
||||||
reveal_type(1 + A()) # revealed: A
|
# TODO should be `A` since `int.__add__` doesn't support `A` instances
|
||||||
|
reveal_type(1 + A()) # revealed: int
|
||||||
|
|
||||||
reveal_type(A() + "foo") # revealed: A
|
reveal_type(A() + "foo") # revealed: A
|
||||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||||
# TODO overloads
|
# TODO overloads
|
||||||
reveal_type("foo" + A()) # revealed: @Todo(return type of decorated function)
|
reveal_type("foo" + A()) # revealed: @Todo(return type)
|
||||||
|
|
||||||
reveal_type(A() + b"foo") # revealed: A
|
reveal_type(A() + b"foo") # revealed: A
|
||||||
# TODO should be `A` since `bytes.__add__` doesn't support `A` instances
|
# TODO should be `A` since `bytes.__add__` doesn't support `A` instances
|
||||||
@@ -320,7 +317,7 @@ reveal_type(b"foo" + A()) # revealed: bytes
|
|||||||
|
|
||||||
reveal_type(A() + ()) # revealed: A
|
reveal_type(A() + ()) # revealed: A
|
||||||
# TODO this should be `A`, since `tuple.__add__` doesn't support `A` instances
|
# TODO this should be `A`, since `tuple.__add__` doesn't support `A` instances
|
||||||
reveal_type(() + A()) # revealed: @Todo(return type of decorated function)
|
reveal_type(() + A()) # revealed: @Todo(return type)
|
||||||
|
|
||||||
literal_string_instance = "foo" * 1_000_000_000
|
literal_string_instance = "foo" * 1_000_000_000
|
||||||
# the test is not testing what it's meant to be testing if this isn't a `LiteralString`:
|
# the test is not testing what it's meant to be testing if this isn't a `LiteralString`:
|
||||||
@@ -329,7 +326,7 @@ reveal_type(literal_string_instance) # revealed: LiteralString
|
|||||||
reveal_type(A() + literal_string_instance) # revealed: A
|
reveal_type(A() + literal_string_instance) # revealed: A
|
||||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||||
# TODO overloads
|
# TODO overloads
|
||||||
reveal_type(literal_string_instance + A()) # revealed: @Todo(return type of decorated function)
|
reveal_type(literal_string_instance + A()) # revealed: @Todo(return type)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Operations involving instances of classes inheriting from `Any`
|
## Operations involving instances of classes inheriting from `Any`
|
||||||
@@ -357,20 +354,6 @@ class Y(Foo): ...
|
|||||||
reveal_type(X() + Y()) # revealed: int
|
reveal_type(X() + Y()) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
## Operations involving types with invalid `__bool__` methods
|
|
||||||
|
|
||||||
<!-- snapshot-diagnostics -->
|
|
||||||
|
|
||||||
```py
|
|
||||||
class NotBoolable:
|
|
||||||
__bool__: int = 3
|
|
||||||
|
|
||||||
a = NotBoolable()
|
|
||||||
|
|
||||||
# error: [unsupported-bool-conversion]
|
|
||||||
10 and a and True
|
|
||||||
```
|
|
||||||
|
|
||||||
## Unsupported
|
## Unsupported
|
||||||
|
|
||||||
### Dunder as instance attribute
|
### Dunder as instance attribute
|
||||||
@@ -406,12 +389,10 @@ A left-hand dunder method doesn't apply for the right-hand operand, or vice vers
|
|||||||
|
|
||||||
```py
|
```py
|
||||||
class A:
|
class A:
|
||||||
def __add__(self, other) -> int:
|
def __add__(self, other) -> int: ...
|
||||||
return 1
|
|
||||||
|
|
||||||
class B:
|
class B:
|
||||||
def __radd__(self, other) -> int:
|
def __radd__(self, other) -> int: ...
|
||||||
return 1
|
|
||||||
|
|
||||||
class C: ...
|
class C: ...
|
||||||
|
|
||||||
|
|||||||
@@ -10,15 +10,16 @@ reveal_type(-3 // 3) # revealed: Literal[-1]
|
|||||||
reveal_type(-3 / 3) # revealed: float
|
reveal_type(-3 / 3) # revealed: float
|
||||||
reveal_type(5 % 3) # revealed: Literal[2]
|
reveal_type(5 % 3) # revealed: Literal[2]
|
||||||
|
|
||||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `Literal[2]` and `Literal["f"]`"
|
# TODO: We don't currently verify that the actual parameter to int.__add__ matches the declared
|
||||||
reveal_type(2 + "f") # revealed: Unknown
|
# formal parameter type.
|
||||||
|
reveal_type(2 + "f") # revealed: int
|
||||||
|
|
||||||
def lhs(x: int):
|
def lhs(x: int):
|
||||||
reveal_type(x + 1) # revealed: int
|
reveal_type(x + 1) # revealed: int
|
||||||
reveal_type(x - 4) # revealed: int
|
reveal_type(x - 4) # revealed: int
|
||||||
reveal_type(x * -1) # revealed: int
|
reveal_type(x * -1) # revealed: int
|
||||||
reveal_type(x // 3) # revealed: int
|
reveal_type(x // 3) # revealed: int
|
||||||
reveal_type(x / 3) # revealed: int | float
|
reveal_type(x / 3) # revealed: float
|
||||||
reveal_type(x % 3) # revealed: int
|
reveal_type(x % 3) # revealed: int
|
||||||
|
|
||||||
def rhs(x: int):
|
def rhs(x: int):
|
||||||
@@ -26,7 +27,7 @@ def rhs(x: int):
|
|||||||
reveal_type(3 - x) # revealed: int
|
reveal_type(3 - x) # revealed: int
|
||||||
reveal_type(3 * x) # revealed: int
|
reveal_type(3 * x) # revealed: int
|
||||||
reveal_type(-3 // x) # revealed: int
|
reveal_type(-3 // x) # revealed: int
|
||||||
reveal_type(-3 / x) # revealed: int | float
|
reveal_type(-3 / x) # revealed: float
|
||||||
reveal_type(5 % x) # revealed: int
|
reveal_type(5 % x) # revealed: int
|
||||||
|
|
||||||
def both(x: int):
|
def both(x: int):
|
||||||
@@ -34,7 +35,7 @@ def both(x: int):
|
|||||||
reveal_type(x - x) # revealed: int
|
reveal_type(x - x) # revealed: int
|
||||||
reveal_type(x * x) # revealed: int
|
reveal_type(x * x) # revealed: int
|
||||||
reveal_type(x // x) # revealed: int
|
reveal_type(x // x) # revealed: int
|
||||||
reveal_type(x / x) # revealed: int | float
|
reveal_type(x / x) # revealed: float
|
||||||
reveal_type(x % x) # revealed: int
|
reveal_type(x % x) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -50,9 +51,9 @@ reveal_type(1 ** (largest_u32 + 1)) # revealed: int
|
|||||||
reveal_type(2**largest_u32) # revealed: int
|
reveal_type(2**largest_u32) # revealed: int
|
||||||
|
|
||||||
def variable(x: int):
|
def variable(x: int):
|
||||||
reveal_type(x**2) # revealed: @Todo(return type of decorated function)
|
reveal_type(x**2) # revealed: @Todo(return type)
|
||||||
reveal_type(2**x) # revealed: @Todo(return type of decorated function)
|
reveal_type(2**x) # revealed: @Todo(return type)
|
||||||
reveal_type(x**x) # revealed: @Todo(return type of decorated function)
|
reveal_type(x**x) # revealed: @Todo(return type)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Division by Zero
|
## Division by Zero
|
||||||
@@ -79,20 +80,24 @@ c = 3 % 0 # error: "Cannot reduce object of type `Literal[3]` modulo zero"
|
|||||||
reveal_type(c) # revealed: int
|
reveal_type(c) # revealed: int
|
||||||
|
|
||||||
# error: "Cannot divide object of type `int` by zero"
|
# error: "Cannot divide object of type `int` by zero"
|
||||||
reveal_type(int() / 0) # revealed: int | float
|
# revealed: float
|
||||||
|
reveal_type(int() / 0)
|
||||||
|
|
||||||
# error: "Cannot divide object of type `Literal[1]` by zero"
|
# error: "Cannot divide object of type `Literal[1]` by zero"
|
||||||
reveal_type(1 / False) # revealed: float
|
# revealed: float
|
||||||
|
reveal_type(1 / False)
|
||||||
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
|
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
|
||||||
True / False
|
True / False
|
||||||
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
|
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
|
||||||
bool(1) / False
|
bool(1) / False
|
||||||
|
|
||||||
# error: "Cannot divide object of type `float` by zero"
|
# error: "Cannot divide object of type `float` by zero"
|
||||||
reveal_type(1.0 / 0) # revealed: int | float
|
# revealed: float
|
||||||
|
reveal_type(1.0 / 0)
|
||||||
|
|
||||||
class MyInt(int): ...
|
class MyInt(int): ...
|
||||||
|
|
||||||
# No error for a subclass of int
|
# No error for a subclass of int
|
||||||
reveal_type(MyInt(3) / 0) # revealed: int | float
|
# revealed: float
|
||||||
|
reveal_type(MyInt(3) / 0)
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
# Binary operations on tuples
|
|
||||||
|
|
||||||
## Concatenation for heterogeneous tuples
|
|
||||||
|
|
||||||
```py
|
|
||||||
reveal_type((1, 2) + (3, 4)) # revealed: tuple[Literal[1], Literal[2], Literal[3], Literal[4]]
|
|
||||||
reveal_type(() + (1, 2)) # revealed: tuple[Literal[1], Literal[2]]
|
|
||||||
reveal_type((1, 2) + ()) # revealed: tuple[Literal[1], Literal[2]]
|
|
||||||
reveal_type(() + ()) # revealed: tuple[()]
|
|
||||||
|
|
||||||
def _(x: tuple[int, str], y: tuple[None, tuple[int]]):
|
|
||||||
reveal_type(x + y) # revealed: tuple[int, str, None, tuple[int]]
|
|
||||||
reveal_type(y + x) # revealed: tuple[None, tuple[int], int, str]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Concatenation for homogeneous tuples
|
|
||||||
|
|
||||||
```py
|
|
||||||
def _(x: tuple[int, ...], y: tuple[str, ...]):
|
|
||||||
reveal_type(x + y) # revealed: @Todo(full tuple[...] support)
|
|
||||||
reveal_type(x + (1, 2)) # revealed: @Todo(full tuple[...] support)
|
|
||||||
```
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
# Binary operations on union types
|
|
||||||
|
|
||||||
Binary operations on union types are only available if they are supported for all possible
|
|
||||||
combinations of types:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def f1(i: int, u: int | None):
|
|
||||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int` and `int | None`"
|
|
||||||
reveal_type(i + u) # revealed: Unknown
|
|
||||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int | None` and `int`"
|
|
||||||
reveal_type(u + i) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
`int` can be added to `int`, and `str` can be added to `str`, but expressions of type `int | str`
|
|
||||||
cannot be added, because that would require addition of `int` and `str` or vice versa:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def f2(i: int, s: str, int_or_str: int | str):
|
|
||||||
i + i
|
|
||||||
s + s
|
|
||||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int | str` and `int | str`"
|
|
||||||
reveal_type(int_or_str + int_or_str) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
However, if an operation is supported for all possible combinations, the result will be a union of
|
|
||||||
the possible outcomes:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
def f3(two_or_three: Literal[2, 3], a_or_b: Literal["a", "b"]):
|
|
||||||
reveal_type(two_or_three + two_or_three) # revealed: Literal[4, 5, 6]
|
|
||||||
reveal_type(two_or_three**two_or_three) # revealed: Literal[4, 8, 9, 27]
|
|
||||||
|
|
||||||
reveal_type(a_or_b + a_or_b) # revealed: Literal["aa", "ab", "ba", "bb"]
|
|
||||||
|
|
||||||
reveal_type(two_or_three * a_or_b) # revealed: Literal["aa", "bb", "aaa", "bbb"]
|
|
||||||
```
|
|
||||||
|
|
||||||
We treat a type annotation of `float` as a union of `int` and `float`, so union handling is relevant
|
|
||||||
here:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def f4(x: float, y: float):
|
|
||||||
reveal_type(x + y) # revealed: int | float
|
|
||||||
reveal_type(x - y) # revealed: int | float
|
|
||||||
reveal_type(x * y) # revealed: int | float
|
|
||||||
reveal_type(x / y) # revealed: int | float
|
|
||||||
reveal_type(x // y) # revealed: int | float
|
|
||||||
reveal_type(x % y) # revealed: int | float
|
|
||||||
```
|
|
||||||
@@ -1,15 +1,10 @@
|
|||||||
# Boundness and declaredness: public uses
|
# Boundness and declaredness: public uses
|
||||||
|
|
||||||
This document demonstrates how type-inference and diagnostics work for *public* uses of a symbol,
|
This document demonstrates how type-inference and diagnostics works for *public* uses of a symbol,
|
||||||
that is, a use of a symbol from another scope. If a symbol has a declared type in its local scope
|
that is, a use of a symbol from another scope. If a symbol has a declared type in its local scope
|
||||||
(e.g. `int`), we use that as the symbol's "public type" (the type of the symbol from the perspective
|
(e.g. `int`), we use that as the symbol's "public type" (the type of the symbol from the perspective
|
||||||
of other scopes) even if there is a more precise local inferred type for the symbol (`Literal[1]`).
|
of other scopes) even if there is a more precise local inferred type for the symbol (`Literal[1]`).
|
||||||
|
|
||||||
If a symbol has no declared type, we use the union of `Unknown` with the inferred type as the public
|
|
||||||
type. If there is no declaration, then the symbol can be reassigned to any type from another scope;
|
|
||||||
the union with `Unknown` reflects that its type must at least be as large as the type of the
|
|
||||||
assigned value, but could be arbitrarily larger.
|
|
||||||
|
|
||||||
We test the whole matrix of possible boundness and declaredness states. The current behavior is
|
We test the whole matrix of possible boundness and declaredness states. The current behavior is
|
||||||
summarized in the following table, while the tests below demonstrate each case. Note that some of
|
summarized in the following table, while the tests below demonstrate each case. Note that some of
|
||||||
this behavior is questionable and might change in the future. See the TODOs in `symbol_by_id`
|
this behavior is questionable and might change in the future. See the TODOs in `symbol_by_id`
|
||||||
@@ -17,11 +12,11 @@ this behavior is questionable and might change in the future. See the TODOs in `
|
|||||||
In particular, we should raise errors in the "possibly-undeclared-and-unbound" as well as the
|
In particular, we should raise errors in the "possibly-undeclared-and-unbound" as well as the
|
||||||
"undeclared-and-possibly-unbound" cases (marked with a "?").
|
"undeclared-and-possibly-unbound" cases (marked with a "?").
|
||||||
|
|
||||||
| **Public type** | declared | possibly-undeclared | undeclared |
|
| **Public type** | declared | possibly-undeclared | undeclared |
|
||||||
| ---------------- | ------------ | -------------------------- | ----------------------- |
|
| ---------------- | ------------ | -------------------------- | ------------ |
|
||||||
| bound | `T_declared` | `T_declared \| T_inferred` | `Unknown \| T_inferred` |
|
| bound | `T_declared` | `T_declared \| T_inferred` | `T_inferred` |
|
||||||
| possibly-unbound | `T_declared` | `T_declared \| T_inferred` | `Unknown \| T_inferred` |
|
| possibly-unbound | `T_declared` | `T_declared \| T_inferred` | `T_inferred` |
|
||||||
| unbound | `T_declared` | `T_declared` | `Unknown` |
|
| unbound | `T_declared` | `T_declared` | `Unknown` |
|
||||||
|
|
||||||
| **Diagnostic** | declared | possibly-undeclared | undeclared |
|
| **Diagnostic** | declared | possibly-undeclared | undeclared |
|
||||||
| ---------------- | -------- | ------------------------- | ------------------- |
|
| ---------------- | -------- | ------------------------- | ------------------- |
|
||||||
@@ -34,28 +29,20 @@ In particular, we should raise errors in the "possibly-undeclared-and-unbound" a
|
|||||||
### Declared and bound
|
### Declared and bound
|
||||||
|
|
||||||
If a symbol has a declared type (`int`), we use that even if there is a more precise inferred type
|
If a symbol has a declared type (`int`), we use that even if there is a more precise inferred type
|
||||||
(`Literal[1]`), or a conflicting inferred type (`str` vs. `Literal[2]` below):
|
(`Literal[1]`), or a conflicting inferred type (`Literal[2]`):
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
x: int = 1
|
||||||
|
|
||||||
```py
|
# error: [invalid-assignment]
|
||||||
from typing import Any
|
y: str = 2
|
||||||
|
|
||||||
def any() -> Any: ...
|
|
||||||
|
|
||||||
a: int = 1
|
|
||||||
b: str = 2 # error: [invalid-assignment]
|
|
||||||
c: Any = 3
|
|
||||||
d: int = any()
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from mod import a, b, c, d
|
from mod import x, y
|
||||||
|
|
||||||
reveal_type(a) # revealed: int
|
reveal_type(x) # revealed: int
|
||||||
reveal_type(b) # revealed: str
|
reveal_type(y) # revealed: str
|
||||||
reveal_type(c) # revealed: Any
|
|
||||||
reveal_type(d) # revealed: int
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Declared and possibly unbound
|
### Declared and possibly unbound
|
||||||
@@ -63,34 +50,22 @@ reveal_type(d) # revealed: int
|
|||||||
If a symbol is declared and *possibly* unbound, we trust that other module and use the declared type
|
If a symbol is declared and *possibly* unbound, we trust that other module and use the declared type
|
||||||
without raising an error.
|
without raising an error.
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
def flag() -> bool: ...
|
||||||
```py
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
def any() -> Any: ...
|
|
||||||
def flag() -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
a: int
|
|
||||||
b: str
|
|
||||||
c: Any
|
|
||||||
d: int
|
|
||||||
|
|
||||||
|
x: int
|
||||||
|
y: str
|
||||||
if flag:
|
if flag:
|
||||||
a = 1
|
x = 1
|
||||||
b = 2 # error: [invalid-assignment]
|
# error: [invalid-assignment]
|
||||||
c = 3
|
y = 2
|
||||||
d = any()
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from mod import a, b, c, d
|
from mod import x, y
|
||||||
|
|
||||||
reveal_type(a) # revealed: int
|
reveal_type(x) # revealed: int
|
||||||
reveal_type(b) # revealed: str
|
reveal_type(y) # revealed: str
|
||||||
reveal_type(c) # revealed: Any
|
|
||||||
reveal_type(d) # revealed: int
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Declared and unbound
|
### Declared and unbound
|
||||||
@@ -98,20 +73,14 @@ reveal_type(d) # revealed: int
|
|||||||
Similarly, if a symbol is declared but unbound, we do not raise an error. We trust that this symbol
|
Similarly, if a symbol is declared but unbound, we do not raise an error. We trust that this symbol
|
||||||
is available somehow and simply use the declared type.
|
is available somehow and simply use the declared type.
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
x: int
|
||||||
```py
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
a: int
|
|
||||||
b: Any
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from mod import a, b
|
from mod import x
|
||||||
|
|
||||||
reveal_type(a) # revealed: int
|
reveal_type(x) # revealed: int
|
||||||
reveal_type(b) # revealed: Any
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Possibly undeclared
|
## Possibly undeclared
|
||||||
@@ -121,72 +90,50 @@ reveal_type(b) # revealed: Any
|
|||||||
If a symbol is possibly undeclared but definitely bound, we use the union of the declared and
|
If a symbol is possibly undeclared but definitely bound, we use the union of the declared and
|
||||||
inferred types:
|
inferred types:
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
def any() -> Any: ...
|
def flag() -> bool: ...
|
||||||
def flag() -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
a = 1
|
x = 1
|
||||||
b = 2
|
y = 2
|
||||||
c = 3
|
|
||||||
d = any()
|
|
||||||
if flag():
|
if flag():
|
||||||
a: int
|
x: Any
|
||||||
b: Any
|
# error: [invalid-declaration]
|
||||||
c: str # error: [invalid-declaration]
|
y: str
|
||||||
d: int
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from mod import a, b, c, d
|
from mod import x, y
|
||||||
|
|
||||||
reveal_type(a) # revealed: int
|
reveal_type(x) # revealed: Literal[1] | Any
|
||||||
reveal_type(b) # revealed: Literal[2] | Any
|
reveal_type(y) # revealed: Literal[2] | Unknown
|
||||||
reveal_type(c) # revealed: Literal[3] | Unknown
|
|
||||||
reveal_type(d) # revealed: Any | int
|
|
||||||
|
|
||||||
# External modifications of `a` that violate the declared type are not allowed:
|
|
||||||
# error: [invalid-assignment]
|
|
||||||
a = None
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Possibly undeclared and possibly unbound
|
### Possibly undeclared and possibly unbound
|
||||||
|
|
||||||
If a symbol is possibly undeclared and possibly unbound, we also use the union of the declared and
|
If a symbol is possibly undeclared and possibly unbound, we also use the union of the declared and
|
||||||
inferred types. This case is interesting because the "possibly declared" definition might not be the
|
inferred types. This case is interesting because the "possibly declared" definition might not be the
|
||||||
same as the "possibly bound" definition (symbol `b`). Note that we raise a `possibly-unbound-import`
|
same as the "possibly bound" definition (symbol `y`). Note that we raise a `possibly-unbound-import`
|
||||||
error for both `a` and `b`:
|
error for both `x` and `y`:
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
def flag() -> bool: ...
|
||||||
```py
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
def flag() -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if flag():
|
if flag():
|
||||||
a: Any = 1
|
x: Any = 1
|
||||||
b = 2
|
y = 2
|
||||||
else:
|
else:
|
||||||
b: str
|
y: str
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# error: [possibly-unbound-import]
|
# error: [possibly-unbound-import]
|
||||||
# error: [possibly-unbound-import]
|
# error: [possibly-unbound-import]
|
||||||
from mod import a, b
|
from mod import x, y
|
||||||
|
|
||||||
reveal_type(a) # revealed: Literal[1] | Any
|
reveal_type(x) # revealed: Literal[1] | Any
|
||||||
reveal_type(b) # revealed: Literal[2] | str
|
reveal_type(y) # revealed: Literal[2] | str
|
||||||
|
|
||||||
# External modifications of `b` that violate the declared type are not allowed:
|
|
||||||
# error: [invalid-assignment]
|
|
||||||
b = None
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Possibly undeclared and unbound
|
### Possibly undeclared and unbound
|
||||||
@@ -194,54 +141,35 @@ b = None
|
|||||||
If a symbol is possibly undeclared and definitely unbound, we currently do not raise an error. This
|
If a symbol is possibly undeclared and definitely unbound, we currently do not raise an error. This
|
||||||
seems inconsistent when compared to the case just above.
|
seems inconsistent when compared to the case just above.
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
def flag() -> bool: ...
|
||||||
```py
|
|
||||||
def flag() -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if flag():
|
if flag():
|
||||||
a: int
|
x: int
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: this should raise an error. Once we fix this, update the section description and the table
|
# TODO: this should raise an error. Once we fix this, update the section description and the table
|
||||||
# on top of this document.
|
# on top of this document.
|
||||||
from mod import a
|
from mod import x
|
||||||
|
|
||||||
reveal_type(a) # revealed: int
|
reveal_type(x) # revealed: int
|
||||||
|
|
||||||
# External modifications to `a` that violate the declared type are not allowed:
|
|
||||||
# error: [invalid-assignment]
|
|
||||||
a = None
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Undeclared
|
## Undeclared
|
||||||
|
|
||||||
### Undeclared but bound
|
### Undeclared but bound
|
||||||
|
|
||||||
If a symbol is *undeclared*, we use the union of `Unknown` with the inferred type. Note that we
|
We use the inferred type as the public type, if a symbol has no declared type.
|
||||||
treat this case differently from the case where a symbol is implicitly declared with `Unknown`,
|
|
||||||
possibly due to the usage of an unknown name in the annotation:
|
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
x = 1
|
||||||
```py
|
|
||||||
# Undeclared:
|
|
||||||
a = 1
|
|
||||||
|
|
||||||
# Implicitly declared with `Unknown`, due to the usage of an unknown name in the annotation:
|
|
||||||
b: SomeUnknownName = 1 # error: [unresolved-reference]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from mod import a, b
|
from mod import x
|
||||||
|
|
||||||
reveal_type(a) # revealed: Unknown | Literal[1]
|
reveal_type(x) # revealed: Literal[1]
|
||||||
reveal_type(b) # revealed: Unknown
|
|
||||||
|
|
||||||
# All external modifications of `a` are allowed:
|
|
||||||
a = None
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Undeclared and possibly unbound
|
### Undeclared and possibly unbound
|
||||||
@@ -249,46 +177,33 @@ a = None
|
|||||||
If a symbol is undeclared and *possibly* unbound, we currently do not raise an error. This seems
|
If a symbol is undeclared and *possibly* unbound, we currently do not raise an error. This seems
|
||||||
inconsistent when compared to the "possibly-undeclared-and-possibly-unbound" case.
|
inconsistent when compared to the "possibly-undeclared-and-possibly-unbound" case.
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
def flag() -> bool: ...
|
||||||
```py
|
|
||||||
def flag() -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if flag:
|
if flag:
|
||||||
a = 1
|
x = 1
|
||||||
b: SomeUnknownName = 1 # error: [unresolved-reference]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: this should raise an error. Once we fix this, update the section description and the table
|
# TODO: this should raise an error. Once we fix this, update the section description and the table
|
||||||
# on top of this document.
|
# on top of this document.
|
||||||
from mod import a, b
|
from mod import x
|
||||||
|
|
||||||
reveal_type(a) # revealed: Unknown | Literal[1]
|
reveal_type(x) # revealed: Literal[1]
|
||||||
reveal_type(b) # revealed: Unknown
|
|
||||||
|
|
||||||
# All external modifications of `a` are allowed:
|
|
||||||
a = None
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Undeclared and unbound
|
### Undeclared and unbound
|
||||||
|
|
||||||
If a symbol is undeclared *and* unbound, we infer `Unknown` and raise an error.
|
If a symbol is undeclared *and* unbound, we infer `Unknown` and raise an error.
|
||||||
|
|
||||||
`mod.py`:
|
```py path=mod.py
|
||||||
|
|
||||||
```py
|
|
||||||
if False:
|
if False:
|
||||||
a: int = 1
|
x: int = 1
|
||||||
```
|
```
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# error: [unresolved-import]
|
# error: [unresolved-import]
|
||||||
from mod import a
|
from mod import x
|
||||||
|
|
||||||
reveal_type(a) # revealed: Unknown
|
reveal_type(x) # revealed: Unknown
|
||||||
|
|
||||||
# Modifications allowed in this case:
|
|
||||||
a = None
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
# `typing.Callable`
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
def _(c: Callable[[], int]):
|
|
||||||
reveal_type(c()) # revealed: int
|
|
||||||
|
|
||||||
def _(c: Callable[[int, str], int]):
|
|
||||||
reveal_type(c(1, "a")) # revealed: int
|
|
||||||
|
|
||||||
# error: [invalid-argument-type] "Object of type `Literal["a"]` cannot be assigned to parameter 1; expected type `int`"
|
|
||||||
# error: [invalid-argument-type] "Object of type `Literal[1]` cannot be assigned to parameter 2; expected type `str`"
|
|
||||||
reveal_type(c("a", 1)) # revealed: int
|
|
||||||
```
|
|
||||||
|
|
||||||
The `Callable` annotation can only be used to describe positional-only parameters.
|
|
||||||
|
|
||||||
```py
|
|
||||||
def _(c: Callable[[int, str], None]):
|
|
||||||
# error: [unknown-argument] "Argument `a` does not match any known parameter"
|
|
||||||
# error: [unknown-argument] "Argument `b` does not match any known parameter"
|
|
||||||
# error: [missing-argument] "No arguments provided for required parameters 1, 2"
|
|
||||||
reveal_type(c(a=1, b="b")) # revealed: None
|
|
||||||
```
|
|
||||||
|
|
||||||
If the annotation uses a gradual form (`...`) for the parameter list, then it can accept any kind of
|
|
||||||
parameter with any type.
|
|
||||||
|
|
||||||
```py
|
|
||||||
def _(c: Callable[..., int]):
|
|
||||||
reveal_type(c()) # revealed: int
|
|
||||||
reveal_type(c(1)) # revealed: int
|
|
||||||
reveal_type(c(1, "str", False, a=[1, 2], b=(3, 4))) # revealed: int
|
|
||||||
```
|
|
||||||
|
|
||||||
An invalid `Callable` form can accept any parameters and will return `Unknown`.
|
|
||||||
|
|
||||||
```py
|
|
||||||
# error: [invalid-type-form]
|
|
||||||
def _(c: Callable[42, str]):
|
|
||||||
reveal_type(c()) # revealed: Unknown
|
|
||||||
```
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
# Calling builtins
|
|
||||||
|
|
||||||
## `bool` with incorrect arguments
|
|
||||||
|
|
||||||
```py
|
|
||||||
class NotBool:
|
|
||||||
__bool__ = None
|
|
||||||
|
|
||||||
# error: [too-many-positional-arguments] "Too many positional arguments to class `bool`: expected 1, got 2"
|
|
||||||
bool(1, 2)
|
|
||||||
|
|
||||||
# TODO: We should emit an `unsupported-bool-conversion` error here because the argument doesn't implement `__bool__` correctly.
|
|
||||||
bool(NotBool())
|
|
||||||
```
|
|
||||||
|
|
||||||
## Calls to `type()`
|
|
||||||
|
|
||||||
A single-argument call to `type()` returns an object that has the argument's meta-type. (This is
|
|
||||||
tested more extensively in `crates/red_knot_python_semantic/resources/mdtest/attributes.md`,
|
|
||||||
alongside the tests for the `__class__` attribute.)
|
|
||||||
|
|
||||||
```py
|
|
||||||
reveal_type(type(1)) # revealed: Literal[int]
|
|
||||||
```
|
|
||||||
|
|
||||||
But a three-argument call to type creates a dynamic instance of the `type` class:
|
|
||||||
|
|
||||||
```py
|
|
||||||
reveal_type(type("Foo", (), {})) # revealed: type
|
|
||||||
```
|
|
||||||
|
|
||||||
Other numbers of arguments are invalid
|
|
||||||
|
|
||||||
```py
|
|
||||||
# error: [no-matching-overload] "No overload of class `type` matches arguments"
|
|
||||||
type("Foo", ())
|
|
||||||
|
|
||||||
# error: [no-matching-overload] "No overload of class `type` matches arguments"
|
|
||||||
type("Foo", (), {}, weird_other_arg=42)
|
|
||||||
```
|
|
||||||
@@ -4,14 +4,14 @@
|
|||||||
|
|
||||||
```py
|
```py
|
||||||
class Multiplier:
|
class Multiplier:
|
||||||
def __init__(self, factor: int):
|
def __init__(self, factor: float):
|
||||||
self.factor = factor
|
self.factor = factor
|
||||||
|
|
||||||
def __call__(self, number: int) -> int:
|
def __call__(self, number: float) -> float:
|
||||||
return number * self.factor
|
return number * self.factor
|
||||||
|
|
||||||
a = Multiplier(2)(3)
|
a = Multiplier(2.0)(3.0)
|
||||||
reveal_type(a) # revealed: int
|
reveal_type(a) # revealed: float
|
||||||
|
|
||||||
class Unit: ...
|
class Unit: ...
|
||||||
|
|
||||||
@@ -25,8 +25,7 @@ reveal_type(b) # revealed: Unknown
|
|||||||
def _(flag: bool):
|
def _(flag: bool):
|
||||||
class PossiblyNotCallable:
|
class PossiblyNotCallable:
|
||||||
if flag:
|
if flag:
|
||||||
def __call__(self) -> int:
|
def __call__(self) -> int: ...
|
||||||
return 1
|
|
||||||
|
|
||||||
a = PossiblyNotCallable()
|
a = PossiblyNotCallable()
|
||||||
result = a() # error: "Object of type `PossiblyNotCallable` is not callable (possibly unbound `__call__` method)"
|
result = a() # error: "Object of type `PossiblyNotCallable` is not callable (possibly unbound `__call__` method)"
|
||||||
@@ -39,8 +38,7 @@ def _(flag: bool):
|
|||||||
def _(flag: bool):
|
def _(flag: bool):
|
||||||
if flag:
|
if flag:
|
||||||
class PossiblyUnbound:
|
class PossiblyUnbound:
|
||||||
def __call__(self) -> int:
|
def __call__(self) -> int: ...
|
||||||
return 1
|
|
||||||
|
|
||||||
# error: [possibly-unresolved-reference]
|
# error: [possibly-unresolved-reference]
|
||||||
a = PossiblyUnbound()
|
a = PossiblyUnbound()
|
||||||
@@ -54,7 +52,7 @@ class NonCallable:
|
|||||||
__call__ = 1
|
__call__ = 1
|
||||||
|
|
||||||
a = NonCallable()
|
a = NonCallable()
|
||||||
# error: [call-non-callable] "Object of type `Literal[1]` is not callable"
|
# error: "Object of type `NonCallable` is not callable"
|
||||||
reveal_type(a()) # revealed: Unknown
|
reveal_type(a()) # revealed: Unknown
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -66,11 +64,10 @@ def _(flag: bool):
|
|||||||
if flag:
|
if flag:
|
||||||
__call__ = 1
|
__call__ = 1
|
||||||
else:
|
else:
|
||||||
def __call__(self) -> int:
|
def __call__(self) -> int: ...
|
||||||
return 1
|
|
||||||
|
|
||||||
a = NonCallable()
|
a = NonCallable()
|
||||||
# error: [call-non-callable] "Object of type `Literal[1]` is not callable"
|
# error: "Object of type `Literal[1] | Literal[__call__]` is not callable (due to union element `Literal[1]`)"
|
||||||
reveal_type(a()) # revealed: Unknown | int
|
reveal_type(a()) # revealed: Unknown | int
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -85,7 +82,7 @@ class C:
|
|||||||
|
|
||||||
c = C()
|
c = C()
|
||||||
|
|
||||||
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter 2 (`x`) of bound method `__call__`; expected type `int`"
|
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter 2 (`x`) of function `__call__`; expected type `int`"
|
||||||
reveal_type(c("foo")) # revealed: int
|
reveal_type(c("foo")) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -99,29 +96,6 @@ class C:
|
|||||||
|
|
||||||
c = C()
|
c = C()
|
||||||
|
|
||||||
# error: 13 [invalid-argument-type] "Object of type `C` cannot be assigned to parameter 1 (`self`) of bound method `__call__`; expected type `int`"
|
# error: 13 [invalid-argument-type] "Object of type `C` cannot be assigned to parameter 1 (`self`) of function `__call__`; expected type `int`"
|
||||||
reveal_type(c()) # revealed: int
|
reveal_type(c()) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
## Union over callables
|
|
||||||
|
|
||||||
### Possibly unbound `__call__`
|
|
||||||
|
|
||||||
```py
|
|
||||||
def outer(cond1: bool):
|
|
||||||
class Test:
|
|
||||||
if cond1:
|
|
||||||
def __call__(self): ...
|
|
||||||
|
|
||||||
class Other:
|
|
||||||
def __call__(self): ...
|
|
||||||
|
|
||||||
def inner(cond2: bool):
|
|
||||||
if cond2:
|
|
||||||
a = Test()
|
|
||||||
else:
|
|
||||||
a = Other()
|
|
||||||
|
|
||||||
# error: [call-non-callable] "Object of type `Test` is not callable (possibly unbound `__call__` method)"
|
|
||||||
a()
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -1,219 +0,0 @@
|
|||||||
# Dunder calls
|
|
||||||
|
|
||||||
## Introduction
|
|
||||||
|
|
||||||
This test suite explains and documents how dunder methods are looked up and called. Throughout the
|
|
||||||
document, we use `__getitem__` as an example, but the same principles apply to other dunder methods.
|
|
||||||
|
|
||||||
Dunder methods are implicitly called when using certain syntax. For example, the index operator
|
|
||||||
`obj[key]` calls the `__getitem__` method under the hood. Exactly *how* a dunder method is looked up
|
|
||||||
and called works slightly different from regular methods. Dunder methods are not looked up on `obj`
|
|
||||||
directly, but rather on `type(obj)`. But in many ways, they still *act* as if they were called on
|
|
||||||
`obj` directly. If the `__getitem__` member of `type(obj)` is a descriptor, it is called with `obj`
|
|
||||||
as the `instance` argument to `__get__`. A desugared version of `obj[key]` is roughly equivalent to
|
|
||||||
`getitem_desugared(obj, key)` as defined below:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
def find_name_in_mro(typ: type, name: str) -> Any:
|
|
||||||
# See implementation in https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
|
|
||||||
pass
|
|
||||||
|
|
||||||
def getitem_desugared(obj: object, key: object) -> object:
|
|
||||||
getitem_callable = find_name_in_mro(type(obj), "__getitem__")
|
|
||||||
if hasattr(getitem_callable, "__get__"):
|
|
||||||
getitem_callable = getitem_callable.__get__(obj, type(obj))
|
|
||||||
|
|
||||||
return getitem_callable(key)
|
|
||||||
```
|
|
||||||
|
|
||||||
In the following tests, we demonstrate that we implement this behavior correctly.
|
|
||||||
|
|
||||||
## Operating on class objects
|
|
||||||
|
|
||||||
If we invoke a dunder method on a class, it is looked up on the *meta* class, since any class is an
|
|
||||||
instance of its metaclass:
|
|
||||||
|
|
||||||
```py
|
|
||||||
class Meta(type):
|
|
||||||
def __getitem__(cls, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
class DunderOnMetaclass(metaclass=Meta):
|
|
||||||
pass
|
|
||||||
|
|
||||||
reveal_type(DunderOnMetaclass[0]) # revealed: str
|
|
||||||
```
|
|
||||||
|
|
||||||
If the dunder method is only present on the class itself, it will not be called:
|
|
||||||
|
|
||||||
```py
|
|
||||||
class ClassWithNormalDunder:
|
|
||||||
def __getitem__(self, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
# error: [non-subscriptable]
|
|
||||||
ClassWithNormalDunder[0]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Operating on instances
|
|
||||||
|
|
||||||
When invoking a dunder method on an instance of a class, it is looked up on the class:
|
|
||||||
|
|
||||||
```py
|
|
||||||
class ClassWithNormalDunder:
|
|
||||||
def __getitem__(self, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
class_with_normal_dunder = ClassWithNormalDunder()
|
|
||||||
|
|
||||||
reveal_type(class_with_normal_dunder[0]) # revealed: str
|
|
||||||
```
|
|
||||||
|
|
||||||
Which can be demonstrated by trying to attach a dunder method to an instance, which will not work:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def external_getitem(instance, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
class ThisFails:
|
|
||||||
def __init__(self):
|
|
||||||
self.__getitem__ = external_getitem
|
|
||||||
|
|
||||||
this_fails = ThisFails()
|
|
||||||
|
|
||||||
# error: [non-subscriptable] "Cannot subscript object of type `ThisFails` with no `__getitem__` method"
|
|
||||||
reveal_type(this_fails[0]) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
However, the attached dunder method *can* be called if accessed directly:
|
|
||||||
|
|
||||||
```py
|
|
||||||
reveal_type(this_fails.__getitem__(this_fails, 0)) # revealed: Unknown | str
|
|
||||||
```
|
|
||||||
|
|
||||||
The instance-level method is also not called when the class-level method is present:
|
|
||||||
|
|
||||||
```py
|
|
||||||
def external_getitem1(instance, key) -> str:
|
|
||||||
return "a"
|
|
||||||
|
|
||||||
def external_getitem2(key) -> int:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
def _(flag: bool):
|
|
||||||
class ThisFails:
|
|
||||||
if flag:
|
|
||||||
__getitem__ = external_getitem1
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.__getitem__ = external_getitem2
|
|
||||||
|
|
||||||
this_fails = ThisFails()
|
|
||||||
|
|
||||||
# error: [call-possibly-unbound-method]
|
|
||||||
reveal_type(this_fails[0]) # revealed: Unknown | str
|
|
||||||
```
|
|
||||||
|
|
||||||
## When the dunder is not a method
|
|
||||||
|
|
||||||
A dunder can also be a non-method callable:
|
|
||||||
|
|
||||||
```py
|
|
||||||
class SomeCallable:
|
|
||||||
def __call__(self, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
class ClassWithNonMethodDunder:
|
|
||||||
__getitem__: SomeCallable = SomeCallable()
|
|
||||||
|
|
||||||
class_with_callable_dunder = ClassWithNonMethodDunder()
|
|
||||||
|
|
||||||
reveal_type(class_with_callable_dunder[0]) # revealed: str
|
|
||||||
```
|
|
||||||
|
|
||||||
## Dunders are looked up using the descriptor protocol
|
|
||||||
|
|
||||||
Here, we demonstrate that the descriptor protocol is invoked when looking up a dunder method. Note
|
|
||||||
that the `instance` argument is on object of type `ClassWithDescriptorDunder`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
class SomeCallable:
|
|
||||||
def __call__(self, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
class Descriptor:
|
|
||||||
def __get__(self, instance: ClassWithDescriptorDunder, owner: type[ClassWithDescriptorDunder]) -> SomeCallable:
|
|
||||||
return SomeCallable()
|
|
||||||
|
|
||||||
class ClassWithDescriptorDunder:
|
|
||||||
__getitem__: Descriptor = Descriptor()
|
|
||||||
|
|
||||||
class_with_descriptor_dunder = ClassWithDescriptorDunder()
|
|
||||||
|
|
||||||
reveal_type(class_with_descriptor_dunder[0]) # revealed: str
|
|
||||||
```
|
|
||||||
|
|
||||||
## Dunders can not be overwritten on instances
|
|
||||||
|
|
||||||
If we attempt to overwrite a dunder method on an instance, it does not affect the behavior of
|
|
||||||
implicit dunder calls:
|
|
||||||
|
|
||||||
```py
|
|
||||||
class C:
|
|
||||||
def __getitem__(self, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
def f(self):
|
|
||||||
# TODO: This should emit an `invalid-assignment` diagnostic once we understand the type of `self`
|
|
||||||
self.__getitem__ = None
|
|
||||||
|
|
||||||
# This is still fine, and simply calls the `__getitem__` method on the class
|
|
||||||
reveal_type(C()[0]) # revealed: str
|
|
||||||
```
|
|
||||||
|
|
||||||
## Calling a union of dunder methods
|
|
||||||
|
|
||||||
```py
|
|
||||||
def _(flag: bool):
|
|
||||||
class C:
|
|
||||||
if flag:
|
|
||||||
def __getitem__(self, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
else:
|
|
||||||
def __getitem__(self, key: int) -> bytes:
|
|
||||||
return bytes()
|
|
||||||
|
|
||||||
c = C()
|
|
||||||
reveal_type(c[0]) # revealed: str | bytes
|
|
||||||
|
|
||||||
if flag:
|
|
||||||
class D:
|
|
||||||
def __getitem__(self, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
else:
|
|
||||||
class D:
|
|
||||||
def __getitem__(self, key: int) -> bytes:
|
|
||||||
return bytes()
|
|
||||||
|
|
||||||
d = D()
|
|
||||||
reveal_type(d[0]) # revealed: str | bytes
|
|
||||||
```
|
|
||||||
|
|
||||||
## Calling a possibly-unbound dunder method
|
|
||||||
|
|
||||||
```py
|
|
||||||
def _(flag: bool):
|
|
||||||
class C:
|
|
||||||
if flag:
|
|
||||||
def __getitem__(self, key: int) -> str:
|
|
||||||
return str(key)
|
|
||||||
|
|
||||||
c = C()
|
|
||||||
# error: [call-possibly-unbound-method]
|
|
||||||
reveal_type(c[0]) # revealed: str
|
|
||||||
```
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user