Compare commits

..

12 Commits

Author SHA1 Message Date
Dhruv Manilawala
c76e15a45d Check context parameters directly from function definition 2025-01-23 14:13:29 +05:30
Ankit Chaurasia
d0aff2bbff Add find_parameter 2025-01-23 12:19:31 +05:45
Ankit Chaurasia
65db31f0e1 highlights the parameter itself 2025-01-22 12:59:39 +05:45
Wei Lee
c2c37b8052 test: update test fixture 2025-01-22 10:06:15 +08:00
Ankit Chaurasia
51613d9107 Add lint error for removed context variables for get_current_context 2025-01-22 09:57:26 +08:00
Ankit Chaurasia
f20e70cd62 remove use of vectors 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
4737824345 fix the logic for lint error message 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
1961b76d03 Refactor functions to use ExprSubscript 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
62a1e55705 refactor rename functions 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
8a7ec4c0a3 Fix PR comments 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
9b9540c3cd Check arguments and function decorated with @task 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
ccafaf8e30 Add more checks for removed context variables
add lint rule to show error for removed context variables in airflow
2025-01-22 09:57:25 +08:00
2363 changed files with 50072 additions and 161270 deletions

View File

@@ -8,7 +8,3 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
# See: https://github.com/astral-sh/ruff/issues/11503
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
rustflags = ["-C", "target-feature=+crt-static"]
[target.'wasm32-unknown-unknown']
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']

View File

@@ -6,10 +6,3 @@ failure-output = "immediate-final"
fail-fast = false
status-level = "skip"
# Mark tests that take longer than 1s as slow.
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
slow-timeout = { period = "1s", terminate-after = 60 }
# Show slow jobs in the final summary
final-status-level = "slow"

7
.github/CODEOWNERS vendored
View File

@@ -18,7 +18,6 @@
/python/py-fuzzer/ @AlexWaygood
# red-knot
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/red_knot_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp

View File

@@ -1,31 +0,0 @@
name: Bug report
description: Report an error or unexpected behavior
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
**Before reporting, please make sure to search through [existing issues](https://github.com/astral-sh/ruff/issues?q=is:issue+is:open+label:bug) (including [closed](https://github.com/astral-sh/ruff/issues?q=is:issue%20state:closed%20label:bug)).**
- type: textarea
attributes:
label: Summary
description: |
A clear and concise description of the bug, including a minimal reproducible example.
Be sure to include the command you invoked (e.g., `ruff check /path/to/file.py --fix`), ideally including the `--isolated` flag and
the current Ruff settings (e.g., relevant sections from your `pyproject.toml`).
If possible, try to include the [playground](https://play.ruff.rs) link that reproduces this issue.
validations:
required: true
- type: input
attributes:
label: Version
description: What version of ruff are you using? (see `ruff version`)
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
validations:
required: false

View File

@@ -1,10 +0,0 @@
name: Rule request
description: Anything related to lint rules (proposing new rules, changes to existing rules, auto-fixes, etc.)
body:
- type: textarea
attributes:
label: Summary
description: |
A clear and concise description of the relevant request. If applicable, please describe the current behavior as well.
validations:
required: true

View File

@@ -1,18 +0,0 @@
name: Question
description: Ask a question about Ruff
labels: ["question"]
body:
- type: textarea
attributes:
label: Question
description: Describe your question in detail.
validations:
required: true
- type: input
attributes:
label: Version
description: What version of ruff are you using? (see `ruff version`)
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
validations:
required: false

View File

@@ -1,8 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: Documentation
url: https://docs.astral.sh/ruff
about: Please consult the documentation before creating an issue.
- name: Community
url: https://discord.com/invite/astral-sh
about: Join our Discord community to ask questions and collaborate.

View File

@@ -40,17 +40,6 @@
enabled: true,
},
packageRules: [
// Pin GitHub Actions to immutable SHAs.
{
matchDepTypes: ["action"],
pinDigests: true,
},
// Annotate GitHub Actions SHAs with a SemVer version.
{
extends: ["helpers:pinGitHubActionDigests"],
extractVersion: "^(?<version>v?\\d+\\.\\d+\\.\\d+)$",
versioning: "regex:^v?(?<major>\\d+)(\\.(?<minor>\\d+)\\.(?<patch>\\d+))?$",
},
{
// Group upload/download artifact updates, the versions are dependent
groupName: "Artifact GitHub Actions dependencies",
@@ -106,7 +95,14 @@
matchManagers: ["cargo"],
matchPackageNames: ["strum"],
description: "Weekly update of strum dependencies",
}
},
{
groupName: "ESLint",
matchManagers: ["npm"],
matchPackageNames: ["eslint"],
allowedVersions: "<9",
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
},
],
vulnerabilityAlerts: {
commitMessageSuffix: "",

View File

@@ -23,12 +23,10 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
env:
PACKAGE_NAME: ruff
MODULE_NAME: ruff
PYTHON_VERSION: "3.13"
PYTHON_VERSION: "3.11"
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
@@ -39,17 +37,17 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build sdist"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
command: sdist
args: --out dist
@@ -59,7 +57,7 @@ jobs:
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
- name: "Upload sdist"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: wheels-sdist
path: dist
@@ -68,23 +66,23 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --locked --out dist
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: wheels-macos-x86_64
path: dist
@@ -99,7 +97,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: artifacts-macos-x86_64
path: |
@@ -110,18 +108,18 @@ jobs:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: arm64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - aarch64"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
target: aarch64
args: --release --locked --out dist
@@ -131,7 +129,7 @@ jobs:
ruff --help
python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: wheels-aarch64-apple-darwin
path: dist
@@ -146,7 +144,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: artifacts-aarch64-apple-darwin
path: |
@@ -166,18 +164,18 @@ jobs:
- target: aarch64-pc-windows-msvc
arch: x64
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.platform.arch }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
@@ -192,7 +190,7 @@ jobs:
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.platform.target }}
path: dist
@@ -203,7 +201,7 @@ jobs:
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.platform.target }}
path: |
@@ -219,18 +217,18 @@ jobs:
- x86_64-unknown-linux-gnu
- i686-unknown-linux-gnu
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
@@ -242,7 +240,7 @@ jobs:
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.target }}
path: dist
@@ -260,7 +258,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.target }}
path: |
@@ -294,24 +292,24 @@ jobs:
arch: arm
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: auto
docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --locked --out dist
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
if: ${{ matrix.platform.arch != 'ppc64' && matrix.platform.arch != 'ppc64le'}}
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
name: Test wheel
with:
arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }}
@@ -325,7 +323,7 @@ jobs:
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.platform.target }}
path: dist
@@ -343,7 +341,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.platform.target }}
path: |
@@ -359,25 +357,25 @@ jobs:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
- name: "Test wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
@@ -387,7 +385,7 @@ jobs:
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.target }}
path: dist
@@ -405,7 +403,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.target }}
path: |
@@ -425,23 +423,23 @@ jobs:
arch: armv7
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
docker-options: ${{ matrix.platform.maturin_docker_options }}
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
- uses: uraimo/run-on-arch-action@v2
name: Test wheel
with:
arch: ${{ matrix.platform.arch }}
@@ -454,7 +452,7 @@ jobs:
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.platform.target }}
path: dist
@@ -472,7 +470,7 @@ jobs:
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: artifacts-${{ matrix.platform.target }}
path: |

View File

@@ -33,14 +33,14 @@ jobs:
- linux/amd64
- linux/arm64
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -51,7 +51,7 @@ jobs:
env:
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
run: |
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${TAG}" != "${version}" ]; then
echo "The input tag does not match the version from pyproject.toml:" >&2
echo "${TAG}" >&2
@@ -63,7 +63,7 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
uses: docker/metadata-action@v5
with:
images: ${{ env.RUFF_BASE_IMG }}
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
@@ -79,7 +79,7 @@ jobs:
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Build and push by digest
id: build
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6
uses: docker/build-push-action@v6
with:
context: .
platforms: ${{ matrix.platform }}
@@ -96,7 +96,7 @@ jobs:
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digests
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_TUPLE }}
path: /tmp/digests/*
@@ -113,17 +113,17 @@ jobs:
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- uses: docker/setup-buildx-action@v3
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
uses: docker/metadata-action@v5
with:
images: ${{ env.RUFF_BASE_IMG }}
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
@@ -131,7 +131,7 @@ jobs:
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -163,13 +163,13 @@ jobs:
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
image-mapping:
- alpine:3.21,alpine3.21,alpine
- alpine:3.20,alpine3.20,alpine
- debian:bookworm-slim,bookworm-slim,debian-slim
- buildpack-deps:bookworm,bookworm,debian
steps:
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -219,7 +219,7 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
uses: docker/metadata-action@v5
# ghcr.io prefers index level annotations
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
@@ -231,7 +231,7 @@ jobs:
${{ env.TAG_PATTERNS }}
- name: Build and push
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6
uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm64
@@ -256,17 +256,17 @@ jobs:
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- uses: docker/setup-buildx-action@v3
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
uses: docker/metadata-action@v5
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:
@@ -276,7 +276,7 @@ jobs:
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@@ -1,7 +1,5 @@
name: CI
permissions: {}
on:
push:
branches: [main]
@@ -18,7 +16,7 @@ env:
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
PACKAGE_NAME: ruff
PYTHON_VERSION: "3.13"
PYTHON_VERSION: "3.12"
jobs:
determine_changes:
@@ -26,177 +24,82 @@ jobs:
runs-on: ubuntu-latest
outputs:
# Flag that is raised when any code that affects parser is changed
parser: ${{ steps.check_parser.outputs.changed }}
parser: ${{ steps.changed.outputs.parser_any_changed }}
# Flag that is raised when any code that affects linter is changed
linter: ${{ steps.check_linter.outputs.changed }}
linter: ${{ steps.changed.outputs.linter_any_changed }}
# Flag that is raised when any code that affects formatter is changed
formatter: ${{ steps.check_formatter.outputs.changed }}
formatter: ${{ steps.changed.outputs.formatter_any_changed }}
# Flag that is raised when any code is changed
# This is superset of the linter and formatter
code: ${{ steps.check_code.outputs.changed }}
code: ${{ steps.changed.outputs.code_any_changed }}
# Flag that is raised when any code that affects the fuzzer is changed
fuzz: ${{ steps.check_fuzzer.outputs.changed }}
# Flag that is set to "true" when code related to red-knot changes.
red_knot: ${{ steps.check_red_knot.outputs.changed }}
# Flag that is set to "true" when code related to the playground changes.
playground: ${{ steps.check_playground.outputs.changed }}
fuzz: ${{ steps.changed.outputs.fuzz_any_changed }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Determine merge base
id: merge_base
env:
BASE_REF: ${{ github.event.pull_request.base.ref || 'main' }}
run: |
sha=$(git merge-base HEAD "origin/${BASE_REF}")
echo "sha=${sha}" >> "$GITHUB_OUTPUT"
- uses: tj-actions/changed-files@v45
id: changed
with:
files_yaml: |
parser:
- Cargo.toml
- Cargo.lock
- crates/ruff_python_trivia/**
- crates/ruff_source_file/**
- crates/ruff_text_size/**
- crates/ruff_python_ast/**
- crates/ruff_python_parser/**
- python/py-fuzzer/**
- .github/workflows/ci.yaml
- name: Check if the parser code changed
id: check_parser
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
':Cargo.toml' \
':Cargo.lock' \
':crates/ruff_python_trivia/**' \
':crates/ruff_source_file/**' \
':crates/ruff_text_size/**' \
':crates/ruff_python_ast/**' \
':crates/ruff_python_parser/**' \
':python/py-fuzzer/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
linter:
- Cargo.toml
- Cargo.lock
- crates/**
- "!crates/ruff_python_formatter/**"
- "!crates/ruff_formatter/**"
- "!crates/ruff_dev/**"
- scripts/*
- python/**
- .github/workflows/ci.yaml
- name: Check if the linter code changed
id: check_linter
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
':Cargo.lock' \
':crates/**' \
':!crates/red_knot*/**' \
':!crates/ruff_python_formatter/**' \
':!crates/ruff_formatter/**' \
':!crates/ruff_dev/**' \
':!crates/ruff_db/**' \
':scripts/*' \
':python/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
formatter:
- Cargo.toml
- Cargo.lock
- crates/ruff_python_formatter/**
- crates/ruff_formatter/**
- crates/ruff_python_trivia/**
- crates/ruff_python_ast/**
- crates/ruff_source_file/**
- crates/ruff_python_index/**
- crates/ruff_text_size/**
- crates/ruff_python_parser/**
- crates/ruff_dev/**
- scripts/*
- python/**
- .github/workflows/ci.yaml
- name: Check if the formatter code changed
id: check_formatter
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
':Cargo.lock' \
':crates/ruff_python_formatter/**' \
':crates/ruff_formatter/**' \
':crates/ruff_python_trivia/**' \
':crates/ruff_python_ast/**' \
':crates/ruff_source_file/**' \
':crates/ruff_python_index/**' \
':crates/ruff_python_index/**' \
':crates/ruff_text_size/**' \
':crates/ruff_python_parser/**' \
':scripts/*' \
':python/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
fuzz:
- fuzz/Cargo.toml
- fuzz/Cargo.lock
- fuzz/fuzz_targets/**
- name: Check if the fuzzer code changed
id: check_fuzzer
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
':Cargo.lock' \
':fuzz/fuzz_targets/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Check if there was any code related change
id: check_code
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
':!**/*.md' \
':crates/red_knot_python_semantic/resources/mdtest/**/*.md' \
':!docs/**' \
':!assets/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Check if there was any playground related change
id: check_playground
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
':playground/**' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Check if the red-knot code changed
id: check_red_knot
env:
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
run: |
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
':Cargo.toml' \
':Cargo.lock' \
':crates/red_knot*/**' \
':crates/ruff_db/**' \
':crates/ruff_annotate_snippets/**' \
':crates/ruff_python_ast/**' \
':crates/ruff_python_parser/**' \
':crates/ruff_python_trivia/**' \
':crates/ruff_source_file/**' \
':crates/ruff_text_size/**' \
':.github/workflows/ci.yaml' \
; then
echo "changed=false" >> "$GITHUB_OUTPUT"
else
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
code:
- "**/*"
- "!**/*.md"
- "crates/red_knot_python_semantic/resources/mdtest/**/*.md"
- "!docs/**"
- "!assets/**"
cargo-fmt:
name: "cargo fmt"
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
@@ -210,14 +113,14 @@ jobs:
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: |
rustup component add clippy
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
- name: "Clippy (wasm)"
@@ -227,33 +130,25 @@ jobs:
name: "cargo test (linux)"
runs-on: depot-ubuntu-22.04-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- name: Red-knot mdtests (GitHub annotations)
if: ${{ needs.determine_changes.outputs.red_knot == 'true' }}
env:
NO_COLOR: 1
MDTEST_GITHUB_ANNOTATIONS_FORMAT: 1
# Ignore errors if this step fails; we want to continue to later steps in the workflow anyway.
# This step is just to get nice GitHub annotations on the PR diff in the files-changed tab.
run: cargo test -p red_knot_python_semantic --test mdtest || true
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -272,7 +167,7 @@ jobs:
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- uses: actions/upload-artifact@v4
with:
name: ruff
path: target/debug/ruff
@@ -281,25 +176,25 @@ jobs:
name: "cargo test (linux, release)"
runs-on: depot-ubuntu-22.04-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -310,23 +205,22 @@ jobs:
name: "cargo test (windows)"
runs-on: github-windows-2025-x86_64-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
run: |
@@ -337,23 +231,23 @@ jobs:
name: "cargo test (wasm)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
- uses: jetli/wasm-pack-action@v0.4.0
with:
version: v0.13.1
- uses: Swatinem/rust-cache@v2
- name: "Test ruff_wasm"
run: |
cd crates/ruff_wasm
@@ -369,47 +263,47 @@ jobs:
if: ${{ github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: "Build"
run: cargo build --release --locked
cargo-build-msrv:
name: "cargo build (msrv)"
runs-on: depot-ubuntu-latest-8
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: SebRollen/toml-action@b1b3628f55fc3a28208d4203ada8b737e9687876 # v1.2.0
- uses: SebRollen/toml-action@v1.2.0
id: msrv
with:
file: "Cargo.toml"
field: "workspace.package.rust-version"
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
env:
MSRV: ${{ steps.msrv.outputs.value }}
run: rustup default "${MSRV}"
- name: "Install mold"
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -424,16 +318,16 @@ jobs:
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
with:
workspaces: "fuzz -> target"
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
with:
workspaces: "fuzz -> target"
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@63aaa5c1932cebabc34eceda9d92a70215dcead6 # v1.12.3
uses: cargo-bins/cargo-binstall@main
with:
tool: cargo-fuzz@0.11.2
- name: "Install cargo-fuzz"
@@ -447,16 +341,16 @@ jobs:
needs:
- cargo-test-linux
- determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
timeout-minutes: 20
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
- uses: astral-sh/setup-uv@v5
- uses: actions/download-artifact@v4
name: Download Ruff binary to test
id: download-cached-binary
with:
@@ -483,15 +377,15 @@ jobs:
name: "test scripts"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 5
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: rustup component add rustfmt
- uses: Swatinem/rust-cache@v2
# Run all code generation scripts, and verify that the current output is
# already checked into git.
- run: python crates/ruff_python_ast/generate.py
@@ -515,24 +409,24 @@ jobs:
- determine_changes
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
- uses: actions/download-artifact@v4
name: Download comparison Ruff binary
id: ruff-target
with:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
- uses: dawidd6/action-download-artifact@v7
name: Download baseline Ruff binary
with:
name: ruff
@@ -620,13 +514,13 @@ jobs:
run: |
echo ${{ github.event.number }} > pr-number
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- uses: actions/upload-artifact@v4
name: Upload PR Number
with:
name: pr-number
path: pr-number
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- uses: actions/upload-artifact@v4
name: Upload Results
with:
name: ecosystem-result
@@ -638,10 +532,10 @@ jobs:
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: cargo-bins/cargo-binstall@63aaa5c1932cebabc34eceda9d92a70215dcead6 # v1.12.3
- uses: cargo-bins/cargo-binstall@main
- run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
@@ -649,20 +543,19 @@ jobs:
name: "python package"
runs-on: ubuntu-latest
timeout-minutes: 20
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: Swatinem/rust-cache@v2
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
uses: PyO3/maturin-action@v1
with:
args: --out dist
- name: "Test wheel"
@@ -675,15 +568,22 @@ jobs:
pre-commit:
name: "pre-commit"
runs-on: depot-ubuntu-22.04-16
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install pre-commit"
run: pip install pre-commit
- name: "Cache pre-commit"
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
@@ -692,7 +592,7 @@ jobs:
echo '```console' > "$GITHUB_STEP_SUMMARY"
# Enable color output for pre-commit and remove it for the summary
# Use --hook-stage=manual to enable slower pre-commit hooks that are skipped by default
SKIP=cargo-fmt,clippy,dev-generate-all uvx --python="${PYTHON_VERSION}" pre-commit run --all-files --show-diff-on-failure --color=always --hook-stage=manual | \
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always --hook-stage=manual | \
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> "$GITHUB_STEP_SUMMARY") >&1
exit_code="${PIPESTATUS[0]}"
echo '```' >> "$GITHUB_STEP_SUMMARY"
@@ -705,22 +605,22 @@ jobs:
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: "3.13"
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
uses: astral-sh/setup-uv@v5
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
@@ -744,15 +644,16 @@ jobs:
name: "formatter instabilities and black similarity"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
timeout-minutes: 10
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: rustup show
- name: "Cache rust"
uses: Swatinem/rust-cache@v2
- name: "Run checks"
run: scripts/formatter_ecosystem_checks.sh
- name: "Github step summary"
@@ -767,24 +668,23 @@ jobs:
needs:
- cargo-test-linux
- determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: extractions/setup-just@dd310ad5a97d8e7b41793f8ef055398d51ad4de6 # v2
- uses: extractions/setup-just@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
name: "Download ruff-lsp source"
with:
persist-credentials: false
repository: "astral-sh/ruff-lsp"
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
# installation fails on 3.13 and newer
python-version: "3.12"
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
- uses: actions/download-artifact@v4
name: Download development ruff binary
id: ruff-target
with:
@@ -807,65 +707,32 @@ jobs:
just test
check-playground:
name: "check playground"
runs-on: ubuntu-latest
timeout-minutes: 5
needs:
- determine_changes
if: ${{ (needs.determine_changes.outputs.playground == 'true') }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
with:
node-version: 22
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
- name: "Install Node dependencies"
run: npm ci
working-directory: playground
- name: "Build playgrounds"
run: npm run dev:wasm
working-directory: playground
- name: "Run TypeScript checks"
run: npm run check
working-directory: playground
- name: "Check formatting"
run: npm run fmt:check
working-directory: playground
benchmarks:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- name: "Checkout Branch"
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: "Install Rust toolchain"
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
uses: taiki-e/install-action@v2
with:
tool: cargo-codspeed
- uses: Swatinem/rust-cache@v2
- name: "Build benchmarks"
run: cargo codspeed build --features codspeed -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
uses: CodSpeedHQ/action@v3
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -31,15 +31,15 @@ jobs:
# Don't run the cron job on forks:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: astral-sh/setup-uv@v5
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: Build ruff
# A debug build means the script runs slower once it gets started,
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
@@ -65,7 +65,7 @@ jobs:
permissions:
issues: write
steps:
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@@ -30,14 +30,14 @@ jobs:
# Don't run the cron job on forks:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: Build Red Knot
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
# mode (1.5 min vs 14 min), so the overall time is shorter with a release build.
@@ -47,7 +47,6 @@ jobs:
run: |
export QUICKCHECK_TESTS=100000
for _ in {1..5}; do
cargo test --locked --release --package red_knot_python_semantic -- --ignored list::property_tests
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
done
@@ -59,7 +58,7 @@ jobs:
permissions:
issues: write
steps:
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@@ -1,96 +0,0 @@
name: Run mypy_primer
permissions: {}
on:
pull_request:
paths:
- "crates/red_knot*/**"
- "crates/ruff_db"
- "crates/ruff_python_ast"
- "crates/ruff_python_parser"
- ".github/workflows/mypy_primer.yaml"
- ".github/workflows/mypy_primer_comment.yaml"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
mypy_primer:
name: Run mypy_primer
runs-on: depot-ubuntu-22.04-16
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
with:
workspaces: "ruff"
- name: Install Rust toolchain
run: rustup show
- name: Install mypy_primer
run: |
uv tool install "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support-v5"
- name: Run mypy_primer
shell: bash
run: |
cd ruff
PRIMER_SELECTOR="$(paste -s -d'|' crates/red_knot_python_semantic/resources/primer/good.txt)"
echo "new commit"
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
git checkout -b base_commit "$MERGE_BASE"
echo "base commit"
git rev-list --format=%s --max-count=1 base_commit
cd ..
echo "Project selector: $PRIMER_SELECTOR"
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
uvx mypy_primer \
--repo ruff \
--type-checker knot \
--old base_commit \
--new "$GITHUB_SHA" \
--project-selector "/($PRIMER_SELECTOR)\$" \
--output concise \
--debug > mypy_primer.diff || [ $? -eq 1 ]
# Output diff with ANSI color codes
cat mypy_primer.diff
# Remove ANSI color codes before uploading
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
echo ${{ github.event.number }} > pr-number
- name: Upload diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: mypy_primer_diff
path: mypy_primer.diff
- name: Upload pr-number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr-number
path: pr-number

View File

@@ -1,97 +0,0 @@
name: PR comment (mypy_primer)
on: # zizmor: ignore[dangerous-triggers]
workflow_run:
workflows: [Run mypy_primer]
types: [completed]
workflow_dispatch:
inputs:
workflow_run_id:
description: The mypy_primer workflow that triggers the workflow run
required: true
jobs:
comment:
runs-on: ubuntu-24.04
permissions:
pull-requests: write
steps:
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: Download PR number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
allow_forks: true
- name: Parse pull request number
id: pr-number
run: |
if [[ -f pr-number ]]
then
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
fi
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
name: "Download mypy_primer results"
id: download-mypy_primer_diff
if: steps.pr-number.outputs.pr-number
with:
name: mypy_primer_diff
workflow: mypy_primer.yaml
pr: ${{ steps.pr-number.outputs.pr-number }}
path: pr/mypy_primer_diff
workflow_conclusion: completed
if_no_artifact_found: ignore
allow_forks: true
- name: Generate comment content
id: generate-comment
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
run: |
# Guard against malicious mypy_primer results that symlink to a secret
# file on this runner
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
then
echo "Error: mypy_primer.diff cannot be a symlink"
exit 1
fi
# Note this identifier is used to find the comment to update on
# subsequent runs
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
echo '## `mypy_primer` results' >> comment.txt
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
echo '<details>' >> comment.txt
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
echo '' >> comment.txt
echo '```diff' >> comment.txt
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
echo '```' >> comment.txt
echo '</details>' >> comment.txt
else
echo 'No ecosystem changes detected ✅' >> comment.txt
fi
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
cat comment.txt >> "$GITHUB_OUTPUT"
echo 'EOF' >> "$GITHUB_OUTPUT"
- name: Find existing comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
issue-number: ${{ steps.pr-number.outputs.pr-number }}
comment-author: "github-actions[bot]"
body-includes: "<!-- generated-comment mypy_primer -->"
- name: Create or update comment
if: steps.find-comment.outcome == 'success'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.pr-number.outputs.pr-number }}
body-path: comment.txt
edit-mode: replace

View File

@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: "Update pre-commit mirror"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
uses: actions/github-script@v7
with:
github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
script: |

View File

@@ -16,7 +16,7 @@ jobs:
permissions:
pull-requests: write
steps:
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
- uses: dawidd6/action-download-artifact@v7
name: Download pull request number
with:
name: pr-number
@@ -32,7 +32,7 @@ jobs:
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
fi
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
- uses: dawidd6/action-download-artifact@v7
name: "Download ecosystem results"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number
@@ -70,7 +70,7 @@ jobs:
echo 'EOF' >> "$GITHUB_OUTPUT"
- name: Find existing comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
uses: peter-evans/find-comment@v3
if: steps.generate-comment.outcome == 'success'
id: find-comment
with:
@@ -80,7 +80,7 @@ jobs:
- name: Create or update comment
if: steps.find-comment.outcome == 'success'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
uses: peter-evans/create-or-update-comment@v4
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.pr-number.outputs.pr-number }}

View File

@@ -23,12 +23,12 @@ jobs:
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
persist-credentials: true
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: 3.12
@@ -61,14 +61,14 @@ jobs:
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}

View File

@@ -1,58 +0,0 @@
# Publish the Red Knot playground.
name: "[Knot Playground] Release"
permissions: {}
on:
push:
branches: [main]
paths:
- "crates/red_knot*/**"
- "crates/ruff_db/**"
- "crates/ruff_python_ast/**"
- "crates/ruff_python_parser/**"
- "playground/**"
- ".github/workflows/publish-knot-playground.yml"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}
cancel-in-progress: true
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
publish:
runs-on: ubuntu-latest
env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
with:
node-version: 22
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
- name: "Install Node dependencies"
run: npm ci
working-directory: playground
- name: "Run TypeScript checks"
run: npm run check
working-directory: playground
- name: "Build Knot playground"
run: npm run build --workspace knot-playground
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
command: pages deploy playground/knot/dist --project-name=knot-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}

View File

@@ -24,31 +24,34 @@ jobs:
env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
- uses: actions/setup-node@v4
with:
node-version: 22
node-version: 20
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-bindgen-action@v0.2.0
- name: "Run wasm-pack"
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
- name: "Install Node dependencies"
run: npm ci
working-directory: playground
- name: "Run TypeScript checks"
run: npm run check
working-directory: playground
- name: "Build Ruff playground"
run: npm run build --workspace ruff-playground
- name: "Build JavaScript bundle"
run: npm run build
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3.14.1
uses: cloudflare/wrangler-action@v3.13.1
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
command: pages deploy playground/ruff/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
command: pages deploy playground/dist --project-name=ruff-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}

View File

@@ -22,8 +22,8 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
uses: astral-sh/setup-uv@v5
- uses: actions/download-artifact@v4
with:
pattern: wheels-*
path: wheels

View File

@@ -29,15 +29,13 @@ jobs:
target: [web, bundler, nodejs]
fail-fast: false
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa # v0.4.0
with:
version: v0.13.1
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-bindgen-action@v0.2.0
- name: "Run wasm-pack build"
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
- name: "Rename generated package"
@@ -45,7 +43,7 @@ jobs:
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
mv /tmp/package.json crates/ruff_wasm/pkg
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
- uses: actions/setup-node@v4
with:
node-version: 20
registry-url: "https://registry.npmjs.org"

View File

@@ -1,7 +1,6 @@
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/
#
# Copyright 2022-2024, axodotdev
# Copyright 2025 Astral Software Inc.
# SPDX-License-Identifier: MIT or Apache-2.0
#
# CI that:
@@ -51,7 +50,7 @@ on:
jobs:
# Run 'dist plan' (or host) to determine what tasks we need to do
plan:
runs-on: "depot-ubuntu-latest-4"
runs-on: "ubuntu-20.04"
outputs:
val: ${{ steps.plan.outputs.manifest }}
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
@@ -60,17 +59,16 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install dist
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.4-prerelease.1/cargo-dist-installer.sh | sh"
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
- name: Cache dist
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
uses: actions/upload-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/dist
@@ -86,7 +84,7 @@ jobs:
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
uses: actions/upload-artifact@v4
with:
name: artifacts-plan-dist-manifest
path: plan-dist-manifest.json
@@ -118,24 +116,23 @@ jobs:
- plan
- custom-build-binaries
- custom-build-docker
runs-on: "depot-ubuntu-latest-4"
runs-on: "ubuntu-20.04"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
@@ -153,7 +150,7 @@ jobs:
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
uses: actions/upload-artifact@v4
with:
name: artifacts-build-global
path: |
@@ -170,23 +167,22 @@ jobs:
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
runs-on: "depot-ubuntu-latest-4"
runs-on: "ubuntu-20.04"
outputs:
val: ${{ steps.host.outputs.manifest }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
@@ -200,7 +196,7 @@ jobs:
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
uses: actions/upload-artifact@v4
with:
# Overwrite the previous copy
name: artifacts-dist-manifest
@@ -246,17 +242,16 @@ jobs:
# still allowing individual publish jobs to skip themselves (for prereleases).
# "host" however must run to completion, no skipping allowed!
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
runs-on: "depot-ubuntu-latest-4"
runs-on: "ubuntu-20.04"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
# Create a GitHub Release while uploading all files to it
- name: "Download GitHub Artifacts"
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: artifacts

View File

@@ -21,12 +21,12 @@ jobs:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
name: Checkout Ruff
with:
path: ruff
persist-credentials: true
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
name: Checkout typeshed
with:
repository: python/typeshed
@@ -70,7 +70,7 @@ jobs:
permissions:
issues: write
steps:
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

7
.github/zizmor.yml vendored
View File

@@ -10,10 +10,3 @@ rules:
ignore:
- build-docker.yml
- publish-playground.yml
excessive-permissions:
# it's hard to test what the impact of removing these ignores would be
# without actually running the release workflow...
ignore:
- build-docker.yml
- publish-playground.yml
- publish-docs.yml

4
.gitignore vendored
View File

@@ -29,10 +29,6 @@ tracing.folded
tracing-flamechart.svg
tracing-flamegraph.svg
# insta
*.rs.pending-snap
###
# Rust.gitignore
###

View File

@@ -1 +0,0 @@
!/.github/

View File

@@ -5,7 +5,6 @@ exclude: |
.github/workflows/release.yml|
crates/red_knot_vendored/vendor/.*|
crates/red_knot_project/resources/.*|
crates/ruff_benchmark/resources/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff_notebook/resources/.*|
@@ -18,18 +17,13 @@ exclude: |
)$
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-merge-conflict
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.24.1
rev: v0.23
hooks:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.22
rev: 0.7.21
hooks:
- id: mdformat
additional_dependencies:
@@ -42,7 +36,7 @@ repos:
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.44.0
rev: v0.43.0
hooks:
- id: markdownlint-fix
exclude: |
@@ -62,10 +56,10 @@ repos:
.*?invalid(_.+)*_syntax\.md
)$
additional_dependencies:
- black==25.1.0
- black==24.10.0
- repo: https://github.com/crate-ci/typos
rev: v1.31.1
rev: v1.29.4
hooks:
- id: typos
@@ -79,7 +73,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.6
rev: v0.9.2
hooks:
- id: ruff-format
- id: ruff
@@ -89,7 +83,7 @@ repos:
# Prettier
- repo: https://github.com/rbubley/mirrors-prettier
rev: v3.5.3
rev: v3.4.2
hooks:
- id: prettier
types: [yaml]
@@ -97,12 +91,12 @@ repos:
# zizmor detects security vulnerabilities in GitHub Actions workflows.
# Additional configuration for the tool is found in `.github/zizmor.yml`
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.6.0
rev: v1.1.1
hooks:
- id: zizmor
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.33.0
rev: 0.31.0
hooks:
- id: check-github-workflows

View File

@@ -1,55 +1,5 @@
# Breaking Changes
## 0.11.0
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
In previous versions of Ruff, you could specify your Python version with:
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
In v0.10, config discovery has been updated to address this issue:
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
for a `pyproject.toml` file in the same directory and respect its
`requires-python` version, even if it does not contain a `[tool.ruff]`
section.
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
search for the closest `pyproject.toml` in the parent directories and use its
`requires-python` setting.
## 0.10.0
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
You can find a description of this change in the 0.11.0 section.
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [_Error suppression_](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
- **\[`unsafe-markup-use`\]: `RUF035` has been recoded to `S704`** ([#15957](https://github.com/astral-sh/ruff/pull/15957))
## 0.9.0
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
@@ -259,8 +209,8 @@ This change only affects those using Ruff under its default rule set. Users that
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
Previously, Ruff supported non-standards-compliant emoji identifiers such as `📦 = 1`.
We decided to remove this non-standard language extension. Ruff now reports syntax errors for invalid emoji identifiers in your code, the same as CPython.
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))

View File

@@ -1,589 +1,5 @@
# Changelog
## 0.11.6
### Preview features
- Avoid adding whitespace to the end of a docstring after an escaped quote ([#17216](https://github.com/astral-sh/ruff/pull/17216))
- \[`airflow`\] Extract `AIR311` from `AIR301` rules (`AIR301`, `AIR311`) ([#17310](https://github.com/astral-sh/ruff/pull/17310), [#17422](https://github.com/astral-sh/ruff/pull/17422))
### Bug fixes
- Raise syntax error when `\` is at end of file ([#17409](https://github.com/astral-sh/ruff/pull/17409))
## 0.11.5
### Preview features
- \[`airflow`\] Add missing `AIR302` attribute check ([#17115](https://github.com/astral-sh/ruff/pull/17115))
- \[`airflow`\] Expand module path check to individual symbols (`AIR302`) ([#17278](https://github.com/astral-sh/ruff/pull/17278))
- \[`airflow`\] Extract `AIR312` from `AIR302` rules (`AIR302`, `AIR312`) ([#17152](https://github.com/astral-sh/ruff/pull/17152))
- \[`airflow`\] Update oudated `AIR301`, `AIR302` rules ([#17123](https://github.com/astral-sh/ruff/pull/17123))
- [syntax-errors] Async comprehension in sync comprehension ([#17177](https://github.com/astral-sh/ruff/pull/17177))
- [syntax-errors] Check annotations in annotated assignments ([#17283](https://github.com/astral-sh/ruff/pull/17283))
- [syntax-errors] Extend annotation checks to `await` ([#17282](https://github.com/astral-sh/ruff/pull/17282))
### Bug fixes
- \[`flake8-pie`\] Avoid false positive for multiple assignment with `auto()` (`PIE796`) ([#17274](https://github.com/astral-sh/ruff/pull/17274))
### Rule changes
- \[`ruff`\] Fix `RUF100` to detect unused file-level `noqa` directives with specific codes (#17042) ([#17061](https://github.com/astral-sh/ruff/pull/17061))
- \[`flake8-pytest-style`\] Avoid false positive for legacy form of `pytest.raises` (`PT011`) ([#17231](https://github.com/astral-sh/ruff/pull/17231))
### Documentation
- Fix formatting of "See Style Guide" link ([#17272](https://github.com/astral-sh/ruff/pull/17272))
## 0.11.4
### Preview features
- \[`ruff`\] Implement `invalid-rule-code` as `RUF102` ([#17138](https://github.com/astral-sh/ruff/pull/17138))
- [syntax-errors] Detect duplicate keys in `match` mapping patterns ([#17129](https://github.com/astral-sh/ruff/pull/17129))
- [syntax-errors] Detect duplicate attributes in `match` class patterns ([#17186](https://github.com/astral-sh/ruff/pull/17186))
- [syntax-errors] Detect invalid syntax in annotations ([#17101](https://github.com/astral-sh/ruff/pull/17101))
### Bug fixes
- [syntax-errors] Fix multiple assignment error for class fields in `match` patterns ([#17184](https://github.com/astral-sh/ruff/pull/17184))
- Don't skip visiting non-tuple slice in `typing.Annotated` subscripts ([#17201](https://github.com/astral-sh/ruff/pull/17201))
## 0.11.3
### Preview features
- \[`airflow`\] Add more autofixes for `AIR302` ([#16876](https://github.com/astral-sh/ruff/pull/16876), [#16977](https://github.com/astral-sh/ruff/pull/16977), [#16976](https://github.com/astral-sh/ruff/pull/16976), [#16965](https://github.com/astral-sh/ruff/pull/16965))
- \[`airflow`\] Move `AIR301` to `AIR002` ([#16978](https://github.com/astral-sh/ruff/pull/16978))
- \[`airflow`\] Move `AIR302` to `AIR301` and `AIR303` to `AIR302` ([#17151](https://github.com/astral-sh/ruff/pull/17151))
- \[`flake8-bandit`\] Mark `str` and `list[str]` literals as trusted input (`S603`) ([#17136](https://github.com/astral-sh/ruff/pull/17136))
- \[`ruff`\] Support slices in `RUF005` ([#17078](https://github.com/astral-sh/ruff/pull/17078))
- [syntax-errors] Start detecting compile-time syntax errors ([#16106](https://github.com/astral-sh/ruff/pull/16106))
- [syntax-errors] Duplicate type parameter names ([#16858](https://github.com/astral-sh/ruff/pull/16858))
- [syntax-errors] Irrefutable `case` pattern before final case ([#16905](https://github.com/astral-sh/ruff/pull/16905))
- [syntax-errors] Multiple assignments in `case` pattern ([#16957](https://github.com/astral-sh/ruff/pull/16957))
- [syntax-errors] Single starred assignment target ([#17024](https://github.com/astral-sh/ruff/pull/17024))
- [syntax-errors] Starred expressions in `return`, `yield`, and `for` ([#17134](https://github.com/astral-sh/ruff/pull/17134))
- [syntax-errors] Store to or delete `__debug__` ([#16984](https://github.com/astral-sh/ruff/pull/16984))
### Bug fixes
- Error instead of `panic!` when running Ruff from a deleted directory (#16903) ([#17054](https://github.com/astral-sh/ruff/pull/17054))
- [syntax-errors] Fix false positive for parenthesized tuple index ([#16948](https://github.com/astral-sh/ruff/pull/16948))
### CLI
- Check `pyproject.toml` correctly when it is passed via stdin ([#16971](https://github.com/astral-sh/ruff/pull/16971))
### Configuration
- \[`flake8-import-conventions`\] Add import `numpy.typing as npt` to default `flake8-import-conventions.aliases` ([#17133](https://github.com/astral-sh/ruff/pull/17133))
### Documentation
- \[`refurb`\] Document why `UserDict`, `UserList`, and `UserString` are preferred over `dict`, `list`, and `str` (`FURB189`) ([#16927](https://github.com/astral-sh/ruff/pull/16927))
## 0.11.2
### Preview features
- [syntax-errors] Fix false-positive syntax errors emitted for annotations on variadic parameters before Python 3.11 ([#16878](https://github.com/astral-sh/ruff/pull/16878))
## 0.11.1
### Preview features
- \[`airflow`\] Add `chain`, `chain_linear` and `cross_downstream` for `AIR302` ([#16647](https://github.com/astral-sh/ruff/pull/16647))
- [syntax-errors] Improve error message and range for pre-PEP-614 decorator syntax errors ([#16581](https://github.com/astral-sh/ruff/pull/16581))
- [syntax-errors] PEP 701 f-strings before Python 3.12 ([#16543](https://github.com/astral-sh/ruff/pull/16543))
- [syntax-errors] Parenthesized context managers before Python 3.9 ([#16523](https://github.com/astral-sh/ruff/pull/16523))
- [syntax-errors] Star annotations before Python 3.11 ([#16545](https://github.com/astral-sh/ruff/pull/16545))
- [syntax-errors] Star expression in index before Python 3.11 ([#16544](https://github.com/astral-sh/ruff/pull/16544))
- [syntax-errors] Unparenthesized assignment expressions in sets and indexes ([#16404](https://github.com/astral-sh/ruff/pull/16404))
### Bug fixes
- Server: Allow `FixAll` action in presence of version-specific syntax errors ([#16848](https://github.com/astral-sh/ruff/pull/16848))
- \[`flake8-bandit`\] Allow raw strings in `suspicious-mark-safe-usage` (`S308`) #16702 ([#16770](https://github.com/astral-sh/ruff/pull/16770))
- \[`refurb`\] Avoid panicking `unwrap` in `verbose-decimal-constructor` (`FURB157`) ([#16777](https://github.com/astral-sh/ruff/pull/16777))
- \[`refurb`\] Fix starred expressions fix (`FURB161`) ([#16550](https://github.com/astral-sh/ruff/pull/16550))
- Fix `--statistics` reporting for unsafe fixes ([#16756](https://github.com/astral-sh/ruff/pull/16756))
### Rule changes
- \[`flake8-executables`\] Allow `uv run` in shebang line for `shebang-missing-python` (`EXE003`) ([#16849](https://github.com/astral-sh/ruff/pull/16849),[#16855](https://github.com/astral-sh/ruff/pull/16855))
### CLI
- Add `--exit-non-zero-on-format` ([#16009](https://github.com/astral-sh/ruff/pull/16009))
### Documentation
- Update Ruff tutorial to avoid non-existent fix in `__init__.py` ([#16818](https://github.com/astral-sh/ruff/pull/16818))
- \[`flake8-gettext`\] Swap `format-` and `printf-in-get-text-func-call` examples (`INT002`, `INT003`) ([#16769](https://github.com/astral-sh/ruff/pull/16769))
## 0.11.0
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
### Breaking changes
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
In previous versions of Ruff, you could specify your Python version with:
- The `target-version` option in a `ruff.toml` file or the `[tool.ruff]` section of a pyproject.toml file.
- The `project.requires-python` field in a `pyproject.toml` file with a `[tool.ruff]` section.
These options worked well in most cases, and are still recommended for fine control of the Python version. However, because of the way Ruff discovers config files, `pyproject.toml` files without a `[tool.ruff]` section would be ignored, including the `requires-python` setting. Ruff would then use the default Python version (3.9 as of this writing) instead, which is surprising when you've attempted to request another version.
In v0.10, config discovery has been updated to address this issue:
- If Ruff finds a `ruff.toml` file without a `target-version`, it will check
for a `pyproject.toml` file in the same directory and respect its
`requires-python` version, even if it does not contain a `[tool.ruff]`
section.
- If Ruff finds a user-level configuration, the `requires-python` field of the closest `pyproject.toml` in a parent directory will take precedence.
- If there is no config file (`ruff.toml`or `pyproject.toml` with a
`[tool.ruff]` section) in the directory of the file being checked, Ruff will
search for the closest `pyproject.toml` in the parent directories and use its
`requires-python` setting.
### Stabilization
The following behaviors have been stabilized:
- [`blanket-noqa`](https://docs.astral.sh/ruff/rules/blanket-noqa/) (`PGH004`): Also detect blanked file-level noqa comments (and not just line level comments).
### Preview features
- [syntax-errors] Tuple unpacking in `for` statement iterator clause before Python 3.9 ([#16558](https://github.com/astral-sh/ruff/pull/16558))
## 0.10.0
Check out the [blog post](https://astral.sh/blog/ruff-v0.10.0) for a migration guide and overview of the changes!
### Breaking changes
See also, the "Remapped rules" section which may result in disabled rules.
- **Changes to how the Python version is inferred when a `target-version` is not specified** ([#16319](https://github.com/astral-sh/ruff/pull/16319))
Because of a mistake in the release process, the `requires-python` inference changes are not included in this release and instead shipped as part of 0.11.0.
You can find a description of this change in the 0.11.0 section.
- **Updated `TYPE_CHECKING` behavior** ([#16669](https://github.com/astral-sh/ruff/pull/16669))
Previously, Ruff only recognized typechecking blocks that tested the `typing.TYPE_CHECKING` symbol. Now, Ruff recognizes any local variable named `TYPE_CHECKING`. This release also removes support for the legacy `if 0:` and `if False:` typechecking checks. Use a local `TYPE_CHECKING` variable instead.
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [*Error suppression*](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
This change fixes a bug in the formatter where it introduced unnecessary parentheses around with statements with a single context manager and a trailing comment. This change may result in a change in formatting for some users.
- **Bump alpine default tag to 3.21 for derived Docker images** ([#16456](https://github.com/astral-sh/ruff/pull/16456))
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Now the ruff:alpine image will use 3.21 instead of 3.20 and ruff:alpine3.20 will no longer be updated.
### Deprecated Rules
The following rules have been deprecated:
- [`non-pep604-isinstance`](https://docs.astral.sh/ruff/rules/non-pep604-isinstance/) (`UP038`)
- [`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/) (`S320`)
### Remapped rules
The following rules have been remapped to new rule codes:
- \[`unsafe-markup-use`\]: `RUF035` to `S704`
### Stabilization
The following rules have been stabilized and are no longer in preview:
- [`batched-without-explicit-strict`](https://docs.astral.sh/ruff/rules/batched-without-explicit-strict) (`B911`)
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable) (`C420`)
- [`datetime-min-max`](https://docs.astral.sh/ruff/rules/datetime-min-max) (`DTZ901`)
- [`fast-api-unused-path-parameter`](https://docs.astral.sh/ruff/rules/fast-api-unused-path-parameter) (`FAST003`)
- [`root-logger-call`](https://docs.astral.sh/ruff/rules/root-logger-call) (`LOG015`)
- [`len-test`](https://docs.astral.sh/ruff/rules/len-test) (`PLC1802`)
- [`shallow-copy-environ`](https://docs.astral.sh/ruff/rules/shallow-copy-environ) (`PLW1507`)
- [`os-listdir`](https://docs.astral.sh/ruff/rules/os-listdir) (`PTH208`)
- [`invalid-pathlib-with-suffix`](https://docs.astral.sh/ruff/rules/invalid-pathlib-with-suffix) (`PTH210`)
- [`invalid-assert-message-literal-argument`](https://docs.astral.sh/ruff/rules/invalid-assert-message-literal-argument) (`RUF040`)
- [`unnecessary-nested-literal`](https://docs.astral.sh/ruff/rules/unnecessary-nested-literal) (`RUF041`)
- [`unnecessary-cast-to-int`](https://docs.astral.sh/ruff/rules/unnecessary-cast-to-int) (`RUF046`)
- [`map-int-version-parsing`](https://docs.astral.sh/ruff/rules/map-int-version-parsing) (`RUF048`)
- [`if-key-in-dict-del`](https://docs.astral.sh/ruff/rules/if-key-in-dict-del) (`RUF051`)
- [`unsafe-markup-use`](https://docs.astral.sh/ruff/rules/unsafe-markup-use) (`S704`). This rule has also been renamed from `RUF035`.
- [`split-static-string`](https://docs.astral.sh/ruff/rules/split-static-string) (`SIM905`)
- [`runtime-cast-value`](https://docs.astral.sh/ruff/rules/runtime-cast-value) (`TC006`)
- [`unquoted-type-alias`](https://docs.astral.sh/ruff/rules/unquoted-type-alias) (`TC007`)
- [`non-pep646-unpack`](https://docs.astral.sh/ruff/rules/non-pep646-unpack) (`UP044`)
The following behaviors have been stabilized:
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`) [`invalid-first-argument-name-for-class-method`](https://docs.astral.sh/ruff/rules/invalid-first-argument-name-for-class-method/) (`N804`): `__new__` methods are now no longer flagged by `invalid-first-argument-name-for-class-method` (`N804`) but instead by `bad-staticmethod-argument` (`PLW0211`)
- [`bad-str-strip-call`](https://docs.astral.sh/ruff/rules/bad-str-strip-call/) (`PLE1310`): The rule now applies to objects which are known to have type `str` or `bytes`.
- [`custom-type-var-for-self`](https://docs.astral.sh/ruff/rules/custom-type-var-for-self/) (`PYI019`): More accurate detection of custom `TypeVars` replaceable by `Self`. The range of the diagnostic is now the full function header rather than just the return annotation.
- [`invalid-argument-name`](https://docs.astral.sh/ruff/rules/invalid-argument-name/) (`N803`): Ignore argument names of functions decorated with `typing.override`
- [`invalid-envvar-default`](https://docs.astral.sh/ruff/rules/invalid-envvar-default/) (`PLW1508`): Detect default value arguments to `os.environ.get` with invalid type.
- [`pytest-raises-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-raises-with-multiple-statements/) (`PT012`) [`pytest-warns-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-warns-with-multiple-statements/) (`PT031`): Allow `for` statements with an empty body in `pytest.raises` and `pytest.warns` `with` statements.
- [`redundant-open-modes`](https://docs.astral.sh/ruff/rules/redundant-open-modes/) (`UP015`): The diagnostic range is now the range of the redundant mode argument where it previously was the range of the entire open call. You may have to replace your `noqa` comments when suppressing `UP015`.
- [`stdlib-module-shadowing`](https://docs.astral.sh/ruff/rules/stdlib-module-shadowing/) (`A005`): Changes the default value of `lint.flake8-builtins.strict-checking` from `true` to `false`.
- [`type-none-comparison`](https://docs.astral.sh/ruff/rules/type-none-comparison/) (`FURB169`): Now also recognizes `type(expr) is type(None)` comparisons where `expr` isn't a name expression.
The following fixes or improvements to fixes have been stabilized:
- [`repeated-equality-comparison`](https://docs.astral.sh/ruff/rules/repeated-equality-comparison/) (`PLR1714`) ([#16685](https://github.com/astral-sh/ruff/pull/16685))
- [`needless-bool`](https://docs.astral.sh/ruff/rules/needless-bool/) (`SIM103`) ([#16684](https://github.com/astral-sh/ruff/pull/16684))
- [`unused-private-type-var`](https://docs.astral.sh/ruff/rules/unused-private-type-var/) (`PYI018`) ([#16682](https://github.com/astral-sh/ruff/pull/16682))
### Server
- Remove logging output for `ruff.printDebugInformation` ([#16617](https://github.com/astral-sh/ruff/pull/16617))
### Configuration
- \[`flake8-builtins`\] Deprecate the `builtins-` prefixed options in favor of the unprefixed options (e.g. `builtins-allowed-modules` is now deprecated in favor of `allowed-modules`) ([#16092](https://github.com/astral-sh/ruff/pull/16092))
### Bug fixes
- [flake8-bandit] Fix mixed-case hash algorithm names (S324) ([#16552](https://github.com/astral-sh/ruff/pull/16552))
### CLI
- [ruff] Fix `last_tag`/`commits_since_last_tag` for `version` command ([#16686](https://github.com/astral-sh/ruff/pull/16686))
## 0.9.10
### Preview features
- \[`ruff`\] Add new rule `RUF059`: Unused unpacked assignment ([#16449](https://github.com/astral-sh/ruff/pull/16449))
- \[`syntax-errors`\] Detect assignment expressions before Python 3.8 ([#16383](https://github.com/astral-sh/ruff/pull/16383))
- \[`syntax-errors`\] Named expressions in decorators before Python 3.9 ([#16386](https://github.com/astral-sh/ruff/pull/16386))
- \[`syntax-errors`\] Parenthesized keyword argument names after Python 3.8 ([#16482](https://github.com/astral-sh/ruff/pull/16482))
- \[`syntax-errors`\] Positional-only parameters before Python 3.8 ([#16481](https://github.com/astral-sh/ruff/pull/16481))
- \[`syntax-errors`\] Tuple unpacking in `return` and `yield` before Python 3.8 ([#16485](https://github.com/astral-sh/ruff/pull/16485))
- \[`syntax-errors`\] Type parameter defaults before Python 3.13 ([#16447](https://github.com/astral-sh/ruff/pull/16447))
- \[`syntax-errors`\] Type parameter lists before Python 3.12 ([#16479](https://github.com/astral-sh/ruff/pull/16479))
- \[`syntax-errors`\] `except*` before Python 3.11 ([#16446](https://github.com/astral-sh/ruff/pull/16446))
- \[`syntax-errors`\] `type` statements before Python 3.12 ([#16478](https://github.com/astral-sh/ruff/pull/16478))
### Bug fixes
- Escape template filenames in glob patterns in configuration ([#16407](https://github.com/astral-sh/ruff/pull/16407))
- \[`flake8-simplify`\] Exempt unittest context methods for `SIM115` rule ([#16439](https://github.com/astral-sh/ruff/pull/16439))
- Formatter: Fix syntax error location in notebooks ([#16499](https://github.com/astral-sh/ruff/pull/16499))
- \[`pyupgrade`\] Do not offer fix when at least one target is `global`/`nonlocal` (`UP028`) ([#16451](https://github.com/astral-sh/ruff/pull/16451))
- \[`flake8-builtins`\] Ignore variables matching module attribute names (`A001`) ([#16454](https://github.com/astral-sh/ruff/pull/16454))
- \[`pylint`\] Convert `code` keyword argument to a positional argument in fix for (`PLR1722`) ([#16424](https://github.com/astral-sh/ruff/pull/16424))
### CLI
- Move rule code from `description` to `check_name` in GitLab output serializer ([#16437](https://github.com/astral-sh/ruff/pull/16437))
### Documentation
- \[`pydocstyle`\] Clarify that `D417` only checks docstrings with an arguments section ([#16494](https://github.com/astral-sh/ruff/pull/16494))
## 0.9.9
### Preview features
- Fix caching of unsupported-syntax errors ([#16425](https://github.com/astral-sh/ruff/pull/16425))
### Bug fixes
- Only show unsupported-syntax errors in editors when preview mode is enabled ([#16429](https://github.com/astral-sh/ruff/pull/16429))
## 0.9.8
### Preview features
- Start detecting version-related syntax errors in the parser ([#16090](https://github.com/astral-sh/ruff/pull/16090))
### Rule changes
- \[`pylint`\] Mark fix unsafe (`PLW1507`) ([#16343](https://github.com/astral-sh/ruff/pull/16343))
- \[`pylint`\] Catch `case np.nan`/`case math.nan` in `match` statements (`PLW0177`) ([#16378](https://github.com/astral-sh/ruff/pull/16378))
- \[`ruff`\] Add more Pydantic models variants to the list of default copy semantics (`RUF012`) ([#16291](https://github.com/astral-sh/ruff/pull/16291))
### Server
- Avoid indexing the project if `configurationPreference` is `editorOnly` ([#16381](https://github.com/astral-sh/ruff/pull/16381))
- Avoid unnecessary info at non-trace server log level ([#16389](https://github.com/astral-sh/ruff/pull/16389))
- Expand `ruff.configuration` to allow inline config ([#16296](https://github.com/astral-sh/ruff/pull/16296))
- Notify users for invalid client settings ([#16361](https://github.com/astral-sh/ruff/pull/16361))
### Configuration
- Add `per-file-target-version` option ([#16257](https://github.com/astral-sh/ruff/pull/16257))
### Bug fixes
- \[`refurb`\] Do not consider docstring(s) (`FURB156`) ([#16391](https://github.com/astral-sh/ruff/pull/16391))
- \[`flake8-self`\] Ignore attribute accesses on instance-like variables (`SLF001`) ([#16149](https://github.com/astral-sh/ruff/pull/16149))
- \[`pylint`\] Fix false positives, add missing methods, and support positional-only parameters (`PLE0302`) ([#16263](https://github.com/astral-sh/ruff/pull/16263))
- \[`flake8-pyi`\] Mark `PYI030` fix unsafe when comments are deleted ([#16322](https://github.com/astral-sh/ruff/pull/16322))
### Documentation
- Fix example for `S611` ([#16316](https://github.com/astral-sh/ruff/pull/16316))
- Normalize inconsistent markdown headings in docstrings ([#16364](https://github.com/astral-sh/ruff/pull/16364))
- Document MSRV policy ([#16384](https://github.com/astral-sh/ruff/pull/16384))
## 0.9.7
### Preview features
- Consider `__new__` methods as special function type for enforcing class method or static method rules ([#13305](https://github.com/astral-sh/ruff/pull/13305))
- \[`airflow`\] Improve the internal logic to differentiate deprecated symbols (`AIR303`) ([#16013](https://github.com/astral-sh/ruff/pull/16013))
- \[`refurb`\] Manual timezone monkeypatching (`FURB162`) ([#16113](https://github.com/astral-sh/ruff/pull/16113))
- \[`ruff`\] Implicit class variable in dataclass (`RUF045`) ([#14349](https://github.com/astral-sh/ruff/pull/14349))
- \[`ruff`\] Skip singleton starred expressions for `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#16083](https://github.com/astral-sh/ruff/pull/16083))
- \[`refurb`\] Check for subclasses includes subscript expressions (`FURB189`) ([#16155](https://github.com/astral-sh/ruff/pull/16155))
### Rule changes
- \[`flake8-debugger`\] Also flag `sys.breakpointhook` and `sys.__breakpointhook__` (`T100`) ([#16191](https://github.com/astral-sh/ruff/pull/16191))
- \[`pycodestyle`\] Exempt `site.addsitedir(...)` calls (`E402`) ([#16251](https://github.com/astral-sh/ruff/pull/16251))
### Formatter
- Fix unstable formatting of trailing end-of-line comments of parenthesized attribute values ([#16187](https://github.com/astral-sh/ruff/pull/16187))
### Server
- Fix handling of requests received after shutdown message ([#16262](https://github.com/astral-sh/ruff/pull/16262))
- Ignore `source.organizeImports.ruff` and `source.fixAll.ruff` code actions for a notebook cell ([#16154](https://github.com/astral-sh/ruff/pull/16154))
- Include document specific debug info for `ruff.printDebugInformation` ([#16215](https://github.com/astral-sh/ruff/pull/16215))
- Update server to return the debug info as string with `ruff.printDebugInformation` ([#16214](https://github.com/astral-sh/ruff/pull/16214))
### CLI
- Warn on invalid `noqa` even when there are no diagnostics ([#16178](https://github.com/astral-sh/ruff/pull/16178))
- Better error messages while loading configuration `extend`s ([#15658](https://github.com/astral-sh/ruff/pull/15658))
### Bug fixes
- \[`flake8-comprehensions`\] Handle trailing comma in `C403` fix ([#16110](https://github.com/astral-sh/ruff/pull/16110))
- \[`flake8-pyi`\] Avoid flagging `custom-typevar-for-self` on metaclass methods (`PYI019`) ([#16141](https://github.com/astral-sh/ruff/pull/16141))
- \[`pydocstyle`\] Handle arguments with the same names as sections (`D417`) ([#16011](https://github.com/astral-sh/ruff/pull/16011))
- \[`pylint`\] Correct ordering of arguments in fix for `if-stmt-min-max` (`PLR1730`) ([#16080](https://github.com/astral-sh/ruff/pull/16080))
- \[`pylint`\] Do not offer fix for raw strings (`PLE251`) ([#16132](https://github.com/astral-sh/ruff/pull/16132))
- \[`pyupgrade`\] Do not upgrade functional `TypedDicts` with private field names to the class-based syntax (`UP013`) ([#16219](https://github.com/astral-sh/ruff/pull/16219))
- \[`pyupgrade`\] Handle micro version numbers correctly (`UP036`) ([#16091](https://github.com/astral-sh/ruff/pull/16091))
- \[`pyupgrade`\] Unwrap unary expressions correctly (`UP018`) ([#15919](https://github.com/astral-sh/ruff/pull/15919))
- \[`refurb`\] Correctly handle lengths of literal strings in `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#16237](https://github.com/astral-sh/ruff/pull/16237))
- \[`ruff`\] Skip `RUF001` diagnostics when visiting string type definitions ([#16122](https://github.com/astral-sh/ruff/pull/16122))
### Documentation
- Add FAQ entry for `source.*` code actions in Notebook ([#16212](https://github.com/astral-sh/ruff/pull/16212))
- Add `SECURITY.md` ([#16224](https://github.com/astral-sh/ruff/pull/16224))
## 0.9.6
### Preview features
- \[`airflow`\] Add `external_task.{ExternalTaskMarker, ExternalTaskSensor}` for `AIR302` ([#16014](https://github.com/astral-sh/ruff/pull/16014))
- \[`flake8-builtins`\] Make strict module name comparison optional (`A005`) ([#15951](https://github.com/astral-sh/ruff/pull/15951))
- \[`flake8-pyi`\] Extend fix to Python \<= 3.9 for `redundant-none-literal` (`PYI061`) ([#16044](https://github.com/astral-sh/ruff/pull/16044))
- \[`pylint`\] Also report when the object isn't a literal (`PLE1310`) ([#15985](https://github.com/astral-sh/ruff/pull/15985))
- \[`ruff`\] Implement `indented-form-feed` (`RUF054`) ([#16049](https://github.com/astral-sh/ruff/pull/16049))
- \[`ruff`\] Skip type definitions for `missing-f-string-syntax` (`RUF027`) ([#16054](https://github.com/astral-sh/ruff/pull/16054))
### Rule changes
- \[`flake8-annotations`\] Correct syntax for `typing.Union` in suggested return type fixes for `ANN20x` rules ([#16025](https://github.com/astral-sh/ruff/pull/16025))
- \[`flake8-builtins`\] Match upstream module name comparison (`A005`) ([#16006](https://github.com/astral-sh/ruff/pull/16006))
- \[`flake8-comprehensions`\] Detect overshadowed `list`/`set`/`dict`, ignore variadics and named expressions (`C417`) ([#15955](https://github.com/astral-sh/ruff/pull/15955))
- \[`flake8-pie`\] Remove following comma correctly when the unpacked dictionary is empty (`PIE800`) ([#16008](https://github.com/astral-sh/ruff/pull/16008))
- \[`flake8-simplify`\] Only trigger `SIM401` on known dictionaries ([#15995](https://github.com/astral-sh/ruff/pull/15995))
- \[`pylint`\] Do not report calls when object type and argument type mismatch, remove custom escape handling logic (`PLE1310`) ([#15984](https://github.com/astral-sh/ruff/pull/15984))
- \[`pyupgrade`\] Comments within parenthesized value ranges should not affect applicability (`UP040`) ([#16027](https://github.com/astral-sh/ruff/pull/16027))
- \[`pyupgrade`\] Don't introduce invalid syntax when upgrading old-style type aliases with parenthesized multiline values (`UP040`) ([#16026](https://github.com/astral-sh/ruff/pull/16026))
- \[`pyupgrade`\] Ensure we do not rename two type parameters to the same name (`UP049`) ([#16038](https://github.com/astral-sh/ruff/pull/16038))
- \[`pyupgrade`\] \[`ruff`\] Don't apply renamings if the new name is shadowed in a scope of one of the references to the binding (`UP049`, `RUF052`) ([#16032](https://github.com/astral-sh/ruff/pull/16032))
- \[`ruff`\] Update `RUF009` to behave similar to `B008` and ignore attributes with immutable types ([#16048](https://github.com/astral-sh/ruff/pull/16048))
### Server
- Root exclusions in the server to project root ([#16043](https://github.com/astral-sh/ruff/pull/16043))
### Bug fixes
- \[`flake8-datetime`\] Ignore `.replace()` calls while looking for `.astimezone` ([#16050](https://github.com/astral-sh/ruff/pull/16050))
- \[`flake8-type-checking`\] Avoid `TC004` false positive where the runtime definition is provided by `__getattr__` ([#16052](https://github.com/astral-sh/ruff/pull/16052))
### Documentation
- Improve `ruff-lsp` migration document ([#16072](https://github.com/astral-sh/ruff/pull/16072))
- Undeprecate `ruff.nativeServer` ([#16039](https://github.com/astral-sh/ruff/pull/16039))
## 0.9.5
### Preview features
- Recognize all symbols named `TYPE_CHECKING` for `in_type_checking_block` ([#15719](https://github.com/astral-sh/ruff/pull/15719))
- \[`flake8-comprehensions`\] Handle builtins at top of file correctly for `unnecessary-dict-comprehension-for-iterable` (`C420`) ([#15837](https://github.com/astral-sh/ruff/pull/15837))
- \[`flake8-logging`\] `.exception()` and `exc_info=` outside exception handlers (`LOG004`, `LOG014`) ([#15799](https://github.com/astral-sh/ruff/pull/15799))
- \[`flake8-pyi`\] Fix incorrect behaviour of `custom-typevar-return-type` preview-mode autofix if `typing` was already imported (`PYI019`) ([#15853](https://github.com/astral-sh/ruff/pull/15853))
- \[`flake8-pyi`\] Fix more complex cases (`PYI019`) ([#15821](https://github.com/astral-sh/ruff/pull/15821))
- \[`flake8-pyi`\] Make `PYI019` autofixable for `.py` files in preview mode as well as stubs ([#15889](https://github.com/astral-sh/ruff/pull/15889))
- \[`flake8-pyi`\] Remove type parameter correctly when it is the last (`PYI019`) ([#15854](https://github.com/astral-sh/ruff/pull/15854))
- \[`pylint`\] Fix missing parens in unsafe fix for `unnecessary-dunder-call` (`PLC2801`) ([#15762](https://github.com/astral-sh/ruff/pull/15762))
- \[`pyupgrade`\] Better messages and diagnostic range (`UP015`) ([#15872](https://github.com/astral-sh/ruff/pull/15872))
- \[`pyupgrade`\] Rename private type parameters in PEP 695 generics (`UP049`) ([#15862](https://github.com/astral-sh/ruff/pull/15862))
- \[`refurb`\] Also report non-name expressions (`FURB169`) ([#15905](https://github.com/astral-sh/ruff/pull/15905))
- \[`refurb`\] Mark fix as unsafe if there are comments (`FURB171`) ([#15832](https://github.com/astral-sh/ruff/pull/15832))
- \[`ruff`\] Classes with mixed type variable style (`RUF053`) ([#15841](https://github.com/astral-sh/ruff/pull/15841))
- \[`airflow`\] `BashOperator` has been moved to `airflow.providers.standard.operators.bash.BashOperator` (`AIR302`) ([#15922](https://github.com/astral-sh/ruff/pull/15922))
- \[`flake8-pyi`\] Add autofix for unused-private-type-var (`PYI018`) ([#15999](https://github.com/astral-sh/ruff/pull/15999))
- \[`flake8-pyi`\] Significantly improve accuracy of `PYI019` if preview mode is enabled ([#15888](https://github.com/astral-sh/ruff/pull/15888))
### Rule changes
- Preserve triple quotes and prefixes for strings ([#15818](https://github.com/astral-sh/ruff/pull/15818))
- \[`flake8-comprehensions`\] Skip when `TypeError` present from too many (kw)args for `C410`,`C411`, and `C418` ([#15838](https://github.com/astral-sh/ruff/pull/15838))
- \[`flake8-pyi`\] Rename `PYI019` and improve its diagnostic message ([#15885](https://github.com/astral-sh/ruff/pull/15885))
- \[`pep8-naming`\] Ignore `@override` methods (`N803`) ([#15954](https://github.com/astral-sh/ruff/pull/15954))
- \[`pyupgrade`\] Reuse replacement logic from `UP046` and `UP047` to preserve more comments (`UP040`) ([#15840](https://github.com/astral-sh/ruff/pull/15840))
- \[`ruff`\] Analyze deferred annotations before enforcing `mutable-(data)class-default` and `function-call-in-dataclass-default-argument` (`RUF008`,`RUF009`,`RUF012`) ([#15921](https://github.com/astral-sh/ruff/pull/15921))
- \[`pycodestyle`\] Exempt `sys.path += ...` calls (`E402`) ([#15980](https://github.com/astral-sh/ruff/pull/15980))
### Configuration
- Config error only when `flake8-import-conventions` alias conflicts with `isort.required-imports` bound name ([#15918](https://github.com/astral-sh/ruff/pull/15918))
- Workaround Even Better TOML crash related to `allOf` ([#15992](https://github.com/astral-sh/ruff/pull/15992))
### Bug fixes
- \[`flake8-comprehensions`\] Unnecessary `list` comprehension (rewrite as a `set` comprehension) (`C403`) - Handle extraneous parentheses around list comprehension ([#15877](https://github.com/astral-sh/ruff/pull/15877))
- \[`flake8-comprehensions`\] Handle trailing comma in fixes for `unnecessary-generator-list/set` (`C400`,`C401`) ([#15929](https://github.com/astral-sh/ruff/pull/15929))
- \[`flake8-pyi`\] Fix several correctness issues with `custom-type-var-return-type` (`PYI019`) ([#15851](https://github.com/astral-sh/ruff/pull/15851))
- \[`pep8-naming`\] Consider any number of leading underscore for `N801` ([#15988](https://github.com/astral-sh/ruff/pull/15988))
- \[`pyflakes`\] Visit forward annotations in `TypeAliasType` as types (`F401`) ([#15829](https://github.com/astral-sh/ruff/pull/15829))
- \[`pylint`\] Correct min/max auto-fix and suggestion for (`PL1730`) ([#15930](https://github.com/astral-sh/ruff/pull/15930))
- \[`refurb`\] Handle unparenthesized tuples correctly (`FURB122`, `FURB142`) ([#15953](https://github.com/astral-sh/ruff/pull/15953))
- \[`refurb`\] Avoid `None | None` as well as better detection and fix (`FURB168`) ([#15779](https://github.com/astral-sh/ruff/pull/15779))
### Documentation
- Add deprecation warning for `ruff-lsp` related settings ([#15850](https://github.com/astral-sh/ruff/pull/15850))
- Docs (`linter.md`): clarify that Python files are always searched for in subdirectories ([#15882](https://github.com/astral-sh/ruff/pull/15882))
- Fix a typo in `non_pep695_generic_class.rs` ([#15946](https://github.com/astral-sh/ruff/pull/15946))
- Improve Docs: Pylint subcategories' codes ([#15909](https://github.com/astral-sh/ruff/pull/15909))
- Remove non-existing `lint.extendIgnore` editor setting ([#15844](https://github.com/astral-sh/ruff/pull/15844))
- Update black deviations ([#15928](https://github.com/astral-sh/ruff/pull/15928))
- Mention `UP049` in `UP046` and `UP047`, add `See also` section to `UP040` ([#15956](https://github.com/astral-sh/ruff/pull/15956))
- Add instance variable examples to `RUF012` ([#15982](https://github.com/astral-sh/ruff/pull/15982))
- Explain precedence for `ignore` and `select` config ([#15883](https://github.com/astral-sh/ruff/pull/15883))
## 0.9.4
### Preview features
- \[`airflow`\] Extend airflow context parameter check for `BaseOperator.execute` (`AIR302`) ([#15713](https://github.com/astral-sh/ruff/pull/15713))
- \[`airflow`\] Update `AIR302` to check for deprecated context keys ([#15144](https://github.com/astral-sh/ruff/pull/15144))
- \[`flake8-bandit`\] Permit suspicious imports within stub files (`S4`) ([#15822](https://github.com/astral-sh/ruff/pull/15822))
- \[`pylint`\] Do not trigger `PLR6201` on empty collections ([#15732](https://github.com/astral-sh/ruff/pull/15732))
- \[`refurb`\] Do not emit diagnostic when loop variables are used outside loop body (`FURB122`) ([#15757](https://github.com/astral-sh/ruff/pull/15757))
- \[`ruff`\] Add support for more `re` patterns (`RUF055`) ([#15764](https://github.com/astral-sh/ruff/pull/15764))
- \[`ruff`\] Check for shadowed `map` before suggesting fix (`RUF058`) ([#15790](https://github.com/astral-sh/ruff/pull/15790))
- \[`ruff`\] Do not emit diagnostic when all arguments to `zip()` are variadic (`RUF058`) ([#15744](https://github.com/astral-sh/ruff/pull/15744))
- \[`ruff`\] Parenthesize fix when argument spans multiple lines for `unnecessary-round` (`RUF057`) ([#15703](https://github.com/astral-sh/ruff/pull/15703))
### Rule changes
- Preserve quote style in generated code ([#15726](https://github.com/astral-sh/ruff/pull/15726), [#15778](https://github.com/astral-sh/ruff/pull/15778), [#15794](https://github.com/astral-sh/ruff/pull/15794))
- \[`flake8-bugbear`\] Exempt `NewType` calls where the original type is immutable (`B008`) ([#15765](https://github.com/astral-sh/ruff/pull/15765))
- \[`pylint`\] Honor banned top-level imports by `TID253` in `PLC0415`. ([#15628](https://github.com/astral-sh/ruff/pull/15628))
- \[`pyupgrade`\] Ignore `is_typeddict` and `TypedDict` for `deprecated-import` (`UP035`) ([#15800](https://github.com/astral-sh/ruff/pull/15800))
### CLI
- Fix formatter warning message for `flake8-quotes` option ([#15788](https://github.com/astral-sh/ruff/pull/15788))
- Implement tab autocomplete for `ruff config` ([#15603](https://github.com/astral-sh/ruff/pull/15603))
### Bug fixes
- \[`flake8-comprehensions`\] Do not emit `unnecessary-map` diagnostic when lambda has different arity (`C417`) ([#15802](https://github.com/astral-sh/ruff/pull/15802))
- \[`flake8-comprehensions`\] Parenthesize `sorted` when needed for `unnecessary-call-around-sorted` (`C413`) ([#15825](https://github.com/astral-sh/ruff/pull/15825))
- \[`pyupgrade`\] Handle end-of-line comments for `quoted-annotation` (`UP037`) ([#15824](https://github.com/astral-sh/ruff/pull/15824))
### Documentation
- Add missing config docstrings ([#15803](https://github.com/astral-sh/ruff/pull/15803))
- Add references to `trio.run_process` and `anyio.run_process` ([#15761](https://github.com/astral-sh/ruff/pull/15761))
- Use `uv init --lib` in tutorial ([#15718](https://github.com/astral-sh/ruff/pull/15718))
## 0.9.3
### Preview features
- \[`airflow`\] Argument `fail_stop` in DAG has been renamed as `fail_fast` (`AIR302`) ([#15633](https://github.com/astral-sh/ruff/pull/15633))
- \[`airflow`\] Extend `AIR303` with more symbols ([#15611](https://github.com/astral-sh/ruff/pull/15611))
- \[`flake8-bandit`\] Report all references to suspicious functions (`S3`) ([#15541](https://github.com/astral-sh/ruff/pull/15541))
- \[`flake8-pytest-style`\] Do not emit diagnostics for empty `for` loops (`PT012`, `PT031`) ([#15542](https://github.com/astral-sh/ruff/pull/15542))
- \[`flake8-simplify`\] Avoid double negations (`SIM103`) ([#15562](https://github.com/astral-sh/ruff/pull/15562))
- \[`pyflakes`\] Fix infinite loop with unused local import in `__init__.py` (`F401`) ([#15517](https://github.com/astral-sh/ruff/pull/15517))
- \[`pylint`\] Do not report methods with only one `EM101`-compatible `raise` (`PLR6301`) ([#15507](https://github.com/astral-sh/ruff/pull/15507))
- \[`pylint`\] Implement `redefined-slots-in-subclass` (`W0244`) ([#9640](https://github.com/astral-sh/ruff/pull/9640))
- \[`pyupgrade`\] Add rules to use PEP 695 generics in classes and functions (`UP046`, `UP047`) ([#15565](https://github.com/astral-sh/ruff/pull/15565), [#15659](https://github.com/astral-sh/ruff/pull/15659))
- \[`refurb`\] Implement `for-loop-writes` (`FURB122`) ([#10630](https://github.com/astral-sh/ruff/pull/10630))
- \[`ruff`\] Implement `needless-else` clause (`RUF047`) ([#15051](https://github.com/astral-sh/ruff/pull/15051))
- \[`ruff`\] Implement `starmap-zip` (`RUF058`) ([#15483](https://github.com/astral-sh/ruff/pull/15483))
### Rule changes
- \[`flake8-bugbear`\] Do not raise error if keyword argument is present and target-python version is less or equals than 3.9 (`B903`) ([#15549](https://github.com/astral-sh/ruff/pull/15549))
- \[`flake8-comprehensions`\] strip parentheses around generators in `unnecessary-generator-set` (`C401`) ([#15553](https://github.com/astral-sh/ruff/pull/15553))
- \[`flake8-pytest-style`\] Rewrite references to `.exception` (`PT027`) ([#15680](https://github.com/astral-sh/ruff/pull/15680))
- \[`flake8-simplify`\] Mark fixes as unsafe (`SIM201`, `SIM202`) ([#15626](https://github.com/astral-sh/ruff/pull/15626))
- \[`flake8-type-checking`\] Fix some safe fixes being labeled unsafe (`TC006`,`TC008`) ([#15638](https://github.com/astral-sh/ruff/pull/15638))
- \[`isort`\] Omit trailing whitespace in `unsorted-imports` (`I001`) ([#15518](https://github.com/astral-sh/ruff/pull/15518))
- \[`pydoclint`\] Allow ignoring one line docstrings for `DOC` rules ([#13302](https://github.com/astral-sh/ruff/pull/13302))
- \[`pyflakes`\] Apply redefinition fixes by source code order (`F811`) ([#15575](https://github.com/astral-sh/ruff/pull/15575))
- \[`pyflakes`\] Avoid removing too many imports in `redefined-while-unused` (`F811`) ([#15585](https://github.com/astral-sh/ruff/pull/15585))
- \[`pyflakes`\] Group redefinition fixes by source statement (`F811`) ([#15574](https://github.com/astral-sh/ruff/pull/15574))
- \[`pylint`\] Include name of base class in message for `redefined-slots-in-subclass` (`W0244`) ([#15559](https://github.com/astral-sh/ruff/pull/15559))
- \[`ruff`\] Update fix for `RUF055` to use `var == value` ([#15605](https://github.com/astral-sh/ruff/pull/15605))
### Formatter
- Fix bracket spacing for single-element tuples in f-string expressions ([#15537](https://github.com/astral-sh/ruff/pull/15537))
- Fix unstable f-string formatting for expressions containing a trailing comma ([#15545](https://github.com/astral-sh/ruff/pull/15545))
### Performance
- Avoid quadratic membership check in import fixes ([#15576](https://github.com/astral-sh/ruff/pull/15576))
### Server
- Allow `unsafe-fixes` settings for code actions ([#15666](https://github.com/astral-sh/ruff/pull/15666))
### Bug fixes
- \[`flake8-bandit`\] Add missing single-line/dotall regex flag (`S608`) ([#15654](https://github.com/astral-sh/ruff/pull/15654))
- \[`flake8-import-conventions`\] Fix infinite loop between `ICN001` and `I002` (`ICN001`) ([#15480](https://github.com/astral-sh/ruff/pull/15480))
- \[`flake8-simplify`\] Do not emit diagnostics for expressions inside string type annotations (`SIM222`, `SIM223`) ([#15405](https://github.com/astral-sh/ruff/pull/15405))
- \[`pyflakes`\] Treat arguments passed to the `default=` parameter of `TypeVar` as type expressions (`F821`) ([#15679](https://github.com/astral-sh/ruff/pull/15679))
- \[`pyupgrade`\] Avoid syntax error when the iterable is a non-parenthesized tuple (`UP028`) ([#15543](https://github.com/astral-sh/ruff/pull/15543))
- \[`ruff`\] Exempt `NewType` calls where the original type is immutable (`RUF009`) ([#15588](https://github.com/astral-sh/ruff/pull/15588))
- Preserve raw string prefix and escapes in all codegen fixes ([#15694](https://github.com/astral-sh/ruff/pull/15694))
### Documentation
- Generate documentation redirects for lowercase rule codes ([#15564](https://github.com/astral-sh/ruff/pull/15564))
- `TRY300`: Add some extra notes on not catching exceptions you didn't expect ([#15036](https://github.com/astral-sh/ruff/pull/15036))
## 0.9.2
### Preview features
@@ -1505,11 +921,11 @@ The following rules have been stabilized and are no longer in preview:
The following behaviors have been stabilized:
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context managers.
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context managers.
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context managers.
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context managers.
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context managers.
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
The following fixes have been stabilized:

View File

@@ -526,7 +526,7 @@ cargo benchmark
#### Benchmark-driven Development
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
`--save-baseline=<name>` to store an initial baseline benchmark (e.g., on `main`) and then use
`--save-baseline=<name>` to store an initial baseline benchmark (e.g. on `main`) and then use
`--benchmark=<name>` to compare against that benchmark. Criterion will print a message telling you
if the benchmark improved/regressed compared to that baseline.
@@ -678,9 +678,9 @@ utils with it:
23 Newline 24
```
- `cargo dev print-cst <file>`: Print the CST of a Python file using
- `cargo dev print-cst <file>`: Print the CST of a python file using
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
in Ruff. For example, for `if True: pass # comment`, everything, including the whitespace, is represented:
in Ruff. E.g. for `if True: pass # comment` everything including the whitespace is represented:
```text
Module {

1528
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ resolver = "2"
[workspace.package]
edition = "2021"
rust-version = "1.84"
rust-version = "1.80"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@@ -38,11 +38,10 @@ ruff_text_size = { path = "crates/ruff_text_size" }
red_knot_vendored = { path = "crates/red_knot_vendored" }
ruff_workspace = { path = "crates/ruff_workspace" }
red_knot_ide = { path = "crates/red_knot_ide" }
red_knot_project = { path = "crates/red_knot_project", default-features = false }
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
red_knot_server = { path = "crates/red_knot_server" }
red_knot_test = { path = "crates/red_knot_test" }
red_knot_project = { path = "crates/red_knot_project", default-features = false }
aho-corasick = { version = "1.1.3" }
anstream = { version = "0.6.18" }
@@ -55,6 +54,7 @@ bitflags = { version = "2.5.0" }
bstr = { version = "1.9.1" }
cachedir = { version = "0.3.1" }
camino = { version = "1.1.7" }
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete_command = { version = "0.6.0" }
clearscreen = { version = "4.0.0" }
@@ -63,7 +63,7 @@ colored = { version = "3.0.0" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
compact_str = "0.9.0"
compact_str = "0.8.0"
criterion = { version = "0.5.1", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
@@ -71,16 +71,14 @@ dir-test = { version = "0.4.0" }
dunce = { version = "1.0.5" }
drop_bomb = { version = "0.1.5" }
env_logger = { version = "0.11.0" }
etcetera = { version = "0.10.0" }
etcetera = { version = "0.8.0" }
fern = { version = "0.7.0" }
filetime = { version = "0.2.23" }
getrandom = { version = "0.3.1" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globwalk = { version = "0.9.1" }
hashbrown = { version = "0.15.0", default-features = false, features = [
"raw-entry",
"equivalent",
"inline-more",
] }
ignore = { version = "0.4.22" }
@@ -94,7 +92,6 @@ insta-cmd = { version = "0.6.0" }
is-macro = { version = "0.3.5" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.14.0" }
jiff = { version = "0.2.0" }
js-sys = { version = "0.3.69" }
jod-thread = { version = "0.1.2" }
libc = { version = "0.2.153" }
@@ -119,12 +116,12 @@ proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.13.4" }
quick-junit = { version = "0.5.0" }
quote = { version = "1.0.23" }
rand = { version = "0.9.0" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "87bf6b6c2d5f6479741271da73bd9d30c2580c26" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "88a1d7774d78f048fbd77d40abca9ebd729fd1f0" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -137,15 +134,10 @@ serde_with = { version = "3.6.0", default-features = false, features = [
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2" }
snapbox = { version = "0.6.0", features = [
"diff",
"term-svg",
"cmd",
"examples",
] }
snapbox = { version = "0.6.0", features = ["diff", "term-svg", "cmd", "examples"] }
static_assertions = "1.1.0"
strum = { version = "0.27.0", features = ["strum_macros"] }
strum_macros = { version = "0.27.0" }
strum = { version = "0.26.0", features = ["strum_macros"] }
strum_macros = { version = "0.26.0" }
syn = { version = "2.0.55" }
tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
@@ -155,7 +147,6 @@ toml = { version = "0.8.11" }
tracing = { version = "0.1.40" }
tracing-flame = { version = "0.2.0" }
tracing-indicatif = { version = "0.3.6" }
tracing-log = { version = "0.2.0" }
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
"env-filter",
"fmt",
@@ -168,6 +159,7 @@ unicode-ident = { version = "1.0.12" }
unicode-width = { version = "0.2.0" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = [
"v4",
@@ -181,10 +173,6 @@ wasm-bindgen-test = { version = "0.3.42" }
wild = { version = "2" }
zip = { version = "0.6.6", default-features = false }
[workspace.metadata.cargo-shear]
ignored = ["getrandom"]
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
@@ -209,7 +197,6 @@ must_use_candidate = "allow"
similar_names = "allow"
single_match_else = "allow"
too_many_lines = "allow"
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
@@ -272,7 +259,7 @@ inherits = "release"
# Config for 'dist'
[workspace.metadata.dist]
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.28.4-prerelease.1"
cargo-dist-version = "0.25.2-prerelease.3"
# CI backends to support
ci = "github"
# The installers to generate for each app
@@ -318,23 +305,10 @@ local-artifacts-jobs = ["./build-binaries", "./build-docker"]
# Publish jobs to run in CI
publish-jobs = ["./publish-pypi", "./publish-wasm"]
# Post-announce jobs to run in CI
post-announce-jobs = [
"./notify-dependents",
"./publish-docs",
"./publish-playground",
]
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
# Custom permissions for GitHub Jobs
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
# Whether to install an updater program
install-updater = false
# Path that installers should place binaries in
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
[workspace.metadata.dist.github-custom-runners]
global = "depot-ubuntu-latest-4"
[workspace.metadata.dist.github-action-commits]
"actions/checkout" = "11bd71901bbe5b1630ceea73d27597364c9af683" # v4
"actions/upload-artifact" = "ea165f8d65b6e75b540449e92b4886f43607fa02" # v4.6.2
"actions/download-artifact" = "95815c38cf2ff2164869cbab79da8d1f422bc89e" # v4.2.1
"actions/attest-build-provenance" = "c074443f1aee8d4aeeae555aebba3282517141b2" #v2.2.3

View File

@@ -149,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.11.6/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.11.6/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.9.2/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.9.2/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -183,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.11.6
rev: v0.9.2
hooks:
# Run the linter.
- id: ruff
@@ -452,7 +452,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [ivy](https://github.com/unifyai/ivy)
- [JAX](https://github.com/jax-ml/jax)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Kraken Tech](https://kraken.tech/)
- [LangChain](https://github.com/hwchase17/langchain)

View File

@@ -1,15 +0,0 @@
# Security policy
## Reporting a vulnerability
If you have found a possible vulnerability, please email `security at astral dot sh`.
## Bug bounties
While we sincerely appreciate and encourage reports of suspected security problems, please note that
Astral does not currently run any bug bounty programs.
## Vulnerability disclosures
Critical vulnerabilities will be disclosed via GitHub's
[security advisory](https://github.com/astral-sh/ruff/security) system.

View File

@@ -23,10 +23,6 @@ extend-ignore-re = [
# Line ignore with trailing "spellchecker:disable-line"
"(?Rm)^.*#\\s*spellchecker:disable-line$",
"LICENSEs",
# Various third party dependencies uses `typ` as struct field names (e.g., lsp_types::LogMessageParams)
"typ",
# TODO: Remove this once the `TYP` redirects are removed from `rule_redirects.rs`
"TYP",
]
[default.extend-identifiers]

View File

@@ -16,27 +16,23 @@ red_knot_python_semantic = { workspace = true }
red_knot_project = { workspace = true, features = ["zstd"] }
red_knot_server = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
ruff_python_ast = { workspace = true }
anyhow = { workspace = true }
argfile = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }
colored = { workspace = true }
countme = { workspace = true, features = ["enable"] }
crossbeam = { workspace = true }
ctrlc = { version = "3.4.4" }
jiff = { workspace = true }
rayon = { workspace = true }
salsa = { workspace = true }
tracing = { workspace = true, features = ["release_max_level_debug"] }
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
tracing-flame = { workspace = true }
tracing-tree = { workspace = true }
wild = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_python_trivia = { workspace = true }
insta = { workspace = true, features = ["filters"] }
insta-cmd = { workspace = true }

View File

@@ -1,25 +0,0 @@
# Red Knot
Red Knot is an extremely fast type checker.
Currently, it is a work-in-progress and not ready for user testing.
Red Knot is designed to prioritize good type inference, even in unannotated code,
and aims to avoid false positives.
While Red Knot will produce similar results to mypy and pyright on many codebases,
100% compatibility with these tools is a non-goal.
On some codebases, Red Knot's design decisions lead to different outcomes
than you would get from running one of these more established tools.
## Contributing
Core type checking tests are written as Markdown code blocks.
They can be found in [`red_knot_python_semantic/resources/mdtest`][resources-mdtest].
See [`red_knot_test/README.md`][mdtest-readme] for more information
on the test framework itself.
The list of open issues can be found [here][open-issues].
[mdtest-readme]: ../red_knot_test/README.md
[open-issues]: https://github.com/astral-sh/ruff/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20label%3Ared-knot
[resources-mdtest]: ../red_knot_python_semantic/resources/mdtest

View File

@@ -1,104 +0,0 @@
use std::{
fs,
path::{Path, PathBuf},
process::Command,
};
fn main() {
// The workspace root directory is not available without walking up the tree
// https://github.com/rust-lang/cargo/issues/3946
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
.join("..")
.join("..");
commit_info(&workspace_root);
#[allow(clippy::disallowed_methods)]
let target = std::env::var("TARGET").unwrap();
println!("cargo::rustc-env=RUST_HOST_TARGET={target}");
}
fn commit_info(workspace_root: &Path) {
// If not in a git repository, do not attempt to retrieve commit information
let git_dir = workspace_root.join(".git");
if !git_dir.exists() {
return;
}
if let Some(git_head_path) = git_head(&git_dir) {
println!("cargo:rerun-if-changed={}", git_head_path.display());
let git_head_contents = fs::read_to_string(git_head_path);
if let Ok(git_head_contents) = git_head_contents {
// The contents are either a commit or a reference in the following formats
// - "<commit>" when the head is detached
// - "ref <ref>" when working on a branch
// If a commit, checking if the HEAD file has changed is sufficient
// If a ref, we need to add the head file for that ref to rebuild on commit
let mut git_ref_parts = git_head_contents.split_whitespace();
git_ref_parts.next();
if let Some(git_ref) = git_ref_parts.next() {
let git_ref_path = git_dir.join(git_ref);
println!("cargo:rerun-if-changed={}", git_ref_path.display());
}
}
}
let output = match Command::new("git")
.arg("log")
.arg("-1")
.arg("--date=short")
.arg("--abbrev=9")
.arg("--format=%H %h %cd %(describe)")
.output()
{
Ok(output) if output.status.success() => output,
_ => return,
};
let stdout = String::from_utf8(output.stdout).unwrap();
let mut parts = stdout.split_whitespace();
let mut next = || parts.next().unwrap();
let _commit_hash = next();
println!("cargo::rustc-env=RED_KNOT_COMMIT_SHORT_HASH={}", next());
println!("cargo::rustc-env=RED_KNOT_COMMIT_DATE={}", next());
// Describe can fail for some commits
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
if let Some(describe) = parts.next() {
let mut describe_parts = describe.split('-');
let _last_tag = describe_parts.next().unwrap();
// If this is the tagged commit, this component will be missing
println!(
"cargo::rustc-env=RED_KNOT_LAST_TAG_DISTANCE={}",
describe_parts.next().unwrap_or("0")
);
}
}
fn git_head(git_dir: &Path) -> Option<PathBuf> {
// The typical case is a standard git repository.
let git_head_path = git_dir.join("HEAD");
if git_head_path.exists() {
return Some(git_head_path);
}
if !git_dir.is_file() {
return None;
}
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
// then let's try to attempt to read it as a worktree. If it's
// a worktree, then its contents will look like this, e.g.:
//
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
//
// And the HEAD file we want to watch will be at:
//
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
let contents = fs::read_to_string(git_dir).ok()?;
let (label, worktree_path) = contents.split_once(':')?;
if label != "gitdir" {
return None;
}
let worktree_path = worktree_path.trim();
Some(PathBuf::from(worktree_path))
}

View File

@@ -1,61 +0,0 @@
# Running `mypy_primer`
## Basics
For now, we use our own [fork of mypy primer]. It can be run using `uvx --from "…" mypy_primer`. For example, to see the help message, run:
```sh
uvx --from "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support" mypy_primer -h
```
Alternatively, you can install the forked version of `mypy_primer` using:
```sh
uv tool install "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support"
```
and then run it using `uvx mypy_primer` or just `mypy_primer`, if your `PATH` is set up accordingly (see: [Tool executables]).
## Showing the diagnostics diff between two Git revisions
To show the diagnostics diff between two Git revisions (e.g. your feature branch and `main`), run:
```sh
mypy_primer \
--type-checker knot \
--old origin/main \
--new my/feature \
--debug \
--output concise \
--project-selector '/black$'
```
This will show the diagnostics diff for the `black` project between the `main` branch and your `my/feature` branch. To run the
diff for all projects we currently enable in CI, use `--project-selector "/($(paste -s -d'|' crates/red_knot_python_semantic/resources/primer/good.txt))\$"`.
You can also take a look at the [full list of ecosystem projects]. Note that some of them might still need a `knot_paths` configuration
option to work correctly.
## Avoiding recompilation
If you want to run `mypy_primer` repeatedly, e.g. for different projects, but for the same combination of `--old` and `--new`, you
can use set the `MYPY_PRIMER_NO_REBUILD` environment variable to avoid recompilation of Red Knot:
```sh
MYPY_PRIMER_NO_REBUILD=1 mypy_primer …
```
## Running from a local copy of the repository
If you are working on a local branch, you can use `mypy_primer`'s `--repo` option to specify the path to your local copy of the `ruff` repository.
This allows `mypy_primer` to check out local branches:
```sh
mypy_primer --repo /path/to/ruff --old origin/main --new my/local-branch …
```
Note that you might need to clean up `/tmp/mypy_primer` in order for this to work correctly.
[fork of mypy primer]: https://github.com/astral-sh/mypy_primer/tree/add-red-knot-support
[full list of ecosystem projects]: https://github.com/astral-sh/mypy_primer/blob/add-red-knot-support/mypy_primer/projects.py
[tool executables]: https://docs.astral.sh/uv/concepts/tools/#tool-executables

View File

@@ -1,280 +0,0 @@
use crate::logging::Verbosity;
use crate::python_version::PythonVersion;
use clap::{ArgAction, ArgMatches, Error, Parser};
use red_knot_project::metadata::options::{EnvironmentOptions, Options, TerminalOptions};
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
use red_knot_python_semantic::lint;
use ruff_db::system::SystemPathBuf;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
)]
#[command(version)]
pub(crate) struct Args {
#[command(subcommand)]
pub(crate) command: Command,
}
#[derive(Debug, clap::Subcommand)]
pub(crate) enum Command {
/// Check a project for type errors.
Check(CheckCommand),
/// Start the language server
Server,
/// Display Red Knot's version
Version,
}
#[derive(Debug, Parser)]
pub(crate) struct CheckCommand {
/// List of files or directories to check.
#[clap(
help = "List of files or directories to check [default: the project root]",
value_name = "PATH"
)]
pub paths: Vec<SystemPathBuf>,
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
///
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
#[arg(long, value_name = "PROJECT")]
pub(crate) project: Option<SystemPathBuf>,
/// Path to the Python installation from which Red Knot resolves type information and third-party dependencies.
///
/// If not specified, Red Knot will look at the `VIRTUAL_ENV` environment variable.
///
/// Red Knot will search in the path's `site-packages` directories for type information and
/// third-party imports.
///
/// This option is commonly used to specify the path to a virtual environment.
#[arg(long, value_name = "PATH")]
pub(crate) python: Option<SystemPathBuf>,
/// Custom directory to use for stdlib typeshed stubs.
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
pub(crate) typeshed: Option<SystemPathBuf>,
/// Additional path to use as a module-resolution source (can be passed multiple times).
#[arg(long, value_name = "PATH")]
pub(crate) extra_search_path: Option<Vec<SystemPathBuf>>,
/// Python version to assume when resolving types.
#[arg(long, value_name = "VERSION", alias = "target-version")]
pub(crate) python_version: Option<PythonVersion>,
/// Target platform to assume when resolving types.
///
/// This is used to specialize the type of `sys.platform` and will affect the visibility
/// of platform-specific functions and attributes. If the value is set to `all`, no
/// assumptions are made about the target platform. If unspecified, the current system's
/// platform will be used.
#[arg(long, value_name = "PLATFORM", alias = "platform")]
pub(crate) python_platform: Option<String>,
#[clap(flatten)]
pub(crate) verbosity: Verbosity,
#[clap(flatten)]
pub(crate) rules: RulesArg,
/// The format to use for printing diagnostic messages.
#[arg(long)]
pub(crate) output_format: Option<OutputFormat>,
/// Control when colored output is used.
#[arg(long, value_name = "WHEN")]
pub(crate) color: Option<TerminalColor>,
/// Use exit code 1 if there are any warning-level diagnostics.
#[arg(long, conflicts_with = "exit_zero", default_missing_value = "true", num_args=0..1)]
pub(crate) error_on_warning: Option<bool>,
/// Always use exit code 0, even when there are error-level diagnostics.
#[arg(long)]
pub(crate) exit_zero: bool,
/// Watch files for changes and recheck files related to the changed files.
#[arg(long, short = 'W')]
pub(crate) watch: bool,
}
impl CheckCommand {
pub(crate) fn into_options(self) -> Options {
let rules = if self.rules.is_empty() {
None
} else {
Some(
self.rules
.into_iter()
.map(|(rule, level)| (RangedValue::cli(rule), RangedValue::cli(level)))
.collect(),
)
};
Options {
environment: Some(EnvironmentOptions {
python_version: self
.python_version
.map(|version| RangedValue::cli(version.into())),
python_platform: self
.python_platform
.map(|platform| RangedValue::cli(platform.into())),
python: self.python.map(RelativePathBuf::cli),
typeshed: self.typeshed.map(RelativePathBuf::cli),
extra_paths: self.extra_search_path.map(|extra_search_paths| {
extra_search_paths
.into_iter()
.map(RelativePathBuf::cli)
.collect()
}),
}),
terminal: Some(TerminalOptions {
output_format: self
.output_format
.map(|output_format| RangedValue::cli(output_format.into())),
error_on_warning: self.error_on_warning,
}),
rules,
..Default::default()
}
}
}
/// A list of rules to enable or disable with a given severity.
///
/// This type is used to parse the `--error`, `--warn`, and `--ignore` arguments
/// while preserving the order in which they were specified (arguments last override previous severities).
#[derive(Debug)]
pub(crate) struct RulesArg(Vec<(String, lint::Level)>);
impl RulesArg {
fn is_empty(&self) -> bool {
self.0.is_empty()
}
fn into_iter(self) -> impl Iterator<Item = (String, lint::Level)> {
self.0.into_iter()
}
}
impl clap::FromArgMatches for RulesArg {
fn from_arg_matches(matches: &ArgMatches) -> Result<Self, Error> {
let mut rules = Vec::new();
for (level, arg_id) in [
(lint::Level::Ignore, "ignore"),
(lint::Level::Warn, "warn"),
(lint::Level::Error, "error"),
] {
let indices = matches.indices_of(arg_id).into_iter().flatten();
let levels = matches.get_many::<String>(arg_id).into_iter().flatten();
rules.extend(
indices
.zip(levels)
.map(|(index, rule)| (index, rule, level)),
);
}
// Sort by their index so that values specified later override earlier ones.
rules.sort_by_key(|(index, _, _)| *index);
Ok(Self(
rules
.into_iter()
.map(|(_, rule, level)| (rule.to_owned(), level))
.collect(),
))
}
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), Error> {
self.0 = Self::from_arg_matches(matches)?.0;
Ok(())
}
}
impl clap::Args for RulesArg {
fn augment_args(cmd: clap::Command) -> clap::Command {
const HELP_HEADING: &str = "Enabling / disabling rules";
cmd.arg(
clap::Arg::new("error")
.long("error")
.action(ArgAction::Append)
.help("Treat the given rule as having severity 'error'. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
.arg(
clap::Arg::new("warn")
.long("warn")
.action(ArgAction::Append)
.help("Treat the given rule as having severity 'warn'. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
.arg(
clap::Arg::new("ignore")
.long("ignore")
.action(ArgAction::Append)
.help("Disables the rule. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
}
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
Self::augment_args(cmd)
}
}
/// The diagnostic output format.
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
pub enum OutputFormat {
/// Print diagnostics verbosely, with context and helpful hints.
///
/// Diagnostic messages may include additional context and
/// annotations on the input to help understand the message.
#[default]
#[value(name = "full")]
Full,
/// Print diagnostics concisely, one per line.
///
/// This will guarantee that each diagnostic is printed on
/// a single line. Only the most important or primary aspects
/// of the diagnostic are included. Contextual information is
/// dropped.
#[value(name = "concise")]
Concise,
}
impl From<OutputFormat> for ruff_db::diagnostic::DiagnosticFormat {
fn from(format: OutputFormat) -> ruff_db::diagnostic::DiagnosticFormat {
match format {
OutputFormat::Full => Self::Full,
OutputFormat::Concise => Self::Concise,
}
}
}
/// Control when colored output is used.
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
pub(crate) enum TerminalColor {
/// Display colors if the output goes to an interactive terminal.
#[default]
Auto,
/// Always display colors.
Always,
/// Never display colors.
Never,
}

View File

@@ -190,8 +190,8 @@ where
let ansi = writer.has_ansi_escapes();
if self.display_timestamp {
let timestamp = jiff::Zoned::now()
.strftime("%Y-%m-%d %H:%M:%S.%f")
let timestamp = chrono::Local::now()
.format("%Y-%m-%d %H:%M:%S.%f")
.to_string();
if ansi {
write!(writer, "{} ", timestamp.dimmed())?;
@@ -199,7 +199,7 @@ where
write!(
writer,
"{} ",
jiff::Zoned::now().strftime("%Y-%m-%d %H:%M:%S.%f")
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
)?;
}
}

View File

@@ -1,28 +1,104 @@
use std::io::{self, stdout, BufWriter, Write};
use std::process::{ExitCode, Termination};
use anyhow::Result;
use std::sync::Mutex;
use crate::args::{Args, CheckCommand, Command, TerminalColor};
use crate::logging::setup_tracing;
use anyhow::{anyhow, Context};
use clap::Parser;
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use red_knot_project::metadata::options::Options;
use python_version::PythonVersion;
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
use red_knot_project::watch;
use red_knot_project::watch::ProjectWatcher;
use red_knot_project::{watch, Db};
use red_knot_project::{ProjectDatabase, ProjectMetadata};
use red_knot_server::run_server;
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, Severity};
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::diagnostic::Diagnostic;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use salsa::plumbing::ZalsaDatabase;
mod args;
use crate::logging::{setup_tracing, Verbosity};
mod logging;
mod python_version;
mod version;
mod verbosity;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
)]
#[command(version)]
struct Args {
#[command(subcommand)]
pub(crate) command: Option<Command>,
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
///
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
#[arg(long, value_name = "PROJECT")]
project: Option<SystemPathBuf>,
/// Path to the virtual environment the project uses.
///
/// If provided, red-knot will use the `site-packages` directory of this virtual environment
/// to resolve type information for the project's third-party dependencies.
#[arg(long, value_name = "PATH")]
venv_path: Option<SystemPathBuf>,
/// Custom directory to use for stdlib typeshed stubs.
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
typeshed: Option<SystemPathBuf>,
/// Additional path to use as a module-resolution source (can be passed multiple times).
#[arg(long, value_name = "PATH")]
extra_search_path: Option<Vec<SystemPathBuf>>,
/// Python version to assume when resolving types.
#[arg(long, value_name = "VERSION", alias = "target-version")]
python_version: Option<PythonVersion>,
#[clap(flatten)]
verbosity: Verbosity,
/// Run in watch mode by re-running whenever files change.
#[arg(long, short = 'W')]
watch: bool,
}
impl Args {
fn to_options(&self, cli_cwd: &SystemPath) -> Options {
Options {
environment: Some(EnvironmentOptions {
python_version: self.python_version.map(Into::into),
venv_path: self
.venv_path
.as_ref()
.map(|venv_path| SystemPath::absolute(venv_path, cli_cwd)),
typeshed: self
.typeshed
.as_ref()
.map(|typeshed| SystemPath::absolute(typeshed, cli_cwd)),
extra_paths: self.extra_search_path.as_ref().map(|extra_search_paths| {
extra_search_paths
.iter()
.map(|path| SystemPath::absolute(path, cli_cwd))
.collect()
}),
..EnvironmentOptions::default()
}),
..Default::default()
}
}
}
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Start the language server
Server,
}
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
pub fn main() -> ExitStatus {
@@ -39,15 +115,6 @@ pub fn main() -> ExitStatus {
// the configuration it is help to chain errors ("resolving configuration failed" ->
// "failed to read file: subdir/pyproject.toml")
for cause in error.chain() {
// Exit "gracefully" on broken pipe errors.
//
// See: https://github.com/BurntSushi/ripgrep/blob/bf63fe8f258afc09bae6caa48f0ae35eaf115005/crates/core/main.rs#L47C1-L61C14
if let Some(ioerr) = cause.downcast_ref::<io::Error>() {
if ioerr.kind() == io::ErrorKind::BrokenPipe {
return ExitStatus::Success;
}
}
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
}
@@ -56,36 +123,18 @@ pub fn main() -> ExitStatus {
}
fn run() -> anyhow::Result<ExitStatus> {
let args = wild::args_os();
let args = argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX)
.context("Failed to read CLI arguments from file")?;
let args = Args::parse_from(args);
let args = Args::parse_from(std::env::args());
match args.command {
Command::Server => run_server().map(|()| ExitStatus::Success),
Command::Check(check_args) => run_check(check_args),
Command::Version => version().map(|()| ExitStatus::Success),
if matches!(args.command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success);
}
}
pub(crate) fn version() -> Result<()> {
let mut stdout = BufWriter::new(io::stdout().lock());
let version_info = crate::version::version();
writeln!(stdout, "red knot {}", &version_info)?;
Ok(())
}
fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
set_colored_override(args.color);
let verbosity = args.verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
tracing::debug!("Version: {}", version::version());
// The base path to which all CLI arguments are relative to.
let cwd = {
let cli_base_path = {
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
SystemPathBuf::from_path_buf(cwd)
.map_err(|path| {
@@ -96,41 +145,25 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
})?
};
let project_path = args
let cwd = args
.project
.as_ref()
.map(|project| {
if project.as_std_path().is_dir() {
Ok(SystemPath::absolute(project, &cwd))
.map(|cwd| {
if cwd.as_std_path().is_dir() {
Ok(SystemPath::absolute(cwd, &cli_base_path))
} else {
Err(anyhow!(
"Provided project path `{project}` is not a directory"
))
Err(anyhow!("Provided project path `{cwd}` is not a directory"))
}
})
.transpose()?
.unwrap_or_else(|| cwd.clone());
.unwrap_or_else(|| cli_base_path.clone());
let check_paths: Vec<_> = args
.paths
.iter()
.map(|path| SystemPath::absolute(path, &cwd))
.collect();
let system = OsSystem::new(cwd.clone());
let cli_options = args.to_options(&cwd);
let mut workspace_metadata = ProjectMetadata::discover(system.current_directory(), &system)?;
workspace_metadata.apply_cli_options(cli_options.clone());
let system = OsSystem::new(cwd);
let watch = args.watch;
let exit_zero = args.exit_zero;
let cli_options = args.into_options();
let mut project_metadata = ProjectMetadata::discover(&project_path, &system)?;
project_metadata.apply_cli_options(cli_options.clone());
project_metadata.apply_configuration_files(&system)?;
let mut db = ProjectDatabase::new(project_metadata, system)?;
if !check_paths.is_empty() {
db.project().set_included_paths(&mut db, check_paths);
}
let mut db = ProjectDatabase::new(workspace_metadata, system)?;
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options);
@@ -144,21 +177,17 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
}
})?;
let exit_status = if watch {
let exit_status = if args.watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)?
main_loop.run(&mut db)
};
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
std::mem::forget(db);
if exit_zero {
Ok(ExitStatus::Success)
} else {
Ok(exit_status)
}
Ok(exit_status)
}
#[derive(Copy, Clone)]
@@ -207,7 +236,7 @@ impl MainLoop {
)
}
fn watch(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
fn watch(mut self, db: &mut ProjectDatabase) -> anyhow::Result<ExitStatus> {
tracing::debug!("Starting watch mode");
let sender = self.sender.clone();
let watcher = watch::directory_watcher(move |event| {
@@ -216,12 +245,12 @@ impl MainLoop {
self.watcher = Some(ProjectWatcher::new(watcher, db));
self.run(db)?;
self.run(db);
Ok(ExitStatus::Success)
}
fn run(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
fn run(mut self, db: &mut ProjectDatabase) -> ExitStatus {
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
let result = self.main_loop(db);
@@ -231,7 +260,7 @@ impl MainLoop {
result
}
fn main_loop(&mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
fn main_loop(&mut self, db: &mut ProjectDatabase) -> ExitStatus {
// Schedule the first check.
tracing::debug!("Starting main loop");
@@ -259,54 +288,11 @@ impl MainLoop {
result,
revision: check_revision,
} => {
let terminal_settings = db.project().settings(db).terminal();
let display_config = DisplayDiagnosticConfig::default()
.format(terminal_settings.output_format)
.color(colored::control::SHOULD_COLORIZE.should_colorize());
let min_error_severity = if terminal_settings.error_on_warning {
Severity::Warning
} else {
Severity::Error
};
let has_diagnostics = !result.is_empty();
if check_revision == revision {
if db.project().files(db).is_empty() {
tracing::warn!("No python files found under the given path(s)");
}
let mut stdout = stdout().lock();
if result.is_empty() {
writeln!(stdout, "All checks passed!")?;
if self.watcher.is_none() {
return Ok(ExitStatus::Success);
}
} else {
let mut failed = false;
let diagnostics_count = result.len();
for diagnostic in result {
write!(stdout, "{}", diagnostic.display(db, &display_config))?;
failed |= diagnostic.severity() >= min_error_severity;
}
writeln!(
stdout,
"Found {} diagnostic{}",
diagnostics_count,
if diagnostics_count > 1 { "s" } else { "" }
)?;
if self.watcher.is_none() {
return Ok(if failed {
ExitStatus::Failure
} else {
ExitStatus::Success
});
}
#[allow(clippy::print_stdout)]
for diagnostic in result {
println!("{}", diagnostic.display(db));
}
} else {
tracing::debug!(
@@ -314,6 +300,14 @@ impl MainLoop {
);
}
if self.watcher.is_none() {
return if has_diagnostics {
ExitStatus::Failure
} else {
ExitStatus::Success
};
}
tracing::trace!("Counts after last check:\n{}", countme::get_all());
}
@@ -331,14 +325,14 @@ impl MainLoop {
// TODO: Don't use Salsa internal APIs
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
let _ = db.zalsa_mut();
return Ok(ExitStatus::Success);
return ExitStatus::Success;
}
}
tracing::debug!("Waiting for next main loop message.");
}
Ok(ExitStatus::Success)
ExitStatus::Success
}
}
@@ -358,28 +352,9 @@ impl MainLoopCancellationToken {
enum MainLoopMessage {
CheckWorkspace,
CheckCompleted {
/// The diagnostics that were found during the check.
result: Vec<Diagnostic>,
result: Vec<Box<dyn Diagnostic>>,
revision: u64,
},
ApplyChanges(Vec<watch::ChangeEvent>),
Exit,
}
fn set_colored_override(color: Option<TerminalColor>) {
let Some(color) = color else {
return;
};
match color {
TerminalColor::Auto => {
colored::control::unset_override();
}
TerminalColor::Always => {
colored::control::set_override(true);
}
TerminalColor::Never => {
colored::control::set_override(false);
}
}
}

View File

@@ -40,7 +40,7 @@ impl std::fmt::Display for PythonVersion {
}
}
impl From<PythonVersion> for ruff_python_ast::PythonVersion {
impl From<PythonVersion> for red_knot_python_semantic::PythonVersion {
fn from(value: PythonVersion) -> Self {
match value {
PythonVersion::Py37 => Self::PY37,
@@ -61,8 +61,8 @@ mod tests {
#[test]
fn same_default_as_python_version() {
assert_eq!(
ruff_python_ast::PythonVersion::from(PythonVersion::default()),
ruff_python_ast::PythonVersion::default()
red_knot_python_semantic::PythonVersion::from(PythonVersion::default()),
red_knot_python_semantic::PythonVersion::default()
);
}
}

View File

@@ -0,0 +1 @@

View File

@@ -1,105 +0,0 @@
//! Code for representing Red Knot's release version number.
use std::fmt;
/// Information about the git repository where Red Knot was built from.
pub(crate) struct CommitInfo {
short_commit_hash: String,
commit_date: String,
commits_since_last_tag: u32,
}
/// Red Knot's version.
pub(crate) struct VersionInfo {
/// Red Knot's version, such as "0.5.1"
version: String,
/// Information about the git commit we may have been built from.
///
/// `None` if not built from a git repo or if retrieval failed.
commit_info: Option<CommitInfo>,
}
impl fmt::Display for VersionInfo {
/// Formatted version information: `<version>[+<commits>] (<commit> <date>)`
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.version)?;
if let Some(ref ci) = self.commit_info {
if ci.commits_since_last_tag > 0 {
write!(f, "+{}", ci.commits_since_last_tag)?;
}
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
}
Ok(())
}
}
/// Returns information about Red Knot's version.
pub(crate) fn version() -> VersionInfo {
// Environment variables are only read at compile-time
macro_rules! option_env_str {
($name:expr) => {
option_env!($name).map(|s| s.to_string())
};
}
// This version is pulled from Cargo.toml and set by Cargo
let version = option_env_str!("CARGO_PKG_VERSION").unwrap();
// Commit info is pulled from git and set by `build.rs`
let commit_info =
option_env_str!("RED_KNOT_COMMIT_SHORT_HASH").map(|short_commit_hash| CommitInfo {
short_commit_hash,
commit_date: option_env_str!("RED_KNOT_COMMIT_DATE").unwrap(),
commits_since_last_tag: option_env_str!("RED_KNOT_LAST_TAG_DISTANCE")
.as_deref()
.map_or(0, |value| value.parse::<u32>().unwrap_or(0)),
});
VersionInfo {
version,
commit_info,
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use super::{CommitInfo, VersionInfo};
#[test]
fn version_formatting() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: None,
};
assert_snapshot!(version, @"0.0.0");
}
#[test]
fn version_formatting_with_commit_info() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 0,
}),
};
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
}
#[test]
fn version_formatting_with_commits_since_last_tag() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 24,
}),
};
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,31 +0,0 @@
[package]
name = "red_knot_ide"
version = "0.0.0"
publish = false
authors = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[dependencies]
ruff_db = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_text_size = { workspace = true }
red_knot_python_semantic = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
smallvec = { workspace = true }
tracing = { workspace = true }
[dev-dependencies]
red_knot_vendored = { workspace = true }
insta = { workspace = true, features = ["filters"] }
[lints]
workspace = true

View File

@@ -1,138 +0,0 @@
use red_knot_python_semantic::Db as SemanticDb;
use ruff_db::{Db as SourceDb, Upcast};
#[salsa::db]
pub trait Db: SemanticDb + Upcast<dyn SemanticDb> + Upcast<dyn SourceDb> {}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::Arc;
use super::Db;
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
use red_knot_python_semantic::{default_lint_registry, Db as SemanticDb, Program};
use ruff_db::files::{File, Files};
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
#[salsa::db]
#[derive(Clone)]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
files: Files,
system: TestSystem,
vendored: VendoredFileSystem,
events: Arc<std::sync::Mutex<Vec<salsa::Event>>>,
rule_selection: Arc<RuleSelection>,
}
#[allow(dead_code)]
impl TestDb {
pub(crate) fn new() -> Self {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: red_knot_vendored::file_system().clone(),
events: Arc::default(),
files: Files::default(),
rule_selection: Arc::new(RuleSelection::from_registry(default_lint_registry())),
}
}
/// Takes the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
let inner = Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
let events = inner.get_mut().unwrap();
std::mem::take(&mut *events)
}
/// Clears the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn clear_salsa_events(&mut self) {
self.take_salsa_events();
}
}
impl DbWithTestSystem for TestDb {
fn test_system(&self) -> &TestSystem {
&self.system
}
fn test_system_mut(&mut self) -> &mut TestSystem {
&mut self.system
}
}
#[salsa::db]
impl SourceDb for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
}
fn system(&self) -> &dyn System {
&self.system
}
fn files(&self) -> &Files {
&self.files
}
fn python_version(&self) -> ruff_python_ast::PythonVersion {
Program::get(self).python_version(self)
}
}
impl Upcast<dyn SourceDb> for TestDb {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
impl Upcast<dyn SemanticDb> for TestDb {
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut dyn SemanticDb {
self
}
}
#[salsa::db]
impl SemanticDb for TestDb {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}
fn rule_selection(&self) -> Arc<RuleSelection> {
self.rule_selection.clone()
}
fn lint_registry(&self) -> &LintRegistry {
default_lint_registry()
}
}
#[salsa::db]
impl Db for TestDb {}
#[salsa::db]
impl salsa::Database for TestDb {
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
let event = event();
tracing::trace!("event: {event:?}");
let mut events = self.events.lock().unwrap();
events.push(event);
}
}
}

View File

@@ -1,106 +0,0 @@
use ruff_python_ast::visitor::source_order::{SourceOrderVisitor, TraversalSignal};
use ruff_python_ast::AnyNodeRef;
use ruff_text_size::{Ranged, TextRange};
use std::fmt;
use std::fmt::Formatter;
/// Returns the node with a minimal range that fully contains `range`.
///
/// If `range` is empty and falls within a parser *synthesized* node generated during error recovery,
/// then the first node with the given range is returned.
///
/// ## Panics
/// Panics if `range` is not contained within `root`.
pub(crate) fn covering_node(root: AnyNodeRef, range: TextRange) -> CoveringNode {
struct Visitor<'a> {
range: TextRange,
found: bool,
ancestors: Vec<AnyNodeRef<'a>>,
}
impl<'a> SourceOrderVisitor<'a> for Visitor<'a> {
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
// If the node fully contains the range, than it is a possible match but traverse into its children
// to see if there's a node with a narrower range.
if !self.found && node.range().contains_range(self.range) {
self.ancestors.push(node);
TraversalSignal::Traverse
} else {
TraversalSignal::Skip
}
}
fn leave_node(&mut self, node: AnyNodeRef<'a>) {
if !self.found && self.ancestors.last() == Some(&node) {
self.found = true;
}
}
}
assert!(
root.range().contains_range(range),
"Range is not contained within root"
);
let mut visitor = Visitor {
range,
found: false,
ancestors: Vec::new(),
};
root.visit_source_order(&mut visitor);
let minimal = visitor.ancestors.pop().unwrap_or(root);
CoveringNode {
node: minimal,
ancestors: visitor.ancestors,
}
}
/// The node with a minimal range that fully contains the search range.
pub(crate) struct CoveringNode<'a> {
/// The node with a minimal range that fully contains the search range.
node: AnyNodeRef<'a>,
/// The node's ancestor (the spine up to the root).
ancestors: Vec<AnyNodeRef<'a>>,
}
impl<'a> CoveringNode<'a> {
pub(crate) fn node(&self) -> AnyNodeRef<'a> {
self.node
}
/// Returns the node's parent.
pub(crate) fn parent(&self) -> Option<AnyNodeRef<'a>> {
self.ancestors.last().copied()
}
/// Finds the minimal node that fully covers the range and fulfills the given predicate.
pub(crate) fn find(mut self, f: impl Fn(AnyNodeRef<'a>) -> bool) -> Result<Self, Self> {
if f(self.node) {
return Ok(self);
}
match self.ancestors.iter().rposition(|node| f(*node)) {
Some(index) => {
let node = self.ancestors[index];
self.ancestors.truncate(index);
Ok(Self {
node,
ancestors: self.ancestors,
})
}
None => Err(self),
}
}
}
impl fmt::Debug for CoveringNode<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_tuple("NodeWithAncestors")
.field(&self.node)
.finish()
}
}

View File

@@ -1,851 +0,0 @@
use crate::find_node::covering_node;
use crate::{Db, HasNavigationTargets, NavigationTargets, RangedValue};
use red_knot_python_semantic::types::Type;
use red_knot_python_semantic::{HasType, SemanticModel};
use ruff_db::files::{File, FileRange};
use ruff_db::parsed::{parsed_module, ParsedModule};
use ruff_python_ast::{self as ast, AnyNodeRef};
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange, TextSize};
pub fn goto_type_definition(
db: &dyn Db,
file: File,
offset: TextSize,
) -> Option<RangedValue<NavigationTargets>> {
let parsed = parsed_module(db.upcast(), file);
let goto_target = find_goto_target(parsed, offset)?;
let model = SemanticModel::new(db.upcast(), file);
let ty = goto_target.inferred_type(&model)?;
tracing::debug!(
"Inferred type of covering node is {}",
ty.display(db.upcast())
);
let navigation_targets = ty.navigation_targets(db);
Some(RangedValue {
range: FileRange::new(file, goto_target.range()),
value: navigation_targets,
})
}
#[derive(Clone, Copy, Debug)]
pub(crate) enum GotoTarget<'a> {
Expression(ast::ExprRef<'a>),
FunctionDef(&'a ast::StmtFunctionDef),
ClassDef(&'a ast::StmtClassDef),
Parameter(&'a ast::Parameter),
Alias(&'a ast::Alias),
/// Go to on the module name of an import from
/// ```py
/// from foo import bar
/// ^^^
/// ```
ImportedModule(&'a ast::StmtImportFrom),
/// Go to on the exception handler variable
/// ```py
/// try: ...
/// except Exception as e: ...
/// ^
/// ```
ExceptVariable(&'a ast::ExceptHandlerExceptHandler),
/// Go to on a keyword argument
/// ```py
/// test(a = 1)
/// ^
/// ```
KeywordArgument(&'a ast::Keyword),
/// Go to on the rest parameter of a pattern match
///
/// ```py
/// match x:
/// case {"a": a, "b": b, **rest}: ...
/// ^^^^
/// ```
PatternMatchRest(&'a ast::PatternMatchMapping),
/// Go to on a keyword argument of a class pattern
///
/// ```py
/// match Point3D(0, 0, 0):
/// case Point3D(x=0, y=0, z=0): ...
/// ^ ^ ^
/// ```
PatternKeywordArgument(&'a ast::PatternKeyword),
/// Go to on a pattern star argument
///
/// ```py
/// match array:
/// case [*args]: ...
/// ^^^^
PatternMatchStarName(&'a ast::PatternMatchStar),
/// Go to on the name of a pattern match as pattern
///
/// ```py
/// match x:
/// case [x] as y: ...
/// ^
PatternMatchAsName(&'a ast::PatternMatchAs),
/// Go to on the name of a type variable
///
/// ```py
/// type Alias[T: int = bool] = list[T]
/// ^
/// ```
TypeParamTypeVarName(&'a ast::TypeParamTypeVar),
/// Go to on the name of a type param spec
///
/// ```py
/// type Alias[**P = [int, str]] = Callable[P, int]
/// ^
/// ```
TypeParamParamSpecName(&'a ast::TypeParamParamSpec),
/// Go to on the name of a type var tuple
///
/// ```py
/// type Alias[*Ts = ()] = tuple[*Ts]
/// ^^
/// ```
TypeParamTypeVarTupleName(&'a ast::TypeParamTypeVarTuple),
NonLocal {
identifier: &'a ast::Identifier,
},
Globals {
identifier: &'a ast::Identifier,
},
}
impl<'db> GotoTarget<'db> {
pub(crate) fn inferred_type(self, model: &SemanticModel<'db>) -> Option<Type<'db>> {
let ty = match self {
GotoTarget::Expression(expression) => expression.inferred_type(model),
GotoTarget::FunctionDef(function) => function.inferred_type(model),
GotoTarget::ClassDef(class) => class.inferred_type(model),
GotoTarget::Parameter(parameter) => parameter.inferred_type(model),
GotoTarget::Alias(alias) => alias.inferred_type(model),
GotoTarget::ExceptVariable(except) => except.inferred_type(model),
GotoTarget::KeywordArgument(argument) => {
// TODO: Pyright resolves the declared type of the matching parameter. This seems more accurate
// than using the inferred value.
argument.value.inferred_type(model)
}
// TODO: Support identifier targets
GotoTarget::PatternMatchRest(_)
| GotoTarget::PatternKeywordArgument(_)
| GotoTarget::PatternMatchStarName(_)
| GotoTarget::PatternMatchAsName(_)
| GotoTarget::ImportedModule(_)
| GotoTarget::TypeParamTypeVarName(_)
| GotoTarget::TypeParamParamSpecName(_)
| GotoTarget::TypeParamTypeVarTupleName(_)
| GotoTarget::NonLocal { .. }
| GotoTarget::Globals { .. } => return None,
};
Some(ty)
}
}
impl Ranged for GotoTarget<'_> {
fn range(&self) -> TextRange {
match self {
GotoTarget::Expression(expression) => expression.range(),
GotoTarget::FunctionDef(function) => function.name.range,
GotoTarget::ClassDef(class) => class.name.range,
GotoTarget::Parameter(parameter) => parameter.name.range,
GotoTarget::Alias(alias) => alias.name.range,
GotoTarget::ImportedModule(module) => module.module.as_ref().unwrap().range,
GotoTarget::ExceptVariable(except) => except.name.as_ref().unwrap().range,
GotoTarget::KeywordArgument(keyword) => keyword.arg.as_ref().unwrap().range,
GotoTarget::PatternMatchRest(rest) => rest.rest.as_ref().unwrap().range,
GotoTarget::PatternKeywordArgument(keyword) => keyword.attr.range,
GotoTarget::PatternMatchStarName(star) => star.name.as_ref().unwrap().range,
GotoTarget::PatternMatchAsName(as_name) => as_name.name.as_ref().unwrap().range,
GotoTarget::TypeParamTypeVarName(type_var) => type_var.name.range,
GotoTarget::TypeParamParamSpecName(spec) => spec.name.range,
GotoTarget::TypeParamTypeVarTupleName(tuple) => tuple.name.range,
GotoTarget::NonLocal { identifier, .. } => identifier.range,
GotoTarget::Globals { identifier, .. } => identifier.range,
}
}
}
pub(crate) fn find_goto_target(parsed: &ParsedModule, offset: TextSize) -> Option<GotoTarget> {
let token = parsed
.tokens()
.at_offset(offset)
.max_by_key(|token| match token.kind() {
TokenKind::Name
| TokenKind::String
| TokenKind::Complex
| TokenKind::Float
| TokenKind::Int => 1,
_ => 0,
})?;
let covering_node = covering_node(parsed.syntax().into(), token.range())
.find(|node| node.is_identifier() || node.is_expression())
.ok()?;
tracing::trace!("Covering node is of kind {:?}", covering_node.node().kind());
match covering_node.node() {
AnyNodeRef::Identifier(identifier) => match covering_node.parent() {
Some(AnyNodeRef::StmtFunctionDef(function)) => Some(GotoTarget::FunctionDef(function)),
Some(AnyNodeRef::StmtClassDef(class)) => Some(GotoTarget::ClassDef(class)),
Some(AnyNodeRef::Parameter(parameter)) => Some(GotoTarget::Parameter(parameter)),
Some(AnyNodeRef::Alias(alias)) => Some(GotoTarget::Alias(alias)),
Some(AnyNodeRef::StmtImportFrom(from)) => Some(GotoTarget::ImportedModule(from)),
Some(AnyNodeRef::ExceptHandlerExceptHandler(handler)) => {
Some(GotoTarget::ExceptVariable(handler))
}
Some(AnyNodeRef::Keyword(keyword)) => Some(GotoTarget::KeywordArgument(keyword)),
Some(AnyNodeRef::PatternMatchMapping(mapping)) => {
Some(GotoTarget::PatternMatchRest(mapping))
}
Some(AnyNodeRef::PatternKeyword(keyword)) => {
Some(GotoTarget::PatternKeywordArgument(keyword))
}
Some(AnyNodeRef::PatternMatchStar(star)) => {
Some(GotoTarget::PatternMatchStarName(star))
}
Some(AnyNodeRef::PatternMatchAs(as_pattern)) => {
Some(GotoTarget::PatternMatchAsName(as_pattern))
}
Some(AnyNodeRef::TypeParamTypeVar(var)) => Some(GotoTarget::TypeParamTypeVarName(var)),
Some(AnyNodeRef::TypeParamParamSpec(bound)) => {
Some(GotoTarget::TypeParamParamSpecName(bound))
}
Some(AnyNodeRef::TypeParamTypeVarTuple(var_tuple)) => {
Some(GotoTarget::TypeParamTypeVarTupleName(var_tuple))
}
Some(AnyNodeRef::ExprAttribute(attribute)) => {
Some(GotoTarget::Expression(attribute.into()))
}
Some(AnyNodeRef::StmtNonlocal(_)) => Some(GotoTarget::NonLocal { identifier }),
Some(AnyNodeRef::StmtGlobal(_)) => Some(GotoTarget::Globals { identifier }),
None => None,
Some(parent) => {
tracing::debug!(
"Missing `GoToTarget` for identifier with parent {:?}",
parent.kind()
);
None
}
},
node => node.as_expr_ref().map(GotoTarget::Expression),
}
}
#[cfg(test)]
mod tests {
use crate::tests::{cursor_test, CursorTest, IntoDiagnostic};
use crate::{goto_type_definition, NavigationTarget};
use insta::assert_snapshot;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticId, LintName, Severity, Span, SubDiagnostic,
};
use ruff_db::files::FileRange;
use ruff_text_size::Ranged;
#[test]
fn goto_type_of_expression_with_class_type() {
let test = cursor_test(
r#"
class Test: ...
a<CURSOR>b = Test()
"#,
);
assert_snapshot!(test.goto_type_definition(), @r###"
info: lint:goto-type-definition: Type definition
--> /main.py:2:19
|
2 | class Test: ...
| ^^^^
3 |
4 | ab = Test()
|
info: Source
--> /main.py:4:13
|
2 | class Test: ...
3 |
4 | ab = Test()
| ^^
|
"###);
}
#[test]
fn goto_type_of_expression_with_function_type() {
let test = cursor_test(
r#"
def foo(a, b): ...
ab = foo
a<CURSOR>b
"#,
);
assert_snapshot!(test.goto_type_definition(), @r###"
info: lint:goto-type-definition: Type definition
--> /main.py:2:17
|
2 | def foo(a, b): ...
| ^^^
3 |
4 | ab = foo
|
info: Source
--> /main.py:6:13
|
4 | ab = foo
5 |
6 | ab
| ^^
|
"###);
}
#[test]
fn goto_type_of_expression_with_union_type() {
let test = cursor_test(
r#"
def foo(a, b): ...
def bar(a, b): ...
if random.choice():
a = foo
else:
a = bar
a<CURSOR>
"#,
);
assert_snapshot!(test.goto_type_definition(), @r"
info: lint:goto-type-definition: Type definition
--> /main.py:3:17
|
3 | def foo(a, b): ...
| ^^^
4 |
5 | def bar(a, b): ...
|
info: Source
--> /main.py:12:13
|
10 | a = bar
11 |
12 | a
| ^
|
info: lint:goto-type-definition: Type definition
--> /main.py:5:17
|
3 | def foo(a, b): ...
4 |
5 | def bar(a, b): ...
| ^^^
6 |
7 | if random.choice():
|
info: Source
--> /main.py:12:13
|
10 | a = bar
11 |
12 | a
| ^
|
");
}
#[test]
fn goto_type_of_expression_with_module() {
let mut test = cursor_test(
r#"
import lib
lib<CURSOR>
"#,
);
test.write_file("lib.py", "a = 10").unwrap();
assert_snapshot!(test.goto_type_definition(), @r"
info: lint:goto-type-definition: Type definition
--> /lib.py:1:1
|
1 | a = 10
| ^^^^^^
|
info: Source
--> /main.py:4:13
|
2 | import lib
3 |
4 | lib
| ^^^
|
");
}
#[test]
fn goto_type_of_expression_with_literal_type() {
let test = cursor_test(
r#"
a: str = "test"
a<CURSOR>
"#,
);
assert_snapshot!(test.goto_type_definition(), @r#"
info: lint:goto-type-definition: Type definition
--> stdlib/builtins.pyi:438:7
|
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
437 |
438 | class str(Sequence[str]):
| ^^^
439 | @overload
440 | def __new__(cls, object: object = ...) -> Self: ...
|
info: Source
--> /main.py:4:13
|
2 | a: str = "test"
3 |
4 | a
| ^
|
"#);
}
#[test]
fn goto_type_of_expression_with_literal_node() {
let test = cursor_test(
r#"
a: str = "te<CURSOR>st"
"#,
);
assert_snapshot!(test.goto_type_definition(), @r#"
info: lint:goto-type-definition: Type definition
--> stdlib/builtins.pyi:438:7
|
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
437 |
438 | class str(Sequence[str]):
| ^^^
439 | @overload
440 | def __new__(cls, object: object = ...) -> Self: ...
|
info: Source
--> /main.py:2:22
|
2 | a: str = "test"
| ^^^^^^
|
"#);
}
#[test]
fn goto_type_of_expression_with_type_var_type() {
let test = cursor_test(
r#"
type Alias[T: int = bool] = list[T<CURSOR>]
"#,
);
assert_snapshot!(test.goto_type_definition(), @r###"
info: lint:goto-type-definition: Type definition
--> /main.py:2:24
|
2 | type Alias[T: int = bool] = list[T]
| ^
|
info: Source
--> /main.py:2:46
|
2 | type Alias[T: int = bool] = list[T]
| ^
|
"###);
}
#[test]
fn goto_type_of_expression_with_type_param_spec() {
let test = cursor_test(
r#"
type Alias[**P = [int, str]] = Callable[P<CURSOR>, int]
"#,
);
// TODO: Goto type definition currently doesn't work for type param specs
// because the inference doesn't support them yet.
// This snapshot should show a single target pointing to `T`
assert_snapshot!(test.goto_type_definition(), @"No type definitions found");
}
#[test]
fn goto_type_of_expression_with_type_var_tuple() {
let test = cursor_test(
r#"
type Alias[*Ts = ()] = tuple[*Ts<CURSOR>]
"#,
);
// TODO: Goto type definition currently doesn't work for type var tuples
// because the inference doesn't support them yet.
// This snapshot should show a single target pointing to `T`
assert_snapshot!(test.goto_type_definition(), @"No type definitions found");
}
#[test]
fn goto_type_on_keyword_argument() {
let test = cursor_test(
r#"
def test(a: str): ...
test(a<CURSOR>= "123")
"#,
);
assert_snapshot!(test.goto_type_definition(), @r#"
info: lint:goto-type-definition: Type definition
--> stdlib/builtins.pyi:438:7
|
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
437 |
438 | class str(Sequence[str]):
| ^^^
439 | @overload
440 | def __new__(cls, object: object = ...) -> Self: ...
|
info: Source
--> /main.py:4:18
|
2 | def test(a: str): ...
3 |
4 | test(a= "123")
| ^
|
"#);
}
#[test]
fn goto_type_on_incorrectly_typed_keyword_argument() {
let test = cursor_test(
r#"
def test(a: str): ...
test(a<CURSOR>= 123)
"#,
);
// TODO: This should jump to `str` and not `int` because
// the keyword is typed as a string. It's only the passed argument that
// is an int. Navigating to `str` would match pyright's behavior.
assert_snapshot!(test.goto_type_definition(), @r"
info: lint:goto-type-definition: Type definition
--> stdlib/builtins.pyi:231:7
|
229 | _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed
230 |
231 | class int:
| ^^^
232 | @overload
233 | def __new__(cls, x: ConvertibleToInt = ..., /) -> Self: ...
|
info: Source
--> /main.py:4:18
|
2 | def test(a: str): ...
3 |
4 | test(a= 123)
| ^
|
");
}
#[test]
fn goto_type_on_kwargs() {
let test = cursor_test(
r#"
def f(name: str): ...
kwargs = { "name": "test"}
f(**kwargs<CURSOR>)
"#,
);
assert_snapshot!(test.goto_type_definition(), @r#"
info: lint:goto-type-definition: Type definition
--> stdlib/builtins.pyi:1086:7
|
1084 | def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
1085 |
1086 | class dict(MutableMapping[_KT, _VT]):
| ^^^^
1087 | # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics
1088 | # Also multiprocessing.managers.SyncManager.dict()
|
info: Source
--> /main.py:6:5
|
4 | kwargs = { "name": "test"}
5 |
6 | f(**kwargs)
| ^^^^^^
|
"#);
}
#[test]
fn goto_type_of_expression_with_builtin() {
let test = cursor_test(
r#"
def foo(a: str):
a<CURSOR>
"#,
);
assert_snapshot!(test.goto_type_definition(), @r"
info: lint:goto-type-definition: Type definition
--> stdlib/builtins.pyi:438:7
|
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
437 |
438 | class str(Sequence[str]):
| ^^^
439 | @overload
440 | def __new__(cls, object: object = ...) -> Self: ...
|
info: Source
--> /main.py:3:17
|
2 | def foo(a: str):
3 | a
| ^
|
");
}
#[test]
fn goto_type_definition_cursor_between_object_and_attribute() {
let test = cursor_test(
r#"
class X:
def foo(a, b): ...
x = X()
x<CURSOR>.foo()
"#,
);
assert_snapshot!(test.goto_type_definition(), @r###"
info: lint:goto-type-definition: Type definition
--> /main.py:2:19
|
2 | class X:
| ^
3 | def foo(a, b): ...
|
info: Source
--> /main.py:7:13
|
5 | x = X()
6 |
7 | x.foo()
| ^
|
"###);
}
#[test]
fn goto_between_call_arguments() {
let test = cursor_test(
r#"
def foo(a, b): ...
foo<CURSOR>()
"#,
);
assert_snapshot!(test.goto_type_definition(), @r###"
info: lint:goto-type-definition: Type definition
--> /main.py:2:17
|
2 | def foo(a, b): ...
| ^^^
3 |
4 | foo()
|
info: Source
--> /main.py:4:13
|
2 | def foo(a, b): ...
3 |
4 | foo()
| ^^^
|
"###);
}
#[test]
fn goto_type_narrowing() {
let test = cursor_test(
r#"
def foo(a: str | None, b):
if a is not None:
print(a<CURSOR>)
"#,
);
assert_snapshot!(test.goto_type_definition(), @r"
info: lint:goto-type-definition: Type definition
--> stdlib/builtins.pyi:438:7
|
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
437 |
438 | class str(Sequence[str]):
| ^^^
439 | @overload
440 | def __new__(cls, object: object = ...) -> Self: ...
|
info: Source
--> /main.py:4:27
|
2 | def foo(a: str | None, b):
3 | if a is not None:
4 | print(a)
| ^
|
");
}
#[test]
fn goto_type_none() {
let test = cursor_test(
r#"
def foo(a: str | None, b):
a<CURSOR>
"#,
);
assert_snapshot!(test.goto_type_definition(), @r"
info: lint:goto-type-definition: Type definition
--> stdlib/types.pyi:671:11
|
669 | if sys.version_info >= (3, 10):
670 | @final
671 | class NoneType:
| ^^^^^^^^
672 | def __bool__(self) -> Literal[False]: ...
|
info: Source
--> /main.py:3:17
|
2 | def foo(a: str | None, b):
3 | a
| ^
|
info: lint:goto-type-definition: Type definition
--> stdlib/builtins.pyi:438:7
|
436 | def __getitem__(self, key: int, /) -> str | int | None: ...
437 |
438 | class str(Sequence[str]):
| ^^^
439 | @overload
440 | def __new__(cls, object: object = ...) -> Self: ...
|
info: Source
--> /main.py:3:17
|
2 | def foo(a: str | None, b):
3 | a
| ^
|
");
}
impl CursorTest {
fn goto_type_definition(&self) -> String {
let Some(targets) = goto_type_definition(&self.db, self.file, self.cursor_offset)
else {
return "No goto target found".to_string();
};
if targets.is_empty() {
return "No type definitions found".to_string();
}
let source = targets.range;
self.render_diagnostics(
targets
.into_iter()
.map(|target| GotoTypeDefinitionDiagnostic::new(source, &target)),
)
}
}
struct GotoTypeDefinitionDiagnostic {
source: FileRange,
target: FileRange,
}
impl GotoTypeDefinitionDiagnostic {
fn new(source: FileRange, target: &NavigationTarget) -> Self {
Self {
source,
target: FileRange::new(target.file(), target.focus_range()),
}
}
}
impl IntoDiagnostic for GotoTypeDefinitionDiagnostic {
fn into_diagnostic(self) -> Diagnostic {
let mut source = SubDiagnostic::new(Severity::Info, "Source");
source.annotate(Annotation::primary(
Span::from(self.source.file()).with_range(self.source.range()),
));
let mut main = Diagnostic::new(
DiagnosticId::Lint(LintName::of("goto-type-definition")),
Severity::Info,
"Type definition".to_string(),
);
main.annotate(Annotation::primary(
Span::from(self.target.file()).with_range(self.target.range()),
));
main.sub(source);
main
}
}
}

View File

@@ -1,602 +0,0 @@
use crate::goto::{find_goto_target, GotoTarget};
use crate::{Db, MarkupKind, RangedValue};
use red_knot_python_semantic::types::Type;
use red_knot_python_semantic::SemanticModel;
use ruff_db::files::{File, FileRange};
use ruff_db::parsed::parsed_module;
use ruff_text_size::{Ranged, TextSize};
use std::fmt;
use std::fmt::Formatter;
pub fn hover(db: &dyn Db, file: File, offset: TextSize) -> Option<RangedValue<Hover>> {
let parsed = parsed_module(db.upcast(), file);
let goto_target = find_goto_target(parsed, offset)?;
if let GotoTarget::Expression(expr) = goto_target {
if expr.is_literal_expr() {
return None;
}
}
let model = SemanticModel::new(db.upcast(), file);
let ty = goto_target.inferred_type(&model)?;
tracing::debug!(
"Inferred type of covering node is {}",
ty.display(db.upcast())
);
// TODO: Add documentation of the symbol (not the type's definition).
// TODO: Render the symbol's signature instead of just its type.
let contents = vec![HoverContent::Type(ty)];
Some(RangedValue {
range: FileRange::new(file, goto_target.range()),
value: Hover { contents },
})
}
pub struct Hover<'db> {
contents: Vec<HoverContent<'db>>,
}
impl<'db> Hover<'db> {
/// Renders the hover to a string using the specified markup kind.
pub const fn display<'a>(&'a self, db: &'a dyn Db, kind: MarkupKind) -> DisplayHover<'a> {
DisplayHover {
db,
hover: self,
kind,
}
}
fn iter(&self) -> std::slice::Iter<'_, HoverContent<'db>> {
self.contents.iter()
}
}
impl<'db> IntoIterator for Hover<'db> {
type Item = HoverContent<'db>;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.contents.into_iter()
}
}
impl<'a, 'db> IntoIterator for &'a Hover<'db> {
type Item = &'a HoverContent<'db>;
type IntoIter = std::slice::Iter<'a, HoverContent<'db>>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
pub struct DisplayHover<'a> {
db: &'a dyn Db,
hover: &'a Hover<'a>,
kind: MarkupKind,
}
impl fmt::Display for DisplayHover<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let mut first = true;
for content in &self.hover.contents {
if !first {
self.kind.horizontal_line().fmt(f)?;
}
content.display(self.db, self.kind).fmt(f)?;
first = false;
}
Ok(())
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum HoverContent<'db> {
Type(Type<'db>),
}
impl<'db> HoverContent<'db> {
fn display(&self, db: &'db dyn Db, kind: MarkupKind) -> DisplayHoverContent<'_, 'db> {
DisplayHoverContent {
db,
content: self,
kind,
}
}
}
pub(crate) struct DisplayHoverContent<'a, 'db> {
db: &'db dyn Db,
content: &'a HoverContent<'db>,
kind: MarkupKind,
}
impl fmt::Display for DisplayHoverContent<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.content {
HoverContent::Type(ty) => self
.kind
.fenced_code_block(ty.display(self.db.upcast()), "text")
.fmt(f),
}
}
}
#[cfg(test)]
mod tests {
use crate::tests::{cursor_test, CursorTest};
use crate::{hover, MarkupKind};
use insta::assert_snapshot;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig, LintName,
Severity, Span,
};
use ruff_text_size::{Ranged, TextRange};
#[test]
fn hover_basic() {
let test = cursor_test(
r#"
a = 10
a<CURSOR>
"#,
);
assert_snapshot!(test.hover(), @r"
Literal[10]
---------------------------------------------
```text
Literal[10]
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:4:9
|
2 | a = 10
3 |
4 | a
| ^- Cursor offset
| |
| source
|
");
}
#[test]
fn hover_member() {
let test = cursor_test(
r#"
class Foo:
a: int = 10
def __init__(a: int, b: str):
self.a = a
self.b: str = b
foo = Foo()
foo.<CURSOR>a
"#,
);
assert_snapshot!(test.hover(), @r"
int
---------------------------------------------
```text
int
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:10:9
|
9 | foo = Foo()
10 | foo.a
| ^^^^-
| | |
| | Cursor offset
| source
|
");
}
#[test]
fn hover_function_typed_variable() {
let test = cursor_test(
r#"
def foo(a, b): ...
foo<CURSOR>
"#,
);
assert_snapshot!(test.hover(), @r###"
def foo(a, b) -> Unknown
---------------------------------------------
```text
def foo(a, b) -> Unknown
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:4:13
|
2 | def foo(a, b): ...
3 |
4 | foo
| ^^^- Cursor offset
| |
| source
|
"###);
}
#[test]
fn hover_binary_expression() {
let test = cursor_test(
r#"
def foo(a: int, b: int, c: int):
a + b ==<CURSOR> c
"#,
);
assert_snapshot!(test.hover(), @r"
bool
---------------------------------------------
```text
bool
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:3:17
|
2 | def foo(a: int, b: int, c: int):
3 | a + b == c
| ^^^^^^^^-^
| | |
| | Cursor offset
| source
|
");
}
#[test]
fn hover_keyword_parameter() {
let test = cursor_test(
r#"
def test(a: int): ...
test(a<CURSOR>= 123)
"#,
);
// TODO: This should reveal `int` because the user hovers over the parameter and not the value.
assert_snapshot!(test.hover(), @r"
Literal[123]
---------------------------------------------
```text
Literal[123]
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:4:18
|
2 | def test(a: int): ...
3 |
4 | test(a= 123)
| ^- Cursor offset
| |
| source
|
");
}
#[test]
fn hover_union() {
let test = cursor_test(
r#"
def foo(a, b): ...
def bar(a, b): ...
if random.choice([True, False]):
a = foo
else:
a = bar
a<CURSOR>
"#,
);
assert_snapshot!(test.hover(), @r###"
(def foo(a, b) -> Unknown) | (def bar(a, b) -> Unknown)
---------------------------------------------
```text
(def foo(a, b) -> Unknown) | (def bar(a, b) -> Unknown)
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:12:13
|
10 | a = bar
11 |
12 | a
| ^- Cursor offset
| |
| source
|
"###);
}
#[test]
fn hover_module() {
let mut test = cursor_test(
r#"
import lib
li<CURSOR>b
"#,
);
test.write_file("lib.py", "a = 10").unwrap();
assert_snapshot!(test.hover(), @r"
<module 'lib'>
---------------------------------------------
```text
<module 'lib'>
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:4:13
|
2 | import lib
3 |
4 | lib
| ^^-
| | |
| | Cursor offset
| source
|
");
}
#[test]
fn hover_type_of_expression_with_type_var_type() {
let test = cursor_test(
r#"
type Alias[T: int = bool] = list[T<CURSOR>]
"#,
);
assert_snapshot!(test.hover(), @r"
T
---------------------------------------------
```text
T
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:2:46
|
2 | type Alias[T: int = bool] = list[T]
| ^- Cursor offset
| |
| source
|
");
}
#[test]
fn hover_type_of_expression_with_type_param_spec() {
let test = cursor_test(
r#"
type Alias[**P = [int, str]] = Callable[P<CURSOR>, int]
"#,
);
assert_snapshot!(test.hover(), @r"
@Todo
---------------------------------------------
```text
@Todo
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:2:53
|
2 | type Alias[**P = [int, str]] = Callable[P, int]
| ^- Cursor offset
| |
| source
|
");
}
#[test]
fn hover_type_of_expression_with_type_var_tuple() {
let test = cursor_test(
r#"
type Alias[*Ts = ()] = tuple[*Ts<CURSOR>]
"#,
);
assert_snapshot!(test.hover(), @r"
@Todo
---------------------------------------------
```text
@Todo
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:2:43
|
2 | type Alias[*Ts = ()] = tuple[*Ts]
| ^^- Cursor offset
| |
| source
|
");
}
#[test]
fn hover_class_member_declaration() {
let test = cursor_test(
r#"
class Foo:
a<CURSOR>: int
"#,
);
// TODO: This should be int and not `Never`, https://github.com/astral-sh/ruff/issues/17122
assert_snapshot!(test.hover(), @r"
Never
---------------------------------------------
```text
Never
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:3:13
|
2 | class Foo:
3 | a: int
| ^- Cursor offset
| |
| source
|
");
}
#[test]
fn hover_type_narrowing() {
let test = cursor_test(
r#"
def foo(a: str | None, b):
if a is not None:
print(a<CURSOR>)
"#,
);
assert_snapshot!(test.hover(), @r"
str
---------------------------------------------
```text
str
```
---------------------------------------------
info: lint:hover: Hovered content is
--> /main.py:4:27
|
2 | def foo(a: str | None, b):
3 | if a is not None:
4 | print(a)
| ^- Cursor offset
| |
| source
|
");
}
#[test]
fn hover_whitespace() {
let test = cursor_test(
r#"
class C:
<CURSOR>
foo: str = 'bar'
"#,
);
assert_snapshot!(test.hover(), @"Hover provided no content");
}
#[test]
fn hover_literal_int() {
let test = cursor_test(
r#"
print(
0 + 1<CURSOR>
)
"#,
);
assert_snapshot!(test.hover(), @"Hover provided no content");
}
#[test]
fn hover_literal_ellipsis() {
let test = cursor_test(
r#"
print(
.<CURSOR>..
)
"#,
);
assert_snapshot!(test.hover(), @"Hover provided no content");
}
#[test]
fn hover_docstring() {
let test = cursor_test(
r#"
def f():
"""Lorem ipsum dolor sit amet.<CURSOR>"""
"#,
);
assert_snapshot!(test.hover(), @"Hover provided no content");
}
impl CursorTest {
fn hover(&self) -> String {
use std::fmt::Write;
let Some(hover) = hover(&self.db, self.file, self.cursor_offset) else {
return "Hover provided no content".to_string();
};
let source = hover.range;
let mut buf = String::new();
write!(
&mut buf,
"{plaintext}{line}{markdown}{line}",
plaintext = hover.display(&self.db, MarkupKind::PlainText),
line = MarkupKind::PlainText.horizontal_line(),
markdown = hover.display(&self.db, MarkupKind::Markdown),
)
.unwrap();
let config = DisplayDiagnosticConfig::default()
.color(false)
.format(DiagnosticFormat::Full);
let mut diagnostic = Diagnostic::new(
DiagnosticId::Lint(LintName::of("hover")),
Severity::Info,
"Hovered content is",
);
diagnostic.annotate(
Annotation::primary(Span::from(source.file()).with_range(source.range()))
.message("source"),
);
diagnostic.annotate(
Annotation::secondary(
Span::from(source.file()).with_range(TextRange::empty(self.cursor_offset)),
)
.message("Cursor offset"),
);
write!(buf, "{}", diagnostic.display(&self.db, &config)).unwrap();
buf
}
}
}

View File

@@ -1,279 +0,0 @@
use crate::Db;
use red_knot_python_semantic::types::Type;
use red_knot_python_semantic::{HasType, SemanticModel};
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_python_ast::visitor::source_order::{self, SourceOrderVisitor, TraversalSignal};
use ruff_python_ast::{AnyNodeRef, Expr, Stmt};
use ruff_text_size::{Ranged, TextRange, TextSize};
use std::fmt;
use std::fmt::Formatter;
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct InlayHint<'db> {
pub position: TextSize,
pub content: InlayHintContent<'db>,
}
impl<'db> InlayHint<'db> {
pub const fn display(&self, db: &'db dyn Db) -> DisplayInlayHint<'_, 'db> {
self.content.display(db)
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum InlayHintContent<'db> {
Type(Type<'db>),
ReturnType(Type<'db>),
}
impl<'db> InlayHintContent<'db> {
pub const fn display(&self, db: &'db dyn Db) -> DisplayInlayHint<'_, 'db> {
DisplayInlayHint { db, hint: self }
}
}
pub struct DisplayInlayHint<'a, 'db> {
db: &'db dyn Db,
hint: &'a InlayHintContent<'db>,
}
impl fmt::Display for DisplayInlayHint<'_, '_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self.hint {
InlayHintContent::Type(ty) => {
write!(f, ": {}", ty.display(self.db.upcast()))
}
InlayHintContent::ReturnType(ty) => {
write!(f, " -> {}", ty.display(self.db.upcast()))
}
}
}
}
pub fn inlay_hints(db: &dyn Db, file: File, range: TextRange) -> Vec<InlayHint<'_>> {
let mut visitor = InlayHintVisitor::new(db, file, range);
let ast = parsed_module(db.upcast(), file);
visitor.visit_body(ast.suite());
visitor.hints
}
struct InlayHintVisitor<'db> {
model: SemanticModel<'db>,
hints: Vec<InlayHint<'db>>,
in_assignment: bool,
range: TextRange,
}
impl<'db> InlayHintVisitor<'db> {
fn new(db: &'db dyn Db, file: File, range: TextRange) -> Self {
Self {
model: SemanticModel::new(db.upcast(), file),
hints: Vec::new(),
in_assignment: false,
range,
}
}
fn add_type_hint(&mut self, position: TextSize, ty: Type<'db>) {
self.hints.push(InlayHint {
position,
content: InlayHintContent::Type(ty),
});
}
}
impl SourceOrderVisitor<'_> for InlayHintVisitor<'_> {
fn enter_node(&mut self, node: AnyNodeRef<'_>) -> TraversalSignal {
if self.range.intersect(node.range()).is_some() {
TraversalSignal::Traverse
} else {
TraversalSignal::Skip
}
}
fn visit_stmt(&mut self, stmt: &Stmt) {
let node = AnyNodeRef::from(stmt);
if !self.enter_node(node).is_traverse() {
return;
}
match stmt {
Stmt::Assign(assign) => {
self.in_assignment = true;
for target in &assign.targets {
self.visit_expr(target);
}
self.in_assignment = false;
return;
}
// TODO
Stmt::FunctionDef(_) => {}
Stmt::For(_) => {}
Stmt::Expr(_) => {
// Don't traverse into expression statements because we don't show any hints.
return;
}
_ => {}
}
source_order::walk_stmt(self, stmt);
}
fn visit_expr(&mut self, expr: &'_ Expr) {
if !self.in_assignment {
return;
}
match expr {
Expr::Name(name) => {
if name.ctx.is_store() {
let ty = expr.inferred_type(&self.model);
self.add_type_hint(expr.range().end(), ty);
}
}
_ => {
source_order::walk_expr(self, expr);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use insta::assert_snapshot;
use ruff_db::{
files::{system_path_to_file, File},
source::source_text,
};
use ruff_text_size::TextSize;
use crate::db::tests::TestDb;
use red_knot_python_semantic::{
Program, ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings,
};
use ruff_db::system::{DbWithWritableSystem, SystemPathBuf};
use ruff_python_ast::PythonVersion;
pub(super) fn inlay_hint_test(source: &str) -> InlayHintTest {
const START: &str = "<START>";
const END: &str = "<END>";
let mut db = TestDb::new();
let start = source.find(START);
let end = source
.find(END)
.map(|x| if start.is_some() { x - START.len() } else { x })
.unwrap_or(source.len());
let range = TextRange::new(
TextSize::try_from(start.unwrap_or_default()).unwrap(),
TextSize::try_from(end).unwrap(),
);
let source = source.replace(START, "");
let source = source.replace(END, "");
db.write_file("main.py", source)
.expect("write to memory file system to be successful");
let file = system_path_to_file(&db, "main.py").expect("newly written file to existing");
Program::from_settings(
&db,
ProgramSettings {
python_version: PythonVersion::latest(),
python_platform: PythonPlatform::default(),
search_paths: SearchPathSettings {
extra_paths: vec![],
src_roots: vec![SystemPathBuf::from("/")],
custom_typeshed: None,
python_path: PythonPath::KnownSitePackages(vec![]),
},
},
)
.expect("Default settings to be valid");
InlayHintTest { db, file, range }
}
pub(super) struct InlayHintTest {
pub(super) db: TestDb,
pub(super) file: File,
pub(super) range: TextRange,
}
impl InlayHintTest {
fn inlay_hints(&self) -> String {
let hints = inlay_hints(&self.db, self.file, self.range);
let mut buf = source_text(&self.db, self.file).as_str().to_string();
let mut offset = 0;
for hint in hints {
let end_position = (hint.position.to_u32() as usize) + offset;
let hint_str = format!("[{}]", hint.display(&self.db));
buf.insert_str(end_position, &hint_str);
offset += hint_str.len();
}
buf
}
}
#[test]
fn test_assign_statement() {
let test = inlay_hint_test("x = 1");
assert_snapshot!(test.inlay_hints(), @r"
x[: Literal[1]] = 1
");
}
#[test]
fn test_tuple_assignment() {
let test = inlay_hint_test("x, y = (1, 'abc')");
assert_snapshot!(test.inlay_hints(), @r#"
x[: Literal[1]], y[: Literal["abc"]] = (1, 'abc')
"#);
}
#[test]
fn test_nested_tuple_assignment() {
let test = inlay_hint_test("x, (y, z) = (1, ('abc', 2))");
assert_snapshot!(test.inlay_hints(), @r#"
x[: Literal[1]], (y[: Literal["abc"]], z[: Literal[2]]) = (1, ('abc', 2))
"#);
}
#[test]
fn test_assign_statement_with_type_annotation() {
let test = inlay_hint_test("x: int = 1");
assert_snapshot!(test.inlay_hints(), @r"
x: int = 1
");
}
#[test]
fn test_assign_statement_out_of_range() {
let test = inlay_hint_test("<START>x = 1<END>\ny = 2");
assert_snapshot!(test.inlay_hints(), @r"
x[: Literal[1]] = 1
y = 2
");
}
}

View File

@@ -1,296 +0,0 @@
mod db;
mod find_node;
mod goto;
mod hover;
mod inlay_hints;
mod markup;
pub use db::Db;
pub use goto::goto_type_definition;
pub use hover::hover;
pub use inlay_hints::inlay_hints;
pub use markup::MarkupKind;
use rustc_hash::FxHashSet;
use std::ops::{Deref, DerefMut};
use red_knot_python_semantic::types::{Type, TypeDefinition};
use ruff_db::files::{File, FileRange};
use ruff_text_size::{Ranged, TextRange};
/// Information associated with a text range.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct RangedValue<T> {
pub range: FileRange,
pub value: T,
}
impl<T> RangedValue<T> {
pub fn file_range(&self) -> FileRange {
self.range
}
}
impl<T> Deref for RangedValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl<T> DerefMut for RangedValue<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.value
}
}
impl<T> IntoIterator for RangedValue<T>
where
T: IntoIterator,
{
type Item = T::Item;
type IntoIter = T::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
/// Target to which the editor can navigate to.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct NavigationTarget {
file: File,
/// The range that should be focused when navigating to the target.
///
/// This is typically not the full range of the node. For example, it's the range of the class's name in a class definition.
///
/// The `focus_range` must be fully covered by `full_range`.
focus_range: TextRange,
/// The range covering the entire target.
full_range: TextRange,
}
impl NavigationTarget {
pub fn file(&self) -> File {
self.file
}
pub fn focus_range(&self) -> TextRange {
self.focus_range
}
pub fn full_range(&self) -> TextRange {
self.full_range
}
}
#[derive(Debug, Clone)]
pub struct NavigationTargets(smallvec::SmallVec<[NavigationTarget; 1]>);
impl NavigationTargets {
fn single(target: NavigationTarget) -> Self {
Self(smallvec::smallvec![target])
}
fn empty() -> Self {
Self(smallvec::SmallVec::new())
}
fn unique(targets: impl IntoIterator<Item = NavigationTarget>) -> Self {
let unique: FxHashSet<_> = targets.into_iter().collect();
if unique.is_empty() {
Self::empty()
} else {
let mut targets = unique.into_iter().collect::<Vec<_>>();
targets.sort_by_key(|target| (target.file, target.focus_range.start()));
Self(targets.into())
}
}
fn iter(&self) -> std::slice::Iter<'_, NavigationTarget> {
self.0.iter()
}
#[cfg(test)]
fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl IntoIterator for NavigationTargets {
type Item = NavigationTarget;
type IntoIter = smallvec::IntoIter<[NavigationTarget; 1]>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl<'a> IntoIterator for &'a NavigationTargets {
type Item = &'a NavigationTarget;
type IntoIter = std::slice::Iter<'a, NavigationTarget>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl FromIterator<NavigationTarget> for NavigationTargets {
fn from_iter<T: IntoIterator<Item = NavigationTarget>>(iter: T) -> Self {
Self::unique(iter)
}
}
pub trait HasNavigationTargets {
fn navigation_targets(&self, db: &dyn Db) -> NavigationTargets;
}
impl HasNavigationTargets for Type<'_> {
fn navigation_targets(&self, db: &dyn Db) -> NavigationTargets {
match self {
Type::Union(union) => union
.iter(db.upcast())
.flat_map(|target| target.navigation_targets(db))
.collect(),
Type::Intersection(intersection) => {
// Only consider the positive elements because the negative elements are mainly from narrowing constraints.
let mut targets = intersection
.iter_positive(db.upcast())
.filter(|ty| !ty.is_unknown());
let Some(first) = targets.next() else {
return NavigationTargets::empty();
};
match targets.next() {
Some(_) => {
// If there are multiple types in the intersection, we can't navigate to a single one
// because the type is the intersection of all those types.
NavigationTargets::empty()
}
None => first.navigation_targets(db),
}
}
ty => ty
.definition(db.upcast())
.map(|definition| definition.navigation_targets(db))
.unwrap_or_else(NavigationTargets::empty),
}
}
}
impl HasNavigationTargets for TypeDefinition<'_> {
fn navigation_targets(&self, db: &dyn Db) -> NavigationTargets {
let full_range = self.full_range(db.upcast());
NavigationTargets::single(NavigationTarget {
file: full_range.file(),
focus_range: self.focus_range(db.upcast()).unwrap_or(full_range).range(),
full_range: full_range.range(),
})
}
}
#[cfg(test)]
mod tests {
use crate::db::tests::TestDb;
use insta::internals::SettingsBindDropGuard;
use red_knot_python_semantic::{
Program, ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings,
};
use ruff_db::diagnostic::{Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::{DbWithWritableSystem, SystemPath, SystemPathBuf};
use ruff_python_ast::PythonVersion;
use ruff_text_size::TextSize;
pub(super) fn cursor_test(source: &str) -> CursorTest {
let mut db = TestDb::new();
let cursor_offset = source.find("<CURSOR>").expect(
"`source`` should contain a `<CURSOR>` marker, indicating the position of the cursor.",
);
let mut content = source[..cursor_offset].to_string();
content.push_str(&source[cursor_offset + "<CURSOR>".len()..]);
db.write_file("main.py", &content)
.expect("write to memory file system to be successful");
let file = system_path_to_file(&db, "main.py").expect("newly written file to existing");
Program::from_settings(
&db,
ProgramSettings {
python_version: PythonVersion::latest(),
python_platform: PythonPlatform::default(),
search_paths: SearchPathSettings {
extra_paths: vec![],
src_roots: vec![SystemPathBuf::from("/")],
custom_typeshed: None,
python_path: PythonPath::KnownSitePackages(vec![]),
},
},
)
.expect("Default settings to be valid");
let mut insta_settings = insta::Settings::clone_current();
insta_settings.add_filter(r#"\\(\w\w|\s|\.|")"#, "/$1");
// Filter out TODO types because they are different between debug and release builds.
insta_settings.add_filter(r"@Todo\(.+\)", "@Todo");
let insta_settings_guard = insta_settings.bind_to_scope();
CursorTest {
db,
cursor_offset: TextSize::try_from(cursor_offset)
.expect("source to be smaller than 4GB"),
file,
_insta_settings_guard: insta_settings_guard,
}
}
pub(super) struct CursorTest {
pub(super) db: TestDb,
pub(super) cursor_offset: TextSize,
pub(super) file: File,
_insta_settings_guard: SettingsBindDropGuard,
}
impl CursorTest {
pub(super) fn write_file(
&mut self,
path: impl AsRef<SystemPath>,
content: &str,
) -> std::io::Result<()> {
self.db.write_file(path, content)
}
pub(super) fn render_diagnostics<I, D>(&self, diagnostics: I) -> String
where
I: IntoIterator<Item = D>,
D: IntoDiagnostic,
{
use std::fmt::Write;
let mut buf = String::new();
let config = DisplayDiagnosticConfig::default()
.color(false)
.format(DiagnosticFormat::Full);
for diagnostic in diagnostics {
let diag = diagnostic.into_diagnostic();
write!(buf, "{}", diag.display(&self.db, &config)).unwrap();
}
buf
}
}
pub(super) trait IntoDiagnostic {
fn into_diagnostic(self) -> Diagnostic;
}
}

View File

@@ -1,66 +0,0 @@
use std::fmt;
use std::fmt::Formatter;
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum MarkupKind {
PlainText,
Markdown,
}
impl MarkupKind {
pub(crate) const fn fenced_code_block<T>(self, code: T, language: &str) -> FencedCodeBlock<T>
where
T: fmt::Display,
{
FencedCodeBlock {
language,
code,
kind: self,
}
}
pub(crate) const fn horizontal_line(self) -> HorizontalLine {
HorizontalLine { kind: self }
}
}
pub(crate) struct FencedCodeBlock<'a, T> {
language: &'a str,
code: T,
kind: MarkupKind,
}
impl<T> fmt::Display for FencedCodeBlock<'_, T>
where
T: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
MarkupKind::PlainText => self.code.fmt(f),
MarkupKind::Markdown => write!(
f,
"```{language}\n{code}\n```",
language = self.language,
code = self.code
),
}
}
}
#[derive(Debug, Copy, Clone)]
pub(crate) struct HorizontalLine {
kind: MarkupKind,
}
impl fmt::Display for HorizontalLine {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self.kind {
MarkupKind::PlainText => {
f.write_str("\n---------------------------------------------\n")
}
MarkupKind::Markdown => {
write!(f, "\n---\n")
}
}
}
}

View File

@@ -13,12 +13,10 @@ license.workspace = true
[dependencies]
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["cache", "serde"] }
ruff_db = { workspace = true, features = ["os", "cache", "serde"] }
ruff_macros = { workspace = true }
ruff_python_ast = { workspace = true, features = ["serde"] }
ruff_python_formatter = { workspace = true, optional = true }
ruff_text_size = { workspace = true }
red_knot_ide = { workspace = true }
red_knot_python_semantic = { workspace = true, features = ["serde"] }
red_knot_vendored = { workspace = true }
@@ -26,11 +24,10 @@ anyhow = { workspace = true }
crossbeam = { workspace = true }
glob = { workspace = true }
notify = { workspace = true }
pep440_rs = { workspace = true, features = ["version-ranges"] }
pep440_rs = { workspace = true }
rayon = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
@@ -43,14 +40,8 @@ insta = { workspace = true, features = ["redactions", "ron"] }
[features]
default = ["zstd"]
deflate = ["red_knot_vendored/deflate"]
schemars = [
"dep:schemars",
"ruff_db/schemars",
"red_knot_python_semantic/schemars",
]
zstd = ["red_knot_vendored/zstd"]
format = ["ruff_python_formatter"]
deflate = ["red_knot_vendored/deflate"]
[lints]
workspace = true

View File

@@ -1,15 +0,0 @@
# Regression test for https://github.com/astral-sh/ruff/issues/17215
# panicked in commit 1a6a10b30
# error message:
# dependency graph cycle querying all_narrowing_constraints_for_expression(Id(8591))
def f(a: A, b: B, c: C):
unknown_a: UA = make_unknown()
unknown_b: UB = make_unknown()
unknown_c: UC = make_unknown()
unknown_d: UD = make_unknown()
if unknown_a and unknown_b:
if unknown_c:
if unknown_d:
return a, b, c

View File

@@ -1,22 +0,0 @@
# Regression test for https://github.com/astral-sh/ruff/issues/17215
# panicked in commit 1a6a10b30
# error message:
# dependency graph cycle querying all_negative_narrowing_constraints_for_expression(Id(859f))
def f(f1: bool, f2: bool, f3: bool, f4: bool):
o1: UnknownClass = make_o()
o2: UnknownClass = make_o()
o3: UnknownClass = make_o()
o4: UnknownClass = make_o()
if f1 and f2 and f3 and f4:
if o1 == o2:
return None
if o2 == o3:
return None
if o3 == o4:
return None
if o4 == o1:
return None
return o1, o2, o3, o4

View File

@@ -1,8 +1,7 @@
use std::{collections::HashMap, hash::BuildHasher};
use red_knot_python_semantic::{PythonPath, PythonPlatform};
use red_knot_python_semantic::{PythonPlatform, PythonVersion, SitePackages};
use ruff_db::system::SystemPathBuf;
use ruff_python_ast::PythonVersion;
/// Combine two values, preferring the values in `self`.
///
@@ -43,7 +42,7 @@ use ruff_python_ast::PythonVersion;
/// resolved extra search path of `["b", "c", "a"]`, which means `a` will be tried last.
///
/// There's an argument here that the user should be able to specify the order of the paths,
/// because only then is the user in full control of where to insert the path when specifying `extra-paths`
/// because only then is the user in full control of where to insert the path when specyifing `extra-paths`
/// in multiple sources.
///
/// ## Macro
@@ -128,7 +127,7 @@ macro_rules! impl_noop_combine {
impl_noop_combine!(SystemPathBuf);
impl_noop_combine!(PythonPlatform);
impl_noop_combine!(PythonPath);
impl_noop_combine!(SitePackages);
impl_noop_combine!(PythonVersion);
// std types

View File

@@ -2,8 +2,7 @@ use std::panic::RefUnwindSafe;
use std::sync::Arc;
use crate::DEFAULT_LINT_REGISTRY;
use crate::{Project, ProjectMetadata};
use red_knot_ide::Db as IdeDb;
use crate::{check_file, Project, ProjectMetadata};
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
use red_knot_python_semantic::{Db as SemanticDb, Program};
use ruff_db::diagnostic::Diagnostic;
@@ -28,6 +27,7 @@ pub struct ProjectDatabase {
storage: salsa::Storage<ProjectDatabase>,
files: Files,
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
rule_selection: Arc<RuleSelection>,
}
impl ProjectDatabase {
@@ -35,11 +35,14 @@ impl ProjectDatabase {
where
S: System + 'static + Send + Sync + RefUnwindSafe,
{
let rule_selection = RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY);
let mut db = Self {
project: None,
storage: salsa::Storage::default(),
files: Files::default(),
system: Arc::new(system),
rule_selection: Arc::new(rule_selection),
};
// TODO: Use the `program_settings` to compute the key for the database's persistent
@@ -56,13 +59,14 @@ impl ProjectDatabase {
}
/// Checks all open files in the project and its dependencies.
pub fn check(&self) -> Result<Vec<Diagnostic>, Cancelled> {
pub fn check(&self) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
self.with_db(|db| db.project().check(db))
}
#[tracing::instrument(level = "debug", skip(self))]
pub fn check_file(&self, file: File) -> Result<Vec<Diagnostic>, Cancelled> {
self.with_db(|db| self.project().check_file(db, file))
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
self.with_db(|db| check_file(db, file))
}
/// Returns a mutable reference to the system.
@@ -104,19 +108,6 @@ impl Upcast<dyn SourceDb> for ProjectDatabase {
}
}
impl Upcast<dyn IdeDb> for ProjectDatabase {
fn upcast(&self) -> &(dyn IdeDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn IdeDb + 'static) {
self
}
}
#[salsa::db]
impl IdeDb for ProjectDatabase {}
#[salsa::db]
impl SemanticDb for ProjectDatabase {
fn is_file_open(&self, file: File) -> bool {
@@ -127,8 +118,8 @@ impl SemanticDb for ProjectDatabase {
project.is_file_open(self, file)
}
fn rule_selection(&self) -> Arc<RuleSelection> {
self.project().rules(self)
fn rule_selection(&self) -> &RuleSelection {
&self.rule_selection
}
fn lint_registry(&self) -> &LintRegistry {
@@ -149,10 +140,6 @@ impl SourceDb for ProjectDatabase {
fn files(&self) -> &Files {
&self.files
}
fn python_version(&self) -> ruff_python_ast::PythonVersion {
Program::get(self).python_version(self)
}
}
#[salsa::db]
@@ -163,7 +150,7 @@ impl salsa::Database for ProjectDatabase {
}
let event = event();
if matches!(event.kind, salsa::EventKind::WillCheckCancellation) {
if matches!(event.kind, salsa::EventKind::WillCheckCancellation { .. }) {
return;
}
@@ -178,32 +165,6 @@ impl Db for ProjectDatabase {
}
}
#[cfg(feature = "format")]
mod format {
use crate::ProjectDatabase;
use ruff_db::files::File;
use ruff_db::Upcast;
use ruff_python_formatter::{Db as FormatDb, PyFormatOptions};
#[salsa::db]
impl FormatDb for ProjectDatabase {
fn format_options(&self, file: File) -> PyFormatOptions {
let source_ty = file.source_type(self);
PyFormatOptions::from_source_type(source_ty)
}
}
impl Upcast<dyn FormatDb> for ProjectDatabase {
fn upcast(&self) -> &(dyn FormatDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn FormatDb + 'static) {
self
}
}
}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::Arc;
@@ -211,7 +172,7 @@ pub(crate) mod tests {
use salsa::Event;
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
use red_knot_python_semantic::{Db as SemanticDb, Program};
use red_knot_python_semantic::Db as SemanticDb;
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
@@ -229,6 +190,7 @@ pub(crate) mod tests {
files: Files,
system: TestSystem,
vendored: VendoredFileSystem,
rule_selection: RuleSelection,
project: Option<Project>,
}
@@ -240,6 +202,7 @@ pub(crate) mod tests {
vendored: red_knot_vendored::file_system().clone(),
files: Files::default(),
events: Arc::default(),
rule_selection: RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY),
project: None,
};
@@ -285,10 +248,6 @@ pub(crate) mod tests {
fn files(&self) -> &Files {
&self.files
}
fn python_version(&self) -> ruff_python_ast::PythonVersion {
Program::get(self).python_version(self)
}
}
impl Upcast<dyn SemanticDb> for TestDb {
@@ -315,8 +274,8 @@ pub(crate) mod tests {
!file.path(self).is_vendored_path()
}
fn rule_selection(&self) -> Arc<RuleSelection> {
self.project().rules(self)
fn rule_selection(&self) -> &RuleSelection {
&self.rule_selection
}
fn lint_registry(&self) -> &LintRegistry {

View File

@@ -2,11 +2,10 @@ use crate::db::{Db, ProjectDatabase};
use crate::metadata::options::Options;
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
use crate::{Project, ProjectMetadata};
use std::collections::BTreeSet;
use crate::walk::ProjectFilesWalker;
use red_knot_python_semantic::Program;
use ruff_db::files::{File, Files};
use ruff_db::files::{system_path_to_file, File, Files};
use ruff_db::system::walk_directory::WalkState;
use ruff_db::system::SystemPath;
use ruff_db::Db as _;
use rustc_hash::FxHashSet;
@@ -15,7 +14,7 @@ impl ProjectDatabase {
#[tracing::instrument(level = "debug", skip(self, changes, cli_options))]
pub fn apply_changes(&mut self, changes: Vec<ChangeEvent>, cli_options: Option<&Options>) {
let mut project = self.project();
let project_root = project.root(self).to_path_buf();
let project_path = project.root(self).to_path_buf();
let program = Program::get(self);
let custom_stdlib_versions_path = program
.custom_stdlib_search_path(self)
@@ -30,7 +29,7 @@ impl ProjectDatabase {
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
let mut synced_files = FxHashSet::default();
let mut sync_recursively = BTreeSet::default();
let mut synced_recursively = FxHashSet::default();
let mut sync_path = |db: &mut ProjectDatabase, path: &SystemPath| {
if synced_files.insert(path.to_path_buf()) {
@@ -38,13 +37,17 @@ impl ProjectDatabase {
}
};
for change in changes {
tracing::trace!("Handle change: {:?}", change);
let mut sync_recursively = |db: &mut ProjectDatabase, path: &SystemPath| {
if synced_recursively.insert(path.to_path_buf()) {
Files::sync_recursively(db, path);
}
};
for change in changes {
if let Some(path) = change.system_path() {
if matches!(
path.file_name(),
Some(".gitignore" | ".ignore" | "knot.toml" | "pyproject.toml")
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
) {
// Changes to ignore files or settings can change the project structure or add/remove files.
project_changed = true;
@@ -66,27 +69,16 @@ impl ProjectDatabase {
match kind {
CreatedKind::File => sync_path(self, &path),
CreatedKind::Directory | CreatedKind::Any => {
sync_recursively.insert(path.clone());
sync_recursively(self, &path);
}
}
// Unlike other files, it's not only important to update the status of existing
// and known `File`s (`sync_recursively`), it's also important to discover new files
// that were added in the project's root (or any of the paths included for checking).
//
// This is important because `Project::check` iterates over all included files.
// The code below walks the `added_paths` and adds all files that
// should be included in the project. We can skip this check for
// paths that aren't part of the project or shouldn't be included
// when checking the project.
if project.is_path_included(self, &path) {
if self.system().is_file(&path) {
// Add the parent directory because `walkdir` always visits explicitly passed files
// even if they match an exclude filter.
added_paths.insert(path.parent().unwrap().to_path_buf());
} else {
added_paths.insert(path);
}
if self.system().is_file(&path) {
// Add the parent directory because `walkdir` always visits explicitly passed files
// even if they match an exclude filter.
added_paths.insert(path.parent().unwrap().to_path_buf());
} else {
added_paths.insert(path);
}
}
@@ -110,7 +102,7 @@ impl ProjectDatabase {
project.remove_file(self, file);
}
} else {
sync_recursively.insert(path.clone());
sync_recursively(self, &path);
if custom_stdlib_versions_path
.as_ref()
@@ -119,19 +111,11 @@ impl ProjectDatabase {
custom_stdlib_change = true;
}
if project.is_path_included(self, &path) || path == project_root {
// TODO: Shouldn't it be enough to simply traverse the project files and remove all
// that start with the given path?
tracing::debug!(
"Reload project because of a path that could have been a directory."
);
// Perform a full-reload in case the deleted directory contained the pyproject.toml.
// We may want to make this more clever in the future, to e.g. iterate over the
// indexed files and remove the once that start with the same path, unless
// the deleted path is the project configuration.
project_changed = true;
}
// Perform a full-reload in case the deleted directory contained the pyproject.toml.
// We may want to make this more clever in the future, to e.g. iterate over the
// indexed files and remove the once that start with the same path, unless
// the deleted path is the project configuration.
project_changed = true;
}
}
@@ -148,46 +132,24 @@ impl ProjectDatabase {
ChangeEvent::Rescan => {
project_changed = true;
Files::sync_all(self);
sync_recursively.clear();
break;
}
}
}
let sync_recursively = sync_recursively.into_iter();
let mut last = None;
for path in sync_recursively {
// Avoid re-syncing paths that are sub-paths of each other.
if let Some(last) = &last {
if path.starts_with(last) {
continue;
}
}
Files::sync_recursively(self, &path);
last = Some(path);
}
if project_changed {
match ProjectMetadata::discover(&project_root, self.system()) {
match ProjectMetadata::discover(&project_path, self.system()) {
Ok(mut metadata) => {
if let Some(cli_options) = cli_options {
metadata.apply_cli_options(cli_options.clone());
}
if let Err(error) = metadata.apply_configuration_files(self.system()) {
tracing::error!(
"Failed to apply configuration files, continuing without applying them: {error}"
);
}
let program_settings = metadata.to_program_settings(self.system());
let program = Program::get(self);
if let Err(error) = program.update_from_settings(self, program_settings) {
tracing::error!("Failed to update the program settings, keeping the old program settings: {error}");
}
};
if metadata.root() == project.root(self) {
tracing::debug!("Reloading project after structural change");
@@ -217,24 +179,43 @@ impl ProjectDatabase {
}
}
let diagnostics = if let Some(walker) = ProjectFilesWalker::incremental(self, added_paths) {
// Use directory walking to discover newly added files.
let (files, diagnostics) = walker.collect_vec(self);
let mut added_paths = added_paths.into_iter();
for file in files {
project.add_file(self, file);
// Use directory walking to discover newly added files.
if let Some(path) = added_paths.next() {
let mut walker = self.system().walk_directory(&path);
for extra_path in added_paths {
walker = walker.add(&extra_path);
}
diagnostics
} else {
Vec::new()
};
let added_paths = std::sync::Mutex::new(Vec::default());
// Note: We simply replace all IO related diagnostics here. This isn't ideal, because
// it removes IO errors that may still be relevant. However, tracking IO errors correctly
// across revisions doesn't feel essential, considering that they're rare. However, we could
// implement a `BTreeMap` or similar and only prune the diagnostics from paths that we've
// re-scanned (or that were removed etc).
project.replace_index_diagnostics(self, diagnostics);
walker.run(|| {
Box::new(|entry| {
let Ok(entry) = entry else {
return WalkState::Continue;
};
if !entry.file_type().is_file() {
return WalkState::Continue;
}
let mut paths = added_paths.lock().unwrap();
paths.push(entry.into_path());
WalkState::Continue
})
});
for path in added_paths.into_inner().unwrap() {
let file = system_path_to_file(self, &path);
if let Ok(file) = file {
project.add_file(self, file);
}
}
}
}
}

View File

@@ -8,7 +8,10 @@ use salsa::Setter;
use ruff_db::files::File;
use crate::db::Db;
use crate::{IOErrorDiagnostic, Project};
use crate::Project;
/// Cheap cloneable hash set of files.
type FileSet = Arc<FxHashSet<File>>;
/// The indexed files of a project.
///
@@ -32,9 +35,9 @@ impl IndexedFiles {
}
}
fn indexed(inner: Arc<IndexedInner>) -> Self {
fn indexed(files: FileSet) -> Self {
Self {
state: std::sync::Mutex::new(State::Indexed(inner)),
state: std::sync::Mutex::new(State::Indexed(files)),
}
}
@@ -43,8 +46,8 @@ impl IndexedFiles {
match &*state {
State::Lazy => Index::Lazy(LazyFiles { files: state }),
State::Indexed(inner) => Index::Indexed(Indexed {
inner: Arc::clone(inner),
State::Indexed(files) => Index::Indexed(Indexed {
files: Arc::clone(files),
_lifetime: PhantomData,
}),
}
@@ -91,7 +94,7 @@ impl IndexedFiles {
Some(IndexedMut {
db: Some(db),
project,
indexed,
files: indexed,
did_change: false,
})
}
@@ -109,7 +112,7 @@ enum State {
Lazy,
/// The files are indexed. Stores the known files of a package.
Indexed(Arc<IndexedInner>),
Indexed(FileSet),
}
pub(super) enum Index<'db> {
@@ -126,48 +129,32 @@ pub(super) struct LazyFiles<'db> {
impl<'db> LazyFiles<'db> {
/// Sets the indexed files of a package to `files`.
pub(super) fn set(
mut self,
files: FxHashSet<File>,
diagnostics: Vec<IOErrorDiagnostic>,
) -> Indexed<'db> {
pub(super) fn set(mut self, files: FxHashSet<File>) -> Indexed<'db> {
let files = Indexed {
inner: Arc::new(IndexedInner { files, diagnostics }),
files: Arc::new(files),
_lifetime: PhantomData,
};
*self.files = State::Indexed(Arc::clone(&files.inner));
*self.files = State::Indexed(Arc::clone(&files.files));
files
}
}
/// The indexed files of the project.
/// The indexed files of a package.
///
/// Note: This type is intentionally non-cloneable. Making it cloneable requires
/// revisiting the locking behavior in [`IndexedFiles::indexed_mut`].
#[derive(Debug)]
#[derive(Debug, PartialEq, Eq)]
pub struct Indexed<'db> {
inner: Arc<IndexedInner>,
files: FileSet,
// Preserve the lifetime of `PackageFiles`.
_lifetime: PhantomData<&'db ()>,
}
#[derive(Debug)]
struct IndexedInner {
files: FxHashSet<File>,
diagnostics: Vec<IOErrorDiagnostic>,
}
impl Indexed<'_> {
pub(super) fn diagnostics(&self) -> &[IOErrorDiagnostic] {
&self.inner.diagnostics
}
}
impl Deref for Indexed<'_> {
type Target = FxHashSet<File>;
fn deref(&self) -> &Self::Target {
&self.inner.files
&self.files
}
}
@@ -178,7 +165,7 @@ impl<'a> IntoIterator for &'a Indexed<'_> {
type IntoIter = IndexedIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.inner.files.iter().copied()
self.files.iter().copied()
}
}
@@ -189,13 +176,13 @@ impl<'a> IntoIterator for &'a Indexed<'_> {
pub(super) struct IndexedMut<'db> {
db: Option<&'db mut dyn Db>,
project: Project,
indexed: Arc<IndexedInner>,
files: FileSet,
did_change: bool,
}
impl IndexedMut<'_> {
pub(super) fn insert(&mut self, file: File) -> bool {
if self.inner_mut().files.insert(file) {
if self.files_mut().insert(file) {
self.did_change = true;
true
} else {
@@ -204,7 +191,7 @@ impl IndexedMut<'_> {
}
pub(super) fn remove(&mut self, file: File) -> bool {
if self.inner_mut().files.remove(&file) {
if self.files_mut().remove(&file) {
self.did_change = true;
true
} else {
@@ -212,13 +199,8 @@ impl IndexedMut<'_> {
}
}
pub(super) fn set_diagnostics(&mut self, diagnostics: Vec<IOErrorDiagnostic>) {
self.inner_mut().diagnostics = diagnostics;
}
fn inner_mut(&mut self) -> &mut IndexedInner {
Arc::get_mut(&mut self.indexed)
.expect("All references to `FilesSet` should have been dropped")
fn files_mut(&mut self) -> &mut FxHashSet<File> {
Arc::get_mut(&mut self.files).expect("All references to `FilesSet` to have been dropped")
}
fn set_impl(&mut self) {
@@ -226,16 +208,16 @@ impl IndexedMut<'_> {
return;
};
let indexed = Arc::clone(&self.indexed);
let files = Arc::clone(&self.files);
if self.did_change {
// If there are changes, set the new file_set to trigger a salsa revision change.
self.project
.set_file_set(db)
.to(IndexedFiles::indexed(indexed));
.to(IndexedFiles::indexed(files));
} else {
// The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state.
*self.project.file_set(db).state.lock().unwrap() = State::Indexed(indexed);
*self.project.file_set(db).state.lock().unwrap() = State::Indexed(files);
}
}
}
@@ -255,7 +237,7 @@ mod tests {
use crate::files::Index;
use crate::ProjectMetadata;
use ruff_db::files::system_path_to_file;
use ruff_db::system::{DbWithWritableSystem as _, SystemPathBuf};
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use ruff_python_ast::name::Name;
#[test]
@@ -270,7 +252,7 @@ mod tests {
let file = system_path_to_file(&db, "test.py").unwrap();
let files = match project.file_set(&db).get() {
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file]), Vec::new()),
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file])),
Index::Indexed(files) => files,
};

View File

@@ -1,34 +1,31 @@
#![allow(clippy::ref_option)]
use crate::metadata::options::OptionDiagnostic;
use crate::walk::{ProjectFilesFilter, ProjectFilesWalker};
pub use db::{Db, ProjectDatabase};
use files::{Index, Indexed, IndexedFiles};
use metadata::settings::Settings;
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder, RuleSelection};
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder};
use red_knot_python_semantic::register_lints;
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::{
create_parse_diagnostic, create_unsupported_syntax_diagnostic, Annotation, Diagnostic,
DiagnosticId, Severity, Span,
};
use ruff_db::files::File;
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::parsed::parsed_module;
use ruff_db::source::{source_text, SourceTextError};
use ruff_db::system::{SystemPath, SystemPathBuf};
use rustc_hash::FxHashSet;
use ruff_db::system::walk_directory::WalkState;
use ruff_db::system::{FileType, SystemPath};
use ruff_python_ast::PySourceType;
use ruff_text_size::TextRange;
use rustc_hash::{FxBuildHasher, FxHashSet};
use salsa::Durability;
use salsa::Setter;
use std::borrow::Cow;
use std::sync::Arc;
use thiserror::Error;
pub use db::{Db, ProjectDatabase};
use files::{Index, Indexed, IndexedFiles};
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
pub mod combine;
mod db;
mod files;
pub mod metadata;
mod walk;
pub mod watch;
pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> =
@@ -69,46 +66,11 @@ pub struct Project {
/// The metadata describing the project, including the unresolved options.
#[return_ref]
pub metadata: ProjectMetadata,
/// The resolved project settings.
#[return_ref]
pub settings: Settings,
/// The paths that should be included when checking this project.
///
/// The default (when this list is empty) is to include all files in the project root
/// (that satisfy the configured include and exclude patterns).
/// However, it's sometimes desired to only check a subset of the project, e.g. to see
/// the diagnostics for a single file or a folder.
///
/// This list gets initialized by the paths passed to `knot check <paths>`
///
/// ## How is this different from `open_files`?
///
/// The `included_paths` is closely related to `open_files`. The only difference is that
/// `open_files` is already a resolved set of files whereas `included_paths` is only a list of paths
/// that are resolved to files by indexing them. The other difference is that
/// new files added to any directory in `included_paths` will be indexed and added to the project
/// whereas `open_files` needs to be updated manually (e.g. by the IDE).
///
/// In short, `open_files` is cheaper in contexts where the set of files is known, like
/// in an IDE when the user only wants to check the open tabs. This could be modeled
/// with `included_paths` too but it would require an explicit walk dir step that's simply unnecessary.
#[default]
#[return_ref]
included_paths_list: Vec<SystemPathBuf>,
/// Diagnostics that were generated when resolving the project settings.
#[return_ref]
settings_diagnostics: Vec<OptionDiagnostic>,
}
#[salsa::tracked]
impl Project {
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
let (settings, settings_diagnostics) = metadata.options().to_settings(db);
Project::builder(metadata, settings, settings_diagnostics)
Project::builder(metadata)
.durability(Durability::MEDIUM)
.open_fileset_durability(Durability::LOW)
.file_set_durability(Durability::LOW)
@@ -123,41 +85,11 @@ impl Project {
self.metadata(db).name()
}
/// Returns the resolved linter rules for the project.
///
/// This is a salsa query to prevent re-computing queries if other, unrelated
/// settings change. For example, we don't want that changing the terminal settings
/// invalidates any type checking queries.
#[salsa::tracked]
pub fn rules(self, db: &dyn Db) -> Arc<RuleSelection> {
self.settings(db).to_rules()
}
/// Returns `true` if `path` is both part of the project and included (see `included_paths_list`).
///
/// Unlike [Self::files], this method does not respect `.gitignore` files. It only checks
/// the project's include and exclude settings as well as the paths that were passed to `knot check <paths>`.
/// This means, that this method is an over-approximation of `Self::files` and may return `true` for paths
/// that won't be included when checking the project because they're ignored in a `.gitignore` file.
pub fn is_path_included(self, db: &dyn Db, path: &SystemPath) -> bool {
ProjectFilesFilter::from_project(db, self).is_included(path)
}
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
tracing::debug!("Reloading project");
assert_eq!(self.root(db), metadata.root());
if &metadata != self.metadata(db) {
let (settings, settings_diagnostics) = metadata.options().to_settings(db);
if self.settings(db) != &settings {
self.set_settings(db).to(settings);
}
if self.settings_diagnostics(db) != &settings_diagnostics {
self.set_settings_diagnostics(db).to(settings_diagnostics);
}
self.set_metadata(db).to(metadata);
}
@@ -165,46 +97,29 @@ impl Project {
}
/// Checks all open files in the project and its dependencies.
pub(crate) fn check(self, db: &ProjectDatabase) -> Vec<Diagnostic> {
pub fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
let project_span = tracing::debug_span!("Project::check");
let _span = project_span.enter();
tracing::debug!("Checking project '{name}'", name = self.name(db));
let mut diagnostics: Vec<Diagnostic> = Vec::new();
diagnostics.extend(
self.settings_diagnostics(db)
.iter()
.map(OptionDiagnostic::to_diagnostic),
);
let files = ProjectFiles::new(db, self);
diagnostics.extend(
files
.diagnostics()
.iter()
.map(IOErrorDiagnostic::to_diagnostic),
);
let result = Arc::new(std::sync::Mutex::new(diagnostics));
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result);
let db = db.clone();
let project_span = project_span.clone();
rayon::scope(move |scope| {
let files = ProjectFiles::new(&db, self);
for file in &files {
let result = inner_result.clone();
let db = db.clone();
let project_span = project_span.clone();
scope.spawn(move |_| {
let check_file_span =
tracing::debug_span!(parent: &project_span, "check_file", ?file);
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
let _entered = check_file_span.entered();
let file_diagnostics = check_file_impl(&db, file);
let file_diagnostics = check_file(&db, file);
result.lock().unwrap().extend(file_diagnostics);
});
}
@@ -213,19 +128,6 @@ impl Project {
Arc::into_inner(result).unwrap().into_inner().unwrap()
}
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Diagnostic> {
let mut file_diagnostics: Vec<_> = self
.settings_diagnostics(db)
.iter()
.map(OptionDiagnostic::to_diagnostic)
.collect();
let check_diagnostics = check_file_impl(db, file);
file_diagnostics.extend(check_diagnostics);
file_diagnostics
}
/// Opens a file in the project.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
@@ -251,30 +153,6 @@ impl Project {
removed
}
pub fn set_included_paths(self, db: &mut dyn Db, paths: Vec<SystemPathBuf>) {
tracing::debug!("Setting included paths: {paths}", paths = paths.len());
self.set_included_paths_list(db).to(paths);
self.reload_files(db);
}
/// Returns the paths that should be checked.
///
/// The default is to check the entire project in which case this method returns
/// the project root. However, users can specify to only check specific sub-folders or
/// even files of a project by using `knot check <paths>`. In that case, this method
/// returns the provided absolute paths.
///
/// Note: The CLI doesn't prohibit users from specifying paths outside the project root.
/// This can be useful to check arbitrary files, but it isn't something we recommend.
/// We should try to support this use case but it's okay if there are some limitations around it.
fn included_paths_or_root(self, db: &dyn Db) -> &[SystemPathBuf] {
match &**self.included_paths_list(db) {
[] => std::slice::from_ref(&self.metadata(db).root),
paths => paths,
}
}
/// Returns the open files in the project or `None` if the entire project should be checked.
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
self.open_fileset(db).as_deref()
@@ -328,7 +206,7 @@ impl Project {
self.files(db).contains(&file)
}
#[tracing::instrument(level = "debug", skip(self, db))]
#[tracing::instrument(level = "debug", skip(db))]
pub fn remove_file(self, db: &mut dyn Db, file: File) {
tracing::debug!(
"Removing file `{}` from project `{}`",
@@ -357,17 +235,6 @@ impl Project {
index.insert(file);
}
/// Replaces the diagnostics from indexing the project files with `diagnostics`.
///
/// This is a no-op if the project files haven't been indexed yet.
pub fn replace_index_diagnostics(self, db: &mut dyn Db, diagnostics: Vec<IOErrorDiagnostic>) {
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
return;
};
index.set_diagnostics(diagnostics);
}
/// Returns the files belonging to this project.
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
let files = self.file_set(db);
@@ -375,14 +242,12 @@ impl Project {
let indexed = match files.get() {
Index::Lazy(vacant) => {
let _entered =
tracing::debug_span!("Project::index_files", project = %self.name(db))
tracing::debug_span!("Project::index_files", package = %self.name(db))
.entered();
let walker = ProjectFilesWalker::new(db);
let (files, diagnostics) = walker.collect_set(db);
tracing::info!("Indexed {} file(s)", files.len());
vacant.set(files, diagnostics)
let files = discover_project_files(db, self);
tracing::info!("Found {} files in project `{}`", files.len(), self.name(db));
vacant.set(files)
}
Index::Indexed(indexed) => indexed,
};
@@ -400,51 +265,82 @@ impl Project {
}
}
fn check_file_impl(db: &dyn Db, file: File) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
pub(crate) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
// Abort checking if there are IO errors.
let source = source_text(db.upcast(), file);
if let Some(read_error) = source.read_error() {
diagnostics.push(
IOErrorDiagnostic {
file: Some(file),
error: read_error.clone().into(),
}
.to_diagnostic(),
);
diagnostics.push(Box::new(IOErrorDiagnostic {
file,
error: read_error.clone(),
}));
return diagnostics;
}
let parsed = parsed_module(db.upcast(), file);
diagnostics.extend(
parsed
.errors()
.iter()
.map(|error| create_parse_diagnostic(file, error)),
);
diagnostics.extend(
parsed
.unsupported_syntax_errors()
.iter()
.map(|error| create_unsupported_syntax_diagnostic(file, error)),
);
diagnostics.extend(check_types(db.upcast(), file).into_iter().cloned());
diagnostics.sort_unstable_by_key(|diagnostic| {
diagnostics.extend(parsed.errors().iter().map(|error| {
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
diagnostic
.primary_span()
.and_then(|span| span.range())
.unwrap_or_default()
.start()
});
}));
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
boxed
}));
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
diagnostics
}
fn discover_project_files(db: &dyn Db, project: Project) -> FxHashSet<File> {
let paths = std::sync::Mutex::new(Vec::new());
db.system().walk_directory(project.root(db)).run(|| {
Box::new(|entry| {
match entry {
Ok(entry) => {
// Skip over any non python files to avoid creating too many entries in `Files`.
match entry.file_type() {
FileType::File => {
if entry
.path()
.extension()
.and_then(PySourceType::try_from_extension)
.is_some()
{
let mut paths = paths.lock().unwrap();
paths.push(entry.into_path());
}
}
FileType::Directory | FileType::Symlink => {}
}
}
Err(error) => {
// TODO Handle error
tracing::error!("Failed to walk path: {error}");
}
}
WalkState::Continue
})
});
let paths = paths.into_inner().unwrap();
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
for path in paths {
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
// We can ignore this.
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
files.insert(file);
}
}
files
}
#[derive(Debug)]
enum ProjectFiles<'a> {
OpenFiles(&'a FxHashSet<File>),
@@ -459,13 +355,6 @@ impl<'a> ProjectFiles<'a> {
ProjectFiles::Indexed(project.files(db))
}
}
fn diagnostics(&self) -> &[IOErrorDiagnostic] {
match self {
ProjectFiles::OpenFiles(_) => &[],
ProjectFiles::Indexed(indexed) => indexed.diagnostics(),
}
}
}
impl<'a> IntoIterator for &'a ProjectFiles<'a> {
@@ -498,43 +387,45 @@ impl Iterator for ProjectFilesIter<'_> {
}
}
#[derive(Debug, Clone)]
#[derive(Debug)]
pub struct IOErrorDiagnostic {
file: Option<File>,
error: IOErrorKind,
file: File,
error: SourceTextError,
}
impl IOErrorDiagnostic {
fn to_diagnostic(&self) -> Diagnostic {
let mut diag = Diagnostic::new(DiagnosticId::Io, Severity::Error, &self.error);
if let Some(file) = self.file {
diag.annotate(Annotation::primary(Span::from(file)));
}
diag
impl Diagnostic for IOErrorDiagnostic {
fn id(&self) -> DiagnosticId {
DiagnosticId::Io
}
}
#[derive(Error, Debug, Clone)]
enum IOErrorKind {
#[error(transparent)]
Walk(#[from] walk::WalkError),
fn message(&self) -> Cow<str> {
self.error.to_string().into()
}
#[error(transparent)]
SourceText(#[from] SourceTextError),
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
None
}
fn severity(&self) -> Severity {
Severity::Error
}
}
#[cfg(test)]
mod tests {
use crate::db::tests::TestDb;
use crate::{check_file_impl, ProjectMetadata};
use crate::{check_file, ProjectMetadata};
use red_knot_python_semantic::types::check_types;
use red_knot_python_semantic::{Program, ProgramSettings, PythonPlatform, SearchPathSettings};
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::system_path_to_file;
use ruff_db::source::source_text;
use ruff_db::system::{DbWithTestSystem, DbWithWritableSystem as _, SystemPath, SystemPathBuf};
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
use ruff_db::testing::assert_function_query_was_not_run;
use ruff_python_ast::name::Name;
use ruff_python_ast::PythonVersion;
#[test]
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
@@ -542,16 +433,6 @@ mod tests {
let mut db = TestDb::new(project);
let path = SystemPath::new("test.py");
Program::from_settings(
&db,
ProgramSettings {
python_version: PythonVersion::default(),
python_platform: PythonPlatform::default(),
search_paths: SearchPathSettings::new(vec![SystemPathBuf::from(".")]),
},
)
.expect("Failed to configure program settings");
db.write_file(path, "x = 10")?;
let file = system_path_to_file(&db, path).unwrap();
@@ -561,9 +442,9 @@ mod tests {
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file_impl(&db, file)
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.primary_message().to_string())
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
vec!["Failed to read file: No such file or directory".to_string()]
);
@@ -577,9 +458,9 @@ mod tests {
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file_impl(&db, file)
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.primary_message().to_string())
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
vec![] as Vec<String>
);

View File

@@ -1,21 +1,15 @@
use configuration_file::{ConfigurationFile, ConfigurationFileError};
use red_knot_python_semantic::ProgramSettings;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
use std::sync::Arc;
use thiserror::Error;
use crate::combine::Combine;
use crate::metadata::pyproject::{Project, PyProject, PyProjectError, ResolveRequiresPythonError};
use crate::metadata::value::ValueSource;
use crate::metadata::pyproject::{Project, PyProject, PyProjectError};
use options::KnotTomlError;
use options::Options;
mod configuration_file;
pub mod options;
pub mod pyproject;
pub mod settings;
pub mod value;
#[derive(Debug, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
@@ -26,15 +20,6 @@ pub struct ProjectMetadata {
/// The raw options
pub(super) options: Options,
/// Paths of configurations other than the project's configuration that were combined into [`Self::options`].
///
/// This field stores the paths of the configuration files, mainly for
/// knowing which files to watch for changes.
///
/// The path ordering doesn't imply precedence.
#[cfg_attr(test, serde(skip_serializing_if = "Vec::is_empty"))]
pub(super) extra_configuration_paths: Vec<SystemPathBuf>,
}
impl ProjectMetadata {
@@ -43,16 +28,12 @@ impl ProjectMetadata {
Self {
name,
root,
extra_configuration_paths: Vec::default(),
options: Options::default(),
}
}
/// Loads a project from a `pyproject.toml` file.
pub(crate) fn from_pyproject(
pyproject: PyProject,
root: SystemPathBuf,
) -> Result<Self, ResolveRequiresPythonError> {
pub(crate) fn from_pyproject(pyproject: PyProject, root: SystemPathBuf) -> Self {
Self::from_options(
pyproject
.tool
@@ -64,38 +45,22 @@ impl ProjectMetadata {
}
/// Loads a project from a set of options with an optional pyproject-project table.
pub fn from_options(
mut options: Options,
pub(crate) fn from_options(
options: Options,
root: SystemPathBuf,
project: Option<&Project>,
) -> Result<Self, ResolveRequiresPythonError> {
) -> Self {
let name = project
.and_then(|project| project.name.as_deref())
.and_then(|project| project.name.as_ref())
.map(|name| Name::new(&**name))
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
// If the `options` don't specify a python version but the `project.requires-python` field is set,
// use that as a lower bound instead.
if let Some(project) = project {
if options
.environment
.as_ref()
.is_none_or(|env| env.python_version.is_none())
{
if let Some(requires_python) = project.resolve_requires_python_lower_bound()? {
let mut environment = options.environment.unwrap_or_default();
environment.python_version = Some(requires_python);
options.environment = Some(environment);
}
}
}
Ok(Self {
// TODO(https://github.com/astral-sh/ruff/issues/15491): Respect requires-python
Self {
name,
root,
options,
extra_configuration_paths: Vec::new(),
})
}
}
/// Discovers the closest project at `path` and returns its metadata.
@@ -122,10 +87,7 @@ impl ProjectMetadata {
let pyproject_path = project_root.join("pyproject.toml");
let pyproject = if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
match PyProject::from_toml_str(
&pyproject_str,
ValueSource::File(Arc::new(pyproject_path.clone())),
) {
match PyProject::from_toml_str(&pyproject_str) {
Ok(pyproject) => Some(pyproject),
Err(error) => {
return Err(ProjectDiscoveryError::InvalidPyProject {
@@ -141,10 +103,7 @@ impl ProjectMetadata {
// A `knot.toml` takes precedence over a `pyproject.toml`.
let knot_toml_path = project_root.join("knot.toml");
if let Ok(knot_str) = system.read_to_string(&knot_toml_path) {
let options = match Options::from_toml_str(
&knot_str,
ValueSource::File(Arc::new(knot_toml_path.clone())),
) {
let options = match Options::from_toml_str(&knot_str) {
Ok(options) => options,
Err(error) => {
return Err(ProjectDiscoveryError::InvalidKnotToml {
@@ -163,34 +122,19 @@ impl ProjectMetadata {
}
tracing::debug!("Found project at '{}'", project_root);
let metadata = ProjectMetadata::from_options(
return Ok(ProjectMetadata::from_options(
options,
project_root.to_path_buf(),
pyproject
.as_ref()
.and_then(|pyproject| pyproject.project.as_ref()),
)
.map_err(|err| {
ProjectDiscoveryError::InvalidRequiresPythonConstraint {
source: err,
path: pyproject_path,
}
})?;
return Ok(metadata);
));
}
if let Some(pyproject) = pyproject {
let has_knot_section = pyproject.knot().is_some();
let metadata =
ProjectMetadata::from_pyproject(pyproject, project_root.to_path_buf())
.map_err(
|err| ProjectDiscoveryError::InvalidRequiresPythonConstraint {
source: err,
path: pyproject_path,
},
)?;
ProjectMetadata::from_pyproject(pyproject, project_root.to_path_buf());
if has_knot_section {
tracing::debug!("Found project at '{}'", project_root);
@@ -238,10 +182,6 @@ impl ProjectMetadata {
&self.options
}
pub fn extra_configuration_paths(&self) -> &[SystemPathBuf] {
&self.extra_configuration_paths
}
pub fn to_program_settings(&self, system: &dyn System) -> ProgramSettings {
self.options.to_program_settings(self.root(), system)
}
@@ -251,31 +191,9 @@ impl ProjectMetadata {
self.options = options.combine(std::mem::take(&mut self.options));
}
/// Applies the options from the configuration files to the project's options.
///
/// This includes:
///
/// * The user-level configuration
pub fn apply_configuration_files(
&mut self,
system: &dyn System,
) -> Result<(), ConfigurationFileError> {
if let Some(user) = ConfigurationFile::user(system)? {
tracing::debug!(
"Applying user-level configuration loaded from `{path}`.",
path = user.path()
);
self.apply_configuration_file(user);
}
Ok(())
}
/// Applies a lower-precedence configuration files to the project's options.
fn apply_configuration_file(&mut self, options: ConfigurationFile) {
self.extra_configuration_paths
.push(options.path().to_owned());
self.options.combine_with(options.into_options());
/// Combine the project options with the user options where project options take precedence.
pub fn apply_user_options(&mut self, options: Options) {
self.options.combine_with(options);
}
}
@@ -295,22 +213,16 @@ pub enum ProjectDiscoveryError {
source: Box<KnotTomlError>,
path: SystemPathBuf,
},
#[error("Invalid `requires-python` version specifier (`{path}`): {source}")]
InvalidRequiresPythonConstraint {
source: ResolveRequiresPythonError,
path: SystemPathBuf,
},
}
#[cfg(test)]
mod tests {
//! Integration tests for project discovery
use crate::snapshot_project;
use anyhow::{anyhow, Context};
use insta::assert_ron_snapshot;
use ruff_db::system::{SystemPathBuf, TestSystem};
use ruff_python_ast::PythonVersion;
use crate::{ProjectDiscoveryError, ProjectMetadata};
@@ -321,7 +233,7 @@ mod tests {
system
.memory_file_system()
.write_files_all([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
.write_files([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
.context("Failed to write files")?;
let project =
@@ -329,15 +241,7 @@ mod tests {
assert_eq!(project.root(), &*root);
with_escaped_paths(|| {
assert_ron_snapshot!(&project, @r#"
ProjectMetadata(
name: Name("app"),
root: "/app",
options: Options(),
)
"#);
});
snapshot_project!(project);
Ok(())
}
@@ -349,7 +253,7 @@ mod tests {
system
.memory_file_system()
.write_files_all([
.write_files([
(
root.join("pyproject.toml"),
r#"
@@ -366,16 +270,7 @@ mod tests {
ProjectMetadata::discover(&root, &system).context("Failed to discover project")?;
assert_eq!(project.root(), &*root);
with_escaped_paths(|| {
assert_ron_snapshot!(&project, @r#"
ProjectMetadata(
name: Name("backend"),
root: "/app",
options: Options(),
)
"#);
});
snapshot_project!(project);
// Discovering the same package from a subdirectory should give the same result
let from_src = ProjectMetadata::discover(&root.join("db"), &system)
@@ -393,7 +288,7 @@ mod tests {
system
.memory_file_system()
.write_files_all([
.write_files([
(
root.join("pyproject.toml"),
r#"
@@ -432,7 +327,7 @@ expected `.`, `]`
system
.memory_file_system()
.write_files_all([
.write_files([
(
root.join("pyproject.toml"),
r#"
@@ -458,19 +353,7 @@ expected `.`, `]`
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(sub_project, @r#"
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(
src: Some(SrcOptions(
root: Some("src"),
)),
),
)
"#);
});
snapshot_project!(sub_project);
Ok(())
}
@@ -482,7 +365,7 @@ expected `.`, `]`
system
.memory_file_system()
.write_files_all([
.write_files([
(
root.join("pyproject.toml"),
r#"
@@ -508,19 +391,7 @@ expected `.`, `]`
let root = ProjectMetadata::discover(&root, &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(root, @r#"
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
src: Some(SrcOptions(
root: Some("src"),
)),
),
)
"#);
});
snapshot_project!(root);
Ok(())
}
@@ -532,7 +403,7 @@ expected `.`, `]`
system
.memory_file_system()
.write_files_all([
.write_files([
(
root.join("pyproject.toml"),
r#"
@@ -552,15 +423,7 @@ expected `.`, `]`
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(sub_project, @r#"
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(),
)
"#);
});
snapshot_project!(sub_project);
Ok(())
}
@@ -572,7 +435,7 @@ expected `.`, `]`
system
.memory_file_system()
.write_files_all([
.write_files([
(
root.join("pyproject.toml"),
r#"
@@ -595,19 +458,7 @@ expected `.`, `]`
let root = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(root, @r#"
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
environment: Some(EnvironmentOptions(
r#python-version: Some("3.10"),
)),
),
)
"#);
});
snapshot_project!(root);
Ok(())
}
@@ -623,308 +474,31 @@ expected `.`, `]`
system
.memory_file_system()
.write_files_all([
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "super-app"
requires-python = ">=3.12"
[project]
name = "super-app"
requires-python = ">=3.12"
[tool.knot.src]
root = "this_option_is_ignored"
"#,
[tool.knot.src]
root = "this_option_is_ignored"
"#,
),
(
root.join("knot.toml"),
r#"
[src]
root = "src"
"#,
[src]
root = "src"
"#,
),
])
.context("Failed to write files")?;
let root = ProjectMetadata::discover(&root, &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(root, @r#"
ProjectMetadata(
name: Name("super-app"),
root: "/app",
options: Options(
environment: Some(EnvironmentOptions(
r#python-version: Some("3.12"),
)),
src: Some(SrcOptions(
root: Some("src"),
)),
),
)
"#);
});
Ok(())
}
#[test]
fn requires_python_major_minor() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=3.12"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY312)
);
Ok(())
}
#[test]
fn requires_python_major_only() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=3"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::from((3, 0)))
);
Ok(())
}
/// A `requires-python` constraint with major, minor and patch can be simplified
/// to major and minor (e.g. 3.12.1 -> 3.12).
#[test]
fn requires_python_major_minor_patch() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=3.12.8"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY312)
);
Ok(())
}
#[test]
fn requires_python_beta_version() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">= 3.13.0b0"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY313)
);
Ok(())
}
#[test]
fn requires_python_greater_than_major_minor() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
# This is somewhat nonsensical because 3.12.1 > 3.12 is true.
# That's why simplifying the constraint to >= 3.12 is correct
requires-python = ">3.12"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY312)
);
Ok(())
}
/// `python-version` takes precedence if both `requires-python` and `python-version` are configured.
#[test]
fn requires_python_and_python_version() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=3.12"
[tool.knot.environment]
python-version = "3.10"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY310)
);
Ok(())
}
#[test]
fn requires_python_less_than() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
requires-python = "<3.12"
"#,
)
.context("Failed to write file")?;
let Err(error) = ProjectMetadata::discover(&root, &system) else {
return Err(anyhow!("Expected project discovery to fail because the `requires-python` doesn't specify a lower bound (it only specifies an upper bound)."));
};
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): value `<3.12` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.");
Ok(())
}
#[test]
fn requires_python_no_specifiers() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ""
"#,
)
.context("Failed to write file")?;
let Err(error) = ProjectMetadata::discover(&root, &system) else {
return Err(anyhow!("Expected project discovery to fail because the `requires-python` specifiers are empty and don't define a lower bound."));
};
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): value `` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.");
Ok(())
}
#[test]
fn requires_python_too_large_major_version() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file_all(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=999.0"
"#,
)
.context("Failed to write file")?;
let Err(error) = ProjectMetadata::discover(&root, &system) else {
return Err(anyhow!("Expected project discovery to fail because of the requires-python major version that is larger than 255."));
};
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): The major version `999` is larger than the maximum supported value 255");
snapshot_project!(root);
Ok(())
}
@@ -934,12 +508,15 @@ expected `.`, `]`
assert_eq!(error.to_string().replace('\\', "/"), message);
}
fn with_escaped_paths<R>(f: impl FnOnce() -> R) -> R {
let mut settings = insta::Settings::clone_current();
settings.add_dynamic_redaction(".root", |content, _path| {
content.as_str().unwrap().replace('\\', "/")
/// Snapshots a project but with all paths using unix separators.
#[macro_export]
macro_rules! snapshot_project {
($project:expr) => {{
assert_ron_snapshot!($project,{
".root" => insta::dynamic_redaction(|content, _content_path| {
content.as_str().unwrap().replace("\\", "/")
}),
});
settings.bind(f)
}
}};
}
}

View File

@@ -1,69 +0,0 @@
use std::sync::Arc;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use thiserror::Error;
use crate::metadata::value::ValueSource;
use super::options::{KnotTomlError, Options};
/// A `knot.toml` configuration file with the options it contains.
pub(crate) struct ConfigurationFile {
path: SystemPathBuf,
options: Options,
}
impl ConfigurationFile {
/// Loads the user-level configuration file if it exists.
///
/// Returns `None` if the file does not exist or if the concept of user-level configurations
/// doesn't exist on `system`.
pub(crate) fn user(system: &dyn System) -> Result<Option<Self>, ConfigurationFileError> {
let Some(configuration_directory) = system.user_config_directory() else {
return Ok(None);
};
let knot_toml_path = configuration_directory.join("knot").join("knot.toml");
tracing::debug!(
"Searching for a user-level configuration at `{path}`",
path = &knot_toml_path
);
let Ok(knot_toml_str) = system.read_to_string(&knot_toml_path) else {
return Ok(None);
};
match Options::from_toml_str(
&knot_toml_str,
ValueSource::File(Arc::new(knot_toml_path.clone())),
) {
Ok(options) => Ok(Some(Self {
path: knot_toml_path,
options,
})),
Err(error) => Err(ConfigurationFileError::InvalidKnotToml {
source: Box::new(error),
path: knot_toml_path,
}),
}
}
/// Returns the path to the configuration file.
pub(crate) fn path(&self) -> &SystemPath {
&self.path
}
pub(crate) fn into_options(self) -> Options {
self.options
}
}
#[derive(Debug, Error)]
pub enum ConfigurationFileError {
#[error("{path} is not a valid `knot.toml`: {source}")]
InvalidKnotToml {
source: Box<KnotTomlError>,
path: SystemPathBuf,
},
}

View File

@@ -1,78 +1,40 @@
use crate::metadata::value::{RangedValue, RelativePathBuf, ValueSource, ValueSourceGuard};
use crate::Db;
use red_knot_python_semantic::lint::{GetLintError, Level, LintSource, RuleSelection};
use red_knot_python_semantic::{ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings};
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, Severity, Span};
use ruff_db::files::system_path_to_file;
use ruff_db::system::{System, SystemPath};
use red_knot_python_semantic::{
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
};
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_macros::Combine;
use ruff_python_ast::PythonVersion;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
use thiserror::Error;
use super::settings::{Settings, TerminalSettings};
/// The options for the project.
#[derive(Debug, Default, Clone, PartialEq, Eq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
/// Configures the type checking environment.
#[serde(skip_serializing_if = "Option::is_none")]
pub environment: Option<EnvironmentOptions>,
#[serde(skip_serializing_if = "Option::is_none")]
pub src: Option<SrcOptions>,
/// Configures the enabled lints and their severity.
#[serde(skip_serializing_if = "Option::is_none")]
pub rules: Option<Rules>,
#[serde(skip_serializing_if = "Option::is_none")]
pub terminal: Option<TerminalOptions>,
}
impl Options {
pub(crate) fn from_toml_str(content: &str, source: ValueSource) -> Result<Self, KnotTomlError> {
let _guard = ValueSourceGuard::new(source, true);
pub(crate) fn from_toml_str(content: &str) -> Result<Self, KnotTomlError> {
let options = toml::from_str(content)?;
Ok(options)
}
pub fn deserialize_with<'de, D>(source: ValueSource, deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let _guard = ValueSourceGuard::new(source, false);
Self::deserialize(deserializer)
}
pub(crate) fn to_program_settings(
&self,
project_root: &SystemPath,
system: &dyn System,
) -> ProgramSettings {
let python_version = self
let (python_version, python_platform) = self
.environment
.as_ref()
.and_then(|env| env.python_version.as_deref().copied())
.map(|env| (env.python_version, env.python_platform.as_ref()))
.unwrap_or_default();
let python_platform = self
.environment
.as_ref()
.and_then(|env| env.python_platform.as_deref().cloned())
.unwrap_or_else(|| {
let default = PythonPlatform::default();
tracing::info!(
"Defaulting to default python version for this platform: '{default}'",
);
default
});
ProgramSettings {
python_version,
python_platform,
python_version: python_version.unwrap_or_default(),
python_platform: python_platform.cloned().unwrap_or_default(),
search_paths: self.to_search_path_settings(project_root, system),
}
}
@@ -82,19 +44,19 @@ impl Options {
project_root: &SystemPath,
system: &dyn System,
) -> SearchPathSettings {
let src_roots = if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_ref())
{
vec![src_root.absolute(project_root, system)]
} else {
let src = project_root.join("src");
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
if system.is_directory(&src) {
vec![project_root.to_path_buf(), src]
let src_roots =
if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_deref()) {
vec![src_root.to_path_buf()]
} else {
vec![project_root.to_path_buf()]
}
};
let src = project_root.join("src");
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
if system.is_directory(&src) {
vec![project_root.to_path_buf(), src]
} else {
vec![project_root.to_path_buf()]
}
};
let (extra_paths, python, typeshed) = self
.environment
@@ -102,280 +64,50 @@ impl Options {
.map(|env| {
(
env.extra_paths.clone(),
env.python.clone(),
env.venv_path.clone(),
env.typeshed.clone(),
)
})
.unwrap_or_default();
SearchPathSettings {
extra_paths: extra_paths
.unwrap_or_default()
.into_iter()
.map(|path| path.absolute(project_root, system))
.collect(),
extra_paths: extra_paths.unwrap_or_default(),
src_roots,
custom_typeshed: typeshed.map(|path| path.absolute(project_root, system)),
python_path: python
.map(|python_path| {
PythonPath::from_cli_flag(python_path.absolute(project_root, system))
})
.or_else(|| {
std::env::var("VIRTUAL_ENV")
.ok()
.map(PythonPath::from_virtual_env_var)
})
.unwrap_or_else(|| PythonPath::Discover(project_root.to_path_buf())),
typeshed,
site_packages: python
.map(|venv_path| SitePackages::Derived { venv_path })
.unwrap_or(SitePackages::Known(vec![])),
}
}
#[must_use]
pub(crate) fn to_settings(&self, db: &dyn Db) -> (Settings, Vec<OptionDiagnostic>) {
let (rules, diagnostics) = self.to_rule_selection(db);
let mut settings = Settings::new(rules);
if let Some(terminal) = self.terminal.as_ref() {
settings.set_terminal(TerminalSettings {
output_format: terminal
.output_format
.as_deref()
.copied()
.unwrap_or_default(),
error_on_warning: terminal.error_on_warning.unwrap_or_default(),
});
}
(settings, diagnostics)
}
#[must_use]
fn to_rule_selection(&self, db: &dyn Db) -> (RuleSelection, Vec<OptionDiagnostic>) {
let registry = db.lint_registry();
let mut diagnostics = Vec::new();
// Initialize the selection with the defaults
let mut selection = RuleSelection::from_registry(registry);
let rules = self
.rules
.as_ref()
.into_iter()
.flat_map(|rules| rules.inner.iter());
for (rule_name, level) in rules {
let source = rule_name.source();
match registry.get(rule_name) {
Ok(lint) => {
let lint_source = match source {
ValueSource::File(_) => LintSource::File,
ValueSource::Cli => LintSource::Cli,
};
if let Ok(severity) = Severity::try_from(**level) {
selection.enable(lint, severity, lint_source);
} else {
selection.disable(lint);
}
}
Err(error) => {
// `system_path_to_file` can return `Err` if the file was deleted since the configuration
// was read. This should be rare and it should be okay to default to not showing a configuration
// file in that case.
let file = source
.file()
.and_then(|path| system_path_to_file(db.upcast(), path).ok());
// TODO: Add a note if the value was configured on the CLI
let diagnostic = match error {
GetLintError::Unknown(_) => OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!("Unknown lint rule `{rule_name}`"),
Severity::Warning,
),
GetLintError::PrefixedWithCategory { suggestion, .. } => {
OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!(
"Unknown lint rule `{rule_name}`. Did you mean `{suggestion}`?"
),
Severity::Warning,
)
}
GetLintError::Removed(_) => OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!("Unknown lint rule `{rule_name}`"),
Severity::Warning,
),
};
let span = file.map(Span::from).map(|span| {
if let Some(range) = rule_name.range() {
span.with_range(range)
} else {
span
}
});
diagnostics.push(diagnostic.with_span(span));
}
}
}
(selection, diagnostics)
}
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct EnvironmentOptions {
/// Specifies the version of Python that will be used to analyze the source code.
/// The version should be specified as a string in the format `M.m` where `M` is the major version
/// and `m` is the minor (e.g. "3.0" or "3.6").
/// If a version is provided, knot will generate errors if the source code makes use of language features
/// that are not supported in that version.
/// It will also tailor its use of type stub files, which conditionalizes type definitions based on the version.
#[serde(skip_serializing_if = "Option::is_none")]
pub python_version: Option<RangedValue<PythonVersion>>,
pub python_version: Option<PythonVersion>,
/// Specifies the target platform that will be used to analyze the source code.
/// If specified, Red Knot will tailor its use of type stub files,
/// which conditionalize type definitions based on the platform.
///
/// If no platform is specified, knot will use the current platform:
/// - `win32` for Windows
/// - `darwin` for macOS
/// - `android` for Android
/// - `ios` for iOS
/// - `linux` for everything else
#[serde(skip_serializing_if = "Option::is_none")]
pub python_platform: Option<RangedValue<PythonPlatform>>,
pub python_platform: Option<PythonPlatform>,
/// List of user-provided paths that should take first priority in the module resolution.
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
/// or pyright's stubPath configuration setting.
#[serde(skip_serializing_if = "Option::is_none")]
pub extra_paths: Option<Vec<RelativePathBuf>>,
pub extra_paths: Option<Vec<SystemPathBuf>>,
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
/// bundled as a zip file in the binary
#[serde(skip_serializing_if = "Option::is_none")]
pub typeshed: Option<RelativePathBuf>,
pub typeshed: Option<SystemPathBuf>,
/// Path to the Python installation from which Red Knot resolves type information and third-party dependencies.
///
/// Red Knot will search in the path's `site-packages` directories for type information and
/// third-party imports.
///
/// This option is commonly used to specify the path to a virtual environment.
#[serde(skip_serializing_if = "Option::is_none")]
pub python: Option<RelativePathBuf>,
// TODO: Rename to python, see https://github.com/astral-sh/ruff/issues/15530
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
pub venv_path: Option<SystemPathBuf>,
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct SrcOptions {
/// The root of the project, used for finding first-party modules.
#[serde(skip_serializing_if = "Option::is_none")]
pub root: Option<RelativePathBuf>,
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", transparent)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Rules {
#[cfg_attr(feature = "schemars", schemars(with = "schema::Rules"))]
inner: FxHashMap<RangedValue<String>, RangedValue<Level>>,
}
impl FromIterator<(RangedValue<String>, RangedValue<Level>)> for Rules {
fn from_iter<T: IntoIterator<Item = (RangedValue<String>, RangedValue<Level>)>>(
iter: T,
) -> Self {
Self {
inner: iter.into_iter().collect(),
}
}
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct TerminalOptions {
/// The format to use for printing diagnostic messages.
///
/// Defaults to `full`.
#[serde(skip_serializing_if = "Option::is_none")]
pub output_format: Option<RangedValue<DiagnosticFormat>>,
/// Use exit code 1 if there are any warning-level diagnostics.
///
/// Defaults to `false`.
pub error_on_warning: Option<bool>,
}
#[cfg(feature = "schemars")]
mod schema {
use crate::DEFAULT_LINT_REGISTRY;
use red_knot_python_semantic::lint::Level;
use schemars::gen::SchemaGenerator;
use schemars::schema::{
InstanceType, Metadata, ObjectValidation, Schema, SchemaObject, SubschemaValidation,
};
use schemars::JsonSchema;
pub(super) struct Rules;
impl JsonSchema for Rules {
fn schema_name() -> String {
"Rules".to_string()
}
fn json_schema(gen: &mut SchemaGenerator) -> Schema {
let registry = &*DEFAULT_LINT_REGISTRY;
let level_schema = gen.subschema_for::<Level>();
let properties: schemars::Map<String, Schema> = registry
.lints()
.iter()
.map(|lint| {
(
lint.name().to_string(),
Schema::Object(SchemaObject {
metadata: Some(Box::new(Metadata {
title: Some(lint.summary().to_string()),
description: Some(lint.documentation()),
deprecated: lint.status.is_deprecated(),
default: Some(lint.default_level.to_string().into()),
..Metadata::default()
})),
subschemas: Some(Box::new(SubschemaValidation {
one_of: Some(vec![level_schema.clone()]),
..Default::default()
})),
..Default::default()
}),
)
})
.collect();
Schema::Object(SchemaObject {
instance_type: Some(InstanceType::Object.into()),
object: Some(Box::new(ObjectValidation {
properties,
// Allow unknown rules: Red Knot will warn about them.
// It gives a better experience when using an older Red Knot version because
// the schema will not deny rules that have been removed in newer versions.
additional_properties: Some(Box::new(level_schema)),
..ObjectValidation::default()
})),
..Default::default()
})
}
}
pub root: Option<SystemPathBuf>,
}
#[derive(Error, Debug)]
@@ -383,37 +115,3 @@ pub enum KnotTomlError {
#[error(transparent)]
TomlSyntax(#[from] toml::de::Error),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct OptionDiagnostic {
id: DiagnosticId,
message: String,
severity: Severity,
span: Option<Span>,
}
impl OptionDiagnostic {
pub fn new(id: DiagnosticId, message: String, severity: Severity) -> Self {
Self {
id,
message,
severity,
span: None,
}
}
#[must_use]
fn with_span(self, span: Option<Span>) -> Self {
OptionDiagnostic { span, ..self }
}
pub(crate) fn to_diagnostic(&self) -> Diagnostic {
if let Some(ref span) = self.span {
let mut diag = Diagnostic::new(self.id, self.severity, "");
diag.annotate(Annotation::primary(span.clone()).message(&self.message));
diag
} else {
Diagnostic::new(self.id, self.severity, &self.message)
}
}
}

View File

@@ -1,12 +1,10 @@
use crate::metadata::options::Options;
use crate::metadata::value::{RangedValue, ValueSource, ValueSourceGuard};
use pep440_rs::{release_specifiers_to_ranges, Version, VersionSpecifiers};
use ruff_python_ast::PythonVersion;
use pep440_rs::{Version, VersionSpecifiers};
use serde::{Deserialize, Deserializer, Serialize};
use std::collections::Bound;
use std::ops::Deref;
use thiserror::Error;
use crate::metadata::options::Options;
/// A `pyproject.toml` as specified in PEP 517.
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
#[serde(rename_all = "kebab-case")]
@@ -30,11 +28,7 @@ pub enum PyProjectError {
}
impl PyProject {
pub(crate) fn from_toml_str(
content: &str,
source: ValueSource,
) -> Result<Self, PyProjectError> {
let _guard = ValueSourceGuard::new(source, true);
pub(crate) fn from_toml_str(content: &str) -> Result<Self, PyProjectError> {
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
}
}
@@ -49,78 +43,11 @@ pub struct Project {
///
/// Note: Intentionally option to be more permissive during deserialization.
/// `PackageMetadata::from_pyproject` reports missing names.
pub name: Option<RangedValue<PackageName>>,
pub name: Option<PackageName>,
/// The version of the project
pub version: Option<RangedValue<Version>>,
pub version: Option<Version>,
/// The Python versions this project is compatible with.
pub requires_python: Option<RangedValue<VersionSpecifiers>>,
}
impl Project {
pub(super) fn resolve_requires_python_lower_bound(
&self,
) -> Result<Option<RangedValue<PythonVersion>>, ResolveRequiresPythonError> {
let Some(requires_python) = self.requires_python.as_ref() else {
return Ok(None);
};
tracing::debug!("Resolving requires-python constraint: `{requires_python}`");
let ranges = release_specifiers_to_ranges((**requires_python).clone());
let Some((lower, _)) = ranges.bounding_range() else {
return Ok(None);
};
let version = match lower {
// Ex) `>=3.10.1` -> `>=3.10`
Bound::Included(version) => version,
// Ex) `>3.10.1` -> `>=3.10` or `>3.10` -> `>=3.10`
// The second example looks obscure at first but it is required because
// `3.10.1 > 3.10` is true but we only have two digits here. So including 3.10 is the
// right move. Overall, using `>` without a patch release is most likely bogus.
Bound::Excluded(version) => version,
// Ex) `<3.10` or ``
Bound::Unbounded => {
return Err(ResolveRequiresPythonError::NoLowerBound(
requires_python.to_string(),
))
}
};
// Take the major and minor version
let mut versions = version.release().iter().take(2);
let Some(major) = versions.next().copied() else {
return Ok(None);
};
let minor = versions.next().copied().unwrap_or_default();
tracing::debug!("Resolved requires-python constraint to: {major}.{minor}");
let major =
u8::try_from(major).map_err(|_| ResolveRequiresPythonError::TooLargeMajor(major))?;
let minor =
u8::try_from(minor).map_err(|_| ResolveRequiresPythonError::TooLargeMajor(minor))?;
Ok(Some(
requires_python
.clone()
.map_value(|_| PythonVersion::from((major, minor))),
))
}
}
#[derive(Debug, Error)]
pub enum ResolveRequiresPythonError {
#[error("The major version `{0}` is larger than the maximum supported value 255")]
TooLargeMajor(u64),
#[error("The minor version `{0}` is larger than the maximum supported value 255")]
TooLargeMinor(u64),
#[error("value `{0}` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.")]
NoLowerBound(String),
pub requires_python: Option<VersionSpecifiers>,
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]

View File

@@ -1,55 +0,0 @@
use std::sync::Arc;
use red_knot_python_semantic::lint::RuleSelection;
use ruff_db::diagnostic::DiagnosticFormat;
/// The resolved [`super::Options`] for the project.
///
/// Unlike [`super::Options`], the struct has default values filled in and
/// uses representations that are optimized for reads (instead of preserving the source representation).
/// It's also not required that this structure precisely resembles the TOML schema, although
/// it's encouraged to use a similar structure.
///
/// It's worth considering to adding a salsa query for specific settings to
/// limit the blast radius when only some settings change. For example,
/// changing the terminal settings shouldn't invalidate any core type-checking queries.
/// This can be achieved by adding a salsa query for the type checking specific settings.
///
/// Settings that are part of [`red_knot_python_semantic::ProgramSettings`] are not included here.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Settings {
rules: Arc<RuleSelection>,
terminal: TerminalSettings,
}
impl Settings {
pub fn new(rules: RuleSelection) -> Self {
Self {
rules: Arc::new(rules),
terminal: TerminalSettings::default(),
}
}
pub fn rules(&self) -> &RuleSelection {
&self.rules
}
pub fn to_rules(&self) -> Arc<RuleSelection> {
self.rules.clone()
}
pub fn terminal(&self) -> &TerminalSettings {
&self.terminal
}
pub fn set_terminal(&mut self, terminal: TerminalSettings) {
self.terminal = terminal;
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct TerminalSettings {
pub output_format: DiagnosticFormat,
pub error_on_warning: bool,
}

View File

@@ -1,346 +0,0 @@
use crate::combine::Combine;
use crate::Db;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_macros::Combine;
use ruff_text_size::{TextRange, TextSize};
use serde::{Deserialize, Deserializer};
use std::cell::RefCell;
use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
use toml::Spanned;
#[derive(Clone, Debug)]
pub enum ValueSource {
/// Value loaded from a project's configuration file.
///
/// Ideally, we'd use [`ruff_db::files::File`] but we can't because the database hasn't been
/// created when loading the configuration.
File(Arc<SystemPathBuf>),
/// The value comes from a CLI argument, while it's left open if specified using a short argument,
/// long argument (`--extra-paths`) or `--config key=value`.
Cli,
}
impl ValueSource {
pub fn file(&self) -> Option<&SystemPath> {
match self {
ValueSource::File(path) => Some(&**path),
ValueSource::Cli => None,
}
}
}
thread_local! {
/// Serde doesn't provide any easy means to pass a value to a [`Deserialize`] implementation,
/// but we want to associate each deserialized [`RelativePath`] with the source from
/// which it originated. We use a thread local variable to work around this limitation.
///
/// Use the [`ValueSourceGuard`] to initialize the thread local before calling into any
/// deserialization code. It ensures that the thread local variable gets cleaned up
/// once deserialization is done (once the guard gets dropped).
static VALUE_SOURCE: RefCell<Option<(ValueSource, bool)>> = const { RefCell::new(None) };
}
/// Guard to safely change the [`VALUE_SOURCE`] for the current thread.
#[must_use]
pub(super) struct ValueSourceGuard {
prev_value: Option<(ValueSource, bool)>,
}
impl ValueSourceGuard {
pub(super) fn new(source: ValueSource, is_toml: bool) -> Self {
let prev = VALUE_SOURCE.replace(Some((source, is_toml)));
Self { prev_value: prev }
}
}
impl Drop for ValueSourceGuard {
fn drop(&mut self) {
VALUE_SOURCE.set(self.prev_value.take());
}
}
/// A value that "remembers" where it comes from (source) and its range in source.
///
/// ## Equality, Hash, and Ordering
/// The equality, hash, and ordering are solely based on the value. They disregard the value's range
/// or source.
///
/// This ensures that two resolved configurations are identical even if the position of a value has changed
/// or if the values were loaded from different sources.
#[derive(Clone, serde::Serialize)]
#[serde(transparent)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct RangedValue<T> {
value: T,
#[serde(skip)]
source: ValueSource,
/// The byte range of `value` in `source`.
///
/// Can be `None` because not all sources support a range.
/// For example, arguments provided on the CLI won't have a range attached.
#[serde(skip)]
range: Option<TextRange>,
}
impl<T> RangedValue<T> {
pub fn new(value: T, source: ValueSource) -> Self {
Self::with_range(value, source, TextRange::default())
}
pub fn cli(value: T) -> Self {
Self::with_range(value, ValueSource::Cli, TextRange::default())
}
pub fn with_range(value: T, source: ValueSource, range: TextRange) -> Self {
Self {
value,
range: Some(range),
source,
}
}
pub fn range(&self) -> Option<TextRange> {
self.range
}
pub fn source(&self) -> &ValueSource {
&self.source
}
#[must_use]
pub fn with_source(mut self, source: ValueSource) -> Self {
self.source = source;
self
}
#[must_use]
pub fn map_value<R>(self, f: impl FnOnce(T) -> R) -> RangedValue<R> {
RangedValue {
value: f(self.value),
source: self.source,
range: self.range,
}
}
pub fn into_inner(self) -> T {
self.value
}
}
impl<T> Combine for RangedValue<T> {
fn combine(self, _other: Self) -> Self
where
Self: Sized,
{
self
}
fn combine_with(&mut self, _other: Self) {}
}
impl<T> IntoIterator for RangedValue<T>
where
T: IntoIterator,
{
type Item = T::Item;
type IntoIter = T::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
// The type already has an `iter` method thanks to `Deref`.
#[allow(clippy::into_iter_without_iter)]
impl<'a, T> IntoIterator for &'a RangedValue<T>
where
&'a T: IntoIterator,
{
type Item = <&'a T as IntoIterator>::Item;
type IntoIter = <&'a T as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
// The type already has a `into_iter_mut` method thanks to `DerefMut`.
#[allow(clippy::into_iter_without_iter)]
impl<'a, T> IntoIterator for &'a mut RangedValue<T>
where
&'a mut T: IntoIterator,
{
type Item = <&'a mut T as IntoIterator>::Item;
type IntoIter = <&'a mut T as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
impl<T> fmt::Debug for RangedValue<T>
where
T: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.value.fmt(f)
}
}
impl<T> fmt::Display for RangedValue<T>
where
T: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.value.fmt(f)
}
}
impl<T> Deref for RangedValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl<T> DerefMut for RangedValue<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.value
}
}
impl<T, U: ?Sized> AsRef<U> for RangedValue<T>
where
T: AsRef<U>,
{
fn as_ref(&self) -> &U {
self.value.as_ref()
}
}
impl<T: PartialEq> PartialEq for RangedValue<T> {
fn eq(&self, other: &Self) -> bool {
self.value.eq(&other.value)
}
}
impl<T: PartialEq<T>> PartialEq<T> for RangedValue<T> {
fn eq(&self, other: &T) -> bool {
self.value.eq(other)
}
}
impl<T: Eq> Eq for RangedValue<T> {}
impl<T: Hash> Hash for RangedValue<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.value.hash(state);
}
}
impl<T: PartialOrd> PartialOrd for RangedValue<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.value.partial_cmp(&other.value)
}
}
impl<T: PartialOrd<T>> PartialOrd<T> for RangedValue<T> {
fn partial_cmp(&self, other: &T) -> Option<Ordering> {
self.value.partial_cmp(other)
}
}
impl<T: Ord> Ord for RangedValue<T> {
fn cmp(&self, other: &Self) -> Ordering {
self.value.cmp(&other.value)
}
}
impl<'de, T> Deserialize<'de> for RangedValue<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
VALUE_SOURCE.with_borrow(|source| {
let (source, has_span) = source.clone().unwrap();
if has_span {
let spanned: Spanned<T> = Spanned::deserialize(deserializer)?;
let span = spanned.span();
let range = TextRange::new(
TextSize::try_from(span.start)
.expect("Configuration file to be smaller than 4GB"),
TextSize::try_from(span.end)
.expect("Configuration file to be smaller than 4GB"),
);
Ok(Self::with_range(spanned.into_inner(), source, range))
} else {
Ok(Self::new(T::deserialize(deserializer)?, source))
}
})
}
}
/// A possibly relative path in a configuration file.
///
/// Relative paths in configuration files or from CLI options
/// require different anchoring:
///
/// * CLI: The path is relative to the current working directory
/// * Configuration file: The path is relative to the project's root.
#[derive(
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
PartialEq,
Eq,
PartialOrd,
Ord,
Hash,
Combine,
)]
#[serde(transparent)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct RelativePathBuf(RangedValue<SystemPathBuf>);
impl RelativePathBuf {
pub fn new(path: impl AsRef<SystemPath>, source: ValueSource) -> Self {
Self(RangedValue::new(path.as_ref().to_path_buf(), source))
}
pub fn cli(path: impl AsRef<SystemPath>) -> Self {
Self::new(path, ValueSource::Cli)
}
/// Returns the relative path as specified by the user.
pub fn path(&self) -> &SystemPath {
&self.0
}
/// Returns the owned relative path.
pub fn into_path_buf(self) -> SystemPathBuf {
self.0.into_inner()
}
/// Resolves the absolute path for `self` based on its origin.
pub fn absolute_with_db(&self, db: &dyn Db) -> SystemPathBuf {
self.absolute(db.project().root(db), db.system())
}
/// Resolves the absolute path for `self` based on its origin.
pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> SystemPathBuf {
let relative_to = match &self.0.source {
ValueSource::File(_) => project_root,
ValueSource::Cli => system.current_directory(),
};
SystemPath::absolute(&self.0, relative_to)
}
}

View File

@@ -0,0 +1,14 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: root
---
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),
),
)

View File

@@ -0,0 +1,14 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: sub_project
---
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),
),
)

View File

@@ -0,0 +1,18 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: root
---
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
environment: Some(EnvironmentOptions(
r#python-version: Some("3.10"),
r#python-platform: None,
r#extra-paths: None,
typeshed: None,
r#venv-path: None,
)),
src: None,
),
)

View File

@@ -0,0 +1,12 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: sub_project
---
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(
environment: None,
src: None,
),
)

View File

@@ -0,0 +1,14 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: root
---
ProjectMetadata(
name: Name("super-app"),
root: "/app",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),
),
)

View File

@@ -0,0 +1,12 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: project
---
ProjectMetadata(
name: Name("backend"),
root: "/app",
options: Options(
environment: None,
src: None,
),
)

View File

@@ -0,0 +1,12 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: project
---
ProjectMetadata(
name: Name("app"),
root: "/app",
options: Options(
environment: None,
src: None,
),
)

View File

@@ -1,256 +0,0 @@
use crate::{Db, IOErrorDiagnostic, IOErrorKind, Project};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::walk_directory::{ErrorKind, WalkDirectoryBuilder, WalkState};
use ruff_db::system::{FileType, SystemPath, SystemPathBuf};
use ruff_python_ast::PySourceType;
use rustc_hash::{FxBuildHasher, FxHashSet};
use std::path::PathBuf;
use thiserror::Error;
/// Filter that decides which files are included in the project.
///
/// In the future, this will hold a reference to the `include` and `exclude` pattern.
///
/// This struct mainly exists because `dyn Db` isn't `Send` or `Sync`, making it impossible
/// to access fields from within the walker.
#[derive(Default, Debug)]
pub(crate) struct ProjectFilesFilter<'a> {
/// The same as [`Project::included_paths_or_root`].
included_paths: &'a [SystemPathBuf],
/// The filter skips checking if the path is in `included_paths` if set to `true`.
///
/// Skipping this check is useful when the walker only walks over `included_paths`.
skip_included_paths: bool,
}
impl<'a> ProjectFilesFilter<'a> {
pub(crate) fn from_project(db: &'a dyn Db, project: Project) -> Self {
Self {
included_paths: project.included_paths_or_root(db),
skip_included_paths: false,
}
}
/// Returns `true` if a file is part of the project and included in the paths to check.
///
/// A file is included in the checked files if it is a sub path of the project's root
/// (when no CLI path arguments are specified) or if it is a sub path of any path provided on the CLI (`knot check <paths>`) AND:
///
/// * It matches a positive `include` pattern and isn't excluded by a later negative `include` pattern.
/// * It doesn't match a positive `exclude` pattern or is re-included by a later negative `exclude` pattern.
///
/// ## Note
///
/// This method may return `true` for files that don't end up being included when walking the
/// project tree because it doesn't consider `.gitignore` and other ignore files when deciding
/// if a file's included.
pub(crate) fn is_included(&self, path: &SystemPath) -> bool {
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
enum CheckPathMatch {
/// The path is a partial match of the checked path (it's a sub path)
Partial,
/// The path matches a check path exactly.
Full,
}
let m = if self.skip_included_paths {
Some(CheckPathMatch::Partial)
} else {
self.included_paths
.iter()
.filter_map(|included_path| {
if let Ok(relative_path) = path.strip_prefix(included_path) {
// Exact matches are always included
if relative_path.as_str().is_empty() {
Some(CheckPathMatch::Full)
} else {
Some(CheckPathMatch::Partial)
}
} else {
None
}
})
.max()
};
match m {
None => false,
Some(CheckPathMatch::Partial) => {
// TODO: For partial matches, only include the file if it is included by the project's include/exclude settings.
true
}
Some(CheckPathMatch::Full) => true,
}
}
}
pub(crate) struct ProjectFilesWalker<'a> {
walker: WalkDirectoryBuilder,
filter: ProjectFilesFilter<'a>,
}
impl<'a> ProjectFilesWalker<'a> {
pub(crate) fn new(db: &'a dyn Db) -> Self {
let project = db.project();
let mut filter = ProjectFilesFilter::from_project(db, project);
// It's unnecessary to filter on included paths because it only iterates over those to start with.
filter.skip_included_paths = true;
Self::from_paths(db, project.included_paths_or_root(db), filter)
.expect("included_paths_or_root to never return an empty iterator")
}
/// Creates a walker for indexing the project files incrementally.
///
/// The main difference to a full project walk is that `paths` may contain paths
/// that aren't part of the included files.
pub(crate) fn incremental<P>(db: &'a dyn Db, paths: impl IntoIterator<Item = P>) -> Option<Self>
where
P: AsRef<SystemPath>,
{
let project = db.project();
let filter = ProjectFilesFilter::from_project(db, project);
Self::from_paths(db, paths, filter)
}
fn from_paths<P>(
db: &'a dyn Db,
paths: impl IntoIterator<Item = P>,
filter: ProjectFilesFilter<'a>,
) -> Option<Self>
where
P: AsRef<SystemPath>,
{
let mut paths = paths.into_iter();
let mut walker = db.system().walk_directory(paths.next()?.as_ref());
for path in paths {
walker = walker.add(path);
}
Some(Self { walker, filter })
}
/// Walks the project paths and collects the paths of all files that
/// are included in the project.
pub(crate) fn walk_paths(self) -> (Vec<SystemPathBuf>, Vec<IOErrorDiagnostic>) {
let paths = std::sync::Mutex::new(Vec::new());
let diagnostics = std::sync::Mutex::new(Vec::new());
self.walker.run(|| {
Box::new(|entry| {
match entry {
Ok(entry) => {
if !self.filter.is_included(entry.path()) {
tracing::debug!("Ignoring not-included path: {}", entry.path());
return WalkState::Skip;
}
// Skip over any non python files to avoid creating too many entries in `Files`.
match entry.file_type() {
FileType::File => {
if entry
.path()
.extension()
.and_then(PySourceType::try_from_extension)
.is_some()
{
let mut paths = paths.lock().unwrap();
paths.push(entry.into_path());
}
}
FileType::Directory | FileType::Symlink => {}
}
}
Err(error) => match error.kind() {
ErrorKind::Loop { .. } => {
unreachable!("Loops shouldn't be possible without following symlinks.")
}
ErrorKind::Io { path, err } => {
let mut diagnostics = diagnostics.lock().unwrap();
let error = if let Some(path) = path {
WalkError::IOPathError {
path: path.clone(),
error: err.to_string(),
}
} else {
WalkError::IOError {
error: err.to_string(),
}
};
diagnostics.push(IOErrorDiagnostic {
file: None,
error: IOErrorKind::Walk(error),
});
}
ErrorKind::NonUtf8Path { path } => {
diagnostics.lock().unwrap().push(IOErrorDiagnostic {
file: None,
error: IOErrorKind::Walk(WalkError::NonUtf8Path {
path: path.clone(),
}),
});
}
},
}
WalkState::Continue
})
});
(
paths.into_inner().unwrap(),
diagnostics.into_inner().unwrap(),
)
}
pub(crate) fn collect_vec(self, db: &dyn Db) -> (Vec<File>, Vec<IOErrorDiagnostic>) {
let (paths, diagnostics) = self.walk_paths();
(
paths
.into_iter()
.filter_map(move |path| {
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
// We can ignore this.
system_path_to_file(db.upcast(), &path).ok()
})
.collect(),
diagnostics,
)
}
pub(crate) fn collect_set(self, db: &dyn Db) -> (FxHashSet<File>, Vec<IOErrorDiagnostic>) {
let (paths, diagnostics) = self.walk_paths();
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
for path in paths {
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
files.insert(file);
}
}
(files, diagnostics)
}
}
#[derive(Error, Debug, Clone)]
pub(crate) enum WalkError {
#[error("`{path}`: {error}")]
IOPathError { path: SystemPathBuf, error: String },
#[error("Failed to walk project directory: {error}")]
IOError { error: String },
#[error("`{path}` is not a valid UTF-8 path")]
NonUtf8Path { path: PathBuf },
}

View File

@@ -6,7 +6,7 @@ use tracing::info;
use red_knot_python_semantic::system_module_search_paths;
use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_db::system::{SystemPath, SystemPathBuf};
use ruff_db::Upcast;
use ruff_db::{Db as _, Upcast};
use crate::db::{Db, ProjectDatabase};
use crate::watch::Watcher;
@@ -42,9 +42,9 @@ impl ProjectWatcher {
pub fn update(&mut self, db: &ProjectDatabase) {
let search_paths: Vec<_> = system_module_search_paths(db.upcast()).collect();
let project_path = db.project().root(db);
let project_path = db.project().root(db).to_path_buf();
let new_cache_key = Self::compute_cache_key(project_path, &search_paths);
let new_cache_key = Self::compute_cache_key(&project_path, &search_paths);
if self.cache_key == Some(new_cache_key) {
return;
@@ -68,47 +68,31 @@ impl ProjectWatcher {
self.has_errored_paths = false;
let config_paths = db
.project()
.metadata(db)
.extra_configuration_paths()
.iter()
.map(SystemPathBuf::as_path);
// Watch both the project root and any paths provided by the user on the CLI (removing any redundant nested paths).
// This is necessary to observe changes to files that are outside the project root.
// We always need to watch the project root to observe changes to its configuration.
let included_paths = ruff_db::system::deduplicate_nested_paths(
std::iter::once(project_path).chain(
db.project()
.included_paths_list(db)
.iter()
.map(SystemPathBuf::as_path),
),
);
let project_path = db
.system()
.canonicalize_path(&project_path)
.unwrap_or(project_path);
// Find the non-overlapping module search paths and filter out paths that are already covered by the project.
// Module search paths are already canonicalized.
let unique_module_paths = ruff_db::system::deduplicate_nested_paths(
search_paths
.into_iter()
.filter(|path| !path.starts_with(project_path)),
);
.filter(|path| !path.starts_with(&project_path)),
)
.map(SystemPath::to_path_buf);
// Now add the new paths, first starting with the project path and then
// adding the library search paths, and finally the paths for configurations.
for path in included_paths
.chain(unique_module_paths)
.chain(config_paths)
{
// adding the library search paths.
for path in std::iter::once(project_path).chain(unique_module_paths) {
// Log a warning. It's not worth aborting if registering a single folder fails because
// Ruff otherwise stills works as expected.
if let Err(error) = self.watcher.watch(path) {
if let Err(error) = self.watcher.watch(&path) {
// TODO: Log a user-facing warning.
tracing::warn!("Failed to setup watcher for path `{path}`: {error}. You have to restart Ruff after making changes to files under this path or you might see stale results.");
self.has_errored_paths = true;
} else {
self.watched_paths.push(path.to_path_buf());
self.watched_paths.push(path);
}
}

View File

@@ -1,14 +1,12 @@
use anyhow::{anyhow, Context};
use red_knot_project::{ProjectDatabase, ProjectMetadata};
use red_knot_python_semantic::{HasType, SemanticModel};
use red_knot_python_semantic::{HasTy, SemanticModel};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::parsed::parsed_module;
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
use ruff_python_ast::visitor::source_order;
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
use ruff_python_ast::{
self as ast, Alias, Comprehension, Expr, Parameter, ParameterWithDefault, Stmt,
};
use ruff_python_ast::{self as ast, Alias, Expr, Parameter, ParameterWithDefault, Stmt};
fn setup_db(project_root: &SystemPath, system: TestSystem) -> anyhow::Result<ProjectDatabase> {
let project = ProjectMetadata::discover(project_root, &system)?;
@@ -119,7 +117,7 @@ fn run_corpus_tests(pattern: &str) -> anyhow::Result<()> {
let code = std::fs::read_to_string(source)?;
let mut check_with_file_name = |path: &SystemPath| {
memory_fs.write_file_all(path, &code).unwrap();
memory_fs.write_file(path, &code).unwrap();
File::sync_path(&mut db, path);
// this test is only asserting that we can pull every expression type without a panic
@@ -199,10 +197,10 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
fn visit_stmt(&mut self, stmt: &Stmt) {
match stmt {
Stmt::FunctionDef(function) => {
let _ty = function.inferred_type(&self.model);
let _ty = function.ty(&self.model);
}
Stmt::ClassDef(class) => {
let _ty = class.inferred_type(&self.model);
let _ty = class.ty(&self.model);
}
Stmt::Assign(assign) => {
for target in &assign.targets {
@@ -218,17 +216,6 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
self.visit_body(&for_stmt.orelse);
return;
}
Stmt::With(with_stmt) => {
for item in &with_stmt.items {
if let Some(target) = &item.optional_vars {
self.visit_target(target);
}
self.visit_expr(&item.context_expr);
}
self.visit_body(&with_stmt.body);
return;
}
Stmt::AnnAssign(_)
| Stmt::Return(_)
| Stmt::Delete(_)
@@ -236,6 +223,7 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
| Stmt::TypeAlias(_)
| Stmt::While(_)
| Stmt::If(_)
| Stmt::With(_)
| Stmt::Match(_)
| Stmt::Raise(_)
| Stmt::Try(_)
@@ -255,33 +243,25 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
}
fn visit_expr(&mut self, expr: &Expr) {
let _ty = expr.inferred_type(&self.model);
let _ty = expr.ty(&self.model);
source_order::walk_expr(self, expr);
}
fn visit_comprehension(&mut self, comprehension: &Comprehension) {
self.visit_expr(&comprehension.iter);
self.visit_target(&comprehension.target);
for if_expr in &comprehension.ifs {
self.visit_expr(if_expr);
}
}
fn visit_parameter(&mut self, parameter: &Parameter) {
let _ty = parameter.inferred_type(&self.model);
let _ty = parameter.ty(&self.model);
source_order::walk_parameter(self, parameter);
}
fn visit_parameter_with_default(&mut self, parameter_with_default: &ParameterWithDefault) {
let _ty = parameter_with_default.inferred_type(&self.model);
let _ty = parameter_with_default.ty(&self.model);
source_order::walk_parameter_with_default(self, parameter_with_default);
}
fn visit_alias(&mut self, alias: &Alias) {
let _ty = alias.inferred_type(&self.model);
let _ty = alias.ty(&self.model);
source_order::walk_alias(self, alias);
}
@@ -289,4 +269,16 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
/// Whether or not the .py/.pyi version of this file is expected to fail
#[rustfmt::skip]
const KNOWN_FAILURES: &[(&str, bool, bool)] = &[];
const KNOWN_FAILURES: &[(&str, bool, bool)] = &[
// related to circular references in class definitions
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_26.py", true, true),
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_27.py", true, true),
("crates/ruff_linter/resources/test/fixtures/pyflakes/F811_19.py", true, false),
("crates/ruff_linter/resources/test/fixtures/pyupgrade/UP039.py", true, false),
// related to circular references in type aliases (salsa cycle panic):
("crates/ruff_python_parser/resources/inline/err/type_alias_invalid_value_expr.py", true, true),
("crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC008.py", true, true),
// related to circular references in f-string annotations (invalid syntax)
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_15.py", true, true),
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_14.py", false, true),
];

View File

@@ -12,9 +12,9 @@ license = { workspace = true }
[dependencies]
ruff_db = { workspace = true }
ruff_index = { workspace = true, features = ["salsa"] }
ruff_index = { workspace = true }
ruff_macros = { workspace = true }
ruff_python_ast = { workspace = true, features = ["salsa"] }
ruff_python_ast = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_source_file = { workspace = true }
@@ -31,22 +31,19 @@ drop_bomb = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true }
ordermap = { workspace = true }
salsa = { workspace = true, features = ["compact_str"] }
salsa = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
rustc-hash = { workspace = true }
hashbrown = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
smallvec = { workspace = true }
static_assertions = { workspace = true }
test-case = { workspace = true }
memchr = { workspace = true }
strum = { workspace = true}
strum_macros = { workspace = true}
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing", "os"] }
ruff_db = { workspace = true, features = ["os", "testing"] }
ruff_python_parser = { workspace = true }
red_knot_test = { workspace = true }
red_knot_vendored = { workspace = true }
@@ -59,7 +56,7 @@ quickcheck = { version = "1.0.3", default-features = false }
quickcheck_macros = { version = "1.0.0" }
[features]
serde = ["ruff_db/serde", "dep:serde", "ruff_python_ast/serde"]
serde = ["ruff_db/serde", "dep:serde"]
[lints]
workspace = true

View File

@@ -61,13 +61,7 @@ class MDTestRunner:
return False
# Run it again with 'json' format to find the mdtest executable:
try:
json_output = self._run_cargo_test(message_format="json")
except subprocess.CalledProcessError as _:
# `cargo test` can still fail if something changed in between the two runs.
# Here we don't have a human-readable output, so just show a generic message:
self.console.print("[red]Error[/red]: Failed to compile tests")
return False
json_output = self._run_cargo_test(message_format="json")
if json_output:
self._get_executable_path_from_json(json_output)
@@ -155,7 +149,6 @@ class MDTestRunner:
def watch(self) -> Never:
self._recompile_tests("Compiling tests...", message_on_success=False)
self._run_mdtest()
self.console.print("[dim]Ready to watch for changes...[/dim]")
for changes in watch(CRATE_ROOT):

View File

@@ -29,7 +29,7 @@ It is invalid to parameterize `Annotated` with less than two arguments.
```py
from typing_extensions import Annotated
# error: [invalid-type-form] "`typing.Annotated` requires at least two arguments when used in a type expression"
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
def _(x: Annotated):
reveal_type(x) # revealed: Unknown
@@ -39,11 +39,11 @@ def _(flag: bool):
else:
X = bool
# error: [invalid-type-form] "`typing.Annotated` requires at least two arguments when used in a type expression"
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
def f(y: X):
reveal_type(y) # revealed: Unknown | bool
# error: [invalid-type-form] "`typing.Annotated` requires at least two arguments when used in a type expression"
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
def _(x: Annotated | bool):
reveal_type(x) # revealed: Unknown | bool
@@ -73,10 +73,12 @@ Inheriting from `Annotated[T, ...]` is equivalent to inheriting from `T` itself.
```py
from typing_extensions import Annotated
# TODO: False positive
# error: [invalid-base]
class C(Annotated[int, "foo"]): ...
# TODO: Should be `tuple[Literal[C], Literal[int], Literal[object]]`
reveal_type(C.__mro__) # revealed: tuple[Literal[C], @Todo(Inference of subscript on special form), Literal[object]]
reveal_type(C.__mro__) # revealed: tuple[Literal[C], Unknown, Literal[object]]
```
### Not parameterized

View File

@@ -1,307 +0,0 @@
# Callable
References:
- <https://typing.python.org/en/latest/spec/callables.html#callable>
Note that `typing.Callable` is deprecated at runtime, in favour of `collections.abc.Callable` (see:
<https://docs.python.org/3/library/typing.html#deprecated-aliases>). However, removal of
`typing.Callable` is not currently planned, and the canonical location of the stub for the symbol in
typeshed is still `typing.pyi`.
## Invalid forms
The `Callable` special form requires _exactly_ two arguments where the first argument is either a
parameter type list, parameter specification, `typing.Concatenate`, or `...` and the second argument
is the return type. Here, we explore various invalid forms.
### Empty
A bare `Callable` without any type arguments:
```py
from typing import Callable
def _(c: Callable):
reveal_type(c) # revealed: (...) -> Unknown
```
### Invalid parameter type argument
When it's not a list:
```py
from typing import Callable
# error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
def _(c: Callable[int, str]):
reveal_type(c) # revealed: (...) -> Unknown
```
Or, when it's a literal type:
```py
# error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
def _(c: Callable[42, str]):
reveal_type(c) # revealed: (...) -> Unknown
```
Or, when one of the parameter type is invalid in the list:
```py
# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression"
# error: [invalid-type-form] "Boolean literals are not allowed in this context in a type expression"
def _(c: Callable[[int, 42, str, False], None]):
# revealed: (int, Unknown, str, Unknown, /) -> None
reveal_type(c)
```
### Missing return type
Using a parameter list:
```py
from typing import Callable
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
def _(c: Callable[[int, str]]):
reveal_type(c) # revealed: (...) -> Unknown
```
Or, an ellipsis:
```py
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
def _(c: Callable[...]):
reveal_type(c) # revealed: (...) -> Unknown
```
Or something else that's invalid in a type expression generally:
```py
# fmt: off
def _(c: Callable[ # error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
{1, 2} # error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
]
):
reveal_type(c) # revealed: (...) -> Unknown
```
### More than two arguments
We can't reliably infer the callable type if there are more then 2 arguments because we don't know
which argument corresponds to either the parameters or the return type.
```py
from typing import Callable
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
def _(c: Callable[[int], str, str]):
reveal_type(c) # revealed: (...) -> Unknown
```
### List as the second argument
```py
from typing import Callable
# fmt: off
def _(c: Callable[
int, # error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
[str] # error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
]
):
reveal_type(c) # revealed: (...) -> Unknown
```
### List as both arguments
```py
from typing import Callable
# error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
def _(c: Callable[[int], [str]]):
reveal_type(c) # revealed: (int, /) -> Unknown
```
### Three list arguments
```py
from typing import Callable
# fmt: off
def _(c: Callable[ # error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
[int],
[str], # error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
[bytes] # error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
]
):
reveal_type(c) # revealed: (...) -> Unknown
```
## Simple
A simple `Callable` with multiple parameters and a return type:
```py
from typing import Callable
def _(c: Callable[[int, str], int]):
reveal_type(c) # revealed: (int, str, /) -> int
```
## Union
```py
from typing import Callable, Union
def _(
c: Callable[[Union[int, str]], int] | None,
d: None | Callable[[Union[int, str]], int],
e: None | Callable[[Union[int, str]], int] | int,
):
reveal_type(c) # revealed: ((int | str, /) -> int) | None
reveal_type(d) # revealed: None | ((int | str, /) -> int)
reveal_type(e) # revealed: None | ((int | str, /) -> int) | int
```
## Intersection
```py
from typing import Callable, Union
from knot_extensions import Intersection, Not
def _(
c: Intersection[Callable[[Union[int, str]], int], int],
d: Intersection[int, Callable[[Union[int, str]], int]],
e: Intersection[int, Callable[[Union[int, str]], int], str],
f: Intersection[Not[Callable[[int, str], Intersection[int, str]]]],
):
reveal_type(c) # revealed: ((int | str, /) -> int) & int
reveal_type(d) # revealed: int & ((int | str, /) -> int)
reveal_type(e) # revealed: int & ((int | str, /) -> int) & str
reveal_type(f) # revealed: ~((int, str, /) -> int & str)
```
## Nested
A nested `Callable` as one of the parameter types:
```py
from typing import Callable
def _(c: Callable[[Callable[[int], str]], int]):
reveal_type(c) # revealed: ((int, /) -> str, /) -> int
```
And, as the return type:
```py
def _(c: Callable[[int, str], Callable[[int], int]]):
reveal_type(c) # revealed: (int, str, /) -> (int, /) -> int
```
## Gradual form
The `Callable` special form supports the use of `...` in place of the list of parameter types. This
is a [gradual form] indicating that the type is consistent with any input signature:
```py
from typing import Callable
def gradual_form(c: Callable[..., str]):
reveal_type(c) # revealed: (...) -> str
```
## Using `typing.Concatenate`
Using `Concatenate` as the first argument to `Callable`:
```py
from typing_extensions import Callable, Concatenate
def _(c: Callable[Concatenate[int, str, ...], int]):
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
```
And, as one of the parameter types:
```py
def _(c: Callable[[Concatenate[int, str, ...], int], int]):
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
```
## Using `typing.ParamSpec`
```toml
[environment]
python-version = "3.12"
```
Using a `ParamSpec` in a `Callable` annotation:
```py
from typing_extensions import Callable
# TODO: Not an error; remove once `ParamSpec` is supported
# error: [invalid-type-form]
def _[**P1](c: Callable[P1, int]):
reveal_type(c) # revealed: (...) -> Unknown
```
And, using the legacy syntax:
```py
from typing_extensions import ParamSpec
P2 = ParamSpec("P2")
# TODO: Not an error; remove once `ParamSpec` is supported
# error: [invalid-type-form]
def _(c: Callable[P2, int]):
reveal_type(c) # revealed: (...) -> Unknown
```
## Using `typing.Unpack`
Using the unpack operator (`*`):
```py
from typing_extensions import Callable, TypeVarTuple
Ts = TypeVarTuple("Ts")
def _(c: Callable[[int, *Ts], int]):
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
```
And, using the legacy syntax using `Unpack`:
```py
from typing_extensions import Unpack
def _(c: Callable[[int, Unpack[Ts]], int]):
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
```
## Member lookup
```py
from typing import Callable
def _(c: Callable[[int], int]):
reveal_type(c.__init__) # revealed: def __init__(self) -> None
reveal_type(c.__class__) # revealed: type
# TODO: The member lookup for `Callable` uses `object` which does not have a `__call__`
# attribute. We could special case `__call__` in this context. Refer to
# https://github.com/astral-sh/ruff/pull/16493#discussion_r1985098508 for more details.
# error: [unresolved-attribute] "Type `(int, /) -> int` has no attribute `__call__`"
reveal_type(c.__call__) # revealed: Unknown
```
[gradual form]: https://typing.python.org/en/latest/spec/glossary.html#term-gradual-form

View File

@@ -1,179 +0,0 @@
# Deferred annotations
## Deferred annotations in stubs always resolve
`mod.pyi`:
```pyi
def get_foo() -> Foo: ...
class Foo: ...
```
```py
from mod import get_foo
reveal_type(get_foo()) # revealed: Foo
```
## Deferred annotations in regular code fail
In (regular) source files, annotations are *not* deferred. This also tests that imports from
`__future__` that are not `annotations` are ignored.
```py
from __future__ import with_statement as annotations
# error: [unresolved-reference]
def get_foo() -> Foo: ...
class Foo: ...
reveal_type(get_foo()) # revealed: Unknown
```
## Deferred annotations in regular code with `__future__.annotations`
If `__future__.annotations` is imported, annotations *are* deferred.
```py
from __future__ import annotations
def get_foo() -> Foo:
return Foo()
class Foo: ...
reveal_type(get_foo()) # revealed: Foo
```
## Deferred self-reference annotations in a class definition
```toml
[environment]
python-version = "3.12"
```
```py
from __future__ import annotations
class Foo:
this: Foo
# error: [unresolved-reference]
_ = Foo()
# error: [unresolved-reference]
[Foo for _ in range(1)]
a = int
def f(self, x: Foo):
reveal_type(x) # revealed: Foo
def g(self) -> Foo:
_: Foo = self
return self
class Bar:
foo: Foo
b = int
def f(self, x: Foo):
return self
# error: [unresolved-reference]
def g(self) -> Bar:
return self
# error: [unresolved-reference]
def h[T: Bar](self):
pass
class Baz[T: Foo]:
pass
# error: [unresolved-reference]
type S = a
type T = b
def h[T: Bar]():
# error: [unresolved-reference]
return Bar()
type Baz = Foo
```
## Non-deferred self-reference annotations in a class definition
```toml
[environment]
python-version = "3.12"
```
```py
class Foo:
# error: [unresolved-reference]
this: Foo
ok: "Foo"
# error: [unresolved-reference]
_ = Foo()
# error: [unresolved-reference]
[Foo for _ in range(1)]
a = int
# error: [unresolved-reference]
def f(self, x: Foo):
reveal_type(x) # revealed: Unknown
# error: [unresolved-reference]
def g(self) -> Foo:
_: Foo = self
return self
class Bar:
# error: [unresolved-reference]
foo: Foo
b = int
# error: [unresolved-reference]
def f(self, x: Foo):
return self
# error: [unresolved-reference]
def g(self) -> Bar:
return self
# error: [unresolved-reference]
def h[T: Bar](self):
pass
class Baz[T: Foo]:
pass
# error: [unresolved-reference]
type S = a
type T = b
def h[T: Bar]():
# error: [unresolved-reference]
return Bar()
type Qux = Foo
def _():
class C:
# error: [unresolved-reference]
def f(self) -> C:
return self
```
## Base class references
### Not deferred by __future__.annotations
```py
from __future__ import annotations
class A(B): # error: [unresolved-reference]
pass
class B:
pass
```
### Deferred in stub files
```pyi
class A(B): ...
class B: ...
```

View File

@@ -1,90 +0,0 @@
# Special cases for int/float/complex in annotations
In order to support common use cases, an annotation of `float` actually means `int | float`, and an
annotation of `complex` actually means `int | float | complex`. See
[the specification](https://typing.python.org/en/latest/spec/special-types.html#special-cases-for-float-and-complex)
## float
An annotation of `float` means `int | float`, so `int` is assignable to it:
```py
def takes_float(x: float):
pass
def passes_int_to_float(x: int):
# no error!
takes_float(x)
```
It also applies to variable annotations:
```py
def assigns_int_to_float(x: int):
# no error!
y: float = x
```
It doesn't work the other way around:
```py
def takes_int(x: int):
pass
def passes_float_to_int(x: float):
# error: [invalid-argument-type]
takes_int(x)
def assigns_float_to_int(x: float):
# error: [invalid-assignment]
y: int = x
```
Unlike other type checkers, we choose not to obfuscate this special case by displaying `int | float`
as just `float`; we display the actual type:
```py
def f(x: float):
reveal_type(x) # revealed: int | float
```
## complex
An annotation of `complex` means `int | float | complex`, so `int` and `float` are both assignable
to it (but not the other way around):
```py
def takes_complex(x: complex):
pass
def passes_to_complex(x: float, y: int):
# no errors!
takes_complex(x)
takes_complex(y)
def assigns_to_complex(x: float, y: int):
# no errors!
a: complex = x
b: complex = y
def takes_int(x: int):
pass
def takes_float(x: float):
pass
def passes_complex(x: complex):
# error: [invalid-argument-type]
takes_int(x)
# error: [invalid-argument-type]
takes_float(x)
def assigns_complex(x: complex):
# error: [invalid-assignment]
y: int = x
# error: [invalid-assignment]
z: float = x
def f(x: complex):
reveal_type(x) # revealed: int | float | complex
```

View File

@@ -1,114 +0,0 @@
# Tests for invalid types in type expressions
## Invalid types are rejected
Many types are illegal in the context of a type expression:
```py
import typing
from knot_extensions import AlwaysTruthy, AlwaysFalsy
from typing_extensions import Literal, Never
class A: ...
def _(
a: type[int],
b: AlwaysTruthy,
c: AlwaysFalsy,
d: Literal[True],
e: Literal["bar"],
f: Literal[b"foo"],
g: tuple[int, str],
h: Never,
i: int,
j: A,
):
def foo(): ...
def invalid(
a_: a, # error: [invalid-type-form] "Variable of type `type[int]` is not allowed in a type expression"
b_: b, # error: [invalid-type-form]
c_: c, # error: [invalid-type-form]
d_: d, # error: [invalid-type-form]
e_: e, # error: [invalid-type-form]
f_: f, # error: [invalid-type-form]
g_: g, # error: [invalid-type-form]
h_: h, # error: [invalid-type-form]
i_: typing, # error: [invalid-type-form]
j_: foo, # error: [invalid-type-form]
k_: i, # error: [invalid-type-form] "Variable of type `int` is not allowed in a type expression"
l_: j, # error: [invalid-type-form] "Variable of type `A` is not allowed in a type expression"
):
reveal_type(a_) # revealed: Unknown
reveal_type(b_) # revealed: Unknown
reveal_type(c_) # revealed: Unknown
reveal_type(d_) # revealed: Unknown
reveal_type(e_) # revealed: Unknown
reveal_type(f_) # revealed: Unknown
reveal_type(g_) # revealed: Unknown
reveal_type(h_) # revealed: Unknown
reveal_type(i_) # revealed: Unknown
reveal_type(j_) # revealed: Unknown
```
## Invalid AST nodes
```py
def bar() -> None:
return None
def _(
a: 1, # error: [invalid-type-form] "Int literals are not allowed in this context in a type expression"
b: 2.3, # error: [invalid-type-form] "Float literals are not allowed in type expressions"
c: 4j, # error: [invalid-type-form] "Complex literals are not allowed in type expressions"
d: True, # error: [invalid-type-form] "Boolean literals are not allowed in this context in a type expression"
e: int | b"foo", # error: [invalid-type-form] "Bytes literals are not allowed in this context in a type expression"
f: 1 and 2, # error: [invalid-type-form] "Boolean operations are not allowed in type expressions"
g: 1 or 2, # error: [invalid-type-form] "Boolean operations are not allowed in type expressions"
h: (foo := 1), # error: [invalid-type-form] "Named expressions are not allowed in type expressions"
i: not 1, # error: [invalid-type-form] "Unary operations are not allowed in type expressions"
j: lambda: 1, # error: [invalid-type-form] "`lambda` expressions are not allowed in type expressions"
k: 1 if True else 2, # error: [invalid-type-form] "`if` expressions are not allowed in type expressions"
l: await 1, # error: [invalid-type-form] "`await` expressions are not allowed in type expressions"
m: (yield 1), # error: [invalid-type-form] "`yield` expressions are not allowed in type expressions"
n: (yield from [1]), # error: [invalid-type-form] "`yield from` expressions are not allowed in type expressions"
o: 1 < 2, # error: [invalid-type-form] "Comparison expressions are not allowed in type expressions"
p: bar(), # error: [invalid-type-form] "Function calls are not allowed in type expressions"
q: int | f"foo", # error: [invalid-type-form] "F-strings are not allowed in type expressions"
r: [1, 2, 3][1:2], # error: [invalid-type-form] "Slices are not allowed in type expressions"
):
reveal_type(a) # revealed: Unknown
reveal_type(b) # revealed: Unknown
reveal_type(c) # revealed: Unknown
reveal_type(d) # revealed: Unknown
reveal_type(e) # revealed: int | Unknown
reveal_type(f) # revealed: Unknown
reveal_type(g) # revealed: Unknown
reveal_type(h) # revealed: Unknown
reveal_type(i) # revealed: Unknown
reveal_type(j) # revealed: Unknown
reveal_type(k) # revealed: Unknown
reveal_type(p) # revealed: Unknown
reveal_type(q) # revealed: int | Unknown
reveal_type(r) # revealed: @Todo(unknown type subscript)
```
## Invalid Collection based AST nodes
```py
def _(
a: {1: 2}, # error: [invalid-type-form] "Dict literals are not allowed in type expressions"
b: {1, 2}, # error: [invalid-type-form] "Set literals are not allowed in type expressions"
c: {k: v for k, v in [(1, 2)]}, # error: [invalid-type-form] "Dict comprehensions are not allowed in type expressions"
d: [k for k in [1, 2]], # error: [invalid-type-form] "List comprehensions are not allowed in type expressions"
e: {k for k in [1, 2]}, # error: [invalid-type-form] "Set comprehensions are not allowed in type expressions"
f: (k for k in [1, 2]), # error: [invalid-type-form] "Generator expressions are not allowed in type expressions"
g: [int, str], # error: [invalid-type-form] "List literals are not allowed in this context in a type expression"
):
reveal_type(a) # revealed: Unknown
reveal_type(b) # revealed: Unknown
reveal_type(c) # revealed: Unknown
reveal_type(d) # revealed: Unknown
reveal_type(e) # revealed: Unknown
reveal_type(f) # revealed: Unknown
reveal_type(g) # revealed: Unknown
```

View File

@@ -1,6 +1,6 @@
# Literal
<https://typing.python.org/en/latest/spec/literal.html#literals>
<https://typing.readthedocs.io/en/latest/spec/literal.html#literals>
## Parameterization
@@ -36,7 +36,7 @@ def f():
reveal_type(a7) # revealed: None
reveal_type(a8) # revealed: Literal[1]
# TODO: This should be Color.RED
reveal_type(b1) # revealed: @Todo(Attribute access on enum classes)
reveal_type(b1) # revealed: Literal[0]
# error: [invalid-type-form]
invalid1: Literal[3 + 4]
@@ -68,7 +68,7 @@ def x(
a3: Literal[Literal["w"], Literal["r"], Literal[Literal["w+"]]],
a4: Literal[True] | Literal[1, 2] | Literal["foo"],
):
reveal_type(a1) # revealed: Literal[1, 2, 3, 5, "foo"] | None
reveal_type(a1) # revealed: Literal[1, 2, 3, "foo", 5] | None
reveal_type(a2) # revealed: Literal["w", "r"]
reveal_type(a3) # revealed: Literal["w", "r", "w+"]
reveal_type(a4) # revealed: Literal[True, 1, 2, "foo"]
@@ -106,9 +106,9 @@ def union_example(
Literal["B"],
Literal[True],
None,
],
]
):
reveal_type(x) # revealed: Unknown | Literal[-1, 0, 1, "A", "B", "foo", "bar", b"A", b"\x00", b"\x07", True] | None
reveal_type(x) # revealed: Unknown | Literal[-1, "A", b"A", b"\x00", b"\x07", 0, 1, "B", "foo", "bar", True] | None
```
## Detecting Literal outside typing and typing_extensions
@@ -116,9 +116,7 @@ def union_example(
Only Literal that is defined in typing and typing_extension modules is detected as the special
Literal.
`other.pyi`:
```pyi
```pyi path=other.pyi
from typing import _SpecialForm
Literal: _SpecialForm
@@ -127,17 +125,10 @@ Literal: _SpecialForm
```py
from other import Literal
# TODO: can we add a subdiagnostic here saying something like:
#
# `other.Literal` and `typing.Literal` have similar names, but are different symbols and don't have the same semantics
#
# ?
#
# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression"
a1: Literal[26]
def f():
reveal_type(a1) # revealed: @Todo(unknown type subscript)
reveal_type(a1) # revealed: @Todo(generics)
```
## Detecting typing_extensions.Literal
@@ -156,7 +147,7 @@ def f():
```py
from typing import Literal
# error: [invalid-type-form] "`typing.Literal` requires at least one argument when used in a type expression"
# error: [invalid-type-form] "`Literal` requires at least one argument when used in a type expression"
def _(x: Literal):
reveal_type(x) # revealed: Unknown
```

View File

@@ -72,11 +72,13 @@ reveal_type(baz) # revealed: Literal["bazfoo"]
qux = (foo, bar)
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
reveal_type(foo.join(qux)) # revealed: LiteralString
# TODO: Infer "LiteralString"
reveal_type(foo.join(qux)) # revealed: @Todo(Attribute access on `StringLiteral` types)
template: LiteralString = "{}, {}"
reveal_type(template) # revealed: Literal["{}, {}"]
reveal_type(template.format(foo, bar)) # revealed: LiteralString
# TODO: Infer `LiteralString`
reveal_type(template.format(foo, bar)) # revealed: @Todo(Attribute access on `StringLiteral` types)
```
### Assignability
@@ -145,4 +147,4 @@ def f():
reveal_type(x) # revealed: LiteralString
```
[1]: https://typing.python.org/en/latest/spec/literal.html#literalstring
[1]: https://typing.readthedocs.io/en/latest/spec/literal.html#literalstring

View File

@@ -1,24 +0,0 @@
# NewType
Currently, red-knot doesn't support `typing.NewType` in type annotations.
## Valid forms
```py
from typing_extensions import NewType
from types import GenericAlias
X = GenericAlias(type, ())
A = NewType("A", int)
# TODO: typeshed for `typing.GenericAlias` uses `type` for the first argument. `NewType` should be special-cased
# to be compatible with `type`
# error: [invalid-argument-type] "Argument to this function is incorrect: Expected `type`, found `NewType`"
B = GenericAlias(A, ())
def _(
a: A,
b: B,
):
reveal_type(a) # revealed: @Todo(Support for `typing.NewType` instances in type expressions)
reveal_type(b) # revealed: @Todo(Support for `typing.GenericAlias` instances in type expressions)
```

View File

@@ -45,13 +45,3 @@ def f():
# revealed: int | None
reveal_type(a)
```
## Invalid
```py
from typing import Optional
# error: [invalid-type-form] "`typing.Optional` requires exactly one argument when used in a type expression"
def f(x: Optional) -> None:
reveal_type(x) # revealed: Unknown
```

View File

@@ -1,10 +1,5 @@
# Starred expression annotations
```toml
[environment]
python-version = "3.11"
```
Type annotations for `*args` can be starred expressions themselves:
```py

View File

@@ -67,24 +67,23 @@ import typing
####################
### Built-ins
####################
class ListSubclass(typing.List): ...
# TODO: generic protocols
# revealed: tuple[Literal[ListSubclass], Literal[list], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
# TODO: should have `Generic`, should not have `Unknown`
# revealed: tuple[Literal[ListSubclass], Literal[list], Unknown, Literal[object]]
reveal_type(ListSubclass.__mro__)
class DictSubclass(typing.Dict): ...
# TODO: generic protocols
# revealed: tuple[Literal[DictSubclass], Literal[dict], Literal[MutableMapping], Literal[Mapping], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
# TODO: should have `Generic`, should not have `Unknown`
# revealed: tuple[Literal[DictSubclass], Literal[dict], Unknown, Literal[object]]
reveal_type(DictSubclass.__mro__)
class SetSubclass(typing.Set): ...
# TODO: generic protocols
# revealed: tuple[Literal[SetSubclass], Literal[set], Literal[MutableSet], Literal[AbstractSet], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
# TODO: should have `Generic`, should not have `Unknown`
# revealed: tuple[Literal[SetSubclass], Literal[set], Unknown, Literal[object]]
reveal_type(SetSubclass.__mro__)
class FrozenSetSubclass(typing.FrozenSet): ...
@@ -95,12 +94,11 @@ reveal_type(FrozenSetSubclass.__mro__)
####################
### `collections`
####################
class ChainMapSubclass(typing.ChainMap): ...
# TODO: generic protocols
# revealed: tuple[Literal[ChainMapSubclass], Literal[ChainMap], Literal[MutableMapping], Literal[Mapping], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
# TODO: Should be (ChainMapSubclass, ChainMap, MutableMapping, Mapping, Collection, Sized, Iterable, Container, Generic, object)
# revealed: tuple[Literal[ChainMapSubclass], Literal[ChainMap], Unknown, Literal[object]]
reveal_type(ChainMapSubclass.__mro__)
class CounterSubclass(typing.Counter): ...
@@ -117,8 +115,8 @@ reveal_type(DefaultDictSubclass.__mro__)
class DequeSubclass(typing.Deque): ...
# TODO: generic protocols
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
# TODO: Should be (DequeSubclass, deque, MutableSequence, Sequence, Reversible, Collection, Sized, Iterable, Container, Generic, object)
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Unknown, Literal[object]]
reveal_type(DequeSubclass.__mro__)
class OrderedDictSubclass(typing.OrderedDict): ...

View File

@@ -105,7 +105,7 @@ def f1(
from typing import Literal
def f(v: Literal["a", r"b", b"c", "d" "e", "\N{LATIN SMALL LETTER F}", "\x67", """h"""]):
reveal_type(v) # revealed: Literal["a", "b", "de", "f", "g", "h", b"c"]
reveal_type(v) # revealed: Literal["a", "b", b"c", "de", "f", "g", "h"]
```
## Class variables
@@ -116,8 +116,8 @@ MyType = int
class Aliases:
MyType = str
forward: "MyType" = "value"
not_forward: MyType = "value"
forward: "MyType"
not_forward: MyType
reveal_type(Aliases.forward) # revealed: str
reveal_type(Aliases.not_forward) # revealed: str

View File

@@ -2,16 +2,16 @@
## Annotation
`typing.Union` can be used to construct union types in the same way as the `|` operator.
`typing.Union` can be used to construct union types same as `|` operator.
```py
from typing import Union
a: Union[int, str]
a1: Union[int, bool]
a2: Union[int, Union[bytes, str]]
a2: Union[int, Union[float, str]]
a3: Union[int, None]
a4: Union[Union[bytes, str]]
a4: Union[Union[float, str]]
a5: Union[int]
a6: Union[()]
@@ -21,11 +21,11 @@ def f():
# Since bool is a subtype of int we simplify to int here. But we do allow assigning boolean values (see below).
# revealed: int
reveal_type(a1)
# revealed: int | bytes | str
# revealed: int | float | str
reveal_type(a2)
# revealed: int | None
reveal_type(a3)
# revealed: bytes | str
# revealed: float | str
reveal_type(a4)
# revealed: int
reveal_type(a5)
@@ -59,30 +59,3 @@ def f():
# revealed: int | str
reveal_type(a)
```
## Invalid
```py
from typing import Union
# error: [invalid-type-form] "`typing.Union` requires at least one argument when used in a type expression"
def f(x: Union) -> None:
reveal_type(x) # revealed: Unknown
```
## Implicit type aliases using new-style unions
We don't recognise these as type aliases yet, but we also don't emit false-positive diagnostics if
you use them in type expressions:
```toml
[environment]
python-version = "3.10"
```
```py
X = int | str
def f(y: X):
reveal_type(y) # revealed: @Todo(Support for `types.UnionType` instances in type expressions)
```

View File

@@ -18,7 +18,7 @@ def f(*args: Unpack[Ts]) -> tuple[Unpack[Ts]]:
# TODO: should understand the annotation
reveal_type(args) # revealed: tuple
reveal_type(Alias) # revealed: @Todo(Support for `typing.TypeAlias`)
reveal_type(Alias) # revealed: @Todo(Unsupported or invalid type in a type expression)
def g() -> TypeGuard[int]: ...
def h() -> TypeIs[int]: ...
@@ -33,31 +33,7 @@ def i(callback: Callable[Concatenate[int, P], R_co], *args: P.args, **kwargs: P.
class Foo:
def method(self, x: Self):
reveal_type(x) # revealed: @Todo(Support for `typing.Self`)
```
## Type expressions
One thing that is supported is error messages for using special forms in type expressions.
```py
from typing_extensions import Unpack, TypeGuard, TypeIs, Concatenate, ParamSpec, Generic
def _(
a: Unpack, # error: [invalid-type-form] "`typing.Unpack` requires exactly one argument when used in a type expression"
b: TypeGuard, # error: [invalid-type-form] "`typing.TypeGuard` requires exactly one argument when used in a type expression"
c: TypeIs, # error: [invalid-type-form] "`typing.TypeIs` requires exactly one argument when used in a type expression"
d: Concatenate, # error: [invalid-type-form] "`typing.Concatenate` requires at least two arguments when used in a type expression"
e: ParamSpec,
f: Generic, # error: [invalid-type-form] "`typing.Generic` is not allowed in type expressions"
) -> None:
reveal_type(a) # revealed: Unknown
reveal_type(b) # revealed: Unknown
reveal_type(c) # revealed: Unknown
reveal_type(d) # revealed: Unknown
def foo(a_: e) -> None:
reveal_type(a_) # revealed: @Todo(Support for `typing.ParamSpec`)
reveal_type(x) # revealed: @Todo(Unsupported or invalid type in a type expression)
```
## Inheritance
@@ -66,7 +42,7 @@ You can't inherit from most of these. `typing.Callable` is an exception.
```py
from typing import Callable
from typing_extensions import Self, Unpack, TypeGuard, TypeIs, Concatenate, Generic
from typing_extensions import Self, Unpack, TypeGuard, TypeIs, Concatenate
class A(Self): ... # error: [invalid-base]
class B(Unpack): ... # error: [invalid-base]
@@ -74,18 +50,12 @@ class C(TypeGuard): ... # error: [invalid-base]
class D(TypeIs): ... # error: [invalid-base]
class E(Concatenate): ... # error: [invalid-base]
class F(Callable): ...
class G(Generic): ... # error: [invalid-base] "Cannot inherit from plain `Generic`"
reveal_type(F.__mro__) # revealed: tuple[Literal[F], @Todo(Support for Callable as a base class), Literal[object]]
```
## Subscriptability
```toml
[environment]
python-version = "3.12"
```
Some of these are not subscriptable:
```py

View File

@@ -1,6 +1,6 @@
# Unsupported type qualifiers
## Not yet fully supported
## Not yet supported
Several type qualifiers are unsupported by red-knot currently. However, we also don't emit
false-positive errors if you use one in an annotation:
@@ -19,33 +19,6 @@ class Bar(TypedDict):
z: ReadOnly[bytes]
```
## Type expressions
One thing that is supported is error messages for using type qualifiers in type expressions.
```py
from typing_extensions import Final, ClassVar, Required, NotRequired, ReadOnly
def _(
a: (
Final # error: [invalid-type-form] "Type qualifier `typing.Final` is not allowed in type expressions (only in annotation expressions)"
| int
),
b: (
ClassVar # error: [invalid-type-form] "Type qualifier `typing.ClassVar` is not allowed in type expressions (only in annotation expressions)"
| int
),
c: Required, # error: [invalid-type-form] "Type qualifier `typing.Required` is not allowed in type expressions (only in annotation expressions, and only with exactly one argument)"
d: NotRequired, # error: [invalid-type-form] "Type qualifier `typing.NotRequired` is not allowed in type expressions (only in annotation expressions, and only with exactly one argument)"
e: ReadOnly, # error: [invalid-type-form] "Type qualifier `typing.ReadOnly` is not allowed in type expressions (only in annotation expressions, and only with exactly one argument)"
) -> None:
reveal_type(a) # revealed: Unknown | int
reveal_type(b) # revealed: Unknown | int
reveal_type(c) # revealed: Unknown
reveal_type(d) # revealed: Unknown
reveal_type(e) # revealed: Unknown
```
## Inheritance
You can't inherit from a type qualifier.

View File

@@ -25,14 +25,7 @@ x = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]` is not
## Tuple annotations are understood
```toml
[environment]
python-version = "3.12"
```
`module.py`:
```py
```py path=module.py
from typing_extensions import Unpack
a: tuple[()] = ()
@@ -47,9 +40,7 @@ i: tuple[str | int, str | int] = (42, 42)
j: tuple[str | int] = (42,)
```
`script.py`:
```py
```py path=script.py
from module import a, b, c, d, e, f, g, h, i, j
reveal_type(a) # revealed: tuple[()]
@@ -57,11 +48,13 @@ reveal_type(b) # revealed: tuple[int]
reveal_type(c) # revealed: tuple[str, int]
reveal_type(d) # revealed: tuple[tuple[str, str], tuple[int, int]]
# TODO: homogeneous tuples, PEP-646 tuples, generics
# TODO: homogeneous tuples, PEP-646 tuples
reveal_type(e) # revealed: @Todo(full tuple[...] support)
reveal_type(f) # revealed: @Todo(full tuple[...] support)
reveal_type(g) # revealed: @Todo(full tuple[...] support)
reveal_type(h) # revealed: tuple[@Todo(specialized non-generic class), @Todo(specialized non-generic class)]
# TODO: support more kinds of type expressions in annotations
reveal_type(h) # revealed: @Todo(full tuple[...] support)
reveal_type(i) # revealed: tuple[str | int, str | int]
reveal_type(j) # revealed: tuple[str | int]
@@ -121,7 +114,7 @@ reveal_type(x) # revealed: Foo
## Annotations in stub files are deferred
```pyi
```pyi path=main.pyi
x: Foo
class Foo: ...
@@ -132,7 +125,7 @@ reveal_type(x) # revealed: Foo
## Annotated assignments in stub files are inferred correctly
```pyi
```pyi path=main.pyi
x: int = 1
reveal_type(x) # revealed: Literal[1]
```

View File

@@ -9,11 +9,7 @@ reveal_type(x) # revealed: Literal[2]
x = 1.0
x /= 2
reveal_type(x) # revealed: int | float
x = (1, 2)
x += (3, 4)
reveal_type(x) # revealed: tuple[Literal[1], Literal[2], Literal[3], Literal[4]]
reveal_type(x) # revealed: float
```
## Dunder methods
@@ -28,12 +24,12 @@ x -= 1
reveal_type(x) # revealed: str
class C:
def __iadd__(self, other: str) -> int:
return 1
def __iadd__(self, other: str) -> float:
return 1.0
x = C()
x += "Hello"
reveal_type(x) # revealed: int
reveal_type(x) # revealed: float
```
## Unsupported types
@@ -44,7 +40,7 @@ class C:
return 42
x = C()
# error: [unsupported-operator] "Operator `-=` is unsupported between objects of type `C` and `Literal[1]`"
# error: [invalid-argument-type]
x -= 1
reveal_type(x) # revealed: int
@@ -79,7 +75,8 @@ def _(flag: bool):
f = Foo()
# error: [unsupported-operator] "Operator `+=` is unsupported between objects of type `Foo` and `Literal["Hello, world!"]`"
# TODO: We should emit an `unsupported-operator` error here, possibly with the information
# that `Foo.__iadd__` may be unbound as additional context.
f += "Hello, world!"
reveal_type(f) # revealed: int | Unknown
@@ -133,10 +130,10 @@ def _(flag: bool):
if flag:
f = Foo()
else:
f = 42
f = 42.0
f += 12
reveal_type(f) # revealed: str | Literal[54]
reveal_type(f) # revealed: str | float
```
## Partially bound target union with `__add__`
@@ -165,18 +162,3 @@ def f(flag: bool, flag2: bool):
reveal_type(f) # revealed: int | str | float
```
## Implicit dunder calls on class objects
```py
class Meta(type):
def __iadd__(cls, other: int) -> str:
return ""
class C(metaclass=Meta): ...
cls = C
cls += 1
reveal_type(cls) # revealed: str
```

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More