Compare commits
1 Commits
0.9.5
...
perf-node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82f33db5e6 |
@@ -8,7 +8,3 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
[target.'wasm32-unknown-unknown']
|
||||
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
|
||||
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']
|
||||
@@ -6,10 +6,3 @@ failure-output = "immediate-final"
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
|
||||
# Mark tests that take longer than 1s as slow.
|
||||
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "1s", terminate-after = 60 }
|
||||
|
||||
# Show slow jobs in the final summary
|
||||
final-status-level = "slow"
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"fill-labs.dependi",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
@@ -17,7 +17,4 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -14,7 +14,5 @@ crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_ast/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
crates/ruff_python_formatter/src/generated.rs -diff linguist-generated=true text=auto eol=lf
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
*.md.snap linguist-language=Markdown
|
||||
|
||||
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
@@ -9,15 +9,13 @@
|
||||
/crates/ruff_formatter/ @MichaReiser
|
||||
/crates/ruff_python_formatter/ @MichaReiser
|
||||
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
|
||||
/crates/ruff_annotate_snippets/ @BurntSushi
|
||||
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser/red-knot etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood
|
||||
|
||||
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
<!--
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
* The current Ruff version (`ruff --version`).
|
||||
-->
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,2 +0,0 @@
|
||||
# This file cannot use the extension `.yaml`.
|
||||
blank_issues_enabled: false
|
||||
22
.github/ISSUE_TEMPLATE/issue.yaml
vendored
22
.github/ISSUE_TEMPLATE/issue.yaml
vendored
@@ -1,22 +0,0 @@
|
||||
name: New issue
|
||||
description: A generic issue
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
* The current Ruff version (`ruff --version`).
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
description: A description of the issue
|
||||
10
.github/actionlint.yaml
vendored
10
.github/actionlint.yaml
vendored
@@ -1,10 +0,0 @@
|
||||
# Configuration for the actionlint tool, which we run via pre-commit
|
||||
# to verify the correctness of the syntax in our GitHub Actions workflows.
|
||||
|
||||
self-hosted-runner:
|
||||
# Various runners we use that aren't recognized out-of-the-box by actionlint:
|
||||
labels:
|
||||
- depot-ubuntu-latest-8
|
||||
- depot-ubuntu-22.04-16
|
||||
- github-windows-2025-x86_64-8
|
||||
- github-windows-2025-x86_64-16
|
||||
10
.github/renovate.json5
vendored
10
.github/renovate.json5
vendored
@@ -45,7 +45,7 @@
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackageNames: ["actions/.*-artifact"],
|
||||
matchPackagePatterns: ["actions/.*-artifact"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
@@ -61,7 +61,7 @@
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackageNames: ["zip"],
|
||||
matchPackagePatterns: ["zip"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
@@ -70,7 +70,7 @@
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackageNames: ["mkdocs-material"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
@@ -87,13 +87,13 @@
|
||||
{
|
||||
groupName: "Monaco",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["monaco"],
|
||||
matchPackagePatterns: ["monaco"],
|
||||
description: "Weekly update of the Monaco editor",
|
||||
},
|
||||
{
|
||||
groupName: "strum",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["strum"],
|
||||
matchPackagePatterns: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
},
|
||||
{
|
||||
|
||||
32
.github/workflows/build-binaries.yml
vendored
32
.github/workflows/build-binaries.yml
vendored
@@ -23,8 +23,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
MODULE_NAME: ruff
|
||||
@@ -42,7 +40,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -55,9 +52,9 @@ jobs:
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.tar.gz --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -66,12 +63,11 @@ jobs:
|
||||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
runs-on: macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -113,7 +109,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -127,7 +122,7 @@ jobs:
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Upload wheels"
|
||||
@@ -169,7 +164,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -188,9 +182,9 @@ jobs:
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -222,7 +216,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -238,9 +231,9 @@ jobs:
|
||||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
|
||||
"${MODULE_NAME}" --help
|
||||
python -m "${MODULE_NAME}" --help
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -297,7 +290,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -362,7 +354,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -428,7 +419,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
265
.github/workflows/build-docker.yml
vendored
265
.github/workflows/build-docker.yml
vendored
@@ -17,26 +17,16 @@ on:
|
||||
paths:
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
|
||||
docker-publish:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
@@ -46,254 +36,33 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
env:
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
||||
run: |
|
||||
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Export digests
|
||||
env:
|
||||
digest: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.20,alpine3.20,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
env:
|
||||
TAG_VALUE: ${{ fromJson(inputs.plan).announcement_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the image and tags from the matrix variable
|
||||
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
|
||||
|
||||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${RUFF_BASE_IMG}:latest /ruff /usr/local/bin/ruff
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/ruff"]
|
||||
EOF
|
||||
|
||||
# Initialize a variable to store all tag docker metadata patterns
|
||||
TAG_PATTERNS=""
|
||||
|
||||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${TAG_VALUE}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> "$GITHUB_ENV"
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
|
||||
# show the ruff base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
|
||||
# shellcheck disable=SC2046
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
|
||||
|
||||
264
.github/workflows/ci.yaml
vendored
264
.github/workflows/ci.yaml
vendored
@@ -1,7 +1,5 @@
|
||||
name: CI
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -18,7 +16,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.12"
|
||||
PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
determine_changes:
|
||||
@@ -34,15 +32,12 @@ jobs:
|
||||
# Flag that is raised when any code is changed
|
||||
# This is superset of the linter and formatter
|
||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||
# Flag that is raised when any code that affects the fuzzer is changed
|
||||
fuzz: ${{ steps.changed.outputs.fuzz_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: tj-actions/changed-files@v45
|
||||
- uses: tj-actions/changed-files@v44
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
@@ -54,14 +49,13 @@ jobs:
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_python_parser/**
|
||||
- python/py-fuzzer/**
|
||||
- scripts/fuzz-parser/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
linter:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/**
|
||||
- "!crates/red_knot*/**"
|
||||
- "!crates/ruff_python_formatter/**"
|
||||
- "!crates/ruff_formatter/**"
|
||||
- "!crates/ruff_dev/**"
|
||||
@@ -85,15 +79,9 @@ jobs:
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
fuzz:
|
||||
- fuzz/Cargo.toml
|
||||
- fuzz/Cargo.lock
|
||||
- fuzz/fuzz_targets/**
|
||||
|
||||
code:
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "crates/red_knot_python_semantic/resources/mdtest/**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
||||
@@ -103,8 +91,6 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- run: cargo fmt --all --check
|
||||
@@ -117,13 +103,11 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
@@ -131,15 +115,12 @@ jobs:
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -152,6 +133,7 @@ jobs:
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -166,7 +148,7 @@ jobs:
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
@@ -175,56 +157,24 @@ jobs:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-test-windows:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: github-windows-2025-x86_64-16
|
||||
runs-on: windows-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||
run: |
|
||||
@@ -235,23 +185,19 @@ jobs:
|
||||
name: "cargo test (wasm)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
@@ -264,40 +210,34 @@ jobs:
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
runs-on: macos-latest
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
cargo-build-msrv:
|
||||
name: "cargo build (msrv)"
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: SebRollen/toml-action@v1.2.0
|
||||
id: msrv
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
run: rustup default ${{ steps.msrv.outputs.value }}
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
@@ -308,28 +248,26 @@ jobs:
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
|
||||
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
with:
|
||||
@@ -340,20 +278,24 @@ jobs:
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
name: "fuzz parser"
|
||||
name: "Fuzz the parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
|
||||
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@v5
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -361,42 +303,24 @@ jobs:
|
||||
name: ruff
|
||||
path: ruff-to-test
|
||||
- name: Fuzz
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
(
|
||||
uvx \
|
||||
--python="${PYTHON_VERSION}" \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable="${DOWNLOAD_PATH}/ruff" \
|
||||
--bin=ruff \
|
||||
0-500
|
||||
)
|
||||
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
# already checked into git.
|
||||
- run: python crates/ruff_python_ast/generate.py
|
||||
- run: python crates/ruff_python_formatter/generate.py
|
||||
- run: test -z "$(git status --porcelain)"
|
||||
# Verify that adding a plugin or rule produces clean code.
|
||||
- run: ./scripts/add_rule.py --name DoTheThing --prefix F --code 999 --linter pyflakes
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: ./scripts/add_rule.py --name DoTheThing --prefix PL --code C0999 --linter pylint
|
||||
- run: cargo check
|
||||
- run: cargo fmt --all --check
|
||||
- run: |
|
||||
@@ -407,18 +331,16 @@ jobs:
|
||||
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -430,7 +352,7 @@ jobs:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v8
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
@@ -444,72 +366,64 @@ jobs:
|
||||
|
||||
- name: Run `ruff check` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||
|
||||
cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (stable)" > ecosystem-result
|
||||
cat ecosystem-result-check-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff check` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||
|
||||
cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Linter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-check-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||
|
||||
cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (stable)" >> ecosystem-result
|
||||
cat ecosystem-result-format-stable >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
|
||||
- name: Run `ruff format` preview ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
|
||||
# Set pipefail to avoid hiding errors with tee
|
||||
set -eo pipefail
|
||||
|
||||
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||
|
||||
cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY"
|
||||
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
|
||||
echo "### Formatter (preview)" >> ecosystem-result
|
||||
cat ecosystem-result-format-preview >> ecosystem-result
|
||||
echo "" >> ecosystem-result
|
||||
@@ -537,8 +451,6 @@ jobs:
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@main
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
@@ -547,11 +459,8 @@ jobs:
|
||||
name: "python package"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
@@ -565,7 +474,7 @@ jobs:
|
||||
args: --out dist
|
||||
- name: "Test wheel"
|
||||
run: |
|
||||
pip install --force-reinstall --find-links dist "${PACKAGE_NAME}"
|
||||
pip install --force-reinstall --find-links dist ${{ env.PACKAGE_NAME }}
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
- name: "Remove wheels from cache"
|
||||
@@ -577,14 +486,12 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install pre-commit"
|
||||
run: pip install pre-commit
|
||||
- name: "Cache pre-commit"
|
||||
@@ -594,14 +501,13 @@ jobs:
|
||||
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: "Run pre-commit"
|
||||
run: |
|
||||
echo '```console' > "$GITHUB_STEP_SUMMARY"
|
||||
echo '```console' > $GITHUB_STEP_SUMMARY
|
||||
# Enable color output for pre-commit and remove it for the summary
|
||||
# Use --hook-stage=manual to enable slower pre-commit hooks that are skipped by default
|
||||
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always --hook-stage=manual | \
|
||||
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> "$GITHUB_STEP_SUMMARY") >&1
|
||||
exit_code="${PIPESTATUS[0]}"
|
||||
echo '```' >> "$GITHUB_STEP_SUMMARY"
|
||||
exit "$exit_code"
|
||||
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always | \
|
||||
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> $GITHUB_STEP_SUMMARY) >&1
|
||||
exit_code=${PIPESTATUS[0]}
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
exit $exit_code
|
||||
|
||||
docs:
|
||||
name: "mkdocs"
|
||||
@@ -611,12 +517,7 @@ jobs:
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
@@ -624,14 +525,13 @@ jobs:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: uv pip install -r docs/requirements.txt --system
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Update README File"
|
||||
run: python scripts/transform_readme.py --target mkdocs
|
||||
- name: "Generate docs"
|
||||
@@ -649,21 +549,20 @@ jobs:
|
||||
name: "formatter instabilities and black similarity"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run checks"
|
||||
- name: "Cache rust"
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: "Formatter progress"
|
||||
run: scripts/formatter_ecosystem_checks.sh
|
||||
- name: "Github step summary"
|
||||
run: cat target/formatter-ecosystem/stats.txt > "$GITHUB_STEP_SUMMARY"
|
||||
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
|
||||
- name: "Remove checkouts from cache"
|
||||
run: rm -r target/formatter-ecosystem
|
||||
run: rm -r target/progress_projects
|
||||
|
||||
check-ruff-lsp:
|
||||
name: "test ruff-lsp"
|
||||
@@ -672,7 +571,7 @@ jobs:
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@v2
|
||||
env:
|
||||
@@ -681,7 +580,6 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
name: "Download ruff-lsp source"
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
@@ -700,29 +598,23 @@ jobs:
|
||||
just install
|
||||
|
||||
- name: Run ruff-lsp tests
|
||||
env:
|
||||
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||
run: |
|
||||
# Setup development binary
|
||||
pip uninstall --yes ruff
|
||||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
export PATH="${DOWNLOAD_PATH}:${PATH}"
|
||||
chmod +x ${{ steps.ruff-target.outputs.download-path }}/ruff
|
||||
export PATH=${{ steps.ruff-target.outputs.download-path }}:$PATH
|
||||
ruff version
|
||||
|
||||
just test
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
@@ -732,6 +624,8 @@ jobs:
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
|
||||
22
.github/workflows/daily_fuzz.yaml
vendored
22
.github/workflows/daily_fuzz.yaml
vendored
@@ -32,9 +32,13 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@v5
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -45,17 +49,7 @@ jobs:
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
@@ -73,6 +67,6 @@ jobs:
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
|
||||
labels: ["bug", "parser", "fuzzer"],
|
||||
})
|
||||
|
||||
71
.github/workflows/daily_property_tests.yaml
vendored
71
.github/workflows/daily_property_tests.yaml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Daily property test run
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 12 * * *"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/daily_property_tests.yaml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
property_tests:
|
||||
name: Property tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
# Don't run the cron job on forks:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build Red Knot
|
||||
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
|
||||
# mode (1.5 min vs 14 min), so the overall time is shorter with a release build.
|
||||
run: cargo build --locked --release --package red_knot_python_semantic --tests
|
||||
- name: Run property tests
|
||||
shell: bash
|
||||
run: |
|
||||
export QUICKCHECK_TESTS=100000
|
||||
for _ in {1..5}; do
|
||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
|
||||
done
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily property test run surfaced any bugs
|
||||
runs-on: ubuntu-latest
|
||||
needs: property_tests
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.property_tests.result == 'failure' }}
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.create({
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Daily property test run failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "red-knot", "testing"],
|
||||
})
|
||||
17
.github/workflows/pr-comment.yaml
vendored
17
.github/workflows/pr-comment.yaml
vendored
@@ -10,13 +10,14 @@ on:
|
||||
description: The ecosystem workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v8
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
@@ -29,10 +30,10 @@ jobs:
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v8
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -65,9 +66,9 @@ jobs:
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
echo 'comment<<EOF' >> $GITHUB_OUTPUT
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
|
||||
53
.github/workflows/publish-docs.yml
vendored
53
.github/workflows/publish-docs.yml
vendored
@@ -26,38 +26,38 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
env:
|
||||
version: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
|
||||
run: |
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> "$GITHUB_ENV"
|
||||
echo "display_name=$display_name" >> "$GITHUB_ENV"
|
||||
echo "version=$version" >> $GITHUB_ENV
|
||||
echo "display_name=$display_name" >> $GITHUB_ENV
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "${display_name}" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "timestamp=$timestamp" >> $GITHUB_ENV
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
@@ -92,7 +92,9 @@ jobs:
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
@@ -100,10 +102,12 @@ jobs:
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b "${branch_name}"
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
@@ -112,8 +116,12 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for ${display_name}"
|
||||
pull_request_title="Update ruff documentation for $display_name"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
@@ -122,15 +130,13 @@ jobs:
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin "${branch_name}"
|
||||
git push origin $branch_name
|
||||
|
||||
# create the PR
|
||||
gh pr create \
|
||||
--base=main \
|
||||
--head="${branch_name}" \
|
||||
--title="${pull_request_title}" \
|
||||
--body="Automated documentation update for ${display_name}" \
|
||||
--label="documentation"
|
||||
gh pr create --base main --head $branch_name \
|
||||
--title "$pull_request_title" \
|
||||
--body "Automated documentation update for $display_name" \
|
||||
--label "documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
@@ -138,7 +144,8 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash "${branch_name}"
|
||||
gh pr merge --squash $branch_name
|
||||
|
||||
6
.github/workflows/publish-playground.yml
vendored
6
.github/workflows/publish-playground.yml
vendored
@@ -25,13 +25,11 @@ jobs:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
@@ -49,7 +47,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.13.1
|
||||
uses: cloudflare/wrangler-action@v3.7.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
8
.github/workflows/publish-pypi.yml
vendored
8
.github/workflows/publish-pypi.yml
vendored
@@ -21,12 +21,14 @@ jobs:
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
run: uv publish -v wheels/*
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
||||
4
.github/workflows/publish-wasm.yml
vendored
4
.github/workflows/publish-wasm.yml
vendored
@@ -30,8 +30,6 @@ jobs:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
@@ -45,7 +43,7 @@ jobs:
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
36
.github/workflows/release.yml
vendored
36
.github/workflows/release.yml
vendored
@@ -1,12 +1,10 @@
|
||||
# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * builds artifacts with cargo-dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
@@ -24,10 +22,10 @@ permissions:
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# (cargo-dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
@@ -48,7 +46,7 @@ on:
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
@@ -62,16 +60,16 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install dist
|
||||
- name: Install cargo-dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
path: ~/.cargo/bin/cargo-dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
@@ -79,8 +77,8 @@ jobs:
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
@@ -124,12 +122,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
- name: Install cached cargo-dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -140,8 +138,8 @@ jobs:
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
@@ -174,12 +172,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
- name: Install cached cargo-dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -191,7 +189,7 @@ jobs:
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
21
.github/workflows/sync_typeshed.yaml
vendored
21
.github/workflows/sync_typeshed.yaml
vendored
@@ -25,13 +25,11 @@ jobs:
|
||||
name: Checkout Ruff
|
||||
with:
|
||||
path: ruff
|
||||
persist-credentials: true
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout typeshed
|
||||
with:
|
||||
repository: python/typeshed
|
||||
path: typeshed
|
||||
persist-credentials: false
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -39,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
@@ -59,7 +57,7 @@ jobs:
|
||||
run: |
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
|
||||
|
||||
create-issue-on-failure:
|
||||
@@ -78,6 +76,5 @@ jobs:
|
||||
owner: "astral-sh",
|
||||
repo: "ruff",
|
||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "red-knot"],
|
||||
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
|
||||
})
|
||||
|
||||
19
.github/zizmor.yml
vendored
19
.github/zizmor.yml
vendored
@@ -1,19 +0,0 @@
|
||||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
#
|
||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- pr-comment.yaml
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- publish-docs.yml
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -29,10 +29,6 @@ tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
# insta
|
||||
*.rs.pending-snap
|
||||
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -21,11 +21,3 @@ MD014: false
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
siblings_only: true
|
||||
|
||||
# MD046/code-block-style
|
||||
#
|
||||
# Ignore this because it conflicts with the code block style used in content
|
||||
# tabs of mkdocs-material which is to add a blank line after the content title.
|
||||
#
|
||||
# Ref: https://github.com/astral-sh/ruff/pull/15011#issuecomment-2544790854
|
||||
MD046: false
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
fail_fast: false
|
||||
fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.github/workflows/release.yml|
|
||||
crates/red_knot_vendored/vendor/.*|
|
||||
crates/red_knot_project/resources/.*|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
@@ -19,17 +17,17 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.23
|
||||
rev: v0.19
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.22
|
||||
rev: 0.7.17
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs==4.0.0
|
||||
- mdformat-footnote==0.1.1
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
@@ -37,7 +35,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.44.0
|
||||
rev: v0.41.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -46,21 +44,8 @@ repos:
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/adamchainz/blacken-docs
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
args: ["--pyi", "--line-length", "130"]
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.29.5
|
||||
rev: v1.23.6
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -74,7 +59,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.4
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -83,43 +68,11 @@ repos:
|
||||
require_serial: true
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.4.2
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.3.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.31.1
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
|
||||
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: actionlint
|
||||
stages:
|
||||
# This hook is disabled by default, since it's quite slow.
|
||||
# To run all hooks *including* this hook, use `uvx pre-commit run -a --hook-stage=manual`.
|
||||
# To run *just* this hook, use `uvx pre-commit run -a actionlint --hook-stage=manual`.
|
||||
- manual
|
||||
args:
|
||||
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
|
||||
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
|
||||
additional_dependencies:
|
||||
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
|
||||
# and checks these with shellcheck. This is arguably its most useful feature,
|
||||
# but the integration only works if shellcheck is installed
|
||||
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, dev-generate-all]
|
||||
|
||||
@@ -1,47 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
@@ -196,7 +154,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
|
||||
1015
CHANGELOG.md
1015
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -29,14 +29,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
@@ -64,7 +66,7 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
@@ -139,7 +141,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
`#[violation]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
@@ -265,20 +267,26 @@ To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
1. Install MkDocs and Material for MkDocs with:
|
||||
|
||||
```shell
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
@@ -360,8 +368,9 @@ GitHub Actions will run your changes against a number of real-world projects fro
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
@@ -388,18 +397,12 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -418,7 +421,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -464,10 +467,10 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `uv venv --project ./scripts/benchmarks`, activate the venv and then run `uv sync --project ./scripts/benchmarks` to create a working environment for the
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
above. All reported benchmarks were computed using the versions specified by
|
||||
`./scripts/benchmarks/pyproject.toml` on Python 3.11.
|
||||
|
||||
@@ -521,8 +524,6 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
@@ -561,7 +562,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -863,7 +864,7 @@ each configuration file.
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
|
||||
2287
Cargo.lock
generated
2287
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
153
Cargo.toml
153
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
rust-version = "1.76"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -13,12 +13,10 @@ license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_annotate_snippets = { path = "crates/ruff_annotate_snippets" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db", default-features = false }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
@@ -35,19 +33,15 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
||||
ruff_server = { path = "crates/ruff_server" }
|
||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
red_knot_vendored = { path = "crates/red_knot_vendored" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_test = { path = "crates/red_knot_test" }
|
||||
red_knot_project = { path = "crates/red_knot_project", default-features = false }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
anstream = { version = "0.6.18" }
|
||||
anstyle = { version = "1.0.10" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
@@ -57,9 +51,9 @@ camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "4.0.0" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "3.0.0" }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
@@ -67,33 +61,24 @@ compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
getrandom = { version = "0.3.1" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
"raw-entry",
|
||||
"equivalent",
|
||||
"inline-more",
|
||||
] }
|
||||
hashbrown = "0.14.3"
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.14.0" }
|
||||
itertools = { version = "0.13.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
libc = { version = "0.2.153" }
|
||||
@@ -101,29 +86,29 @@ libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "8.0.0" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.9.0" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "88a1d7774d78f048fbd77d40abca9ebd729fd1f0" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -131,47 +116,38 @@ serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
"macros",
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
snapbox = { version = "0.6.0", features = [
|
||||
"diff",
|
||||
"term-svg",
|
||||
"cmd",
|
||||
"examples",
|
||||
] }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
] }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
tryfn = { version = "0.2.1" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
@@ -179,17 +155,10 @@ wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["getrandom"]
|
||||
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
"cfg(fuzzing)",
|
||||
"cfg(codspeed)",
|
||||
] }
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -205,9 +174,8 @@ missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
@@ -220,13 +188,6 @@ get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
# nursery rules
|
||||
redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
@@ -266,12 +227,12 @@ debug = 1
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'dist'
|
||||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.25.2-prerelease.3"
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.18.0"
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
ci = ["github"]
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
@@ -280,33 +241,33 @@ windows-archive = ".zip"
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether dist should create a Github Release or use an existing draft
|
||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
# Publish jobs to run in CI
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase dist should use to create the GitHub release
|
||||
# The stage during which the GitHub Release should be created
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
@@ -314,15 +275,9 @@ build-local-artifacts = false
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = [
|
||||
"./notify-dependents",
|
||||
"./publish-docs",
|
||||
"./publish-playground",
|
||||
]
|
||||
# Announcement jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu AS build
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
|
||||
46
README.md
46
README.md
@@ -110,28 +110,15 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI.
|
||||
|
||||
Invoke Ruff directly with [`uvx`](https://docs.astral.sh/uv/):
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
|
||||
```shell
|
||||
uvx ruff check # Lint all files in the current directory.
|
||||
uvx ruff format # Format all files in the current directory.
|
||||
```
|
||||
|
||||
Or install Ruff with `uv` (recommended), `pip`, or `pipx`:
|
||||
|
||||
```shell
|
||||
# With uv.
|
||||
uv tool install ruff@latest # Install Ruff globally.
|
||||
uv add --dev ruff # Or add Ruff to your project.
|
||||
|
||||
# With pip.
|
||||
pip install ruff
|
||||
|
||||
@@ -149,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.9.5/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.9.5/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.5
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -195,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/astral-sh/ruff-action):
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
|
||||
```yaml
|
||||
name: Ruff
|
||||
@@ -205,10 +192,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration<a id="configuration"></a>
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -251,8 +238,8 @@ exclude = [
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.9
|
||||
target-version = "py39"
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
@@ -304,7 +291,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules<a id="rules"></a>
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -380,21 +367,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing<a id="contributing"></a>
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support<a id="support"></a>
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -418,7 +405,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
@@ -430,7 +417,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
@@ -538,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License<a id="license"></a>
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
18
_typos.toml
18
_typos.toml
@@ -1,10 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = [
|
||||
"crates/red_knot_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
]
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
@@ -12,18 +8,14 @@ hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pn` is a thing
|
||||
pn = "pn" # `import panel as pd` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
]
|
||||
|
||||
[default.extend-identifiers]
|
||||
"FrIeNdLy" = "FrIeNdLy"
|
||||
|
||||
36
clippy.toml
36
clippy.toml
@@ -1,25 +1,21 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes"
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -13,8 +13,9 @@ license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_project = { workspace = true, features = ["zstd"] }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
@@ -32,15 +33,8 @@ tracing-flame = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
|
||||
insta = { workspace = true, features = ["filters"] }
|
||||
insta-cmd = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// The workspace root directory is not available without walking up the tree
|
||||
// https://github.com/rust-lang/cargo/issues/3946
|
||||
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
|
||||
.join("..")
|
||||
.join("..");
|
||||
|
||||
commit_info(&workspace_root);
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let target = std::env::var("TARGET").unwrap();
|
||||
println!("cargo::rustc-env=RUST_HOST_TARGET={target}");
|
||||
}
|
||||
|
||||
fn commit_info(workspace_root: &Path) {
|
||||
// If not in a git repository, do not attempt to retrieve commit information
|
||||
let git_dir = workspace_root.join(".git");
|
||||
if !git_dir.exists() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(git_head_path) = git_head(&git_dir) {
|
||||
println!("cargo:rerun-if-changed={}", git_head_path.display());
|
||||
|
||||
let git_head_contents = fs::read_to_string(git_head_path);
|
||||
if let Ok(git_head_contents) = git_head_contents {
|
||||
// The contents are either a commit or a reference in the following formats
|
||||
// - "<commit>" when the head is detached
|
||||
// - "ref <ref>" when working on a branch
|
||||
// If a commit, checking if the HEAD file has changed is sufficient
|
||||
// If a ref, we need to add the head file for that ref to rebuild on commit
|
||||
let mut git_ref_parts = git_head_contents.split_whitespace();
|
||||
git_ref_parts.next();
|
||||
if let Some(git_ref) = git_ref_parts.next() {
|
||||
let git_ref_path = git_dir.join(git_ref);
|
||||
println!("cargo:rerun-if-changed={}", git_ref_path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let output = match Command::new("git")
|
||||
.arg("log")
|
||||
.arg("-1")
|
||||
.arg("--date=short")
|
||||
.arg("--abbrev=9")
|
||||
.arg("--format=%H %h %cd %(describe)")
|
||||
.output()
|
||||
{
|
||||
Ok(output) if output.status.success() => output,
|
||||
_ => return,
|
||||
};
|
||||
let stdout = String::from_utf8(output.stdout).unwrap();
|
||||
let mut parts = stdout.split_whitespace();
|
||||
let mut next = || parts.next().unwrap();
|
||||
let _commit_hash = next();
|
||||
println!("cargo::rustc-env=RED_KNOT_COMMIT_SHORT_HASH={}", next());
|
||||
println!("cargo::rustc-env=RED_KNOT_COMMIT_DATE={}", next());
|
||||
|
||||
// Describe can fail for some commits
|
||||
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
|
||||
if let Some(describe) = parts.next() {
|
||||
let mut describe_parts = describe.split('-');
|
||||
let _last_tag = describe_parts.next().unwrap();
|
||||
|
||||
// If this is the tagged commit, this component will be missing
|
||||
println!(
|
||||
"cargo::rustc-env=RED_KNOT_LAST_TAG_DISTANCE={}",
|
||||
describe_parts.next().unwrap_or("0")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn git_head(git_dir: &Path) -> Option<PathBuf> {
|
||||
// The typical case is a standard git repository.
|
||||
let git_head_path = git_dir.join("HEAD");
|
||||
if git_head_path.exists() {
|
||||
return Some(git_head_path);
|
||||
}
|
||||
if !git_dir.is_file() {
|
||||
return None;
|
||||
}
|
||||
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
|
||||
// then let's try to attempt to read it as a worktree. If it's
|
||||
// a worktree, then its contents will look like this, e.g.:
|
||||
//
|
||||
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
|
||||
//
|
||||
// And the HEAD file we want to watch will be at:
|
||||
//
|
||||
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
|
||||
let contents = fs::read_to_string(git_dir).ok()?;
|
||||
let (label, worktree_path) = contents.split_once(':')?;
|
||||
if label != "gitdir" {
|
||||
return None;
|
||||
}
|
||||
let worktree_path = worktree_path.trim();
|
||||
Some(PathBuf::from(worktree_path))
|
||||
}
|
||||
@@ -13,17 +13,12 @@ The CLI supports different verbosity levels.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
|
||||
## `RED_KNOT_LOG`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
|
||||
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
|
||||
but this can result in a confused logging output where messages from different threads are intertwined.
|
||||
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
@@ -103,7 +98,7 @@ called **once**.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
|
||||
@@ -1,201 +0,0 @@
|
||||
use crate::logging::Verbosity;
|
||||
use crate::python_version::PythonVersion;
|
||||
use clap::{ArgAction, ArgMatches, Error, Parser};
|
||||
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||
use red_knot_python_semantic::lint;
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
pub(crate) struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Command,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub(crate) enum Command {
|
||||
/// Check a project for type errors.
|
||||
Check(CheckCommand),
|
||||
|
||||
/// Start the language server
|
||||
Server,
|
||||
|
||||
/// Display Red Knot's version
|
||||
Version,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub(crate) struct CheckCommand {
|
||||
/// Run the command within the given project directory.
|
||||
///
|
||||
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
|
||||
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
|
||||
///
|
||||
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
|
||||
#[arg(long, value_name = "PROJECT")]
|
||||
pub(crate) project: Option<SystemPathBuf>,
|
||||
|
||||
/// Path to the virtual environment the project uses.
|
||||
///
|
||||
/// If provided, red-knot will use the `site-packages` directory of this virtual environment
|
||||
/// to resolve type information for the project's third-party dependencies.
|
||||
#[arg(long, value_name = "PATH")]
|
||||
pub(crate) venv_path: Option<SystemPathBuf>,
|
||||
|
||||
/// Custom directory to use for stdlib typeshed stubs.
|
||||
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
|
||||
pub(crate) typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// Additional path to use as a module-resolution source (can be passed multiple times).
|
||||
#[arg(long, value_name = "PATH")]
|
||||
pub(crate) extra_search_path: Option<Vec<SystemPathBuf>>,
|
||||
|
||||
/// Python version to assume when resolving types.
|
||||
#[arg(long, value_name = "VERSION", alias = "target-version")]
|
||||
pub(crate) python_version: Option<PythonVersion>,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub(crate) verbosity: Verbosity,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub(crate) rules: RulesArg,
|
||||
|
||||
/// Use exit code 1 if there are any warning-level diagnostics.
|
||||
#[arg(long, conflicts_with = "exit_zero")]
|
||||
pub(crate) error_on_warning: bool,
|
||||
|
||||
/// Always use exit code 0, even when there are error-level diagnostics.
|
||||
#[arg(long)]
|
||||
pub(crate) exit_zero: bool,
|
||||
|
||||
/// Run in watch mode by re-running whenever files change.
|
||||
#[arg(long, short = 'W')]
|
||||
pub(crate) watch: bool,
|
||||
}
|
||||
|
||||
impl CheckCommand {
|
||||
pub(crate) fn into_options(self) -> Options {
|
||||
let rules = if self.rules.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
self.rules
|
||||
.into_iter()
|
||||
.map(|(rule, level)| (RangedValue::cli(rule), RangedValue::cli(level)))
|
||||
.collect(),
|
||||
)
|
||||
};
|
||||
|
||||
Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: self
|
||||
.python_version
|
||||
.map(|version| RangedValue::cli(version.into())),
|
||||
venv_path: self.venv_path.map(RelativePathBuf::cli),
|
||||
typeshed: self.typeshed.map(RelativePathBuf::cli),
|
||||
extra_paths: self.extra_search_path.map(|extra_search_paths| {
|
||||
extra_search_paths
|
||||
.into_iter()
|
||||
.map(RelativePathBuf::cli)
|
||||
.collect()
|
||||
}),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
rules,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A list of rules to enable or disable with a given severity.
|
||||
///
|
||||
/// This type is used to parse the `--error`, `--warn`, and `--ignore` arguments
|
||||
/// while preserving the order in which they were specified (arguments last override previous severities).
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct RulesArg(Vec<(String, lint::Level)>);
|
||||
|
||||
impl RulesArg {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
fn into_iter(self) -> impl Iterator<Item = (String, lint::Level)> {
|
||||
self.0.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl clap::FromArgMatches for RulesArg {
|
||||
fn from_arg_matches(matches: &ArgMatches) -> Result<Self, Error> {
|
||||
let mut rules = Vec::new();
|
||||
|
||||
for (level, arg_id) in [
|
||||
(lint::Level::Ignore, "ignore"),
|
||||
(lint::Level::Warn, "warn"),
|
||||
(lint::Level::Error, "error"),
|
||||
] {
|
||||
let indices = matches.indices_of(arg_id).into_iter().flatten();
|
||||
let levels = matches.get_many::<String>(arg_id).into_iter().flatten();
|
||||
rules.extend(
|
||||
indices
|
||||
.zip(levels)
|
||||
.map(|(index, rule)| (index, rule, level)),
|
||||
);
|
||||
}
|
||||
|
||||
// Sort by their index so that values specified later override earlier ones.
|
||||
rules.sort_by_key(|(index, _, _)| *index);
|
||||
|
||||
Ok(Self(
|
||||
rules
|
||||
.into_iter()
|
||||
.map(|(_, rule, level)| (rule.to_owned(), level))
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), Error> {
|
||||
self.0 = Self::from_arg_matches(matches)?.0;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl clap::Args for RulesArg {
|
||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
||||
const HELP_HEADING: &str = "Enabling / disabling rules";
|
||||
|
||||
cmd.arg(
|
||||
clap::Arg::new("error")
|
||||
.long("error")
|
||||
.action(ArgAction::Append)
|
||||
.help("Treat the given rule as having severity 'error'. Can be specified multiple times.")
|
||||
.value_name("RULE")
|
||||
.help_heading(HELP_HEADING),
|
||||
)
|
||||
.arg(
|
||||
clap::Arg::new("warn")
|
||||
.long("warn")
|
||||
.action(ArgAction::Append)
|
||||
.help("Treat the given rule as having severity 'warn'. Can be specified multiple times.")
|
||||
.value_name("RULE")
|
||||
.help_heading(HELP_HEADING),
|
||||
)
|
||||
.arg(
|
||||
clap::Arg::new("ignore")
|
||||
.long("ignore")
|
||||
.action(ArgAction::Append)
|
||||
.help("Disables the rule. Can be specified multiple times.")
|
||||
.value_name("RULE")
|
||||
.help_heading(HELP_HEADING),
|
||||
)
|
||||
}
|
||||
|
||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
||||
Self::augment_args(cmd)
|
||||
}
|
||||
}
|
||||
@@ -1,29 +1,96 @@
|
||||
use std::io::{self, BufWriter, Write};
|
||||
use std::process::{ExitCode, Termination};
|
||||
|
||||
use anyhow::Result;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::args::{Args, CheckCommand, Command};
|
||||
use crate::logging::setup_tracing;
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use red_knot_project::metadata::options::Options;
|
||||
use red_knot_project::watch;
|
||||
use red_knot_project::watch::ProjectWatcher;
|
||||
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||
use red_knot_server::run_server;
|
||||
use ruff_db::diagnostic::{Diagnostic, Severity};
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
mod args;
|
||||
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::site_packages::VirtualEnvironment;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
|
||||
mod logging;
|
||||
mod python_version;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
mod version;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
help = "Custom directory to use for stdlib typeshed stubs"
|
||||
)]
|
||||
custom_typeshed_dir: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
default_value_t = TargetVersion::default(),
|
||||
value_name="VERSION")
|
||||
]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
@@ -48,24 +115,22 @@ pub fn main() -> ExitStatus {
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let args = Args::parse_from(std::env::args());
|
||||
let Args {
|
||||
command,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
venv_path,
|
||||
target_version,
|
||||
verbosity,
|
||||
watch,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
match args.command {
|
||||
Command::Server => run_server().map(|()| ExitStatus::Success),
|
||||
Command::Check(check_args) => run_check(check_args),
|
||||
Command::Version => version().map(|()| ExitStatus::Success),
|
||||
if matches!(command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn version() -> Result<()> {
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
let version_info = crate::version::version();
|
||||
writeln!(stdout, "red knot {}", &version_info)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
let verbosity = args.verbosity.level();
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
@@ -75,41 +140,53 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory `{}` contains non-Unicode characters. Red Knot only supports Unicode paths.",
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
let cwd = args
|
||||
.project
|
||||
.as_ref()
|
||||
let cwd = current_directory
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(cwd, &cli_base_path))
|
||||
Ok(SystemPath::absolute(&cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!("Provided project path `{cwd}` is not a directory"))
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd);
|
||||
let watch = args.watch;
|
||||
let exit_zero = args.exit_zero;
|
||||
let min_error_severity = if args.error_on_warning {
|
||||
Severity::Warning
|
||||
} else {
|
||||
Severity::Error
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
|
||||
|
||||
// TODO: Verify the remaining search path settings eagerly.
|
||||
let site_packages = venv_path
|
||||
.map(|path| {
|
||||
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
|
||||
.and_then(|venv| venv.site_packages_directories(&system))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages,
|
||||
},
|
||||
};
|
||||
|
||||
let cli_options = args.into_options();
|
||||
let mut workspace_metadata = ProjectMetadata::discover(system.current_directory(), &system)?;
|
||||
workspace_metadata.apply_cli_options(cli_options.clone());
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
|
||||
|
||||
let mut db = ProjectDatabase::new(workspace_metadata, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options, min_error_severity);
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -131,11 +208,7 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
|
||||
std::mem::forget(db);
|
||||
|
||||
if exit_zero {
|
||||
Ok(ExitStatus::Success)
|
||||
} else {
|
||||
Ok(exit_status)
|
||||
}
|
||||
Ok(exit_status)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
@@ -164,21 +237,11 @@ struct MainLoop {
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<ProjectWatcher>,
|
||||
|
||||
cli_options: Options,
|
||||
|
||||
/// The minimum severity to consider an error when deciding the exit status.
|
||||
///
|
||||
/// TODO(micha): Get from the terminal settings.
|
||||
min_error_severity: Severity,
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(
|
||||
cli_options: Options,
|
||||
min_error_severity: Severity,
|
||||
) -> (Self, MainLoopCancellationToken) {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
@@ -186,28 +249,26 @@ impl MainLoop {
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
cli_options,
|
||||
min_error_severity,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn watch(mut self, db: &mut ProjectDatabase) -> anyhow::Result<ExitStatus> {
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(ProjectWatcher::new(watcher, db));
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
fn run(mut self, db: &mut ProjectDatabase) -> ExitStatus {
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
let result = self.main_loop(db);
|
||||
@@ -217,7 +278,7 @@ impl MainLoop {
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut ProjectDatabase) -> ExitStatus {
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
@@ -226,10 +287,10 @@ impl MainLoop {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.clone();
|
||||
let db = db.snapshot();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the project. This needs to be done in a separate thread
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
@@ -245,14 +306,10 @@ impl MainLoop {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let failed = result
|
||||
.iter()
|
||||
.any(|diagnostic| diagnostic.severity() >= self.min_error_severity);
|
||||
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
#[allow(clippy::print_stdout)]
|
||||
for diagnostic in result {
|
||||
println!("{}", diagnostic.display(db));
|
||||
tracing::error!("{}", diagnostic);
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
@@ -261,7 +318,7 @@ impl MainLoop {
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if failed {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
@@ -274,7 +331,7 @@ impl MainLoop {
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes, Some(&self.cli_options));
|
||||
db.apply_changes(changes);
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
@@ -311,10 +368,7 @@ impl MainLoopCancellationToken {
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace,
|
||||
CheckCompleted {
|
||||
result: Vec<Box<dyn Diagnostic>>,
|
||||
revision: u64,
|
||||
},
|
||||
CheckCompleted { result: Vec<String>, revision: u64 },
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum PythonVersion {
|
||||
#[value(name = "3.7")]
|
||||
Py37,
|
||||
#[value(name = "3.8")]
|
||||
Py38,
|
||||
#[default]
|
||||
#[value(name = "3.9")]
|
||||
Py39,
|
||||
#[value(name = "3.10")]
|
||||
Py310,
|
||||
#[value(name = "3.11")]
|
||||
Py311,
|
||||
#[value(name = "3.12")]
|
||||
Py312,
|
||||
#[value(name = "3.13")]
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "3.7",
|
||||
Self::Py38 => "3.8",
|
||||
Self::Py39 => "3.9",
|
||||
Self::Py310 => "3.10",
|
||||
Self::Py311 => "3.11",
|
||||
Self::Py312 => "3.12",
|
||||
Self::Py313 => "3.13",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PythonVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: PythonVersion) -> Self {
|
||||
match value {
|
||||
PythonVersion::Py37 => Self::PY37,
|
||||
PythonVersion::Py38 => Self::PY38,
|
||||
PythonVersion::Py39 => Self::PY39,
|
||||
PythonVersion::Py310 => Self::PY310,
|
||||
PythonVersion::Py311 => Self::PY311,
|
||||
PythonVersion::Py312 => Self::PY312,
|
||||
PythonVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
#[test]
|
||||
fn same_default_as_python_version() {
|
||||
assert_eq!(
|
||||
red_knot_python_semantic::PythonVersion::from(PythonVersion::default()),
|
||||
red_knot_python_semantic::PythonVersion::default()
|
||||
);
|
||||
}
|
||||
}
|
||||
48
crates/red_knot/src/target_version.rs
Normal file
48
crates/red_knot/src/target_version.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,105 +0,0 @@
|
||||
//! Code for representing Red Knot's release version number.
|
||||
use std::fmt;
|
||||
|
||||
/// Information about the git repository where Red Knot was built from.
|
||||
pub(crate) struct CommitInfo {
|
||||
short_commit_hash: String,
|
||||
commit_date: String,
|
||||
commits_since_last_tag: u32,
|
||||
}
|
||||
|
||||
/// Red Knot's version.
|
||||
pub(crate) struct VersionInfo {
|
||||
/// Red Knot's version, such as "0.5.1"
|
||||
version: String,
|
||||
/// Information about the git commit we may have been built from.
|
||||
///
|
||||
/// `None` if not built from a git repo or if retrieval failed.
|
||||
commit_info: Option<CommitInfo>,
|
||||
}
|
||||
|
||||
impl fmt::Display for VersionInfo {
|
||||
/// Formatted version information: `<version>[+<commits>] (<commit> <date>)`
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.version)?;
|
||||
|
||||
if let Some(ref ci) = self.commit_info {
|
||||
if ci.commits_since_last_tag > 0 {
|
||||
write!(f, "+{}", ci.commits_since_last_tag)?;
|
||||
}
|
||||
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns information about Red Knot's version.
|
||||
pub(crate) fn version() -> VersionInfo {
|
||||
// Environment variables are only read at compile-time
|
||||
macro_rules! option_env_str {
|
||||
($name:expr) => {
|
||||
option_env!($name).map(|s| s.to_string())
|
||||
};
|
||||
}
|
||||
|
||||
// This version is pulled from Cargo.toml and set by Cargo
|
||||
let version = option_env_str!("CARGO_PKG_VERSION").unwrap();
|
||||
|
||||
// Commit info is pulled from git and set by `build.rs`
|
||||
let commit_info =
|
||||
option_env_str!("RED_KNOT_COMMIT_SHORT_HASH").map(|short_commit_hash| CommitInfo {
|
||||
short_commit_hash,
|
||||
commit_date: option_env_str!("RED_KNOT_COMMIT_DATE").unwrap(),
|
||||
commits_since_last_tag: option_env_str!("RED_KNOT_LAST_TAG_DISTANCE")
|
||||
.as_deref()
|
||||
.map_or(0, |value| value.parse::<u32>().unwrap_or(0)),
|
||||
});
|
||||
|
||||
VersionInfo {
|
||||
version,
|
||||
commit_info,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
#[test]
|
||||
fn version_formatting() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commit_info() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commits_since_last_tag() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
}
|
||||
@@ -1,769 +0,0 @@
|
||||
use anyhow::Context;
|
||||
use insta::internals::SettingsBindDropGuard;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Specifying an option on the CLI should take precedence over the same setting in the
|
||||
/// project's configuration.
|
||||
#[test]
|
||||
fn config_override() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.environment]
|
||||
python-version = "3.11"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"test.py",
|
||||
r#"
|
||||
import sys
|
||||
|
||||
# Access `sys.last_exc` that was only added in Python 3.12
|
||||
print(sys.last_exc)
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error: lint:unresolved-attribute
|
||||
--> <temp_dir>/test.py:5:7
|
||||
|
|
||||
4 | # Access `sys.last_exc` that was only added in Python 3.12
|
||||
5 | print(sys.last_exc)
|
||||
| ^^^^^^^^^^^^ Type `<module 'sys'>` has no attribute `last_exc`
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--python-version").arg("3.12"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Paths specified on the CLI are relative to the current working directory and not the project root.
|
||||
///
|
||||
/// We test this by adding an extra search path from the CLI to the libs directory when
|
||||
/// running the CLI from the child directory (using relative paths).
|
||||
///
|
||||
/// Project layout:
|
||||
/// ```
|
||||
/// - libs
|
||||
/// |- utils.py
|
||||
/// - child
|
||||
/// | - test.py
|
||||
/// - pyproject.toml
|
||||
/// ```
|
||||
///
|
||||
/// And the command is run in the `child` directory.
|
||||
#[test]
|
||||
fn cli_arguments_are_relative_to_the_current_directory() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.environment]
|
||||
python-version = "3.11"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"libs/utils.py",
|
||||
r#"
|
||||
def add(a: int, b: int) -> int:
|
||||
a + b
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"child/test.py",
|
||||
r#"
|
||||
from utils import add
|
||||
|
||||
stat = add(10, 15)
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
// Make sure that the CLI fails when the `libs` directory is not in the search path.
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error: lint:unresolved-import
|
||||
--> <temp_dir>/child/test.py:2:6
|
||||
|
|
||||
2 | from utils import add
|
||||
| ^^^^^ Cannot resolve import `utils`
|
||||
3 |
|
||||
4 | stat = add(10, 15)
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")).arg("--extra-search-path").arg("../libs"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Paths specified in a configuration file are relative to the project root.
|
||||
///
|
||||
/// We test this by adding `libs` (as a relative path) to the extra search path in the configuration and run
|
||||
/// the CLI from a subdirectory.
|
||||
///
|
||||
/// Project layout:
|
||||
/// ```
|
||||
/// - libs
|
||||
/// |- utils.py
|
||||
/// - child
|
||||
/// | - test.py
|
||||
/// - pyproject.toml
|
||||
/// ```
|
||||
#[test]
|
||||
fn paths_in_configuration_files_are_relative_to_the_project_root() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.environment]
|
||||
python-version = "3.11"
|
||||
extra-paths = ["libs"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"libs/utils.py",
|
||||
r#"
|
||||
def add(a: int, b: int) -> int:
|
||||
a + b
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"child/test.py",
|
||||
r#"
|
||||
from utils import add
|
||||
|
||||
stat = add(10, 15)
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The rule severity can be changed in the configuration file
|
||||
#[test]
|
||||
fn configuration_rule_severity() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
r#"
|
||||
y = 4 / 0
|
||||
|
||||
for a in range(0, y):
|
||||
x = a
|
||||
|
||||
print(x) # possibly-unresolved-reference
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// Assert that there's a possibly unresolved reference diagnostic
|
||||
// and that division-by-zero has a severity of error by default.
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error: lint:division-by-zero
|
||||
--> <temp_dir>/test.py:2:5
|
||||
|
|
||||
2 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
|
|
||||
|
||||
warning: lint:possibly-unresolved-reference
|
||||
--> <temp_dir>/test.py:7:7
|
||||
|
|
||||
5 | x = a
|
||||
6 |
|
||||
7 | print(x) # possibly-unresolved-reference
|
||||
| - Name `x` used when possibly not defined
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
case.write_file(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.rules]
|
||||
division-by-zero = "warn" # demote to warn
|
||||
possibly-unresolved-reference = "ignore"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
warning: lint:division-by-zero
|
||||
--> <temp_dir>/test.py:2:5
|
||||
|
|
||||
2 | y = 4 / 0
|
||||
| ----- Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The rule severity can be changed using `--ignore`, `--warn`, and `--error`
|
||||
#[test]
|
||||
fn cli_rule_severity() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
r#"
|
||||
import does_not_exit
|
||||
|
||||
y = 4 / 0
|
||||
|
||||
for a in range(0, y):
|
||||
x = a
|
||||
|
||||
print(x) # possibly-unresolved-reference
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// Assert that there's a possibly unresolved reference diagnostic
|
||||
// and that division-by-zero has a severity of error by default.
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error: lint:unresolved-import
|
||||
--> <temp_dir>/test.py:2:8
|
||||
|
|
||||
2 | import does_not_exit
|
||||
| ^^^^^^^^^^^^^ Cannot resolve import `does_not_exit`
|
||||
3 |
|
||||
4 | y = 4 / 0
|
||||
|
|
||||
|
||||
error: lint:division-by-zero
|
||||
--> <temp_dir>/test.py:4:5
|
||||
|
|
||||
2 | import does_not_exit
|
||||
3 |
|
||||
4 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
5 |
|
||||
6 | for a in range(0, y):
|
||||
|
|
||||
|
||||
warning: lint:possibly-unresolved-reference
|
||||
--> <temp_dir>/test.py:9:7
|
||||
|
|
||||
7 | x = a
|
||||
8 |
|
||||
9 | print(x) # possibly-unresolved-reference
|
||||
| - Name `x` used when possibly not defined
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
case
|
||||
.command()
|
||||
.arg("--ignore")
|
||||
.arg("possibly-unresolved-reference")
|
||||
.arg("--warn")
|
||||
.arg("division-by-zero")
|
||||
.arg("--warn")
|
||||
.arg("unresolved-import"),
|
||||
@r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
warning: lint:unresolved-import
|
||||
--> <temp_dir>/test.py:2:8
|
||||
|
|
||||
2 | import does_not_exit
|
||||
| ------------- Cannot resolve import `does_not_exit`
|
||||
3 |
|
||||
4 | y = 4 / 0
|
||||
|
|
||||
|
||||
warning: lint:division-by-zero
|
||||
--> <temp_dir>/test.py:4:5
|
||||
|
|
||||
2 | import does_not_exit
|
||||
3 |
|
||||
4 | y = 4 / 0
|
||||
| ----- Cannot divide object of type `Literal[4]` by zero
|
||||
5 |
|
||||
6 | for a in range(0, y):
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The rule severity can be changed using `--ignore`, `--warn`, and `--error` and
|
||||
/// values specified last override previous severities.
|
||||
#[test]
|
||||
fn cli_rule_severity_precedence() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
r#"
|
||||
y = 4 / 0
|
||||
|
||||
for a in range(0, y):
|
||||
x = a
|
||||
|
||||
print(x) # possibly-unresolved-reference
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// Assert that there's a possibly unresolved reference diagnostic
|
||||
// and that division-by-zero has a severity of error by default.
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error: lint:division-by-zero
|
||||
--> <temp_dir>/test.py:2:5
|
||||
|
|
||||
2 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
|
|
||||
|
||||
warning: lint:possibly-unresolved-reference
|
||||
--> <temp_dir>/test.py:7:7
|
||||
|
|
||||
5 | x = a
|
||||
6 |
|
||||
7 | print(x) # possibly-unresolved-reference
|
||||
| - Name `x` used when possibly not defined
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
case
|
||||
.command()
|
||||
.arg("--error")
|
||||
.arg("possibly-unresolved-reference")
|
||||
.arg("--warn")
|
||||
.arg("division-by-zero")
|
||||
// Override the error severity with warning
|
||||
.arg("--ignore")
|
||||
.arg("possibly-unresolved-reference"),
|
||||
@r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
warning: lint:division-by-zero
|
||||
--> <temp_dir>/test.py:2:5
|
||||
|
|
||||
2 | y = 4 / 0
|
||||
| ----- Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Red Knot warns about unknown rules specified in a configuration file
|
||||
#[test]
|
||||
fn configuration_unknown_rules() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.rules]
|
||||
division-by-zer = "warn" # incorrect rule name
|
||||
"#,
|
||||
),
|
||||
("test.py", "print(10)"),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
warning: unknown-rule
|
||||
--> <temp_dir>/pyproject.toml:3:1
|
||||
|
|
||||
2 | [tool.knot.rules]
|
||||
3 | division-by-zer = "warn" # incorrect rule name
|
||||
| --------------- Unknown lint rule `division-by-zer`
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Red Knot warns about unknown rules specified in a CLI argument
|
||||
#[test]
|
||||
fn cli_unknown_rules() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file("test.py", "print(10)")?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--ignore").arg("division-by-zer"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
warning: unknown-rule: Unknown lint rule `division-by-zer`
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exit_code_only_warnings() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file("test.py", r"print(x) # [unresolved-reference]")?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
warning: lint:unresolved-reference
|
||||
--> <temp_dir>/test.py:1:7
|
||||
|
|
||||
1 | print(x) # [unresolved-reference]
|
||||
| - Name `x` used when not defined
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exit_code_only_info() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
r#"
|
||||
from typing_extensions import reveal_type
|
||||
reveal_type(1)
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
info: revealed-type
|
||||
--> <temp_dir>/test.py:3:1
|
||||
|
|
||||
2 | from typing_extensions import reveal_type
|
||||
3 | reveal_type(1)
|
||||
| -------------- info: Revealed type is `Literal[1]`
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exit_code_only_info_and_error_on_warning_is_true() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
r#"
|
||||
from typing_extensions import reveal_type
|
||||
reveal_type(1)
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
info: revealed-type
|
||||
--> <temp_dir>/test.py:3:1
|
||||
|
|
||||
2 | from typing_extensions import reveal_type
|
||||
3 | reveal_type(1)
|
||||
| -------------- info: Revealed type is `Literal[1]`
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exit_code_no_errors_but_error_on_warning_is_true() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file("test.py", r"print(x) # [unresolved-reference]")?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
warning: lint:unresolved-reference
|
||||
--> <temp_dir>/test.py:1:7
|
||||
|
|
||||
1 | print(x) # [unresolved-reference]
|
||||
| - Name `x` used when not defined
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exit_code_both_warnings_and_errors() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
r#"
|
||||
print(x) # [unresolved-reference]
|
||||
print(4[1]) # [non-subscriptable]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
warning: lint:unresolved-reference
|
||||
--> <temp_dir>/test.py:2:7
|
||||
|
|
||||
2 | print(x) # [unresolved-reference]
|
||||
| - Name `x` used when not defined
|
||||
3 | print(4[1]) # [non-subscriptable]
|
||||
|
|
||||
|
||||
error: lint:non-subscriptable
|
||||
--> <temp_dir>/test.py:3:7
|
||||
|
|
||||
2 | print(x) # [unresolved-reference]
|
||||
3 | print(4[1]) # [non-subscriptable]
|
||||
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exit_code_both_warnings_and_errors_and_error_on_warning_is_true() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
r###"
|
||||
print(x) # [unresolved-reference]
|
||||
print(4[1]) # [non-subscriptable]
|
||||
"###,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
warning: lint:unresolved-reference
|
||||
--> <temp_dir>/test.py:2:7
|
||||
|
|
||||
2 | print(x) # [unresolved-reference]
|
||||
| - Name `x` used when not defined
|
||||
3 | print(4[1]) # [non-subscriptable]
|
||||
|
|
||||
|
||||
error: lint:non-subscriptable
|
||||
--> <temp_dir>/test.py:3:7
|
||||
|
|
||||
2 | print(x) # [unresolved-reference]
|
||||
3 | print(4[1]) # [non-subscriptable]
|
||||
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exit_code_exit_zero_is_true() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
r#"
|
||||
print(x) # [unresolved-reference]
|
||||
print(4[1]) # [non-subscriptable]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--exit-zero"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
warning: lint:unresolved-reference
|
||||
--> <temp_dir>/test.py:2:7
|
||||
|
|
||||
2 | print(x) # [unresolved-reference]
|
||||
| - Name `x` used when not defined
|
||||
3 | print(4[1]) # [non-subscriptable]
|
||||
|
|
||||
|
||||
error: lint:non-subscriptable
|
||||
--> <temp_dir>/test.py:3:7
|
||||
|
|
||||
2 | print(x) # [unresolved-reference]
|
||||
3 | print(4[1]) # [non-subscriptable]
|
||||
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
||||
|
|
||||
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct TestCase {
|
||||
_temp_dir: TempDir,
|
||||
_settings_scope: SettingsBindDropGuard,
|
||||
project_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
fn new() -> anyhow::Result<Self> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
|
||||
// Canonicalize the tempdir path because macos uses symlinks for tempdirs
|
||||
// and that doesn't play well with our snapshot filtering.
|
||||
let project_dir = temp_dir
|
||||
.path()
|
||||
.canonicalize()
|
||||
.context("Failed to canonicalize project path")?;
|
||||
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.add_filter(&tempdir_filter(&project_dir), "<temp_dir>/");
|
||||
settings.add_filter(r#"\\(\w\w|\s|\.|")"#, "/$1");
|
||||
|
||||
let settings_scope = settings.bind_to_scope();
|
||||
|
||||
Ok(Self {
|
||||
project_dir,
|
||||
_temp_dir: temp_dir,
|
||||
_settings_scope: settings_scope,
|
||||
})
|
||||
}
|
||||
|
||||
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_files(files)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
fn with_file(path: impl AsRef<Path>, content: &str) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_file(path, content)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
fn write_files<'a>(
|
||||
&self,
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> anyhow::Result<()> {
|
||||
for (path, content) in files {
|
||||
self.write_file(path, content)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_file(&self, path: impl AsRef<Path>, content: &str) -> anyhow::Result<()> {
|
||||
let path = path.as_ref();
|
||||
let path = self.project_dir.join(path);
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)
|
||||
.with_context(|| format!("Failed to create directory `{}`", parent.display()))?;
|
||||
}
|
||||
std::fs::write(&path, &*ruff_python_trivia::textwrap::dedent(content))
|
||||
.with_context(|| format!("Failed to write file `{path}`", path = path.display()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn project_dir(&self) -> &Path {
|
||||
&self.project_dir
|
||||
}
|
||||
|
||||
fn command(&self) -> Command {
|
||||
let mut command = Command::new(get_cargo_bin("red_knot"));
|
||||
command.current_dir(&self.project_dir).arg("check");
|
||||
command
|
||||
}
|
||||
}
|
||||
|
||||
fn tempdir_filter(path: &Path) -> String {
|
||||
format!(r"{}\\?/?", regex::escape(path.to_str().unwrap()))
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,47 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot_project"
|
||||
version = "0.0.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache", "serde"] }
|
||||
ruff_macros = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["serde"] }
|
||||
ruff_text_size = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true, features = ["serde"] }
|
||||
red_knot_vendored = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
pep440_rs = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
glob = { workspace = true }
|
||||
insta = { workspace = true, features = ["redactions", "ron"] }
|
||||
|
||||
[features]
|
||||
default = ["zstd"]
|
||||
zstd = ["red_knot_vendored/zstd"]
|
||||
deflate = ["red_knot_vendored/deflate"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1 +0,0 @@
|
||||
../../../../ruff_python_parser/resources/invalid/statements/invalid_assignment_targets.py
|
||||
@@ -1,2 +0,0 @@
|
||||
x = 0
|
||||
(x := x + 1)
|
||||
@@ -1 +0,0 @@
|
||||
../../../../ruff_python_parser/resources/invalid/expressions/named/invalid_target.py
|
||||
@@ -1 +0,0 @@
|
||||
../../../../ruff_python_parser/resources/invalid/statements/invalid_augmented_assignment_target.py
|
||||
@@ -1 +0,0 @@
|
||||
x if $z
|
||||
@@ -1,3 +0,0 @@
|
||||
x = 0
|
||||
if x := x + 1:
|
||||
pass
|
||||
@@ -1,6 +0,0 @@
|
||||
while True:
|
||||
|
||||
class A:
|
||||
x: int
|
||||
|
||||
break
|
||||
@@ -1,6 +0,0 @@
|
||||
while True:
|
||||
|
||||
def b():
|
||||
x: int
|
||||
|
||||
break
|
||||
@@ -1,6 +0,0 @@
|
||||
for _ in range(1):
|
||||
|
||||
class A:
|
||||
x: int
|
||||
|
||||
break
|
||||
@@ -1,6 +0,0 @@
|
||||
for _ in range(1):
|
||||
|
||||
def b():
|
||||
x: int
|
||||
|
||||
break
|
||||
@@ -1 +0,0 @@
|
||||
for
|
||||
@@ -1,11 +0,0 @@
|
||||
def bool(x) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
class MyClass: ...
|
||||
|
||||
|
||||
def MyClass() -> MyClass: ...
|
||||
|
||||
|
||||
def x(self) -> x: ...
|
||||
@@ -1,8 +0,0 @@
|
||||
# Regression test for https://github.com/astral-sh/ruff/issues/14115
|
||||
#
|
||||
# This is invalid syntax, but should not lead to a crash.
|
||||
|
||||
def f() -> *int: ...
|
||||
|
||||
|
||||
f()
|
||||
@@ -1,2 +0,0 @@
|
||||
def bool(x=bool):
|
||||
return x
|
||||
@@ -1,2 +0,0 @@
|
||||
with foo() as self.bar:
|
||||
pass
|
||||
@@ -1 +0,0 @@
|
||||
../../../../ruff_notebook/resources/test/fixtures/jupyter/unused_variable.ipynb
|
||||
@@ -1,3 +0,0 @@
|
||||
match x:
|
||||
case [1, 0] if x := x[:0]:
|
||||
y = 1
|
||||
@@ -1,3 +0,0 @@
|
||||
match some_int:
|
||||
case x:=2:
|
||||
pass
|
||||
@@ -1,17 +0,0 @@
|
||||
"""
|
||||
Regression test that makes sure we do not short-circuit here after
|
||||
determining that the overall type will be `Never` and still infer
|
||||
a type for the second tuple element `2`.
|
||||
|
||||
Relevant discussion:
|
||||
https://github.com/astral-sh/ruff/pull/15218#discussion_r1900811073
|
||||
"""
|
||||
|
||||
from typing_extensions import Never
|
||||
|
||||
|
||||
def never() -> Never:
|
||||
return never()
|
||||
|
||||
|
||||
(never(), 2)
|
||||
@@ -1,2 +0,0 @@
|
||||
type foo = int
|
||||
type ListOrSet[T] = list[T] | set[T]
|
||||
@@ -1 +0,0 @@
|
||||
../../../../ruff_python_parser/resources/inline/err/type_param_invalid_bound_expr.py
|
||||
@@ -1,3 +0,0 @@
|
||||
msg = "hello"
|
||||
|
||||
f"{msg!r:>{10+10}}"
|
||||
@@ -1 +0,0 @@
|
||||
x: f"Literal[{1 + 2}]" = 3
|
||||
@@ -1,3 +0,0 @@
|
||||
from typing import Union
|
||||
|
||||
x: Union[int, str] = 1
|
||||
@@ -1 +0,0 @@
|
||||
../../../../ruff_python_parser/resources/inline/err/ann_assign_stmt_invalid_target.py
|
||||
@@ -1,190 +0,0 @@
|
||||
use std::{collections::HashMap, hash::BuildHasher};
|
||||
|
||||
use red_knot_python_semantic::{PythonPlatform, PythonVersion, SitePackages};
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
/// Combine two values, preferring the values in `self`.
|
||||
///
|
||||
/// The logic should follow that of Cargo's `config.toml`:
|
||||
///
|
||||
/// > If a key is specified in multiple config files, the values will get merged together.
|
||||
/// > Numbers, strings, and booleans will use the value in the deeper config directory taking
|
||||
/// > precedence over ancestor directories, where the home directory is the lowest priority.
|
||||
/// > Arrays will be joined together with higher precedence items being placed later in the
|
||||
/// > merged array.
|
||||
///
|
||||
/// ## uv Compatibility
|
||||
///
|
||||
/// The merging behavior differs from uv in that values with higher precedence in arrays
|
||||
/// are placed later in the merged array. This is because we want to support overriding
|
||||
/// earlier values and values from other configurations, including unsetting them.
|
||||
/// For example: patterns coming last in file inclusion and exclusion patterns
|
||||
/// allow overriding earlier patterns, matching the `gitignore` behavior.
|
||||
/// Generally speaking, it feels more intuitive if later values override earlier values
|
||||
/// than the other way around: `knot --exclude png --exclude "!important.png"`.
|
||||
///
|
||||
/// The main downside of this approach is that the ordering can be surprising in cases
|
||||
/// where the option has a "first match" semantic and not a "last match" wins.
|
||||
/// One such example is `extra-paths` where the semantics is given by Python:
|
||||
/// the module on the first matching search path wins.
|
||||
///
|
||||
/// ```toml
|
||||
/// [environment]
|
||||
/// extra-paths = ["b", "c"]
|
||||
/// ```
|
||||
///
|
||||
/// ```bash
|
||||
/// knot --extra-paths a
|
||||
/// ```
|
||||
///
|
||||
/// That's why a user might expect that this configuration results in `["a", "b", "c"]`,
|
||||
/// because the CLI has higher precedence. However, the current implementation results in a
|
||||
/// resolved extra search path of `["b", "c", "a"]`, which means `a` will be tried last.
|
||||
///
|
||||
/// There's an argument here that the user should be able to specify the order of the paths,
|
||||
/// because only then is the user in full control of where to insert the path when specyifing `extra-paths`
|
||||
/// in multiple sources.
|
||||
///
|
||||
/// ## Macro
|
||||
/// You can automatically derive `Combine` for structs with named fields by using `derive(ruff_macros::Combine)`.
|
||||
pub trait Combine {
|
||||
#[must_use]
|
||||
fn combine(mut self, other: Self) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.combine_with(other);
|
||||
self
|
||||
}
|
||||
|
||||
fn combine_with(&mut self, other: Self);
|
||||
}
|
||||
|
||||
impl<T> Combine for Option<T>
|
||||
where
|
||||
T: Combine,
|
||||
{
|
||||
fn combine(self, other: Self) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
match (self, other) {
|
||||
(Some(a), Some(b)) => Some(a.combine(b)),
|
||||
(None, Some(b)) => Some(b),
|
||||
(a, _) => a,
|
||||
}
|
||||
}
|
||||
|
||||
fn combine_with(&mut self, other: Self) {
|
||||
match (self, other) {
|
||||
(Some(a), Some(b)) => {
|
||||
a.combine_with(b);
|
||||
}
|
||||
(a @ None, Some(b)) => {
|
||||
*a = Some(b);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Combine for Vec<T> {
|
||||
fn combine_with(&mut self, mut other: Self) {
|
||||
// `self` takes precedence over `other` but values with higher precedence must be placed after.
|
||||
// Swap the vectors so that `other` is the one that gets extended, so that the values of `self` come after.
|
||||
std::mem::swap(self, &mut other);
|
||||
self.extend(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V, S> Combine for HashMap<K, V, S>
|
||||
where
|
||||
K: Eq + std::hash::Hash,
|
||||
S: BuildHasher,
|
||||
{
|
||||
fn combine_with(&mut self, mut other: Self) {
|
||||
// `self` takes precedence over `other` but `extend` overrides existing values.
|
||||
// Swap the hash maps so that `self` is the one that gets extended.
|
||||
std::mem::swap(self, &mut other);
|
||||
self.extend(other);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implements [`Combine`] for a value that always returns `self` when combined with another value.
|
||||
macro_rules! impl_noop_combine {
|
||||
($name:ident) => {
|
||||
impl Combine for $name {
|
||||
#[inline(always)]
|
||||
fn combine_with(&mut self, _other: Self) {}
|
||||
|
||||
#[inline(always)]
|
||||
fn combine(self, _other: Self) -> Self {
|
||||
self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_noop_combine!(SystemPathBuf);
|
||||
impl_noop_combine!(PythonPlatform);
|
||||
impl_noop_combine!(SitePackages);
|
||||
impl_noop_combine!(PythonVersion);
|
||||
|
||||
// std types
|
||||
impl_noop_combine!(bool);
|
||||
impl_noop_combine!(usize);
|
||||
impl_noop_combine!(u8);
|
||||
impl_noop_combine!(u16);
|
||||
impl_noop_combine!(u32);
|
||||
impl_noop_combine!(u64);
|
||||
impl_noop_combine!(u128);
|
||||
impl_noop_combine!(isize);
|
||||
impl_noop_combine!(i8);
|
||||
impl_noop_combine!(i16);
|
||||
impl_noop_combine!(i32);
|
||||
impl_noop_combine!(i64);
|
||||
impl_noop_combine!(i128);
|
||||
impl_noop_combine!(String);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::combine::Combine;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn combine_option() {
|
||||
assert_eq!(Some(1).combine(Some(2)), Some(1));
|
||||
assert_eq!(None.combine(Some(2)), Some(2));
|
||||
assert_eq!(Some(1).combine(None), Some(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn combine_vec() {
|
||||
assert_eq!(None.combine(Some(vec![1, 2, 3])), Some(vec![1, 2, 3]));
|
||||
assert_eq!(Some(vec![1, 2, 3]).combine(None), Some(vec![1, 2, 3]));
|
||||
assert_eq!(
|
||||
Some(vec![1, 2, 3]).combine(Some(vec![4, 5, 6])),
|
||||
Some(vec![4, 5, 6, 1, 2, 3])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn combine_map() {
|
||||
let a: HashMap<u32, _> = HashMap::from_iter([(1, "a"), (2, "a"), (3, "a")]);
|
||||
let b: HashMap<u32, _> = HashMap::from_iter([(0, "b"), (2, "b"), (5, "b")]);
|
||||
|
||||
assert_eq!(None.combine(Some(b.clone())), Some(b.clone()));
|
||||
assert_eq!(Some(a.clone()).combine(None), Some(a.clone()));
|
||||
assert_eq!(
|
||||
Some(a).combine(Some(b)),
|
||||
Some(HashMap::from_iter([
|
||||
(0, "b"),
|
||||
// The value from `a` takes precedence
|
||||
(1, "a"),
|
||||
(2, "a"),
|
||||
(3, "a"),
|
||||
(5, "b")
|
||||
]))
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,221 +0,0 @@
|
||||
use crate::db::{Db, ProjectDatabase};
|
||||
use crate::metadata::options::Options;
|
||||
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
|
||||
use crate::{Project, ProjectMetadata};
|
||||
|
||||
use red_knot_python_semantic::Program;
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
use ruff_db::system::SystemPath;
|
||||
use ruff_db::Db as _;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
impl ProjectDatabase {
|
||||
#[tracing::instrument(level = "debug", skip(self, changes, cli_options))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<ChangeEvent>, cli_options: Option<&Options>) {
|
||||
let mut project = self.project();
|
||||
let project_path = project.root(self).to_path_buf();
|
||||
let program = Program::get(self);
|
||||
let custom_stdlib_versions_path = program
|
||||
.custom_stdlib_search_path(self)
|
||||
.map(|path| path.join("VERSIONS"));
|
||||
|
||||
// Are there structural changes to the project
|
||||
let mut project_changed = false;
|
||||
// Changes to a custom stdlib path's VERSIONS
|
||||
let mut custom_stdlib_change = false;
|
||||
// Paths that were added
|
||||
let mut added_paths = FxHashSet::default();
|
||||
|
||||
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
|
||||
let mut synced_files = FxHashSet::default();
|
||||
let mut synced_recursively = FxHashSet::default();
|
||||
|
||||
let mut sync_path = |db: &mut ProjectDatabase, path: &SystemPath| {
|
||||
if synced_files.insert(path.to_path_buf()) {
|
||||
File::sync_path(db, path);
|
||||
}
|
||||
};
|
||||
|
||||
let mut sync_recursively = |db: &mut ProjectDatabase, path: &SystemPath| {
|
||||
if synced_recursively.insert(path.to_path_buf()) {
|
||||
Files::sync_recursively(db, path);
|
||||
}
|
||||
};
|
||||
|
||||
for change in changes {
|
||||
if let Some(path) = change.system_path() {
|
||||
if matches!(
|
||||
path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the project structure or add/remove files.
|
||||
project_changed = true;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if Some(path) == custom_stdlib_versions_path.as_deref() {
|
||||
custom_stdlib_change = true;
|
||||
}
|
||||
}
|
||||
|
||||
match change {
|
||||
ChangeEvent::Changed { path, kind: _ } | ChangeEvent::Opened(path) => {
|
||||
sync_path(self, &path);
|
||||
}
|
||||
|
||||
ChangeEvent::Created { kind, path } => {
|
||||
match kind {
|
||||
CreatedKind::File => sync_path(self, &path),
|
||||
CreatedKind::Directory | CreatedKind::Any => {
|
||||
sync_recursively(self, &path);
|
||||
}
|
||||
}
|
||||
|
||||
if self.system().is_file(&path) {
|
||||
// Add the parent directory because `walkdir` always visits explicitly passed files
|
||||
// even if they match an exclude filter.
|
||||
added_paths.insert(path.parent().unwrap().to_path_buf());
|
||||
} else {
|
||||
added_paths.insert(path);
|
||||
}
|
||||
}
|
||||
|
||||
ChangeEvent::Deleted { kind, path } => {
|
||||
let is_file = match kind {
|
||||
DeletedKind::File => true,
|
||||
DeletedKind::Directory => {
|
||||
// file watchers emit an event for every deleted file. No need to scan the entire dir.
|
||||
continue;
|
||||
}
|
||||
DeletedKind::Any => self
|
||||
.files
|
||||
.try_system(self, &path)
|
||||
.is_some_and(|file| file.exists(self)),
|
||||
};
|
||||
|
||||
if is_file {
|
||||
sync_path(self, &path);
|
||||
|
||||
if let Some(file) = self.files().try_system(self, &path) {
|
||||
project.remove_file(self, file);
|
||||
}
|
||||
} else {
|
||||
sync_recursively(self, &path);
|
||||
|
||||
if custom_stdlib_versions_path
|
||||
.as_ref()
|
||||
.is_some_and(|versions_path| versions_path.starts_with(&path))
|
||||
{
|
||||
custom_stdlib_change = true;
|
||||
}
|
||||
|
||||
// Perform a full-reload in case the deleted directory contained the pyproject.toml.
|
||||
// We may want to make this more clever in the future, to e.g. iterate over the
|
||||
// indexed files and remove the once that start with the same path, unless
|
||||
// the deleted path is the project configuration.
|
||||
project_changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
ChangeEvent::CreatedVirtual(path) | ChangeEvent::ChangedVirtual(path) => {
|
||||
File::sync_virtual_path(self, &path);
|
||||
}
|
||||
|
||||
ChangeEvent::DeletedVirtual(path) => {
|
||||
if let Some(virtual_file) = self.files().try_virtual_file(&path) {
|
||||
virtual_file.close(self);
|
||||
}
|
||||
}
|
||||
|
||||
ChangeEvent::Rescan => {
|
||||
project_changed = true;
|
||||
Files::sync_all(self);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if project_changed {
|
||||
match ProjectMetadata::discover(&project_path, self.system()) {
|
||||
Ok(mut metadata) => {
|
||||
if let Some(cli_options) = cli_options {
|
||||
metadata.apply_cli_options(cli_options.clone());
|
||||
}
|
||||
|
||||
let program_settings = metadata.to_program_settings(self.system());
|
||||
|
||||
let program = Program::get(self);
|
||||
if let Err(error) = program.update_from_settings(self, program_settings) {
|
||||
tracing::error!("Failed to update the program settings, keeping the old program settings: {error}");
|
||||
};
|
||||
|
||||
if metadata.root() == project.root(self) {
|
||||
tracing::debug!("Reloading project after structural change");
|
||||
project.reload(self, metadata);
|
||||
} else {
|
||||
tracing::debug!("Replace project after structural change");
|
||||
project = Project::from_metadata(self, metadata);
|
||||
self.project = Some(project);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!(
|
||||
"Failed to load project, keeping old project configuration: {error}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
} else if custom_stdlib_change {
|
||||
let search_paths = project
|
||||
.metadata(self)
|
||||
.to_program_settings(self.system())
|
||||
.search_paths;
|
||||
|
||||
if let Err(error) = program.update_search_paths(self, &search_paths) {
|
||||
tracing::error!("Failed to set the new search paths: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
let mut added_paths = added_paths.into_iter();
|
||||
|
||||
// Use directory walking to discover newly added files.
|
||||
if let Some(path) = added_paths.next() {
|
||||
let mut walker = self.system().walk_directory(&path);
|
||||
|
||||
for extra_path in added_paths {
|
||||
walker = walker.add(&extra_path);
|
||||
}
|
||||
|
||||
let added_paths = std::sync::Mutex::new(Vec::default());
|
||||
|
||||
walker.run(|| {
|
||||
Box::new(|entry| {
|
||||
let Ok(entry) = entry else {
|
||||
return WalkState::Continue;
|
||||
};
|
||||
|
||||
if !entry.file_type().is_file() {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
|
||||
let mut paths = added_paths.lock().unwrap();
|
||||
|
||||
paths.push(entry.into_path());
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
for path in added_paths.into_inner().unwrap() {
|
||||
let file = system_path_to_file(self, &path);
|
||||
|
||||
if let Ok(file) = file {
|
||||
project.add_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,510 +0,0 @@
|
||||
#![allow(clippy::ref_option)]
|
||||
|
||||
use crate::metadata::options::OptionDiagnostic;
|
||||
pub use db::{Db, ProjectDatabase};
|
||||
use files::{Index, Indexed, IndexedFiles};
|
||||
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
|
||||
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder, RuleSelection};
|
||||
use red_knot_python_semantic::register_lints;
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::{source_text, SourceTextError};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
use ruff_db::system::{FileType, SystemPath};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_text_size::TextRange;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub mod combine;
|
||||
|
||||
mod db;
|
||||
mod files;
|
||||
pub mod metadata;
|
||||
pub mod watch;
|
||||
|
||||
pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> =
|
||||
std::sync::LazyLock::new(default_lints_registry);
|
||||
|
||||
pub fn default_lints_registry() -> LintRegistry {
|
||||
let mut builder = LintRegistryBuilder::default();
|
||||
register_lints(&mut builder);
|
||||
builder.build()
|
||||
}
|
||||
|
||||
/// The project as a Salsa ingredient.
|
||||
///
|
||||
/// ## How is a project different from a program?
|
||||
/// There are two (related) motivations:
|
||||
///
|
||||
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
|
||||
/// without introducing a cyclic dependency. The project is defined in a higher level crate
|
||||
/// where it can reference these setting types.
|
||||
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
|
||||
/// it remains the same project. That's why program is a narrowed view of the project only
|
||||
/// holding on to the most fundamental settings required for checking.
|
||||
#[salsa::input]
|
||||
pub struct Project {
|
||||
/// The files that are open in the project.
|
||||
///
|
||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
||||
/// open files rather than all files in the project.
|
||||
#[return_ref]
|
||||
#[default]
|
||||
open_fileset: Option<Arc<FxHashSet<File>>>,
|
||||
|
||||
/// The first-party files of this project.
|
||||
#[default]
|
||||
#[return_ref]
|
||||
file_set: IndexedFiles,
|
||||
|
||||
/// The metadata describing the project, including the unresolved options.
|
||||
#[return_ref]
|
||||
pub metadata: ProjectMetadata,
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
impl Project {
|
||||
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
|
||||
Project::builder(metadata)
|
||||
.durability(Durability::MEDIUM)
|
||||
.open_fileset_durability(Durability::LOW)
|
||||
.file_set_durability(Durability::LOW)
|
||||
.new(db)
|
||||
}
|
||||
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.metadata(db).root()
|
||||
}
|
||||
|
||||
pub fn name(self, db: &dyn Db) -> &str {
|
||||
self.metadata(db).name()
|
||||
}
|
||||
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
|
||||
tracing::debug!("Reloading project");
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
|
||||
if &metadata != self.metadata(db) {
|
||||
self.set_metadata(db).to(metadata);
|
||||
}
|
||||
|
||||
self.reload_files(db);
|
||||
}
|
||||
|
||||
pub fn rule_selection(self, db: &dyn Db) -> &RuleSelection {
|
||||
let (selection, _) = self.rule_selection_with_diagnostics(db);
|
||||
selection
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn rule_selection_with_diagnostics(
|
||||
self,
|
||||
db: &dyn Db,
|
||||
) -> (RuleSelection, Vec<OptionDiagnostic>) {
|
||||
self.metadata(db).options().to_rule_selection(db)
|
||||
}
|
||||
|
||||
/// Checks all open files in the project and its dependencies.
|
||||
pub(crate) fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
|
||||
let project_span = tracing::debug_span!("Project::check");
|
||||
let _span = project_span.enter();
|
||||
|
||||
tracing::debug!("Checking project '{name}'", name = self.name(db));
|
||||
|
||||
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
||||
let (_, options_diagnostics) = self.rule_selection_with_diagnostics(db);
|
||||
diagnostics.extend(options_diagnostics.iter().map(|diagnostic| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||
diagnostic
|
||||
}));
|
||||
|
||||
let result = Arc::new(std::sync::Mutex::new(diagnostics));
|
||||
let inner_result = Arc::clone(&result);
|
||||
|
||||
let db = db.clone();
|
||||
let project_span = project_span.clone();
|
||||
|
||||
rayon::scope(move |scope| {
|
||||
let files = ProjectFiles::new(&db, self);
|
||||
for file in &files {
|
||||
let result = inner_result.clone();
|
||||
let db = db.clone();
|
||||
let project_span = project_span.clone();
|
||||
|
||||
scope.spawn(move |_| {
|
||||
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
|
||||
let _entered = check_file_span.entered();
|
||||
|
||||
let file_diagnostics = check_file_impl(&db, file);
|
||||
result.lock().unwrap().extend(file_diagnostics);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Arc::into_inner(result).unwrap().into_inner().unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
let (_, options_diagnostics) = self.rule_selection_with_diagnostics(db);
|
||||
|
||||
let mut file_diagnostics: Vec<_> = options_diagnostics
|
||||
.iter()
|
||||
.map(|diagnostic| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||
diagnostic
|
||||
})
|
||||
.collect();
|
||||
|
||||
let check_diagnostics = check_file_impl(db, file);
|
||||
file_diagnostics.extend(check_diagnostics);
|
||||
|
||||
file_diagnostics
|
||||
}
|
||||
|
||||
/// Opens a file in the project.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!("Opening file `{}`", file.path(db));
|
||||
|
||||
let mut open_files = self.take_open_files(db);
|
||||
open_files.insert(file);
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
/// Closes a file in the project.
|
||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
tracing::debug!("Closing file `{}`", file.path(db));
|
||||
|
||||
let mut open_files = self.take_open_files(db);
|
||||
let removed = open_files.remove(&file);
|
||||
|
||||
if removed {
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
/// Returns the open files in the project or `None` if the entire project should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
self.open_fileset(db).as_deref()
|
||||
}
|
||||
|
||||
/// Sets the open files in the project.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
||||
tracing::debug!("Set open project files (count: {})", open_files.len());
|
||||
|
||||
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
|
||||
}
|
||||
|
||||
/// This takes the open files from the project and returns them.
|
||||
///
|
||||
/// This changes the behavior of `check` to check all files in the project instead of just the open files.
|
||||
fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
||||
tracing::debug!("Take open project files");
|
||||
|
||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||
// so that the reference counter to `open_files` now drops to 1.
|
||||
let open_files = self.set_open_fileset(db).to(None);
|
||||
|
||||
if let Some(open_files) = open_files {
|
||||
Arc::try_unwrap(open_files).unwrap()
|
||||
} else {
|
||||
FxHashSet::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the file is open in the project.
|
||||
///
|
||||
/// A file is considered open when:
|
||||
/// * explicitly set as an open file using [`open_file`](Self::open_file)
|
||||
/// * It has a [`SystemPath`] and belongs to a package's `src` files
|
||||
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
|
||||
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
|
||||
if let Some(open_files) = self.open_files(db) {
|
||||
open_files.contains(&file)
|
||||
} else if file.path(db).is_system_path() {
|
||||
self.contains_file(db, file)
|
||||
} else {
|
||||
file.path(db).is_system_virtual_path()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).contains(&file)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!(
|
||||
"Removing file `{}` from project `{}`",
|
||||
file.path(db),
|
||||
self.name(db)
|
||||
);
|
||||
|
||||
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
};
|
||||
|
||||
index.remove(file);
|
||||
}
|
||||
|
||||
pub fn add_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!(
|
||||
"Adding file `{}` to project `{}`",
|
||||
file.path(db),
|
||||
self.name(db)
|
||||
);
|
||||
|
||||
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
};
|
||||
|
||||
index.insert(file);
|
||||
}
|
||||
|
||||
/// Returns the files belonging to this project.
|
||||
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
|
||||
let files = self.file_set(db);
|
||||
|
||||
let indexed = match files.get() {
|
||||
Index::Lazy(vacant) => {
|
||||
let _entered =
|
||||
tracing::debug_span!("Project::index_files", package = %self.name(db))
|
||||
.entered();
|
||||
|
||||
let files = discover_project_files(db, self);
|
||||
tracing::info!("Found {} files in project `{}`", files.len(), self.name(db));
|
||||
vacant.set(files)
|
||||
}
|
||||
Index::Indexed(indexed) => indexed,
|
||||
};
|
||||
|
||||
indexed
|
||||
}
|
||||
|
||||
pub fn reload_files(self, db: &mut dyn Db) {
|
||||
tracing::debug!("Reloading files for project `{}`", self.name(db));
|
||||
|
||||
if !self.file_set(db).is_lazy() {
|
||||
// Force a re-index of the files in the next revision.
|
||||
self.set_file_set(db).to(IndexedFiles::lazy());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
||||
|
||||
// Abort checking if there are IO errors.
|
||||
let source = source_text(db.upcast(), file);
|
||||
|
||||
if let Some(read_error) = source.read_error() {
|
||||
diagnostics.push(Box::new(IOErrorDiagnostic {
|
||||
file,
|
||||
error: read_error.clone(),
|
||||
}));
|
||||
return diagnostics;
|
||||
}
|
||||
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
diagnostics.extend(parsed.errors().iter().map(|error| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
|
||||
diagnostic
|
||||
}));
|
||||
|
||||
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
|
||||
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||
boxed
|
||||
}));
|
||||
|
||||
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
fn discover_project_files(db: &dyn Db, project: Project) -> FxHashSet<File> {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
|
||||
db.system().walk_directory(project.root(db)).run(|| {
|
||||
Box::new(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||
match entry.file_type() {
|
||||
FileType::File => {
|
||||
if entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
FileType::Directory | FileType::Symlink => {}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
// TODO Handle error
|
||||
tracing::error!("Failed to walk path: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
let paths = paths.into_inner().unwrap();
|
||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||
|
||||
for path in paths {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
// We can ignore this.
|
||||
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
|
||||
files.insert(file);
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ProjectFiles<'a> {
|
||||
OpenFiles(&'a FxHashSet<File>),
|
||||
Indexed(files::Indexed<'a>),
|
||||
}
|
||||
|
||||
impl<'a> ProjectFiles<'a> {
|
||||
fn new(db: &'a dyn Db, project: Project) -> Self {
|
||||
if let Some(open_files) = project.open_files(db) {
|
||||
ProjectFiles::OpenFiles(open_files)
|
||||
} else {
|
||||
ProjectFiles::Indexed(project.files(db))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a ProjectFiles<'a> {
|
||||
type Item = File;
|
||||
type IntoIter = ProjectFilesIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
match self {
|
||||
ProjectFiles::OpenFiles(files) => ProjectFilesIter::OpenFiles(files.iter()),
|
||||
ProjectFiles::Indexed(indexed) => ProjectFilesIter::Indexed {
|
||||
files: indexed.into_iter(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ProjectFilesIter<'db> {
|
||||
OpenFiles(std::collections::hash_set::Iter<'db, File>),
|
||||
Indexed { files: files::IndexedIter<'db> },
|
||||
}
|
||||
|
||||
impl Iterator for ProjectFilesIter<'_> {
|
||||
type Item = File;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
ProjectFilesIter::OpenFiles(files) => files.next().copied(),
|
||||
ProjectFilesIter::Indexed { files } => files.next(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IOErrorDiagnostic {
|
||||
file: File,
|
||||
error: SourceTextError,
|
||||
}
|
||||
|
||||
impl Diagnostic for IOErrorDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
DiagnosticId::Io
|
||||
}
|
||||
|
||||
fn message(&self) -> Cow<str> {
|
||||
self.error.to_string().into()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
Some(self.file)
|
||||
}
|
||||
|
||||
fn range(&self) -> Option<TextRange> {
|
||||
None
|
||||
}
|
||||
|
||||
fn severity(&self) -> Severity {
|
||||
Severity::Error
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::{check_file_impl, ProjectMetadata};
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
#[test]
|
||||
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
|
||||
let project = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("/"));
|
||||
let mut db = TestDb::new(project);
|
||||
let path = SystemPath::new("test.py");
|
||||
|
||||
db.write_file(path, "x = 10")?;
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
// Now the file gets deleted before we had a chance to read its source text.
|
||||
db.memory_file_system().remove_file(path)?;
|
||||
file.sync(&mut db);
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
check_file_impl(&db, file)
|
||||
.into_iter()
|
||||
.map(|diagnostic| diagnostic.message().into_owned())
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["Failed to read file: No such file or directory".to_string()]
|
||||
);
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
assert_function_query_was_not_run(&db, check_types, file, &events);
|
||||
|
||||
// The user now creates a new file with an empty text. The source text
|
||||
// content returned by `source_text` remains unchanged, but the diagnostics should get updated.
|
||||
db.write_file(path, "").unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
check_file_impl(&db, file)
|
||||
.into_iter()
|
||||
.map(|diagnostic| diagnostic.message().into_owned())
|
||||
.collect::<Vec<_>>(),
|
||||
vec![] as Vec<String>
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,531 +0,0 @@
|
||||
use red_knot_python_semantic::ProgramSettings;
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::combine::Combine;
|
||||
use crate::metadata::pyproject::{Project, PyProject, PyProjectError};
|
||||
use crate::metadata::value::ValueSource;
|
||||
use options::KnotTomlError;
|
||||
use options::Options;
|
||||
|
||||
pub mod options;
|
||||
pub mod pyproject;
|
||||
pub mod value;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct ProjectMetadata {
|
||||
pub(super) name: Name,
|
||||
|
||||
pub(super) root: SystemPathBuf,
|
||||
|
||||
/// The raw options
|
||||
pub(super) options: Options,
|
||||
}
|
||||
|
||||
impl ProjectMetadata {
|
||||
/// Creates a project with the given name and root that uses the default options.
|
||||
pub fn new(name: Name, root: SystemPathBuf) -> Self {
|
||||
Self {
|
||||
name,
|
||||
root,
|
||||
options: Options::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Loads a project from a `pyproject.toml` file.
|
||||
pub(crate) fn from_pyproject(pyproject: PyProject, root: SystemPathBuf) -> Self {
|
||||
Self::from_options(
|
||||
pyproject
|
||||
.tool
|
||||
.and_then(|tool| tool.knot)
|
||||
.unwrap_or_default(),
|
||||
root,
|
||||
pyproject.project.as_ref(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Loads a project from a set of options with an optional pyproject-project table.
|
||||
pub(crate) fn from_options(
|
||||
options: Options,
|
||||
root: SystemPathBuf,
|
||||
project: Option<&Project>,
|
||||
) -> Self {
|
||||
let name = project
|
||||
.and_then(|project| project.name.as_ref())
|
||||
.map(|name| Name::new(&***name))
|
||||
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
|
||||
|
||||
// TODO(https://github.com/astral-sh/ruff/issues/15491): Respect requires-python
|
||||
Self {
|
||||
name,
|
||||
root,
|
||||
options,
|
||||
}
|
||||
}
|
||||
|
||||
/// Discovers the closest project at `path` and returns its metadata.
|
||||
///
|
||||
/// The algorithm traverses upwards in the `path`'s ancestor chain and uses the following precedence
|
||||
/// the resolve the project's root.
|
||||
///
|
||||
/// 1. The closest `pyproject.toml` with a `tool.knot` section or `knot.toml`.
|
||||
/// 1. The closest `pyproject.toml`.
|
||||
/// 1. Fallback to use `path` as the root and use the default settings.
|
||||
pub fn discover(
|
||||
path: &SystemPath,
|
||||
system: &dyn System,
|
||||
) -> Result<ProjectMetadata, ProjectDiscoveryError> {
|
||||
tracing::debug!("Searching for a project in '{path}'");
|
||||
|
||||
if !system.is_directory(path) {
|
||||
return Err(ProjectDiscoveryError::NotADirectory(path.to_path_buf()));
|
||||
}
|
||||
|
||||
let mut closest_project: Option<ProjectMetadata> = None;
|
||||
|
||||
for project_root in path.ancestors() {
|
||||
let pyproject_path = project_root.join("pyproject.toml");
|
||||
|
||||
let pyproject = if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
|
||||
match PyProject::from_toml_str(
|
||||
&pyproject_str,
|
||||
ValueSource::File(Arc::new(pyproject_path.clone())),
|
||||
) {
|
||||
Ok(pyproject) => Some(pyproject),
|
||||
Err(error) => {
|
||||
return Err(ProjectDiscoveryError::InvalidPyProject {
|
||||
path: pyproject_path,
|
||||
source: Box::new(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// A `knot.toml` takes precedence over a `pyproject.toml`.
|
||||
let knot_toml_path = project_root.join("knot.toml");
|
||||
if let Ok(knot_str) = system.read_to_string(&knot_toml_path) {
|
||||
let options = match Options::from_toml_str(
|
||||
&knot_str,
|
||||
ValueSource::File(Arc::new(knot_toml_path.clone())),
|
||||
) {
|
||||
Ok(options) => options,
|
||||
Err(error) => {
|
||||
return Err(ProjectDiscoveryError::InvalidKnotToml {
|
||||
path: knot_toml_path,
|
||||
source: Box::new(error),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
if pyproject
|
||||
.as_ref()
|
||||
.is_some_and(|project| project.knot().is_some())
|
||||
{
|
||||
// TODO: Consider using a diagnostic here
|
||||
tracing::warn!("Ignoring the `tool.knot` section in `{pyproject_path}` because `{knot_toml_path}` takes precedence.");
|
||||
}
|
||||
|
||||
tracing::debug!("Found project at '{}'", project_root);
|
||||
return Ok(ProjectMetadata::from_options(
|
||||
options,
|
||||
project_root.to_path_buf(),
|
||||
pyproject
|
||||
.as_ref()
|
||||
.and_then(|pyproject| pyproject.project.as_ref()),
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(pyproject) = pyproject {
|
||||
let has_knot_section = pyproject.knot().is_some();
|
||||
let metadata =
|
||||
ProjectMetadata::from_pyproject(pyproject, project_root.to_path_buf());
|
||||
|
||||
if has_knot_section {
|
||||
tracing::debug!("Found project at '{}'", project_root);
|
||||
|
||||
return Ok(metadata);
|
||||
}
|
||||
|
||||
// Not a project itself, keep looking for an enclosing project.
|
||||
if closest_project.is_none() {
|
||||
closest_project = Some(metadata);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No project found, but maybe a pyproject.toml was found.
|
||||
let metadata = if let Some(closest_project) = closest_project {
|
||||
tracing::debug!(
|
||||
"Project without `tool.knot` section: '{}'",
|
||||
closest_project.root()
|
||||
);
|
||||
|
||||
closest_project
|
||||
} else {
|
||||
tracing::debug!("The ancestor directories contain no `pyproject.toml`. Falling back to a virtual project.");
|
||||
|
||||
// Create a project with a default configuration
|
||||
Self::new(
|
||||
path.file_name().unwrap_or("root").into(),
|
||||
path.to_path_buf(),
|
||||
)
|
||||
};
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &SystemPath {
|
||||
&self.root
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn options(&self) -> &Options {
|
||||
&self.options
|
||||
}
|
||||
|
||||
pub fn to_program_settings(&self, system: &dyn System) -> ProgramSettings {
|
||||
self.options.to_program_settings(self.root(), system)
|
||||
}
|
||||
|
||||
/// Combine the project options with the CLI options where the CLI options take precedence.
|
||||
pub fn apply_cli_options(&mut self, options: Options) {
|
||||
self.options = options.combine(std::mem::take(&mut self.options));
|
||||
}
|
||||
|
||||
/// Combine the project options with the user options where project options take precedence.
|
||||
pub fn apply_user_options(&mut self, options: Options) {
|
||||
self.options.combine_with(options);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ProjectDiscoveryError {
|
||||
#[error("project path '{0}' is not a directory")]
|
||||
NotADirectory(SystemPathBuf),
|
||||
|
||||
#[error("{path} is not a valid `pyproject.toml`: {source}")]
|
||||
InvalidPyProject {
|
||||
source: Box<PyProjectError>,
|
||||
path: SystemPathBuf,
|
||||
},
|
||||
|
||||
#[error("{path} is not a valid `knot.toml`: {source}")]
|
||||
InvalidKnotToml {
|
||||
source: Box<KnotTomlError>,
|
||||
path: SystemPathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
//! Integration tests for project discovery
|
||||
|
||||
use crate::snapshot_project;
|
||||
use anyhow::{anyhow, Context};
|
||||
use insta::assert_ron_snapshot;
|
||||
use ruff_db::system::{SystemPathBuf, TestSystem};
|
||||
|
||||
use crate::{ProjectDiscoveryError, ProjectMetadata};
|
||||
|
||||
#[test]
|
||||
fn project_without_pyproject() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let project =
|
||||
ProjectMetadata::discover(&root, &system).context("Failed to discover project")?;
|
||||
|
||||
assert_eq!(project.root(), &*root);
|
||||
|
||||
snapshot_project!(project);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn project_with_pyproject() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "backend"
|
||||
|
||||
"#,
|
||||
),
|
||||
(root.join("db/__init__.py"), ""),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let project =
|
||||
ProjectMetadata::discover(&root, &system).context("Failed to discover project")?;
|
||||
|
||||
assert_eq!(project.root(), &*root);
|
||||
snapshot_project!(project);
|
||||
|
||||
// Discovering the same package from a subdirectory should give the same result
|
||||
let from_src = ProjectMetadata::discover(&root.join("db"), &system)
|
||||
.context("Failed to discover project from src sub-directory")?;
|
||||
|
||||
assert_eq!(from_src, project);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn project_with_invalid_pyproject() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "backend"
|
||||
|
||||
[tool.knot
|
||||
"#,
|
||||
),
|
||||
(root.join("db/__init__.py"), ""),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let Err(error) = ProjectMetadata::discover(&root, &system) else {
|
||||
return Err(anyhow!("Expected project discovery to fail because of invalid syntax in the pyproject.toml"));
|
||||
};
|
||||
|
||||
assert_error_eq(
|
||||
&error,
|
||||
r#"/app/pyproject.toml is not a valid `pyproject.toml`: TOML parse error at line 5, column 31
|
||||
|
|
||||
5 | [tool.knot
|
||||
| ^
|
||||
invalid table header
|
||||
expected `.`, `]`
|
||||
"#,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_projects_in_sub_project() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "project-root"
|
||||
|
||||
[tool.knot.src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "nested-project"
|
||||
|
||||
[tool.knot.src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||
|
||||
snapshot_project!(sub_project);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_projects_in_root_project() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "project-root"
|
||||
|
||||
[tool.knot.src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "nested-project"
|
||||
|
||||
[tool.knot.src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let root = ProjectMetadata::discover(&root, &system)?;
|
||||
|
||||
snapshot_project!(root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_projects_without_knot_sections() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "project-root"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "nested-project"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||
|
||||
snapshot_project!(sub_project);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_projects_with_outer_knot_section() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "project-root"
|
||||
|
||||
[tool.knot.environment]
|
||||
python-version = "3.10"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "nested-project"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let root = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||
|
||||
snapshot_project!(root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// A `knot.toml` takes precedence over any `pyproject.toml`.
|
||||
///
|
||||
/// However, the `pyproject.toml` is still loaded to get the project name and, in the future,
|
||||
/// the requires-python constraint.
|
||||
#[test]
|
||||
fn project_with_knot_and_pyproject_toml() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "super-app"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[tool.knot.src]
|
||||
root = "this_option_is_ignored"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("knot.toml"),
|
||||
r#"
|
||||
[src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let root = ProjectMetadata::discover(&root, &system)?;
|
||||
|
||||
snapshot_project!(root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_error_eq(error: &ProjectDiscoveryError, message: &str) {
|
||||
assert_eq!(error.to_string().replace('\\', "/"), message);
|
||||
}
|
||||
|
||||
/// Snapshots a project but with all paths using unix separators.
|
||||
#[macro_export]
|
||||
macro_rules! snapshot_project {
|
||||
($project:expr) => {{
|
||||
assert_ron_snapshot!($project,{
|
||||
".root" => insta::dynamic_redaction(|content, _content_path| {
|
||||
content.as_str().unwrap().replace("\\", "/")
|
||||
}),
|
||||
});
|
||||
}};
|
||||
}
|
||||
}
|
||||
@@ -1,288 +0,0 @@
|
||||
use crate::metadata::value::{RangedValue, RelativePathBuf, ValueSource, ValueSourceGuard};
|
||||
use crate::Db;
|
||||
use red_knot_python_semantic::lint::{GetLintError, Level, LintSource, RuleSelection};
|
||||
use red_knot_python_semantic::{
|
||||
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
|
||||
};
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::{System, SystemPath};
|
||||
use ruff_macros::Combine;
|
||||
use ruff_text_size::TextRange;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Debug;
|
||||
use thiserror::Error;
|
||||
|
||||
/// The options for the project.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct Options {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub environment: Option<EnvironmentOptions>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub src: Option<SrcOptions>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub rules: Option<Rules>,
|
||||
}
|
||||
|
||||
impl Options {
|
||||
pub(crate) fn from_toml_str(content: &str, source: ValueSource) -> Result<Self, KnotTomlError> {
|
||||
let _guard = ValueSourceGuard::new(source);
|
||||
let options = toml::from_str(content)?;
|
||||
Ok(options)
|
||||
}
|
||||
|
||||
pub(crate) fn to_program_settings(
|
||||
&self,
|
||||
project_root: &SystemPath,
|
||||
system: &dyn System,
|
||||
) -> ProgramSettings {
|
||||
let (python_version, python_platform) = self
|
||||
.environment
|
||||
.as_ref()
|
||||
.map(|env| {
|
||||
(
|
||||
env.python_version.as_deref().copied(),
|
||||
env.python_platform.as_deref(),
|
||||
)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
ProgramSettings {
|
||||
python_version: python_version.unwrap_or_default(),
|
||||
python_platform: python_platform.cloned().unwrap_or_default(),
|
||||
search_paths: self.to_search_path_settings(project_root, system),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_search_path_settings(
|
||||
&self,
|
||||
project_root: &SystemPath,
|
||||
system: &dyn System,
|
||||
) -> SearchPathSettings {
|
||||
let src_roots = if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_ref())
|
||||
{
|
||||
vec![src_root.absolute(project_root, system)]
|
||||
} else {
|
||||
let src = project_root.join("src");
|
||||
|
||||
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
|
||||
if system.is_directory(&src) {
|
||||
vec![project_root.to_path_buf(), src]
|
||||
} else {
|
||||
vec![project_root.to_path_buf()]
|
||||
}
|
||||
};
|
||||
|
||||
let (extra_paths, python, typeshed) = self
|
||||
.environment
|
||||
.as_ref()
|
||||
.map(|env| {
|
||||
(
|
||||
env.extra_paths.clone(),
|
||||
env.venv_path.clone(),
|
||||
env.typeshed.clone(),
|
||||
)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
SearchPathSettings {
|
||||
extra_paths: extra_paths
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|path| path.absolute(project_root, system))
|
||||
.collect(),
|
||||
src_roots,
|
||||
custom_typeshed: typeshed.map(|path| path.absolute(project_root, system)),
|
||||
site_packages: python
|
||||
.map(|venv_path| SitePackages::Derived {
|
||||
venv_path: venv_path.absolute(project_root, system),
|
||||
})
|
||||
.unwrap_or(SitePackages::Known(vec![])),
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn to_rule_selection(&self, db: &dyn Db) -> (RuleSelection, Vec<OptionDiagnostic>) {
|
||||
let registry = db.lint_registry();
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
// Initialize the selection with the defaults
|
||||
let mut selection = RuleSelection::from_registry(registry);
|
||||
|
||||
let rules = self
|
||||
.rules
|
||||
.as_ref()
|
||||
.into_iter()
|
||||
.flat_map(|rules| rules.inner.iter());
|
||||
|
||||
for (rule_name, level) in rules {
|
||||
let source = rule_name.source();
|
||||
match registry.get(rule_name) {
|
||||
Ok(lint) => {
|
||||
let lint_source = match source {
|
||||
ValueSource::File(_) => LintSource::File,
|
||||
ValueSource::Cli => LintSource::Cli,
|
||||
};
|
||||
if let Ok(severity) = Severity::try_from(**level) {
|
||||
selection.enable(lint, severity, lint_source);
|
||||
} else {
|
||||
selection.disable(lint);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
// `system_path_to_file` can return `Err` if the file was deleted since the configuration
|
||||
// was read. This should be rare and it should be okay to default to not showing a configuration
|
||||
// file in that case.
|
||||
let file = source
|
||||
.file()
|
||||
.and_then(|path| system_path_to_file(db.upcast(), path).ok());
|
||||
|
||||
// TODO: Add a note if the value was configured on the CLI
|
||||
let diagnostic = match error {
|
||||
GetLintError::Unknown(_) => OptionDiagnostic::new(
|
||||
DiagnosticId::UnknownRule,
|
||||
format!("Unknown lint rule `{rule_name}`"),
|
||||
Severity::Warning,
|
||||
),
|
||||
GetLintError::PrefixedWithCategory { suggestion, .. } => {
|
||||
OptionDiagnostic::new(
|
||||
DiagnosticId::UnknownRule,
|
||||
format!(
|
||||
"Unknown lint rule `{rule_name}`. Did you mean `{suggestion}`?"
|
||||
),
|
||||
Severity::Warning,
|
||||
)
|
||||
}
|
||||
|
||||
GetLintError::Removed(_) => OptionDiagnostic::new(
|
||||
DiagnosticId::UnknownRule,
|
||||
format!("Unknown lint rule `{rule_name}`"),
|
||||
Severity::Warning,
|
||||
),
|
||||
};
|
||||
|
||||
diagnostics.push(diagnostic.with_file(file).with_range(rule_name.range()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(selection, diagnostics)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct EnvironmentOptions {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub python_version: Option<RangedValue<PythonVersion>>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub python_platform: Option<RangedValue<PythonPlatform>>,
|
||||
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extra_paths: Option<Vec<RelativePathBuf>>,
|
||||
|
||||
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub typeshed: Option<RelativePathBuf>,
|
||||
|
||||
// TODO: Rename to python, see https://github.com/astral-sh/ruff/issues/15530
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub venv_path: Option<RelativePathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct SrcOptions {
|
||||
/// The root of the project, used for finding first-party modules.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub root: Option<RelativePathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", transparent)]
|
||||
pub struct Rules {
|
||||
inner: FxHashMap<RangedValue<String>, RangedValue<Level>>,
|
||||
}
|
||||
|
||||
impl FromIterator<(RangedValue<String>, RangedValue<Level>)> for Rules {
|
||||
fn from_iter<T: IntoIterator<Item = (RangedValue<String>, RangedValue<Level>)>>(
|
||||
iter: T,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: iter.into_iter().collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum KnotTomlError {
|
||||
#[error(transparent)]
|
||||
TomlSyntax(#[from] toml::de::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct OptionDiagnostic {
|
||||
id: DiagnosticId,
|
||||
message: String,
|
||||
severity: Severity,
|
||||
file: Option<File>,
|
||||
range: Option<TextRange>,
|
||||
}
|
||||
|
||||
impl OptionDiagnostic {
|
||||
pub fn new(id: DiagnosticId, message: String, severity: Severity) -> Self {
|
||||
Self {
|
||||
id,
|
||||
message,
|
||||
severity,
|
||||
file: None,
|
||||
range: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn with_file(mut self, file: Option<File>) -> Self {
|
||||
self.file = file;
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn with_range(mut self, range: Option<TextRange>) -> Self {
|
||||
self.range = range;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Diagnostic for OptionDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn message(&self) -> Cow<str> {
|
||||
Cow::Borrowed(&self.message)
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
self.file
|
||||
}
|
||||
|
||||
fn range(&self) -> Option<TextRange> {
|
||||
self.range
|
||||
}
|
||||
|
||||
fn severity(&self) -> Severity {
|
||||
self.severity
|
||||
}
|
||||
}
|
||||
@@ -1,199 +0,0 @@
|
||||
use pep440_rs::{Version, VersionSpecifiers};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::ops::Deref;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::metadata::options::Options;
|
||||
use crate::metadata::value::{RangedValue, ValueSource, ValueSourceGuard};
|
||||
|
||||
/// A `pyproject.toml` as specified in PEP 517.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct PyProject {
|
||||
/// PEP 621-compliant project metadata.
|
||||
pub project: Option<Project>,
|
||||
/// Tool-specific metadata.
|
||||
pub tool: Option<Tool>,
|
||||
}
|
||||
|
||||
impl PyProject {
|
||||
pub(crate) fn knot(&self) -> Option<&Options> {
|
||||
self.tool.as_ref().and_then(|tool| tool.knot.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum PyProjectError {
|
||||
#[error(transparent)]
|
||||
TomlSyntax(#[from] toml::de::Error),
|
||||
}
|
||||
|
||||
impl PyProject {
|
||||
pub(crate) fn from_toml_str(
|
||||
content: &str,
|
||||
source: ValueSource,
|
||||
) -> Result<Self, PyProjectError> {
|
||||
let _guard = ValueSourceGuard::new(source);
|
||||
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
|
||||
}
|
||||
}
|
||||
|
||||
/// PEP 621 project metadata (`project`).
|
||||
///
|
||||
/// See <https://packaging.python.org/en/latest/specifications/pyproject-toml>.
|
||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Project {
|
||||
/// The name of the project
|
||||
///
|
||||
/// Note: Intentionally option to be more permissive during deserialization.
|
||||
/// `PackageMetadata::from_pyproject` reports missing names.
|
||||
pub name: Option<RangedValue<PackageName>>,
|
||||
/// The version of the project
|
||||
pub version: Option<RangedValue<Version>>,
|
||||
/// The Python versions this project is compatible with.
|
||||
pub requires_python: Option<RangedValue<VersionSpecifiers>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Tool {
|
||||
pub knot: Option<Options>,
|
||||
}
|
||||
|
||||
/// The normalized name of a package.
|
||||
///
|
||||
/// Converts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.
|
||||
/// For example, `---`, `.`, and `__` are all converted to a single `-`.
|
||||
///
|
||||
/// See: <https://packaging.python.org/en/latest/specifications/name-normalization/>
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
|
||||
pub struct PackageName(String);
|
||||
|
||||
impl PackageName {
|
||||
/// Create a validated, normalized package name.
|
||||
pub(crate) fn new(name: String) -> Result<Self, InvalidPackageNameError> {
|
||||
if name.is_empty() {
|
||||
return Err(InvalidPackageNameError::Empty);
|
||||
}
|
||||
|
||||
if name.starts_with(['-', '_', '.']) {
|
||||
return Err(InvalidPackageNameError::NonAlphanumericStart(
|
||||
name.chars().next().unwrap(),
|
||||
));
|
||||
}
|
||||
|
||||
if name.ends_with(['-', '_', '.']) {
|
||||
return Err(InvalidPackageNameError::NonAlphanumericEnd(
|
||||
name.chars().last().unwrap(),
|
||||
));
|
||||
}
|
||||
|
||||
let Some(start) = name.find(|c: char| {
|
||||
!c.is_ascii() || c.is_ascii_uppercase() || matches!(c, '-' | '_' | '.')
|
||||
}) else {
|
||||
return Ok(Self(name));
|
||||
};
|
||||
|
||||
let (already_normalized, maybe_normalized) = name.split_at(start);
|
||||
|
||||
let mut normalized = String::with_capacity(name.len());
|
||||
normalized.push_str(already_normalized);
|
||||
let mut last = None;
|
||||
|
||||
for c in maybe_normalized.chars() {
|
||||
if !c.is_ascii() {
|
||||
return Err(InvalidPackageNameError::InvalidCharacter(c));
|
||||
}
|
||||
|
||||
if c.is_ascii_uppercase() {
|
||||
normalized.push(c.to_ascii_lowercase());
|
||||
} else if matches!(c, '-' | '_' | '.') {
|
||||
if matches!(last, Some('-' | '_' | '.')) {
|
||||
// Only keep a single instance of `-`, `_` and `.`
|
||||
} else {
|
||||
normalized.push('-');
|
||||
}
|
||||
} else {
|
||||
normalized.push(c);
|
||||
}
|
||||
|
||||
last = Some(c);
|
||||
}
|
||||
|
||||
Ok(Self(normalized))
|
||||
}
|
||||
|
||||
/// Returns the underlying package name.
|
||||
pub(crate) fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PackageName> for String {
|
||||
fn from(value: PackageName) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for PackageName {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
Self::new(s).map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PackageName {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for PackageName {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub(crate) enum InvalidPackageNameError {
|
||||
#[error("name must start with letter or number but it starts with '{0}'")]
|
||||
NonAlphanumericStart(char),
|
||||
#[error("name must end with letter or number but it ends with '{0}'")]
|
||||
NonAlphanumericEnd(char),
|
||||
#[error("valid name consists only of ASCII letters and numbers, period, underscore and hyphen but name contains '{0}'"
|
||||
)]
|
||||
InvalidCharacter(char),
|
||||
#[error("name must not be empty")]
|
||||
Empty,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::PackageName;
|
||||
|
||||
#[test]
|
||||
fn normalize() {
|
||||
let inputs = [
|
||||
"friendly-bard",
|
||||
"Friendly-Bard",
|
||||
"FRIENDLY-BARD",
|
||||
"friendly.bard",
|
||||
"friendly_bard",
|
||||
"friendly--bard",
|
||||
"friendly-.bard",
|
||||
"FrIeNdLy-._.-bArD",
|
||||
];
|
||||
|
||||
for input in inputs {
|
||||
assert_eq!(
|
||||
PackageName::new(input.to_string()).unwrap(),
|
||||
PackageName::new("friendly-bard".to_string()).unwrap(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,337 +0,0 @@
|
||||
use crate::combine::Combine;
|
||||
use crate::Db;
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use toml::Spanned;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ValueSource {
|
||||
/// Value loaded from a project's configuration file.
|
||||
///
|
||||
/// Ideally, we'd use [`ruff_db::files::File`] but we can't because the database hasn't been
|
||||
/// created when loading the configuration.
|
||||
File(Arc<SystemPathBuf>),
|
||||
/// The value comes from a CLI argument, while it's left open if specified using a short argument,
|
||||
/// long argument (`--extra-paths`) or `--config key=value`.
|
||||
Cli,
|
||||
}
|
||||
|
||||
impl ValueSource {
|
||||
pub fn file(&self) -> Option<&SystemPath> {
|
||||
match self {
|
||||
ValueSource::File(path) => Some(&**path),
|
||||
ValueSource::Cli => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
/// Serde doesn't provide any easy means to pass a value to a [`Deserialize`] implementation,
|
||||
/// but we want to associate each deserialized [`RelativePath`] with the source from
|
||||
/// which it originated. We use a thread local variable to work around this limitation.
|
||||
///
|
||||
/// Use the [`ValueSourceGuard`] to initialize the thread local before calling into any
|
||||
/// deserialization code. It ensures that the thread local variable gets cleaned up
|
||||
/// once deserialization is done (once the guard gets dropped).
|
||||
static VALUE_SOURCE: RefCell<Option<ValueSource>> = const { RefCell::new(None) };
|
||||
}
|
||||
|
||||
/// Guard to safely change the [`VALUE_SOURCE`] for the current thread.
|
||||
#[must_use]
|
||||
pub(super) struct ValueSourceGuard {
|
||||
prev_value: Option<ValueSource>,
|
||||
}
|
||||
|
||||
impl ValueSourceGuard {
|
||||
pub(super) fn new(source: ValueSource) -> Self {
|
||||
let prev = VALUE_SOURCE.replace(Some(source));
|
||||
Self { prev_value: prev }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for ValueSourceGuard {
|
||||
fn drop(&mut self) {
|
||||
VALUE_SOURCE.set(self.prev_value.take());
|
||||
}
|
||||
}
|
||||
|
||||
/// A value that "remembers" where it comes from (source) and its range in source.
|
||||
///
|
||||
/// ## Equality, Hash, and Ordering
|
||||
/// The equality, hash, and ordering are solely based on the value. They disregard the value's range
|
||||
/// or source.
|
||||
///
|
||||
/// This ensures that two resolved configurations are identical even if the position of a value has changed
|
||||
/// or if the values were loaded from different sources.
|
||||
#[derive(Clone)]
|
||||
pub struct RangedValue<T> {
|
||||
value: T,
|
||||
source: ValueSource,
|
||||
|
||||
/// The byte range of `value` in `source`.
|
||||
///
|
||||
/// Can be `None` because not all sources support a range.
|
||||
/// For example, arguments provided on the CLI won't have a range attached.
|
||||
range: Option<TextRange>,
|
||||
}
|
||||
|
||||
impl<T> RangedValue<T> {
|
||||
pub fn new(value: T, source: ValueSource) -> Self {
|
||||
Self::with_range(value, source, TextRange::default())
|
||||
}
|
||||
|
||||
pub fn cli(value: T) -> Self {
|
||||
Self::with_range(value, ValueSource::Cli, TextRange::default())
|
||||
}
|
||||
|
||||
pub fn with_range(value: T, source: ValueSource, range: TextRange) -> Self {
|
||||
Self {
|
||||
value,
|
||||
range: Some(range),
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn range(&self) -> Option<TextRange> {
|
||||
self.range
|
||||
}
|
||||
|
||||
pub fn source(&self) -> &ValueSource {
|
||||
&self.source
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_source(mut self, source: ValueSource) -> Self {
|
||||
self.source = source;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> T {
|
||||
self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Combine for RangedValue<T> {
|
||||
fn combine(self, _other: Self) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self
|
||||
}
|
||||
fn combine_with(&mut self, _other: Self) {}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for RangedValue<T>
|
||||
where
|
||||
T: IntoIterator,
|
||||
{
|
||||
type Item = T::Item;
|
||||
type IntoIter = T::IntoIter;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.value.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
// The type already has an `iter` method thanks to `Deref`.
|
||||
#[allow(clippy::into_iter_without_iter)]
|
||||
impl<'a, T> IntoIterator for &'a RangedValue<T>
|
||||
where
|
||||
&'a T: IntoIterator,
|
||||
{
|
||||
type Item = <&'a T as IntoIterator>::Item;
|
||||
type IntoIter = <&'a T as IntoIterator>::IntoIter;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.value.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
// The type already has a `into_iter_mut` method thanks to `DerefMut`.
|
||||
#[allow(clippy::into_iter_without_iter)]
|
||||
impl<'a, T> IntoIterator for &'a mut RangedValue<T>
|
||||
where
|
||||
&'a mut T: IntoIterator,
|
||||
{
|
||||
type Item = <&'a mut T as IntoIterator>::Item;
|
||||
type IntoIter = <&'a mut T as IntoIterator>::IntoIter;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.value.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Debug for RangedValue<T>
|
||||
where
|
||||
T: fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.value.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Display for RangedValue<T>
|
||||
where
|
||||
T: fmt::Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.value.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for RangedValue<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for RangedValue<T> {
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
&mut self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, U: ?Sized> AsRef<U> for RangedValue<T>
|
||||
where
|
||||
T: AsRef<U>,
|
||||
{
|
||||
fn as_ref(&self) -> &U {
|
||||
self.value.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialEq> PartialEq for RangedValue<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.value.eq(&other.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialEq<T>> PartialEq<T> for RangedValue<T> {
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
self.value.eq(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Eq> Eq for RangedValue<T> {}
|
||||
|
||||
impl<T: Hash> Hash for RangedValue<T> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.value.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialOrd> PartialOrd for RangedValue<T> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
self.value.partial_cmp(&other.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialOrd<T>> PartialOrd<T> for RangedValue<T> {
|
||||
fn partial_cmp(&self, other: &T) -> Option<Ordering> {
|
||||
self.value.partial_cmp(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Ord> Ord for RangedValue<T> {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.value.cmp(&other.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T> Deserialize<'de> for RangedValue<T>
|
||||
where
|
||||
T: Deserialize<'de>,
|
||||
{
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let spanned: Spanned<T> = Spanned::deserialize(deserializer)?;
|
||||
let span = spanned.span();
|
||||
let range = TextRange::new(
|
||||
TextSize::try_from(span.start).expect("Configuration file to be smaller than 4GB"),
|
||||
TextSize::try_from(span.end).expect("Configuration file to be smaller than 4GB"),
|
||||
);
|
||||
|
||||
Ok(VALUE_SOURCE.with_borrow(|source| {
|
||||
let source = source.clone().unwrap();
|
||||
|
||||
Self::with_range(spanned.into_inner(), source, range)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Serialize for RangedValue<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.value.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
/// A possibly relative path in a configuration file.
|
||||
///
|
||||
/// Relative paths in configuration files or from CLI options
|
||||
/// require different anchoring:
|
||||
///
|
||||
/// * CLI: The path is relative to the current working directory
|
||||
/// * Configuration file: The path is relative to the project's root.
|
||||
#[derive(
|
||||
Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash,
|
||||
)]
|
||||
#[serde(transparent)]
|
||||
pub struct RelativePathBuf(RangedValue<SystemPathBuf>);
|
||||
|
||||
impl RelativePathBuf {
|
||||
pub fn new(path: impl AsRef<SystemPath>, source: ValueSource) -> Self {
|
||||
Self(RangedValue::new(path.as_ref().to_path_buf(), source))
|
||||
}
|
||||
|
||||
pub fn cli(path: impl AsRef<SystemPath>) -> Self {
|
||||
Self::new(path, ValueSource::Cli)
|
||||
}
|
||||
|
||||
/// Returns the relative path as specified by the user.
|
||||
pub fn path(&self) -> &SystemPath {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Returns the owned relative path.
|
||||
pub fn into_path_buf(self) -> SystemPathBuf {
|
||||
self.0.into_inner()
|
||||
}
|
||||
|
||||
/// Resolves the absolute path for `self` based on its origin.
|
||||
pub fn absolute_with_db(&self, db: &dyn Db) -> SystemPathBuf {
|
||||
self.absolute(db.project().root(db), db.system())
|
||||
}
|
||||
|
||||
/// Resolves the absolute path for `self` based on its origin.
|
||||
pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> SystemPathBuf {
|
||||
let relative_to = match &self.0.source {
|
||||
ValueSource::File(_) => project_root,
|
||||
ValueSource::Cli => system.current_directory(),
|
||||
};
|
||||
|
||||
SystemPath::absolute(&self.0, relative_to)
|
||||
}
|
||||
}
|
||||
|
||||
impl Combine for RelativePathBuf {
|
||||
fn combine(self, other: Self) -> Self {
|
||||
Self(self.0.combine(other.0))
|
||||
}
|
||||
|
||||
fn combine_with(&mut self, other: Self) {
|
||||
self.0.combine_with(other.0);
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_project/src/metadata.rs
|
||||
expression: root
|
||||
---
|
||||
ProjectMetadata(
|
||||
name: Name("project-root"),
|
||||
root: "/app",
|
||||
options: Options(
|
||||
src: Some(SrcOptions(
|
||||
root: Some("src"),
|
||||
)),
|
||||
),
|
||||
)
|
||||
@@ -1,13 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_project/src/metadata.rs
|
||||
expression: sub_project
|
||||
---
|
||||
ProjectMetadata(
|
||||
name: Name("nested-project"),
|
||||
root: "/app/packages/a",
|
||||
options: Options(
|
||||
src: Some(SrcOptions(
|
||||
root: Some("src"),
|
||||
)),
|
||||
),
|
||||
)
|
||||
@@ -1,13 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_project/src/metadata.rs
|
||||
expression: root
|
||||
---
|
||||
ProjectMetadata(
|
||||
name: Name("project-root"),
|
||||
root: "/app",
|
||||
options: Options(
|
||||
environment: Some(EnvironmentOptions(
|
||||
r#python-version: Some("3.10"),
|
||||
)),
|
||||
),
|
||||
)
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_project/src/metadata.rs
|
||||
expression: sub_project
|
||||
---
|
||||
ProjectMetadata(
|
||||
name: Name("nested-project"),
|
||||
root: "/app/packages/a",
|
||||
options: Options(),
|
||||
)
|
||||
@@ -1,13 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_project/src/metadata.rs
|
||||
expression: root
|
||||
---
|
||||
ProjectMetadata(
|
||||
name: Name("super-app"),
|
||||
root: "/app",
|
||||
options: Options(
|
||||
src: Some(SrcOptions(
|
||||
root: Some("src"),
|
||||
)),
|
||||
),
|
||||
)
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_project/src/metadata.rs
|
||||
expression: project
|
||||
---
|
||||
ProjectMetadata(
|
||||
name: Name("backend"),
|
||||
root: "/app",
|
||||
options: Options(),
|
||||
)
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_project/src/metadata.rs
|
||||
expression: project
|
||||
---
|
||||
ProjectMetadata(
|
||||
name: Name("app"),
|
||||
root: "/app",
|
||||
options: Options(),
|
||||
)
|
||||
@@ -1,286 +0,0 @@
|
||||
use anyhow::{anyhow, Context};
|
||||
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||
use red_knot_python_semantic::{HasType, SemanticModel};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
|
||||
use ruff_python_ast::visitor::source_order;
|
||||
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
|
||||
use ruff_python_ast::{self as ast, Alias, Expr, Parameter, ParameterWithDefault, Stmt};
|
||||
|
||||
fn setup_db(project_root: &SystemPath, system: TestSystem) -> anyhow::Result<ProjectDatabase> {
|
||||
let project = ProjectMetadata::discover(project_root, &system)?;
|
||||
ProjectDatabase::new(project, system)
|
||||
}
|
||||
|
||||
fn get_cargo_workspace_root() -> anyhow::Result<SystemPathBuf> {
|
||||
Ok(SystemPathBuf::from(String::from_utf8(
|
||||
std::process::Command::new("cargo")
|
||||
.args(["locate-project", "--workspace", "--message-format", "plain"])
|
||||
.output()?
|
||||
.stdout,
|
||||
)?)
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_owned())
|
||||
}
|
||||
|
||||
/// Test that all snippets in testcorpus can be checked without panic (except for [`KNOWN_FAILURES`])
|
||||
#[test]
|
||||
fn corpus_no_panic() -> anyhow::Result<()> {
|
||||
let crate_root = String::from(env!("CARGO_MANIFEST_DIR"));
|
||||
run_corpus_tests(&format!("{crate_root}/resources/test/corpus/**/*.py"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_no_panic() -> anyhow::Result<()> {
|
||||
let workspace_root = get_cargo_workspace_root()?;
|
||||
run_corpus_tests(&format!(
|
||||
"{workspace_root}/crates/ruff_python_parser/resources/**/*.py"
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn linter_af_no_panic() -> anyhow::Result<()> {
|
||||
let workspace_root = get_cargo_workspace_root()?;
|
||||
run_corpus_tests(&format!(
|
||||
"{workspace_root}/crates/ruff_linter/resources/test/fixtures/[a-f]*/**/*.py"
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn linter_gz_no_panic() -> anyhow::Result<()> {
|
||||
let workspace_root = get_cargo_workspace_root()?;
|
||||
run_corpus_tests(&format!(
|
||||
"{workspace_root}/crates/ruff_linter/resources/test/fixtures/[g-z]*/**/*.py"
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "Enable running once there are fewer failures"]
|
||||
fn linter_stubs_no_panic() -> anyhow::Result<()> {
|
||||
let workspace_root = get_cargo_workspace_root()?;
|
||||
run_corpus_tests(&format!(
|
||||
"{workspace_root}/crates/ruff_linter/resources/test/fixtures/**/*.pyi"
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "Enable running over typeshed stubs once there are fewer failures"]
|
||||
fn typeshed_no_panic() -> anyhow::Result<()> {
|
||||
let workspace_root = get_cargo_workspace_root()?;
|
||||
run_corpus_tests(&format!(
|
||||
"{workspace_root}/crates/red_knot_vendored/vendor/typeshed/**/*.pyi"
|
||||
))
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
fn run_corpus_tests(pattern: &str) -> anyhow::Result<()> {
|
||||
let root = SystemPathBuf::from("/src");
|
||||
|
||||
let system = TestSystem::default();
|
||||
let memory_fs = system.memory_file_system();
|
||||
memory_fs.create_directory_all(root.as_ref())?;
|
||||
|
||||
let mut db = setup_db(&root, system.clone())?;
|
||||
|
||||
let workspace_root = get_cargo_workspace_root()?;
|
||||
let workspace_root = workspace_root.to_string();
|
||||
|
||||
let corpus = glob::glob(pattern).context("Failed to compile pattern")?;
|
||||
|
||||
for path in corpus {
|
||||
let path = path.context("Failed to glob path")?;
|
||||
let path = SystemPathBuf::from_path_buf(path).map_err(|path| {
|
||||
anyhow!(
|
||||
"Failed to convert path '{path}' to system path",
|
||||
path = path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
let relative_path = path.strip_prefix(&workspace_root)?;
|
||||
|
||||
let (py_expected_to_fail, pyi_expected_to_fail) = KNOWN_FAILURES
|
||||
.iter()
|
||||
.find_map(|(path, py_fail, pyi_fail)| {
|
||||
if *path == relative_path.as_str().replace('\\', "/") {
|
||||
Some((*py_fail, *pyi_fail))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or((false, false));
|
||||
|
||||
let source = path.as_path();
|
||||
let source_filename = source.file_name().unwrap();
|
||||
|
||||
let code = std::fs::read_to_string(source)?;
|
||||
|
||||
let mut check_with_file_name = |path: &SystemPath| {
|
||||
memory_fs.write_file(path, &code).unwrap();
|
||||
File::sync_path(&mut db, path);
|
||||
|
||||
// this test is only asserting that we can pull every expression type without a panic
|
||||
// (and some non-expressions that clearly define a single type)
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
let result = std::panic::catch_unwind(|| pull_types(&db, file));
|
||||
|
||||
let expected_to_fail = if path.extension().map(|e| e == "pyi").unwrap_or(false) {
|
||||
pyi_expected_to_fail
|
||||
} else {
|
||||
py_expected_to_fail
|
||||
};
|
||||
if let Err(err) = result {
|
||||
if !expected_to_fail {
|
||||
println!("Check failed for {relative_path:?}. Consider fixing it or adding it to KNOWN_FAILURES");
|
||||
std::panic::resume_unwind(err);
|
||||
}
|
||||
} else {
|
||||
assert!(!expected_to_fail, "Expected to panic, but did not. Consider removing this path from KNOWN_FAILURES");
|
||||
}
|
||||
|
||||
memory_fs.remove_file(path).unwrap();
|
||||
file.sync(&mut db);
|
||||
};
|
||||
|
||||
if source.extension() == Some("pyi") {
|
||||
println!("checking {relative_path}");
|
||||
let pyi_dest = root.join(source_filename);
|
||||
check_with_file_name(&pyi_dest);
|
||||
} else {
|
||||
println!("checking {relative_path}");
|
||||
let py_dest = root.join(source_filename);
|
||||
check_with_file_name(&py_dest);
|
||||
|
||||
let pyi_dest = root.join(format!("{source_filename}i"));
|
||||
println!("re-checking as stub file: {pyi_dest}");
|
||||
check_with_file_name(&pyi_dest);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pull_types(db: &ProjectDatabase, file: File) {
|
||||
let mut visitor = PullTypesVisitor::new(db, file);
|
||||
|
||||
let ast = parsed_module(db, file);
|
||||
|
||||
visitor.visit_body(ast.suite());
|
||||
}
|
||||
|
||||
struct PullTypesVisitor<'db> {
|
||||
model: SemanticModel<'db>,
|
||||
}
|
||||
|
||||
impl<'db> PullTypesVisitor<'db> {
|
||||
fn new(db: &'db ProjectDatabase, file: File) -> Self {
|
||||
Self {
|
||||
model: SemanticModel::new(db, file),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_target(&mut self, target: &Expr) {
|
||||
match target {
|
||||
Expr::List(ast::ExprList { elts, .. }) | Expr::Tuple(ast::ExprTuple { elts, .. }) => {
|
||||
for element in elts {
|
||||
self.visit_target(element);
|
||||
}
|
||||
}
|
||||
_ => self.visit_expr(target),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(function) => {
|
||||
let _ty = function.inferred_type(&self.model);
|
||||
}
|
||||
Stmt::ClassDef(class) => {
|
||||
let _ty = class.inferred_type(&self.model);
|
||||
}
|
||||
Stmt::Assign(assign) => {
|
||||
for target in &assign.targets {
|
||||
self.visit_target(target);
|
||||
}
|
||||
self.visit_expr(&assign.value);
|
||||
return;
|
||||
}
|
||||
Stmt::For(for_stmt) => {
|
||||
self.visit_target(&for_stmt.target);
|
||||
self.visit_expr(&for_stmt.iter);
|
||||
self.visit_body(&for_stmt.body);
|
||||
self.visit_body(&for_stmt.orelse);
|
||||
return;
|
||||
}
|
||||
Stmt::AnnAssign(_)
|
||||
| Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
| Stmt::AugAssign(_)
|
||||
| Stmt::TypeAlias(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Try(_)
|
||||
| Stmt::Assert(_)
|
||||
| Stmt::Import(_)
|
||||
| Stmt::ImportFrom(_)
|
||||
| Stmt::Global(_)
|
||||
| Stmt::Nonlocal(_)
|
||||
| Stmt::Expr(_)
|
||||
| Stmt::Pass(_)
|
||||
| Stmt::Break(_)
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &Expr) {
|
||||
let _ty = expr.inferred_type(&self.model);
|
||||
|
||||
source_order::walk_expr(self, expr);
|
||||
}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &Parameter) {
|
||||
let _ty = parameter.inferred_type(&self.model);
|
||||
|
||||
source_order::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_parameter_with_default(&mut self, parameter_with_default: &ParameterWithDefault) {
|
||||
let _ty = parameter_with_default.inferred_type(&self.model);
|
||||
|
||||
source_order::walk_parameter_with_default(self, parameter_with_default);
|
||||
}
|
||||
|
||||
fn visit_alias(&mut self, alias: &Alias) {
|
||||
let _ty = alias.inferred_type(&self.model);
|
||||
|
||||
source_order::walk_alias(self, alias);
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the .py/.pyi version of this file is expected to fail
|
||||
#[rustfmt::skip]
|
||||
const KNOWN_FAILURES: &[(&str, bool, bool)] = &[
|
||||
// related to circular references in nested functions
|
||||
("crates/ruff_linter/resources/test/fixtures/flake8_return/RET503.py", false, true),
|
||||
// related to circular references in class definitions
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_26.py", true, true),
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_27.py", true, true),
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F811_19.py", true, false),
|
||||
("crates/ruff_linter/resources/test/fixtures/pyupgrade/UP039.py", true, false),
|
||||
// related to circular references in type aliases (salsa cycle panic):
|
||||
("crates/ruff_python_parser/resources/inline/err/type_alias_invalid_value_expr.py", true, true),
|
||||
("crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TC008.py", true, true),
|
||||
// related to circular references in f-string annotations (invalid syntax)
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_15.py", true, true),
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_14.py", false, true),
|
||||
];
|
||||
@@ -13,50 +13,40 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_macros = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_python_literal = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
drop_bomb = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
red_knot_test = { workspace = true }
|
||||
red_knot_vendored = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
dir-test = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
quickcheck = { version = "1.0.3", default-features = false }
|
||||
quickcheck_macros = { version = "1.0.0" }
|
||||
|
||||
[features]
|
||||
serde = ["ruff_db/serde", "dep:serde"]
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# Vendored types for the stdlib
|
||||
# Red Knot
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_vendored/vendor/typeshed`. The file `crates/red_knot_vendored/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
Semantic analysis for the red-knot project.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
@@ -1,4 +1,87 @@
|
||||
/// Rebuild the crate if a test file is added or removed from
|
||||
pub fn main() {
|
||||
println!("cargo::rerun-if-changed=resources/mdtest");
|
||||
//! Build script to package our vendored typeshed files
|
||||
//! into a zip archive that can be included in the Ruff binary.
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use path_slash::PathExt;
|
||||
use zip::result::ZipResult;
|
||||
use zip::write::{FileOptions, ZipWriter};
|
||||
use zip::CompressionMethod;
|
||||
|
||||
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
|
||||
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
|
||||
/// Recursively zip the contents of an entire directory.
|
||||
///
|
||||
/// This routine is adapted from a recipe at
|
||||
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
let dir_entry = entry.unwrap();
|
||||
let absolute_path = dir_entry.path();
|
||||
let normalized_relative_path = absolute_path
|
||||
.strip_prefix(Path::new(directory_path))
|
||||
.unwrap()
|
||||
.to_slash()
|
||||
.expect("Unexpected non-utf8 typeshed path!");
|
||||
|
||||
// Write file or directory explicitly
|
||||
// Some unzip tools unzip files with directory paths correctly, some do not!
|
||||
if absolute_path.is_file() {
|
||||
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.start_file(normalized_relative_path, options)?;
|
||||
let mut f = File::open(absolute_path)?;
|
||||
std::io::copy(&mut f, &mut zip).unwrap();
|
||||
} else if !normalized_relative_path.is_empty() {
|
||||
// Only if not root! Avoids path spec / warning
|
||||
// and mapname conversion failed error on unzip
|
||||
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.add_directory(normalized_relative_path, options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
assert!(
|
||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
||||
"Where is typeshed?"
|
||||
);
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
|
||||
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
|
||||
// so that we use `/` as a path separator on all platforms.
|
||||
// That enables us to load the typeshed zip at compile time in `module.rs`
|
||||
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
|
||||
// based on the default path separator for the specific platform we're on,
|
||||
// which can't be done at compile time.)
|
||||
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
|
||||
|
||||
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
|
||||
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
|
||||
}
|
||||
|
||||
@@ -1,220 +0,0 @@
|
||||
"""A runner for Markdown-based tests for Red Knot"""
|
||||
# /// script
|
||||
# requires-python = ">=3.11"
|
||||
# dependencies = [
|
||||
# "rich",
|
||||
# "watchfiles",
|
||||
# ]
|
||||
# ///
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Final, Literal, Never, assert_never
|
||||
|
||||
from rich.console import Console
|
||||
from watchfiles import Change, watch
|
||||
|
||||
CRATE_NAME: Final = "red_knot_python_semantic"
|
||||
CRATE_ROOT: Final = Path(__file__).resolve().parent
|
||||
MDTEST_DIR: Final = CRATE_ROOT / "resources" / "mdtest"
|
||||
|
||||
|
||||
class MDTestRunner:
|
||||
mdtest_executable: Path | None
|
||||
console: Console
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.mdtest_executable = None
|
||||
self.console = Console()
|
||||
|
||||
def _run_cargo_test(self, *, message_format: Literal["human", "json"]) -> str:
|
||||
return subprocess.check_output(
|
||||
[
|
||||
"cargo",
|
||||
"test",
|
||||
"--package",
|
||||
CRATE_NAME,
|
||||
"--no-run",
|
||||
"--color=always",
|
||||
"--message-format",
|
||||
message_format,
|
||||
],
|
||||
cwd=CRATE_ROOT,
|
||||
env=dict(os.environ, CLI_COLOR="1"),
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
)
|
||||
|
||||
def _recompile_tests(
|
||||
self, status_message: str, *, message_on_success: bool = True
|
||||
) -> bool:
|
||||
with self.console.status(status_message):
|
||||
# Run it with 'human' format in case there are errors:
|
||||
try:
|
||||
self._run_cargo_test(message_format="human")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output)
|
||||
return False
|
||||
|
||||
# Run it again with 'json' format to find the mdtest executable:
|
||||
try:
|
||||
json_output = self._run_cargo_test(message_format="json")
|
||||
except subprocess.CalledProcessError as _:
|
||||
# `cargo test` can still fail if something changed in between the two runs.
|
||||
# Here we don't have a human-readable output, so just show a generic message:
|
||||
self.console.print("[red]Error[/red]: Failed to compile tests")
|
||||
return False
|
||||
|
||||
if json_output:
|
||||
self._get_executable_path_from_json(json_output)
|
||||
|
||||
if message_on_success:
|
||||
self.console.print("[dim]Tests compiled successfully[/dim]")
|
||||
return True
|
||||
|
||||
def _get_executable_path_from_json(self, json_output: str) -> None:
|
||||
for json_line in json_output.splitlines():
|
||||
try:
|
||||
data = json.loads(json_line)
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
if data.get("target", {}).get("name") == "mdtest":
|
||||
self.mdtest_executable = Path(data["executable"])
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Could not find mdtest executable after successful compilation"
|
||||
)
|
||||
|
||||
def _run_mdtest(
|
||||
self, arguments: list[str] | None = None, *, capture_output: bool = False
|
||||
) -> subprocess.CompletedProcess:
|
||||
assert self.mdtest_executable is not None
|
||||
|
||||
arguments = arguments or []
|
||||
return subprocess.run(
|
||||
[self.mdtest_executable, *arguments],
|
||||
cwd=CRATE_ROOT,
|
||||
env=dict(os.environ, CLICOLOR_FORCE="1"),
|
||||
capture_output=capture_output,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
|
||||
def _run_mdtests_for_file(self, markdown_file: Path) -> None:
|
||||
path_mangled = (
|
||||
markdown_file.as_posix()
|
||||
.replace("/", "_")
|
||||
.replace("-", "_")
|
||||
.removesuffix(".md")
|
||||
)
|
||||
test_name = f"mdtest__{path_mangled}"
|
||||
|
||||
output = self._run_mdtest(["--exact", test_name], capture_output=True)
|
||||
|
||||
if output.returncode == 0:
|
||||
if "running 0 tests\n" in output.stdout:
|
||||
self.console.log(
|
||||
f"[yellow]Warning[/yellow]: No tests were executed with filter '{test_name}'"
|
||||
)
|
||||
else:
|
||||
self.console.print(
|
||||
f"Test for [bold green]{markdown_file}[/bold green] succeeded"
|
||||
)
|
||||
else:
|
||||
self.console.print()
|
||||
self.console.rule(
|
||||
f"Test for [bold red]{markdown_file}[/bold red] failed",
|
||||
style="gray",
|
||||
)
|
||||
self._print_trimmed_cargo_test_output(
|
||||
output.stdout + output.stderr, test_name
|
||||
)
|
||||
|
||||
def _print_trimmed_cargo_test_output(self, output: str, test_name: str) -> None:
|
||||
# Skip 'cargo test' boilerplate at the beginning:
|
||||
lines = output.splitlines()
|
||||
start_index = 0
|
||||
for i, line in enumerate(lines):
|
||||
if f"{test_name} stdout" in line:
|
||||
start_index = i
|
||||
break
|
||||
|
||||
for line in lines[start_index + 1 :]:
|
||||
if "MDTEST_TEST_FILTER" in line:
|
||||
continue
|
||||
if line.strip() == "-" * 50:
|
||||
# Skip 'cargo test' boilerplate at the end
|
||||
break
|
||||
|
||||
print(line)
|
||||
|
||||
def watch(self) -> Never:
|
||||
self._recompile_tests("Compiling tests...", message_on_success=False)
|
||||
self.console.print("[dim]Ready to watch for changes...[/dim]")
|
||||
|
||||
for changes in watch(CRATE_ROOT):
|
||||
new_md_files = set()
|
||||
changed_md_files = set()
|
||||
rust_code_has_changed = False
|
||||
|
||||
for change, path_str in changes:
|
||||
path = Path(path_str)
|
||||
|
||||
if path.suffix == ".rs":
|
||||
rust_code_has_changed = True
|
||||
continue
|
||||
|
||||
if path.suffix != ".md":
|
||||
continue
|
||||
|
||||
try:
|
||||
relative_path = Path(path).relative_to(MDTEST_DIR)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
match change:
|
||||
case Change.added:
|
||||
# When saving a file, some editors (looking at you, Vim) might first
|
||||
# save the file with a temporary name (e.g. `file.md~`) and then rename
|
||||
# it to the final name. This creates a `deleted` and `added` change.
|
||||
# We treat those files as `changed` here.
|
||||
if (Change.deleted, path_str) in changes:
|
||||
changed_md_files.add(relative_path)
|
||||
else:
|
||||
new_md_files.add(relative_path)
|
||||
case Change.modified:
|
||||
changed_md_files.add(relative_path)
|
||||
case Change.deleted:
|
||||
# No need to do anything when a Markdown test is deleted
|
||||
pass
|
||||
case _ as unreachable:
|
||||
assert_never(unreachable)
|
||||
|
||||
if rust_code_has_changed:
|
||||
if self._recompile_tests("Rust code has changed, recompiling tests..."):
|
||||
self._run_mdtest()
|
||||
elif new_md_files:
|
||||
files = " ".join(file.as_posix() for file in new_md_files)
|
||||
self._recompile_tests(
|
||||
f"New Markdown test [yellow]{files}[/yellow] detected, recompiling tests..."
|
||||
)
|
||||
|
||||
for path in new_md_files | changed_md_files:
|
||||
self._run_mdtests_for_file(path)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
try:
|
||||
runner = MDTestRunner()
|
||||
runner.watch()
|
||||
except KeyboardInterrupt:
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,141 +0,0 @@
|
||||
version = 1
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[manifest]
|
||||
requirements = [
|
||||
{ name = "rich" },
|
||||
{ name = "watchfiles" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.8.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "idna" },
|
||||
{ name = "sniffio" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "3.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mdurl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mdurl"
|
||||
version = "0.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "13.9.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markdown-it-py" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.12.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "watchfiles"
|
||||
version = "1.0.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f5/26/c705fc77d0a9ecdb9b66f1e2976d95b81df3cae518967431e7dbf9b5e219/watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205", size = 94625 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/bb/8461adc4b1fed009546fb797fc0d5698dcfe5e289cb37e1b8f16a93cdc30/watchfiles-1.0.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2a9f93f8439639dc244c4d2902abe35b0279102bca7bbcf119af964f51d53c19", size = 394869 },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/88/9ebf36b3547176d1709c320de78c1fa3263a46be31b5b1267571d9102686/watchfiles-1.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eea33ad8c418847dd296e61eb683cae1c63329b6d854aefcd412e12d94ee235", size = 384905 },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/8a/04335ce23ef78d8c69f0913e8b20cf7d9233e3986543aeef95ef2d6e43d2/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31f1a379c9dcbb3f09cf6be1b7e83b67c0e9faabed0471556d9438a4a4e14202", size = 449944 },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/4e/c8d5dcd14fe637f4633616dabea8a4af0a10142dccf3b43e0f081ba81ab4/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab594e75644421ae0a2484554832ca5895f8cab5ab62de30a1a57db460ce06c6", size = 456020 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/74/3e91e09e1861dd7fbb1190ce7bd786700dc0fbc2ccd33bb9fff5de039229/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc2eb5d14a8e0d5df7b36288979176fbb39672d45184fc4b1c004d7c3ce29317", size = 482983 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/3d/e64de2d1ce4eb6a574fd78ce3a28c279da263be9ef3cfcab6f708df192f2/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f68d8e9d5a321163ddacebe97091000955a1b74cd43724e346056030b0bacee", size = 520320 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/bd/52235f7063b57240c66a991696ed27e2a18bd6fcec8a1ea5a040b70d0611/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9ce064e81fe79faa925ff03b9f4c1a98b0bbb4a1b8c1b015afa93030cb21a49", size = 500988 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/b0/ff04194141a5fe650c150400dd9e42667916bc0f52426e2e174d779b8a74/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b77d5622ac5cc91d21ae9c2b284b5d5c51085a0bdb7b518dba263d0af006132c", size = 452573 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/9d/966164332c5a178444ae6d165082d4f351bd56afd9c3ec828eecbf190e6a/watchfiles-1.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1941b4e39de9b38b868a69b911df5e89dc43767feeda667b40ae032522b9b5f1", size = 615114 },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/df/f569ae4c1877f96ad4086c153a8eee5a19a3b519487bf5c9454a3438c341/watchfiles-1.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f8c4998506241dedf59613082d1c18b836e26ef2a4caecad0ec41e2a15e4226", size = 613076 },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/ae/8ce5f29e65d5fa5790e3c80c289819c55e12be2e1b9f5b6a0e55e169b97d/watchfiles-1.0.4-cp311-cp311-win32.whl", hash = "sha256:4ebbeca9360c830766b9f0df3640b791be569d988f4be6c06d6fae41f187f105", size = 271013 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/c6/79dc4a7c598a978e5fafa135090aaf7bbb03b8dec7bada437dfbe578e7ed/watchfiles-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:05d341c71f3d7098920f8551d4df47f7b57ac5b8dad56558064c3431bdfc0b74", size = 284229 },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/3d/928633723211753f3500bfb138434f080363b87a1b08ca188b1ce54d1e05/watchfiles-1.0.4-cp311-cp311-win_arm64.whl", hash = "sha256:32b026a6ab64245b584acf4931fe21842374da82372d5c039cba6bf99ef722f3", size = 276824 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/1a/8f4d9a1461709756ace48c98f07772bc6d4519b1e48b5fa24a4061216256/watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2", size = 391345 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/d2/6750b7b3527b1cdaa33731438432e7238a6c6c40a9924049e4cebfa40805/watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9", size = 381515 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/17/80500e42363deef1e4b4818729ed939aaddc56f82f4e72b2508729dd3c6b/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712", size = 449767 },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/37/1427fa4cfa09adbe04b1e97bced19a29a3462cc64c78630787b613a23f18/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12", size = 455677 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/7a/39e9397f3a19cb549a7d380412fd9e507d4854eddc0700bfad10ef6d4dba/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844", size = 482219 },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/2d/7113931a77e2ea4436cad0c1690c09a40a7f31d366f79c6f0a5bc7a4f6d5/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733", size = 518830 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/1b/50733b1980fa81ef3c70388a546481ae5fa4c2080040100cd7bf3bf7b321/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af", size = 497997 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/b4/9396cc61b948ef18943e7c85ecfa64cf940c88977d882da57147f62b34b1/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a", size = 452249 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/69/0c65a5a29e057ad0dc691c2fa6c23b2983c7dabaa190ba553b29ac84c3cc/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff", size = 614412 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/b9/319fcba6eba5fad34327d7ce16a6b163b39741016b1996f4a3c96b8dd0e1/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e", size = 611982 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/47/143c92418e30cb9348a4387bfa149c8e0e404a7c5b0585d46d2f7031b4b9/watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94", size = 271822 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/94/b0165481bff99a64b29e46e07ac2e0df9f7a957ef13bec4ceab8515f44e3/watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c", size = 285441 },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/de/09fe56317d582742d7ca8c2ca7b52a85927ebb50678d9b0fa8194658f536/watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90", size = 277141 },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/98/f03efabec64b5b1fa58c0daab25c68ef815b0f320e54adcacd0d6847c339/watchfiles-1.0.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8012bd820c380c3d3db8435e8cf7592260257b378b649154a7948a663b5f84e9", size = 390954 },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/09/4dd49ba0a32a45813debe5fb3897955541351ee8142f586303b271a02b40/watchfiles-1.0.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa216f87594f951c17511efe5912808dfcc4befa464ab17c98d387830ce07b60", size = 381133 },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/59/5aa6fc93553cd8d8ee75c6247763d77c02631aed21551a97d94998bf1dae/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c9953cf85529c05b24705639ffa390f78c26449e15ec34d5339e8108c7c407", size = 449516 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/aa/df4b6fe14b6317290b91335b23c96b488d365d65549587434817e06895ea/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf684aa9bba4cd95ecb62c822a56de54e3ae0598c1a7f2065d51e24637a3c5d", size = 454820 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/71/185f8672f1094ce48af33252c73e39b48be93b761273872d9312087245f6/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f44a39aee3cbb9b825285ff979ab887a25c5d336e5ec3574f1506a4671556a8d", size = 481550 },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/d7/50ebba2c426ef1a5cb17f02158222911a2e005d401caf5d911bfca58f4c4/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38320582736922be8c865d46520c043bff350956dfc9fbaee3b2df4e1740a4b", size = 518647 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/7a/4c009342e393c545d68987e8010b937f72f47937731225b2b29b7231428f/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39f4914548b818540ef21fd22447a63e7be6e24b43a70f7642d21f1e73371590", size = 497547 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/7c/1cf50b35412d5c72d63b2bf9a4fffee2e1549a245924960dd087eb6a6de4/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12969a3765909cf5dc1e50b2436eb2c0e676a3c75773ab8cc3aa6175c16e902", size = 452179 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/a9/3db1410e1c1413735a9a472380e4f431ad9a9e81711cda2aaf02b7f62693/watchfiles-1.0.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0986902677a1a5e6212d0c49b319aad9cc48da4bd967f86a11bde96ad9676ca1", size = 614125 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/e1/0025d365cf6248c4d1ee4c3d2e3d373bdd3f6aff78ba4298f97b4fad2740/watchfiles-1.0.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:308ac265c56f936636e3b0e3f59e059a40003c655228c131e1ad439957592303", size = 611911 },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/55/035838277d8c98fc8c917ac9beeb0cd6c59d675dc2421df5f9fcf44a0070/watchfiles-1.0.4-cp313-cp313-win32.whl", hash = "sha256:aee397456a29b492c20fda2d8961e1ffb266223625346ace14e4b6d861ba9c80", size = 271152 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/e5/96b8e55271685ddbadc50ce8bc53aa2dff278fb7ac4c2e473df890def2dc/watchfiles-1.0.4-cp313-cp313-win_amd64.whl", hash = "sha256:d6097538b0ae5c1b88c3b55afa245a66793a8fec7ada6755322e465fb1a0e8cc", size = 285216 },
|
||||
]
|
||||
@@ -1,4 +0,0 @@
|
||||
Markdown files within the `mdtest/` subdirectory are tests of type inference and type checking;
|
||||
executed by the `tests/mdtest.rs` integration test.
|
||||
|
||||
See `crates/red_knot_test/README.md` for documentation of this test format.
|
||||
@@ -1 +0,0 @@
|
||||
wrap = 100
|
||||
@@ -1,94 +0,0 @@
|
||||
# `Annotated`
|
||||
|
||||
`Annotated` attaches arbitrary metadata to a given type.
|
||||
|
||||
## Usages
|
||||
|
||||
`Annotated[T, ...]` is equivalent to `T`: All metadata arguments are simply ignored.
|
||||
|
||||
```py
|
||||
from typing_extensions import Annotated
|
||||
|
||||
def _(x: Annotated[int, "foo"]):
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
def _(x: Annotated[int, lambda: 0 + 1 * 2 // 3, _(4)]):
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
def _(x: Annotated[int, "arbitrary", "metadata", "elements", "are", "fine"]):
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
def _(x: Annotated[tuple[str, int], bytes]):
|
||||
reveal_type(x) # revealed: tuple[str, int]
|
||||
```
|
||||
|
||||
## Parameterization
|
||||
|
||||
It is invalid to parameterize `Annotated` with less than two arguments.
|
||||
|
||||
```py
|
||||
from typing_extensions import Annotated
|
||||
|
||||
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||
def _(x: Annotated):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
X = Annotated
|
||||
else:
|
||||
X = bool
|
||||
|
||||
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||
def f(y: X):
|
||||
reveal_type(y) # revealed: Unknown | bool
|
||||
|
||||
# error: [invalid-type-form] "`Annotated` requires at least two arguments when used in an annotation or type expression"
|
||||
def _(x: Annotated | bool):
|
||||
reveal_type(x) # revealed: Unknown | bool
|
||||
|
||||
# error: [invalid-type-form]
|
||||
def _(x: Annotated[()]):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
|
||||
# error: [invalid-type-form]
|
||||
def _(x: Annotated[int]):
|
||||
# `Annotated[T]` is invalid and will raise an error at runtime,
|
||||
# but we treat it the same as `T` to provide better diagnostics later on.
|
||||
# The subscription itself is still reported, regardless.
|
||||
# Same for the `(int,)` form below.
|
||||
reveal_type(x) # revealed: int
|
||||
|
||||
# error: [invalid-type-form]
|
||||
def _(x: Annotated[(int,)]):
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
## Inheritance
|
||||
|
||||
### Correctly parameterized
|
||||
|
||||
Inheriting from `Annotated[T, ...]` is equivalent to inheriting from `T` itself.
|
||||
|
||||
```py
|
||||
from typing_extensions import Annotated
|
||||
|
||||
# TODO: False positive
|
||||
# error: [invalid-base]
|
||||
class C(Annotated[int, "foo"]): ...
|
||||
|
||||
# TODO: Should be `tuple[Literal[C], Literal[int], Literal[object]]`
|
||||
reveal_type(C.__mro__) # revealed: tuple[Literal[C], Unknown, Literal[object]]
|
||||
```
|
||||
|
||||
### Not parameterized
|
||||
|
||||
```py
|
||||
from typing_extensions import Annotated
|
||||
|
||||
# At runtime, this is an error.
|
||||
# error: [invalid-base]
|
||||
class C(Annotated): ...
|
||||
|
||||
reveal_type(C.__mro__) # revealed: tuple[Literal[C], Unknown, Literal[object]]
|
||||
```
|
||||
@@ -1,83 +0,0 @@
|
||||
# Any
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Any` is a way to name the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
x: Any = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Aliased to a different name
|
||||
|
||||
If you alias `typing.Any` to another name, we still recognize that as a spelling of the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any as RenamedAny
|
||||
|
||||
x: RenamedAny = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Shadowed class
|
||||
|
||||
If you define your own class named `Any`, using that in a type expression refers to your class, and
|
||||
isn't a spelling of the Any type.
|
||||
|
||||
```py
|
||||
class Any: ...
|
||||
|
||||
x: Any
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
|
||||
# This verifies that we're not accidentally seeing typing.Any, since str is assignable
|
||||
# to that but not to our locally defined class.
|
||||
y: Any = "not an Any" # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
## Subclass
|
||||
|
||||
The spec allows you to define subclasses of `Any`.
|
||||
|
||||
TODO: Handle assignments correctly. `Subclass` has an unknown superclass, which might be `int`. The
|
||||
assignment to `x` should not be allowed, even when the unknown superclass is `int`. The assignment
|
||||
to `y` should be allowed, since `Subclass` might have `int` as a superclass, and is therefore
|
||||
assignable to `int`.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
class Subclass(Any): ...
|
||||
|
||||
reveal_type(Subclass.__mro__) # revealed: tuple[Literal[Subclass], Any, Literal[object]]
|
||||
|
||||
x: Subclass = 1 # error: [invalid-assignment]
|
||||
# TODO: no diagnostic
|
||||
y: int = Subclass() # error: [invalid-assignment]
|
||||
|
||||
def _(s: Subclass):
|
||||
reveal_type(s) # revealed: Subclass
|
||||
```
|
||||
|
||||
## Invalid
|
||||
|
||||
`Any` cannot be parameterized:
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
# error: [invalid-type-form] "Type `typing.Any` expected no type parameter"
|
||||
def f(x: Any[int]):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
```
|
||||
@@ -1,46 +0,0 @@
|
||||
# Deferred annotations
|
||||
|
||||
## Deferred annotations in stubs always resolve
|
||||
|
||||
`mod.pyi`:
|
||||
|
||||
```pyi
|
||||
def get_foo() -> Foo: ...
|
||||
class Foo: ...
|
||||
```
|
||||
|
||||
```py
|
||||
from mod import get_foo
|
||||
|
||||
reveal_type(get_foo()) # revealed: Foo
|
||||
```
|
||||
|
||||
## Deferred annotations in regular code fail
|
||||
|
||||
In (regular) source files, annotations are *not* deferred. This also tests that imports from
|
||||
`__future__` that are not `annotations` are ignored.
|
||||
|
||||
```py
|
||||
from __future__ import with_statement as annotations
|
||||
|
||||
# error: [unresolved-reference]
|
||||
def get_foo() -> Foo: ...
|
||||
|
||||
class Foo: ...
|
||||
|
||||
reveal_type(get_foo()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Deferred annotations in regular code with `__future__.annotations`
|
||||
|
||||
If `__future__.annotations` is imported, annotations *are* deferred.
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
def get_foo() -> Foo: ...
|
||||
|
||||
class Foo: ...
|
||||
|
||||
reveal_type(get_foo()) # revealed: Foo
|
||||
```
|
||||
@@ -1,155 +0,0 @@
|
||||
# Literal
|
||||
|
||||
<https://typing.readthedocs.io/en/latest/spec/literal.html#literals>
|
||||
|
||||
## Parameterization
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
from enum import Enum
|
||||
|
||||
mode: Literal["w", "r"]
|
||||
a1: Literal[26]
|
||||
a2: Literal[0x1A]
|
||||
a3: Literal[-4]
|
||||
a4: Literal["hello world"]
|
||||
a5: Literal[b"hello world"]
|
||||
a6: Literal[True]
|
||||
a7: Literal[None]
|
||||
a8: Literal[Literal[1]]
|
||||
|
||||
class Color(Enum):
|
||||
RED = 0
|
||||
GREEN = 1
|
||||
BLUE = 2
|
||||
|
||||
b1: Literal[Color.RED]
|
||||
|
||||
def f():
|
||||
reveal_type(mode) # revealed: Literal["w", "r"]
|
||||
reveal_type(a1) # revealed: Literal[26]
|
||||
reveal_type(a2) # revealed: Literal[26]
|
||||
reveal_type(a3) # revealed: Literal[-4]
|
||||
reveal_type(a4) # revealed: Literal["hello world"]
|
||||
reveal_type(a5) # revealed: Literal[b"hello world"]
|
||||
reveal_type(a6) # revealed: Literal[True]
|
||||
reveal_type(a7) # revealed: None
|
||||
reveal_type(a8) # revealed: Literal[1]
|
||||
# TODO: This should be Color.RED
|
||||
reveal_type(b1) # revealed: Unknown | Literal[0]
|
||||
|
||||
# error: [invalid-type-form]
|
||||
invalid1: Literal[3 + 4]
|
||||
# error: [invalid-type-form]
|
||||
invalid2: Literal[4 + 3j]
|
||||
# error: [invalid-type-form]
|
||||
invalid3: Literal[(3, 4)]
|
||||
|
||||
hello = "hello"
|
||||
invalid4: Literal[
|
||||
1 + 2, # error: [invalid-type-form]
|
||||
"foo",
|
||||
hello, # error: [invalid-type-form]
|
||||
(1, 2, 3), # error: [invalid-type-form]
|
||||
]
|
||||
```
|
||||
|
||||
## Shortening unions of literals
|
||||
|
||||
When a Literal is parameterized with more than one value, it’s treated as exactly to equivalent to
|
||||
the union of those types.
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def x(
|
||||
a1: Literal[Literal[Literal[1, 2, 3], "foo"], 5, None],
|
||||
a2: Literal["w"] | Literal["r"],
|
||||
a3: Literal[Literal["w"], Literal["r"], Literal[Literal["w+"]]],
|
||||
a4: Literal[True] | Literal[1, 2] | Literal["foo"],
|
||||
):
|
||||
reveal_type(a1) # revealed: Literal[1, 2, 3, "foo", 5] | None
|
||||
reveal_type(a2) # revealed: Literal["w", "r"]
|
||||
reveal_type(a3) # revealed: Literal["w", "r", "w+"]
|
||||
reveal_type(a4) # revealed: Literal[True, 1, 2, "foo"]
|
||||
```
|
||||
|
||||
## Display of heterogeneous unions of literals
|
||||
|
||||
```py
|
||||
from typing import Literal, Union
|
||||
|
||||
def foo(x: int) -> int:
|
||||
return x + 1
|
||||
|
||||
def bar(s: str) -> str:
|
||||
return s
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def union_example(
|
||||
x: Union[
|
||||
# unknown type
|
||||
# error: [unresolved-reference]
|
||||
y,
|
||||
Literal[-1],
|
||||
Literal["A"],
|
||||
Literal[b"A"],
|
||||
Literal[b"\x00"],
|
||||
Literal[b"\x07"],
|
||||
Literal[0],
|
||||
Literal[1],
|
||||
Literal["B"],
|
||||
Literal["foo"],
|
||||
Literal["bar"],
|
||||
Literal["B"],
|
||||
Literal[True],
|
||||
None,
|
||||
],
|
||||
):
|
||||
reveal_type(x) # revealed: Unknown | Literal[-1, "A", b"A", b"\x00", b"\x07", 0, 1, "B", "foo", "bar", True] | None
|
||||
```
|
||||
|
||||
## Detecting Literal outside typing and typing_extensions
|
||||
|
||||
Only Literal that is defined in typing and typing_extension modules is detected as the special
|
||||
Literal.
|
||||
|
||||
`other.pyi`:
|
||||
|
||||
```pyi
|
||||
from typing import _SpecialForm
|
||||
|
||||
Literal: _SpecialForm
|
||||
```
|
||||
|
||||
```py
|
||||
from other import Literal
|
||||
|
||||
a1: Literal[26]
|
||||
|
||||
def f():
|
||||
reveal_type(a1) # revealed: @Todo(generics)
|
||||
```
|
||||
|
||||
## Detecting typing_extensions.Literal
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal
|
||||
|
||||
a1: Literal[26]
|
||||
|
||||
def f():
|
||||
reveal_type(a1) # revealed: Literal[26]
|
||||
```
|
||||
|
||||
## Invalid
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
# error: [invalid-type-form] "`Literal` requires at least one argument when used in a type expression"
|
||||
def _(x: Literal):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
```
|
||||
@@ -1,150 +0,0 @@
|
||||
# `LiteralString`
|
||||
|
||||
`LiteralString` represents a string that is either defined directly within the source code or is
|
||||
made up of such components.
|
||||
|
||||
Parts of the testcases defined here were adapted from [the specification's examples][1].
|
||||
|
||||
## Usages
|
||||
|
||||
### Valid places
|
||||
|
||||
It can be used anywhere a type is accepted:
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
x: LiteralString
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: LiteralString
|
||||
```
|
||||
|
||||
### Within `Literal`
|
||||
|
||||
`LiteralString` cannot be used within `Literal`:
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal, LiteralString
|
||||
|
||||
bad_union: Literal["hello", LiteralString] # error: [invalid-type-form]
|
||||
bad_nesting: Literal[LiteralString] # error: [invalid-type-form]
|
||||
```
|
||||
|
||||
### Parameterized
|
||||
|
||||
`LiteralString` cannot be parameterized.
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
a: LiteralString[str] # error: [invalid-type-form]
|
||||
b: LiteralString["foo"] # error: [invalid-type-form]
|
||||
```
|
||||
|
||||
### As a base class
|
||||
|
||||
Subclassing `LiteralString` leads to a runtime error.
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
class C(LiteralString): ... # error: [invalid-base]
|
||||
```
|
||||
|
||||
## Inference
|
||||
|
||||
### Common operations
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
foo: LiteralString = "foo"
|
||||
reveal_type(foo) # revealed: Literal["foo"]
|
||||
|
||||
bar: LiteralString = "bar"
|
||||
reveal_type(foo + bar) # revealed: Literal["foobar"]
|
||||
|
||||
baz: LiteralString = "baz"
|
||||
baz += foo
|
||||
reveal_type(baz) # revealed: Literal["bazfoo"]
|
||||
|
||||
qux = (foo, bar)
|
||||
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
|
||||
|
||||
# TODO: Infer "LiteralString"
|
||||
reveal_type(foo.join(qux)) # revealed: @Todo(Attribute access on `StringLiteral` types)
|
||||
|
||||
template: LiteralString = "{}, {}"
|
||||
reveal_type(template) # revealed: Literal["{}, {}"]
|
||||
# TODO: Infer `LiteralString`
|
||||
reveal_type(template.format(foo, bar)) # revealed: @Todo(Attribute access on `StringLiteral` types)
|
||||
```
|
||||
|
||||
### Assignability
|
||||
|
||||
`Literal[""]` is assignable to `LiteralString`, and `LiteralString` is assignable to `str`, but not
|
||||
vice versa.
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal, LiteralString
|
||||
|
||||
def _(flag: bool):
|
||||
foo_1: Literal["foo"] = "foo"
|
||||
bar_1: LiteralString = foo_1 # fine
|
||||
|
||||
foo_2 = "foo" if flag else "bar"
|
||||
reveal_type(foo_2) # revealed: Literal["foo", "bar"]
|
||||
bar_2: LiteralString = foo_2 # fine
|
||||
|
||||
foo_3: LiteralString = "foo" * 1_000_000_000
|
||||
bar_3: str = foo_2 # fine
|
||||
|
||||
baz_1: str = repr(object())
|
||||
qux_1: LiteralString = baz_1 # error: [invalid-assignment]
|
||||
|
||||
baz_2: LiteralString = "baz" * 1_000_000_000
|
||||
qux_2: Literal["qux"] = baz_2 # error: [invalid-assignment]
|
||||
|
||||
baz_3 = "foo" if flag else 1
|
||||
reveal_type(baz_3) # revealed: Literal["foo", 1]
|
||||
qux_3: LiteralString = baz_3 # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
### Narrowing
|
||||
|
||||
```py
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
lorem: LiteralString = "lorem" * 1_000_000_000
|
||||
|
||||
reveal_type(lorem) # revealed: LiteralString
|
||||
|
||||
if lorem == "ipsum":
|
||||
reveal_type(lorem) # revealed: Literal["ipsum"]
|
||||
|
||||
reveal_type(lorem) # revealed: LiteralString
|
||||
|
||||
if "" < lorem == "ipsum":
|
||||
reveal_type(lorem) # revealed: Literal["ipsum"]
|
||||
```
|
||||
|
||||
## `typing.LiteralString`
|
||||
|
||||
`typing.LiteralString` is only available in Python 3.11 and later:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.11"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import LiteralString
|
||||
|
||||
x: LiteralString = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: LiteralString
|
||||
```
|
||||
|
||||
[1]: https://typing.readthedocs.io/en/latest/spec/literal.html#literalstring
|
||||
@@ -1,75 +0,0 @@
|
||||
# NoReturn & Never
|
||||
|
||||
`NoReturn` is used to annotate the return type for functions that never return. `Never` is the
|
||||
bottom type, representing the empty set of Python objects. These two annotations can be used
|
||||
interchangeably.
|
||||
|
||||
## Function Return Type Annotation
|
||||
|
||||
```py
|
||||
from typing import NoReturn
|
||||
|
||||
def stop() -> NoReturn:
|
||||
raise RuntimeError("no way")
|
||||
|
||||
# revealed: Never
|
||||
reveal_type(stop())
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing_extensions import NoReturn, Never, Any
|
||||
|
||||
# error: [invalid-type-form] "Type `typing.Never` expected no type parameter"
|
||||
x: Never[int]
|
||||
a1: NoReturn
|
||||
a2: Never
|
||||
b1: Any
|
||||
b2: int
|
||||
|
||||
def f():
|
||||
# revealed: Never
|
||||
reveal_type(a1)
|
||||
# revealed: Never
|
||||
reveal_type(a2)
|
||||
|
||||
# Never is assignable to all types.
|
||||
v1: int = a1
|
||||
v2: str = a1
|
||||
# Other types are not assignable to Never except for Never (and Any).
|
||||
v3: Never = b1
|
||||
v4: Never = a2
|
||||
v5: Any = b2
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to `Never`"
|
||||
v6: Never = 1
|
||||
```
|
||||
|
||||
## `typing.Never`
|
||||
|
||||
`typing.Never` is only available in Python 3.11 and later.
|
||||
|
||||
### Python 3.11
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.11"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import Never
|
||||
|
||||
reveal_type(Never) # revealed: typing.Never
|
||||
```
|
||||
|
||||
### Python 3.10
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
```py
|
||||
# error: [unresolved-import]
|
||||
from typing import Never
|
||||
```
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user