Compare commits

..

1 Commits

Author SHA1 Message Date
Alex Waygood
09e8599e91 [red-knot] Explicitly test that no duplicate editable search paths are ever added 2024-07-19 14:30:08 +01:00
6049 changed files with 66522 additions and 254569 deletions

View File

@@ -8,7 +8,3 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
# See: https://github.com/astral-sh/ruff/issues/11503
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
rustflags = ["-C", "target-feature=+crt-static"]
[target.'wasm32-unknown-unknown']
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']

View File

@@ -6,10 +6,3 @@ failure-output = "immediate-final"
fail-fast = false
status-level = "skip"
# Mark tests that take longer than 1s as slow.
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
slow-timeout = { period = "1s", terminate-after = 60 }
# Show slow jobs in the final summary
final-status-level = "slow"

View File

@@ -20,7 +20,7 @@
"extensions": [
"ms-python.python",
"rust-lang.rust-analyzer",
"fill-labs.dependi",
"serayuzgur.crates",
"tamasfe.even-better-toml",
"Swellaby.vscode-rust-test-adapter",
"charliermarsh.ruff"

View File

@@ -17,7 +17,4 @@ indent_size = 4
trim_trailing_whitespace = false
[*.md]
max_line_length = 100
[*.toml]
indent_size = 4
max_line_length = 100

4
.gitattributes vendored
View File

@@ -14,7 +14,5 @@ crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text
crates/ruff_python_parser/resources/inline linguist-generated=true
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
crates/ruff_python_ast/src/generated.rs -diff linguist-generated=true text=auto eol=lf
crates/ruff_python_formatter/src/generated.rs -diff linguist-generated=true text=auto eol=lf
ruff.schema.json linguist-generated=true text=auto eol=lf
*.md.snap linguist-language=Markdown

10
.github/CODEOWNERS vendored
View File

@@ -9,15 +9,13 @@
/crates/ruff_formatter/ @MichaReiser
/crates/ruff_python_formatter/ @MichaReiser
/crates/ruff_python_parser/ @MichaReiser @dhruvmanila
/crates/ruff_annotate_snippets/ @BurntSushi
# flake8-pyi
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
# Script for fuzzing the parser/red-knot etc.
/python/py-fuzzer/ @AlexWaygood
# Script for fuzzing the parser
/scripts/fuzz-parser/ @AlexWaygood
# red-knot
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood

12
.github/ISSUE_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,12 @@
<!--
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
If you're filing a bug report, please consider including the following information:
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
e.g. "RUF001", "unused variable", "Jupyter notebook"
* A minimal code snippet that reproduces the bug.
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
* The current Ruff version (`ruff --version`).
-->

View File

@@ -1,2 +0,0 @@
# This file cannot use the extension `.yaml`.
blank_issues_enabled: false

View File

@@ -1,22 +0,0 @@
name: New issue
description: A generic issue
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
If you're filing a bug report, please consider including the following information:
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
e.g. "RUF001", "unused variable", "Jupyter notebook"
* A minimal code snippet that reproduces the bug.
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
* The current Ruff version (`ruff --version`).
- type: textarea
attributes:
label: Description
description: A description of the issue

View File

@@ -1,10 +0,0 @@
# Configuration for the actionlint tool, which we run via pre-commit
# to verify the correctness of the syntax in our GitHub Actions workflows.
self-hosted-runner:
# Various runners we use that aren't recognized out-of-the-box by actionlint:
labels:
- depot-ubuntu-latest-8
- depot-ubuntu-22.04-16
- github-windows-2025-x86_64-8
- github-windows-2025-x86_64-16

View File

@@ -8,32 +8,15 @@
semanticCommits: "disabled",
separateMajorMinor: false,
prHourlyLimit: 10,
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
cargo: {
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
rangeStrategy: "update-lockfile",
},
pep621: {
// The default for this package manager is to only search for `pyproject.toml` files
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
},
pip_requirements: {
// The default for this package manager is to run on all requirements.txt files:
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
// a "negative glob pattern".
// See:
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
ignorePaths: ["!docs/requirements*.txt"]
},
npm: {
// The default for this package manager is to only search for `package.json` files
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
fileMatch: ["^playground/.*package\\.json$"],
},
"pre-commit": {
@@ -45,7 +28,7 @@
groupName: "Artifact GitHub Actions dependencies",
matchManagers: ["github-actions"],
matchDatasources: ["gitea-tags", "github-tags"],
matchPackageNames: ["actions/.*-artifact"],
matchPackagePatterns: ["actions/.*-artifact"],
description: "Weekly update of artifact-related GitHub Actions dependencies",
},
{
@@ -61,18 +44,10 @@
{
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
// See: https://github.com/astral-sh/uv/issues/3642
matchPackageNames: ["zip"],
matchPackagePatterns: ["zip"],
matchManagers: ["cargo"],
enabled: false,
},
{
// `mkdocs-material` requires a manual update to keep the version in sync
// with `mkdocs-material-insider`.
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
matchManagers: ["pip_requirements"],
matchPackageNames: ["mkdocs-material"],
enabled: false,
},
{
groupName: "pre-commit dependencies",
matchManagers: ["pre-commit"],
@@ -87,13 +62,13 @@
{
groupName: "Monaco",
matchManagers: ["npm"],
matchPackageNames: ["monaco"],
matchPackagePatterns: ["monaco"],
description: "Weekly update of the Monaco editor",
},
{
groupName: "strum",
matchManagers: ["cargo"],
matchPackageNames: ["strum"],
matchPackagePatterns: ["strum"],
description: "Weekly update of strum dependencies",
},
{

View File

@@ -23,8 +23,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
env:
PACKAGE_NAME: ruff
MODULE_NAME: ruff
@@ -42,7 +40,6 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -55,9 +52,9 @@ jobs:
args: --out dist
- name: "Test sdist"
run: |
pip install dist/"${PACKAGE_NAME}"-*.tar.gz --force-reinstall
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload sdist"
uses: actions/upload-artifact@v4
with:
@@ -66,12 +63,11 @@ jobs:
macos-x86_64:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14
runs-on: macos-12
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -113,7 +109,6 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -127,7 +122,7 @@ jobs:
args: --release --locked --out dist
- name: "Test wheel - aarch64"
run: |
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
ruff --help
python -m ruff --help
- name: "Upload wheels"
@@ -169,7 +164,6 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -188,9 +182,9 @@ jobs:
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
run: |
python -m pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
@@ -222,7 +216,6 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -238,9 +231,9 @@ jobs:
- name: "Test wheel"
if: ${{ startsWith(matrix.target, 'x86_64') }}
run: |
pip install dist/"${PACKAGE_NAME}"-*.whl --force-reinstall
"${MODULE_NAME}" --help
python -m "${MODULE_NAME}" --help
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v4
with:
@@ -297,7 +290,6 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -362,7 +354,6 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -428,7 +419,6 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}

View File

@@ -17,26 +17,16 @@ on:
paths:
- .github/workflows/build-docker.yml
env:
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
jobs:
docker-build:
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
docker-publish:
name: Build Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
persist-credentials: false
- uses: docker/setup-buildx-action@v3
@@ -46,254 +36,33 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/astral-sh/ruff
- name: Check tag consistency
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
env:
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
run: |
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${TAG}" != "${version}" ]; then
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
echo "The input tag does not match the version from pyproject.toml:" >&2
echo "${TAG}" >&2
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
echo "${version}" >&2
exit 1
else
echo "Releasing ${version}"
fi
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.RUFF_BASE_IMG }}
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
tags: |
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
- name: Normalize Platform Pair (replace / with -)
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_TUPLE=${platform//\//-}" >> "$GITHUB_ENV"
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: .
platforms: ${{ matrix.platform }}
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
- name: Export digests
env:
digest: ${{ steps.build.outputs.digest }}
run: |
mkdir -p /tmp/digests
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digests
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_TUPLE }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
docker-publish:
name: Publish Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-build
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@v3
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.RUFF_BASE_IMG }}
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
tags: |
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Create manifest list and push
working-directory: /tmp/digests
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
run: |
# shellcheck disable=SC2046
docker buildx imagetools create \
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)
docker-publish-extra:
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-publish
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
strategy:
fail-fast: false
matrix:
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
image-mapping:
- alpine:3.20,alpine3.20,alpine
- debian:bookworm-slim,bookworm-slim,debian-slim
- buildpack-deps:bookworm,bookworm,debian
steps:
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate Dynamic Dockerfile Tags
shell: bash
env:
TAG_VALUE: ${{ fromJson(inputs.plan).announcement_tag }}
run: |
set -euo pipefail
# Extract the image and tags from the matrix variable
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
# Generate Dockerfile content
cat <<EOF > Dockerfile
FROM ${BASE_IMAGE}
COPY --from=${RUFF_BASE_IMG}:latest /ruff /usr/local/bin/ruff
ENTRYPOINT []
CMD ["/usr/local/bin/ruff"]
EOF
# Initialize a variable to store all tag docker metadata patterns
TAG_PATTERNS=""
# Loop through all base tags and append its docker metadata pattern to the list
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
IFS=','; for TAG in ${BASE_TAGS}; do
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${TAG_VALUE}\n"
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${TAG_VALUE}\n"
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
done
# Remove the trailing newline from the pattern list
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
# Export image cache name
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> "$GITHUB_ENV"
# Export tag patterns using the multiline env var syntax
{
echo "TAG_PATTERNS<<EOF"
echo -e "${TAG_PATTERNS}"
echo EOF
} >> "$GITHUB_ENV"
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
# ghcr.io prefers index level annotations
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:
images: ${{ env.RUFF_BASE_IMG }}
flavor: |
latest=false
tags: |
${{ env.TAG_PATTERNS }}
- name: Build and push
- name: "Build and push Docker image"
uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm64
# We do not really need to cache here as the Dockerfile is tiny
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
push: true
tags: ${{ steps.meta.outputs.tags }}
# Reuse the builder
cache-from: type=gha
cache-to: type=gha,mode=max
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
labels: ${{ steps.meta.outputs.labels }}
annotations: ${{ steps.meta.outputs.annotations }}
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
# show the ruff base image first since GitHub always shows the last updated image digests
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
docker-republish:
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-publish-extra
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@v3
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:
images: ${{ env.RUFF_BASE_IMG }}
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
tags: |
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Create manifest list and push
working-directory: /tmp/digests
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
run: |
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
# shellcheck disable=SC2046
docker buildx imagetools create \
"${annotations[@]}" \
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf "${RUFF_BASE_IMG}@sha256:%s " *)

View File

@@ -1,7 +1,5 @@
name: CI
permissions: {}
on:
push:
branches: [main]
@@ -18,7 +16,7 @@ env:
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
PACKAGE_NAME: ruff
PYTHON_VERSION: "3.12"
PYTHON_VERSION: "3.11"
jobs:
determine_changes:
@@ -34,15 +32,12 @@ jobs:
# Flag that is raised when any code is changed
# This is superset of the linter and formatter
code: ${{ steps.changed.outputs.code_any_changed }}
# Flag that is raised when any code that affects the fuzzer is changed
fuzz: ${{ steps.changed.outputs.fuzz_any_changed }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- uses: tj-actions/changed-files@v45
- uses: tj-actions/changed-files@v44
id: changed
with:
files_yaml: |
@@ -54,14 +49,13 @@ jobs:
- crates/ruff_text_size/**
- crates/ruff_python_ast/**
- crates/ruff_python_parser/**
- python/py-fuzzer/**
- scripts/fuzz-parser/**
- .github/workflows/ci.yaml
linter:
- Cargo.toml
- Cargo.lock
- crates/**
- "!crates/red_knot*/**"
- "!crates/ruff_python_formatter/**"
- "!crates/ruff_formatter/**"
- "!crates/ruff_dev/**"
@@ -85,15 +79,9 @@ jobs:
- python/**
- .github/workflows/ci.yaml
fuzz:
- fuzz/Cargo.toml
- fuzz/Cargo.lock
- fuzz/fuzz_targets/**
code:
- "**/*"
- "!**/*.md"
- "crates/red_knot_python_semantic/resources/mdtest/**/*.md"
- "!docs/**"
- "!assets/**"
@@ -103,8 +91,6 @@ jobs:
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup component add rustfmt
- run: cargo fmt --all --check
@@ -117,29 +103,24 @@ jobs:
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: |
rustup component add clippy
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
- name: "Clippy (wasm)"
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
cargo-test-linux:
name: "cargo test (linux)"
runs-on: depot-ubuntu-22.04-16
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
@@ -152,6 +133,7 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -160,13 +142,6 @@ jobs:
# Check for broken links in the documentation.
- run: cargo doc --all --no-deps
env:
RUSTDOCFLAGS: "-D warnings"
# Use --document-private-items so that all our doc comments are kept in
# sync, not just public items. Eventually we should do this for all
# crates; for now add crates here as they are warning-clean to prevent
# regression.
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
@@ -175,56 +150,24 @@ jobs:
name: ruff
path: target/debug/ruff
cargo-test-linux-release:
name: "cargo test (linux, release)"
runs-on: depot-ubuntu-22.04-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
cargo-test-windows:
name: "cargo test (windows)"
runs-on: github-windows-2025-x86_64-16
runs-on: windows-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
run: |
@@ -235,69 +178,55 @@ jobs:
name: "cargo test (wasm)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
with:
node-version: 20
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
with:
version: v0.13.1
- name: "Test ruff_wasm"
- uses: Swatinem/rust-cache@v2
- name: "Run wasm-pack"
run: |
cd crates/ruff_wasm
wasm-pack test --node
- name: "Test red_knot_wasm"
run: |
cd crates/red_knot_wasm
wasm-pack test --node
cargo-build-release:
name: "cargo build (release)"
runs-on: macos-latest
if: ${{ github.ref == 'refs/heads/main' }}
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: "Build"
run: cargo build --release --locked
cargo-build-msrv:
name: "cargo build (msrv)"
runs-on: depot-ubuntu-latest-8
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: SebRollen/toml-action@v1.2.0
id: msrv
with:
file: "Cargo.toml"
field: "workspace.package.rust-version"
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
env:
MSRV: ${{ steps.msrv.outputs.value }}
run: rustup default "${MSRV}"
run: rustup default ${{ steps.msrv.outputs.value }}
- name: "Install mold"
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
@@ -308,28 +237,26 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
MSRV: ${{ steps.msrv.outputs.value }}
run: cargo "+${MSRV}" insta test --all-features --unreferenced reject --test-runner nextest
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
cargo-fuzz-build:
name: "cargo fuzz build"
cargo-fuzz:
name: "cargo fuzz"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' || needs.determine_changes.outputs.code == 'true' }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
with:
workspaces: "fuzz -> target"
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@main
with:
@@ -340,20 +267,24 @@ jobs:
- run: cargo fuzz build -s none
fuzz-parser:
name: "fuzz parser"
name: "Fuzz the parser"
runs-on: ubuntu-latest
needs:
- cargo-test-linux
- determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
timeout-minutes: 20
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
persist-credentials: false
- uses: astral-sh/setup-uv@v5
python-version: ${{ env.PYTHON_VERSION }}
- name: Install uv
run: curl -LsSf https://astral.sh/uv/install.sh | sh
- name: Install Python requirements
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
- uses: actions/download-artifact@v4
name: Download Ruff binary to test
id: download-cached-binary
@@ -361,42 +292,24 @@ jobs:
name: ruff
path: ruff-to-test
- name: Fuzz
env:
DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x "${DOWNLOAD_PATH}/ruff"
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
(
uvx \
--python="${PYTHON_VERSION}" \
--from=./python/py-fuzzer \
fuzz \
--test-executable="${DOWNLOAD_PATH}/ruff" \
--bin=ruff \
0-500
)
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
scripts:
name: "test scripts"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 5
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup component add rustfmt
# Run all code generation scripts, and verify that the current output is
# already checked into git.
- run: python crates/ruff_python_ast/generate.py
- run: python crates/ruff_python_formatter/generate.py
- run: test -z "$(git status --porcelain)"
# Verify that adding a plugin or rule produces clean code.
- run: ./scripts/add_rule.py --name DoTheThing --prefix F --code 999 --linter pyflakes
- uses: Swatinem/rust-cache@v2
- run: ./scripts/add_rule.py --name DoTheThing --prefix PL --code C0999 --linter pylint
- run: cargo check
- run: cargo fmt --all --check
- run: |
@@ -407,18 +320,16 @@ jobs:
ecosystem:
name: "ecosystem"
runs-on: depot-ubuntu-latest-8
runs-on: ubuntu-latest
needs:
- cargo-test-linux
- determine_changes
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -430,7 +341,7 @@ jobs:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@v8
- uses: dawidd6/action-download-artifact@v6
name: Download baseline Ruff binary
with:
name: ruff
@@ -444,72 +355,64 @@ jobs:
- name: Run `ruff check` stable ecosystem check
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
env:
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY"
cat ecosystem-result-check-stable > $GITHUB_STEP_SUMMARY
echo "### Linter (stable)" > ecosystem-result
cat ecosystem-result-check-stable >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff check` preview ecosystem check
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
env:
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
ruff-ecosystem check ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY"
cat ecosystem-result-check-preview > $GITHUB_STEP_SUMMARY
echo "### Linter (preview)" >> ecosystem-result
cat ecosystem-result-check-preview >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff format` stable ecosystem check
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
env:
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY"
cat ecosystem-result-format-stable > $GITHUB_STEP_SUMMARY
echo "### Formatter (stable)" >> ecosystem-result
cat ecosystem-result-format-stable >> ecosystem-result
echo "" >> ecosystem-result
- name: Run `ruff format` preview ecosystem check
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
env:
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
run: |
# Make executable, since artifact download doesn't preserve this
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
chmod +x ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff
# Set pipefail to avoid hiding errors with tee
set -eo pipefail
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
ruff-ecosystem format ./ruff ${{ steps.ruff-target.outputs.download-path }}/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY"
cat ecosystem-result-format-preview > $GITHUB_STEP_SUMMARY
echo "### Formatter (preview)" >> ecosystem-result
cat ecosystem-result-format-preview >> ecosystem-result
echo "" >> ecosystem-result
@@ -537,8 +440,6 @@ jobs:
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: cargo-bins/cargo-binstall@main
- run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
@@ -547,11 +448,8 @@ jobs:
name: "python package"
runs-on: ubuntu-latest
timeout-minutes: 20
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -565,7 +463,7 @@ jobs:
args: --out dist
- name: "Test wheel"
run: |
pip install --force-reinstall --find-links dist "${PACKAGE_NAME}"
pip install --force-reinstall --find-links dist ${{ env.PACKAGE_NAME }}
ruff --help
python -m ruff --help
- name: "Remove wheels from cache"
@@ -577,14 +475,12 @@ jobs:
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install pre-commit"
run: pip install pre-commit
- name: "Cache pre-commit"
@@ -594,14 +490,13 @@ jobs:
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: "Run pre-commit"
run: |
echo '```console' > "$GITHUB_STEP_SUMMARY"
echo '```console' > $GITHUB_STEP_SUMMARY
# Enable color output for pre-commit and remove it for the summary
# Use --hook-stage=manual to enable slower pre-commit hooks that are skipped by default
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always --hook-stage=manual | \
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> "$GITHUB_STEP_SUMMARY") >&1
exit_code="${PIPESTATUS[0]}"
echo '```' >> "$GITHUB_STEP_SUMMARY"
exit "$exit_code"
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always | \
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> $GITHUB_STEP_SUMMARY) >&1
exit_code=${PIPESTATUS[0]}
echo '```' >> $GITHUB_STEP_SUMMARY
exit $exit_code
docs:
name: "mkdocs"
@@ -611,12 +506,7 @@ jobs:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: "3.13"
- uses: Swatinem/rust-cache@v2
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
@@ -624,14 +514,13 @@ jobs:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@v5
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
run: pip install -r docs/requirements-insiders.txt
- name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: uv pip install -r docs/requirements.txt --system
run: pip install -r docs/requirements.txt
- name: "Update README File"
run: python scripts/transform_readme.py --target mkdocs
- name: "Generate docs"
@@ -649,21 +538,20 @@ jobs:
name: "formatter instabilities and black similarity"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Run checks"
- name: "Cache rust"
uses: Swatinem/rust-cache@v2
- name: "Formatter progress"
run: scripts/formatter_ecosystem_checks.sh
- name: "Github step summary"
run: cat target/formatter-ecosystem/stats.txt > "$GITHUB_STEP_SUMMARY"
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
- name: "Remove checkouts from cache"
run: rm -r target/formatter-ecosystem
run: rm -r target/progress_projects
check-ruff-lsp:
name: "test ruff-lsp"
@@ -672,7 +560,7 @@ jobs:
needs:
- cargo-test-linux
- determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: extractions/setup-just@v2
env:
@@ -681,7 +569,6 @@ jobs:
- uses: actions/checkout@v4
name: "Download ruff-lsp source"
with:
persist-credentials: false
repository: "astral-sh/ruff-lsp"
- uses: actions/setup-python@v5
@@ -700,29 +587,23 @@ jobs:
just install
- name: Run ruff-lsp tests
env:
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
run: |
# Setup development binary
pip uninstall --yes ruff
chmod +x "${DOWNLOAD_PATH}/ruff"
export PATH="${DOWNLOAD_PATH}:${PATH}"
chmod +x ${{ steps.ruff-target.outputs.download-path }}/ruff
export PATH=${{ steps.ruff-target.outputs.download-path }}:$PATH
ruff version
just test
benchmarks:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- name: "Checkout Branch"
uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
@@ -732,11 +613,13 @@ jobs:
with:
tool: cargo-codspeed
- uses: Swatinem/rust-cache@v2
- name: "Build benchmarks"
run: cargo codspeed build --features codspeed -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@v3
uses: CodSpeedHQ/action@v2
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -32,9 +32,13 @@ jobs:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
persist-credentials: false
- uses: astral-sh/setup-uv@v5
python-version: "3.12"
- name: Install uv
run: curl -LsSf https://astral.sh/uv/install.sh | sh
- name: Install Python requirements
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
@@ -45,17 +49,7 @@ jobs:
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
run: cargo build --locked
- name: Fuzz
run: |
# shellcheck disable=SC2046
(
uvx \
--python=3.12 \
--from=./python/py-fuzzer \
fuzz \
--test-executable=target/debug/ruff \
--bin=ruff \
$(shuf -i 0-9999999999999999999 -n 1000)
)
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
create-issue-on-failure:
name: Create an issue if the daily fuzz surfaced any bugs
@@ -73,6 +67,6 @@ jobs:
owner: "astral-sh",
repo: "ruff",
title: `Daily parser fuzz failed on ${new Date().toDateString()}`,
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
body: "Runs listed here: https://github.com/astral-sh/ruff/actions/workflows/daily_fuzz.yml",
labels: ["bug", "parser", "fuzzer"],
})

View File

@@ -1,71 +0,0 @@
name: Daily property test run
on:
workflow_dispatch:
schedule:
- cron: "0 12 * * *"
pull_request:
paths:
- ".github/workflows/daily_property_tests.yaml"
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
FORCE_COLOR: 1
jobs:
property_tests:
name: Property tests
runs-on: ubuntu-latest
timeout-minutes: 20
# Don't run the cron job on forks:
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: Build Red Knot
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
# mode (1.5 min vs 14 min), so the overall time is shorter with a release build.
run: cargo build --locked --release --package red_knot_python_semantic --tests
- name: Run property tests
shell: bash
run: |
export QUICKCHECK_TESTS=100000
for _ in {1..5}; do
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
done
create-issue-on-failure:
name: Create an issue if the daily property test run surfaced any bugs
runs-on: ubuntu-latest
needs: property_tests
if: ${{ github.repository == 'astral-sh/ruff' && always() && github.event_name == 'schedule' && needs.property_tests.result == 'failure' }}
permissions:
issues: write
steps:
- uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
await github.rest.issues.create({
owner: "astral-sh",
repo: "ruff",
title: `Daily property test run failed on ${new Date().toDateString()}`,
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
labels: ["bug", "red-knot", "testing"],
})

View File

@@ -10,29 +10,29 @@ on:
description: The ecosystem workflow that triggers the workflow run
required: true
permissions:
pull-requests: write
jobs:
comment:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: dawidd6/action-download-artifact@v8
- uses: dawidd6/action-download-artifact@v6
name: Download pull request number
with:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
allow_forks: true
- name: Parse pull request number
id: pr-number
run: |
if [[ -f pr-number ]]
then
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
fi
- uses: dawidd6/action-download-artifact@v8
- uses: dawidd6/action-download-artifact@v6
name: "Download ecosystem results"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number
@@ -43,7 +43,6 @@ jobs:
path: pr/ecosystem
workflow_conclusion: completed
if_no_artifact_found: ignore
allow_forks: true
- name: Generate comment content
id: generate-comment
@@ -65,9 +64,9 @@ jobs:
cat pr/ecosystem/ecosystem-result >> comment.txt
echo "" >> comment.txt
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
cat comment.txt >> "$GITHUB_OUTPUT"
echo 'EOF' >> "$GITHUB_OUTPUT"
echo 'comment<<EOF' >> $GITHUB_OUTPUT
cat comment.txt >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
- name: Find existing comment
uses: peter-evans/find-comment@v3

View File

@@ -26,38 +26,38 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
persist-credentials: true
- uses: actions/setup-python@v5
with:
python-version: 3.12
- name: "Set docs version"
env:
version: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
run: |
# if version is missing, use 'latest'
if [ -z "$version" ]; then
echo "Using 'latest' as version"
version="latest"
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
# if version is missing, exit with error
if [[ -z "$version" ]]; then
echo "Can't build docs without a version."
exit 1
fi
# Use version as display name for now
display_name="$version"
echo "version=$version" >> "$GITHUB_ENV"
echo "display_name=$display_name" >> "$GITHUB_ENV"
echo "version=$version" >> $GITHUB_ENV
echo "display_name=$display_name" >> $GITHUB_ENV
- name: "Set branch name"
run: |
version="${{ env.version }}"
display_name="${{ env.display_name }}"
timestamp="$(date +%s)"
# create branch_display_name from display_name by replacing all
# characters disallowed in git branch names with hyphens
branch_display_name="$(echo "${display_name}" | tr -c '[:alnum:]._' '-' | tr -s '-')"
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
echo "timestamp=$timestamp" >> $GITHUB_ENV
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
@@ -92,7 +92,9 @@ jobs:
run: mkdocs build --strict -f mkdocs.public.yml
- name: "Clone docs repo"
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
run: |
version="${{ env.version }}"
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
- name: "Copy docs"
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
@@ -100,10 +102,12 @@ jobs:
- name: "Commit docs"
working-directory: astral-docs
run: |
git config user.name "astral-docs-bot"
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
branch_name="${{ env.branch_name }}"
git checkout -b "${branch_name}"
git config user.name "$GITHUB_ACTOR"
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
git checkout -b $branch_name
git add site/ruff
git commit -m "Update ruff documentation for $version"
@@ -112,8 +116,12 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
version="${{ env.version }}"
display_name="${{ env.display_name }}"
branch_name="${{ env.branch_name }}"
# set the PR title
pull_request_title="Update ruff documentation for ${display_name}"
pull_request_title="Update ruff documentation for $display_name"
# Delete any existing pull requests that are open for this version
# by checking against pull_request_title because the new PR will
@@ -122,15 +130,13 @@ jobs:
xargs -I {} gh pr close {}
# push the branch to GitHub
git push origin "${branch_name}"
git push origin $branch_name
# create the PR
gh pr create \
--base=main \
--head="${branch_name}" \
--title="${pull_request_title}" \
--body="Automated documentation update for ${display_name}" \
--label="documentation"
gh pr create --base main --head $branch_name \
--title "$pull_request_title" \
--body "Automated documentation update for $display_name" \
--label "documentation"
- name: "Merge Pull Request"
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
@@ -138,7 +144,8 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
branch_name="${{ env.branch_name }}"
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
# give the PR a few seconds to be created before trying to auto-merge it
sleep 10
gh pr merge --squash "${branch_name}"
gh pr merge --squash $branch_name

View File

@@ -25,13 +25,11 @@ jobs:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
with:
node-version: 20
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
@@ -49,7 +47,7 @@ jobs:
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.13.1
uses: cloudflare/wrangler-action@v3.7.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

View File

@@ -21,12 +21,14 @@ jobs:
# For PyPI's trusted publishing.
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@v5
- uses: actions/download-artifact@v4
with:
pattern: wheels-*
path: wheels
merge-multiple: true
- name: Publish to PyPi
run: uv publish -v wheels/*
uses: pypa/gh-action-pypi-publish@release/v1
with:
skip-existing: true
packages-dir: wheels
verbose: true

View File

@@ -30,8 +30,6 @@ jobs:
fail-fast: false
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: jetli/wasm-pack-action@v0.4.0
@@ -45,7 +43,7 @@ jobs:
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- uses: actions/setup-node@v4
with:
node-version: 20
node-version: 18
registry-url: "https://registry.npmjs.org"
- name: "Publish (dry-run)"
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}

View File

@@ -1,12 +1,10 @@
# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
# CI that:
#
# * checks for a Git Tag that looks like a release
# * builds artifacts with dist (archives, installers, hashes)
# * builds artifacts with cargo-dist (archives, installers, hashes)
# * uploads those artifacts to temporary workflow zip
# * on success, uploads the artifacts to a GitHub Release
#
@@ -24,10 +22,10 @@ permissions:
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
#
# If PACKAGE_NAME is specified, then the announcement will be for that
# package (erroring out if it doesn't have the given version or isn't dist-able).
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
#
# If PACKAGE_NAME isn't specified, then the announcement will be for all
# (dist-able) packages in the workspace with that version (this mode is
# (cargo-dist-able) packages in the workspace with that version (this mode is
# intended for workspaces with only one dist-able package, or with all dist-able
# packages versioned/released in lockstep).
#
@@ -48,7 +46,7 @@ on:
type: string
jobs:
# Run 'dist plan' (or host) to determine what tasks we need to do
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
plan:
runs-on: "ubuntu-20.04"
outputs:
@@ -62,16 +60,16 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install dist
- name: Install cargo-dist
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
- name: Cache dist
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
- name: Cache cargo-dist
uses: actions/upload-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/dist
path: ~/.cargo/bin/cargo-dist
# sure would be cool if github gave us proper conditionals...
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
# functionality based on whether this is a pull_request, and whether it's from a fork.
@@ -79,8 +77,8 @@ jobs:
# but also really annoying to build CI around when it needs secrets to work right.)
- id: plan
run: |
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
echo "dist ran successfully"
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
echo "cargo dist ran successfully"
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
@@ -124,12 +122,12 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached dist
- name: Install cached cargo-dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
- run: chmod +x ~/.cargo/bin/cargo-dist
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@v4
@@ -140,8 +138,8 @@ jobs:
- id: cargo-dist
shell: bash
run: |
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
echo "dist ran successfully"
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
echo "cargo dist ran successfully"
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
@@ -174,12 +172,12 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached dist
- name: Install cached cargo-dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
- run: chmod +x ~/.cargo/bin/cargo-dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@v4
@@ -191,7 +189,7 @@ jobs:
- id: host
shell: bash
run: |
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
echo "artifacts uploaded and released successfully"
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"

View File

@@ -25,13 +25,11 @@ jobs:
name: Checkout Ruff
with:
path: ruff
persist-credentials: true
- uses: actions/checkout@v4
name: Checkout typeshed
with:
repository: python/typeshed
path: typeshed
persist-credentials: false
- name: Setup git
run: |
git config --global user.name typeshedbot
@@ -39,13 +37,13 @@ jobs:
- name: Sync typeshed
id: sync
run: |
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
- name: Commit the changes
id: commit
if: ${{ steps.sync.outcome == 'success' }}
@@ -59,7 +57,7 @@ jobs:
run: |
cd ruff
git push --force origin typeshedbot/sync-typeshed
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
gh pr list --repo $GITHUB_REPOSITORY --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
create-issue-on-failure:
@@ -78,6 +76,5 @@ jobs:
owner: "astral-sh",
repo: "ruff",
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
labels: ["bug", "red-knot"],
body: "Runs are listed here: https://github.com/astral-sh/ruff/actions/workflows/sync_typeshed.yaml",
})

19
.github/zizmor.yml vendored
View File

@@ -1,19 +0,0 @@
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
# https://woodruffw.github.io/zizmor/configuration/
#
# TODO: can we remove the ignores here so that our workflows are more secure?
rules:
dangerous-triggers:
ignore:
- pr-comment.yaml
cache-poisoning:
ignore:
- build-docker.yml
- publish-playground.yml
excessive-permissions:
# it's hard to test what the impact of removing these ignores would be
# without actually running the release workflow...
ignore:
- build-docker.yml
- publish-playground.yml
- publish-docs.yml

12
.gitignore vendored
View File

@@ -21,18 +21,6 @@ flamegraph.svg
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
/target*
# samply profiles
profile.json
# tracing-flame traces
tracing.folded
tracing-flamechart.svg
tracing-flamegraph.svg
# insta
*.rs.pending-snap
###
# Rust.gitignore
###

View File

@@ -14,18 +14,7 @@ MD041: false
# MD013/line-length
MD013: false
# MD014/commands-show-output
MD014: false
# MD024/no-duplicate-heading
MD024:
# Allow when nested under different parents e.g. CHANGELOG.md
siblings_only: true
# MD046/code-block-style
#
# Ignore this because it conflicts with the code block style used in content
# tabs of mkdocs-material which is to add a blank line after the content title.
#
# Ref: https://github.com/astral-sh/ruff/pull/15011#issuecomment-2544790854
MD046: false

View File

@@ -1,15 +1,10 @@
fail_fast: false
fail_fast: true
exclude: |
(?x)^(
.github/workflows/release.yml|
crates/red_knot_vendored/vendor/.*|
crates/red_knot_project/resources/.*|
crates/ruff_benchmark/resources/.*|
crates/red_knot_module_resolver/vendor/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff_notebook/resources/.*|
crates/ruff_server/resources/.*|
crates/ruff/resources/.*|
crates/ruff_python_formatter/resources/.*|
crates/ruff_python_formatter/tests/snapshots/.*|
@@ -19,17 +14,17 @@ exclude: |
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.23
rev: v0.18
hooks:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.22
rev: 0.7.17
hooks:
- id: mdformat
additional_dependencies:
- mdformat-mkdocs==4.0.0
- mdformat-footnote==0.1.1
- mdformat-mkdocs
- mdformat-admon
exclude: |
(?x)^(
docs/formatter/black\.md
@@ -37,7 +32,7 @@ repos:
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.44.0
rev: v0.41.0
hooks:
- id: markdownlint-fix
exclude: |
@@ -46,21 +41,8 @@ repos:
| docs/\w+\.md
)$
- repo: https://github.com/adamchainz/blacken-docs
rev: 1.19.1
hooks:
- id: blacken-docs
args: ["--pyi", "--line-length", "130"]
files: '^crates/.*/resources/mdtest/.*\.md'
exclude: |
(?x)^(
.*?invalid(_.+)*_syntax\.md
)$
additional_dependencies:
- black==25.1.0
- repo: https://github.com/crate-ci/typos
rev: v1.29.5
rev: v1.23.2
hooks:
- id: typos
@@ -74,52 +56,25 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.4
rev: v0.5.2
hooks:
- id: ruff-format
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
types_or: [python, pyi]
require_serial: true
exclude: |
(?x)^(
crates/ruff_linter/resources/.*|
crates/ruff_python_formatter/resources/.*
)$
# Prettier
- repo: https://github.com/rbubley/mirrors-prettier
rev: v3.4.2
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0
hooks:
- id: prettier
types: [yaml]
# zizmor detects security vulnerabilities in GitHub Actions workflows.
# Additional configuration for the tool is found in `.github/zizmor.yml`
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.3.0
hooks:
- id: zizmor
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.31.1
hooks:
- id: check-github-workflows
# `actionlint` hook, for verifying correct syntax in GitHub Actions workflows.
# Some additional configuration for `actionlint` can be found in `.github/actionlint.yaml`.
- repo: https://github.com/rhysd/actionlint
rev: v1.7.7
hooks:
- id: actionlint
stages:
# This hook is disabled by default, since it's quite slow.
# To run all hooks *including* this hook, use `uvx pre-commit run -a --hook-stage=manual`.
# To run *just* this hook, use `uvx pre-commit run -a actionlint --hook-stage=manual`.
- manual
args:
- "-ignore=SC2129" # ignorable stylistic lint from shellcheck
- "-ignore=SC2016" # another shellcheck lint: seems to have false positives?
additional_dependencies:
# actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions
# and checks these with shellcheck. This is arguably its most useful feature,
# but the integration only works if shellcheck is installed
- "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.10.0"
ci:
skip: [cargo-fmt, dev-generate-all]

View File

@@ -1,85 +1,5 @@
# Breaking Changes
## 0.9.0
Ruff now formats your code according to the 2025 style guide. As a result, your code might now get formatted differently. See the [changelog](./CHANGELOG.md#090) for a detailed list of changes.
## 0.8.0
- **Default to Python 3.9**
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
- **Changed location of `pydoclint` diagnostics**
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
If you've opted into these preview rules but have them suppressed using
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
some places, this change may mean that you need to move the `noqa` suppression
comments. Most users should be unaffected by this change.
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
- **Changes to the line width calculation**
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
## 0.7.0
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
instead.
## 0.6.0
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
```toml
[tool.ruff.lint.per-file-ignores]
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
```
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
Notebook files and not format them:
```toml
[tool.ruff.format]
exclude = ["*.ipynb"]
```
And, conversely, the following would only format Jupyter Notebook files and not lint them:
```toml
[tool.ruff.lint]
exclude = ["*.ipynb"]
```
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
```toml
[tool.ruff]
extend-exclude = ["*.ipynb"]
```
## 0.5.0
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
@@ -196,7 +116,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,

File diff suppressed because it is too large Load Diff

View File

@@ -2,6 +2,35 @@
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
- [The Basics](#the-basics)
- [Prerequisites](#prerequisites)
- [Development](#development)
- [Project Structure](#project-structure)
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
- [Rule naming convention](#rule-naming-convention)
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
- [MkDocs](#mkdocs)
- [Release Process](#release-process)
- [Creating a new release](#creating-a-new-release)
- [Ecosystem CI](#ecosystem-ci)
- [Benchmarking and Profiling](#benchmarking-and-profiling)
- [CPython Benchmark](#cpython-benchmark)
- [Microbenchmarks](#microbenchmarks)
- [Benchmark-driven Development](#benchmark-driven-development)
- [PR Summary](#pr-summary)
- [Tips](#tips)
- [Profiling Projects](#profiling-projects)
- [Linux](#linux)
- [Mac](#mac)
- [`cargo dev`](#cargo-dev)
- [Subsystems](#subsystems)
- [Compilation Pipeline](#compilation-pipeline)
- [Import Categorization](#import-categorization)
- [Project root](#project-root)
- [Package root](#package-root)
- [Import categorization](#import-categorization-1)
## The Basics
Ruff welcomes contributions in the form of pull requests.
@@ -29,14 +58,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
cargo install cargo-insta
```
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
run Python utility commands.
And you'll need pre-commit to run some validation checks:
```shell
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
```
You can optionally install pre-commit hooks to automatically run the validation checks
when making a commit:
```shell
uv tool install pre-commit
pre-commit install
```
@@ -64,7 +95,7 @@ and that it passes both the lint and test validation checks:
```shell
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
```
These checks will run on GitHub Actions when you open your pull request, but running them locally
@@ -139,7 +170,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
`#[derive(ViolationMetadata)]` to see examples.
`#[violation]` to see examples.
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
@@ -265,20 +296,26 @@ To preview any changes to the documentation locally:
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
1. Install MkDocs and Material for MkDocs with:
```shell
pip install -r docs/requirements.txt
```
1. Generate the MkDocs site with:
```shell
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
python scripts/generate_mkdocs.py
```
1. Run the development server with:
```shell
# For contributors.
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
mkdocs serve -f mkdocs.public.yml
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
mkdocs serve -f mkdocs.insiders.yml
```
The documentation should then be available locally at
@@ -296,34 +333,22 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
### Creating a new release
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
1. Run `./scripts/release.sh`; this command will:
- Generate a temporary virtual environment with `rooster`
- Generate a changelog entry in `CHANGELOG.md`
- Update versions in `pyproject.toml` and `Cargo.toml`
- Update references to versions in the `README.md` and documentation
- Display contributors for the release
1. The changelog should then be editorialized for consistency
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
- Changes should be edited to be user-facing descriptions, avoiding internal details
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
1. Run `cargo check`. This should update the lock file with new versions.
1. Create a pull request with the changelog and version updates
1. Merge the PR
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
- The new version number (without starting `v`)
1. The release workflow will do the following:
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
make sure you're [re-running all the failed
@@ -334,25 +359,14 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
1. Attach artifacts to draft GitHub release
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
downstream jobs manually if needed.
1. Verify the GitHub release:
1. The Changelog should match the content of `CHANGELOG.md`
1. Append the contributors from the `scripts/release.sh` script
1. Append the contributors from the `bump.sh` script
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
1. One can determine if an update is needed when
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
1. Once run successfully, you should follow the link in the output to create a PR.
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
the release instructions in those repositories. `ruff-lsp` should always be updated
before `ruff-vscode`.
This step is generally not required for a patch release, but should always be done
for a minor release.
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
## Ecosystem CI
@@ -360,8 +374,9 @@ GitHub Actions will run your changes against a number of real-world projects fro
report on any linter or formatter differences. You can also run those checks locally via:
```shell
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
pip install -e ./python/ruff-ecosystem
ruff-ecosystem check ruff "./target/debug/ruff"
ruff-ecosystem format ruff "./target/debug/ruff"
```
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
@@ -374,7 +389,7 @@ We have several ways of benchmarking and profiling Ruff:
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
- Profiling the linter on either the microbenchmarks or entire projects
> **Note**
> \[!NOTE\]
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
> applications, like web browsers). You may also want to switch your CPU to a "performance"
> mode, if it exists, especially when benchmarking short-lived processes.
@@ -388,18 +403,12 @@ which makes it a good target for benchmarking.
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
```
Install `hyperfine`:
```shell
cargo install hyperfine
```
To benchmark the release build:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
@@ -418,7 +427,7 @@ To benchmark against the ecosystem's existing tools:
```shell
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"pyflakes crates/ruff_linter/resources/test/cpython" \
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
"pycodestyle crates/ruff_linter/resources/test/cpython" \
@@ -464,10 +473,10 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
```
You can run `uv venv --project ./scripts/benchmarks`, activate the venv and then run `uv sync --project ./scripts/benchmarks` to create a working environment for the
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
above. All reported benchmarks were computed using the versions specified by
`./scripts/benchmarks/pyproject.toml` on Python 3.11.
@@ -521,8 +530,6 @@ You can run the benchmarks with
cargo benchmark
```
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
#### Benchmark-driven Development
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
@@ -561,7 +568,7 @@ cargo install critcmp
#### Tips
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
to only run the lexer benchmarks.
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
@@ -863,7 +870,7 @@ each configuration file.
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
(inclusive of the root itself). The module path can be thought of as "the path you would use to
import the module" (e.g., `import foo.bar.baz`).
@@ -898,11 +905,15 @@ There are three ways in which an import can be categorized as "first-party":
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
automatically. This check is as simple as comparing the first segment of the current file's
module path to the first segment of the import.
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
sets the directories to scan when identifying first-party imports. The algorithm is
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
file `foo.py`.
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
This ensures that Ruff supports both flat and "src" layouts out of the box.
By default, `src` is set to the project root. In the above example, we'd want to set
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
explicitly.

2434
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ resolver = "2"
[workspace.package]
edition = "2021"
rust-version = "1.80"
rust-version = "1.75"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@@ -13,12 +13,10 @@ license = "MIT"
[workspace.dependencies]
ruff = { path = "crates/ruff" }
ruff_annotate_snippets = { path = "crates/ruff_annotate_snippets" }
ruff_cache = { path = "crates/ruff_cache" }
ruff_db = { path = "crates/ruff_db", default-features = false }
ruff_db = { path = "crates/ruff_db" }
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
ruff_formatter = { path = "crates/ruff_formatter" }
ruff_graph = { path = "crates/ruff_graph" }
ruff_index = { path = "crates/ruff_index" }
ruff_linter = { path = "crates/ruff_linter" }
ruff_macros = { path = "crates/ruff_macros" }
@@ -35,19 +33,15 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
ruff_server = { path = "crates/ruff_server" }
ruff_source_file = { path = "crates/ruff_source_file" }
ruff_text_size = { path = "crates/ruff_text_size" }
red_knot_vendored = { path = "crates/red_knot_vendored" }
ruff_workspace = { path = "crates/ruff_workspace" }
red_knot = { path = "crates/red_knot" }
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
red_knot_server = { path = "crates/red_knot_server" }
red_knot_test = { path = "crates/red_knot_test" }
red_knot_project = { path = "crates/red_knot_project", default-features = false }
aho-corasick = { version = "1.1.3" }
anstream = { version = "0.6.18" }
anstyle = { version = "1.0.10" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
assert_fs = { version = "1.1.0" }
argfile = { version = "0.2.0" }
bincode = { version = "1.3.3" }
bitflags = { version = "2.5.0" }
@@ -57,9 +51,9 @@ camino = { version = "1.1.7" }
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete_command = { version = "0.6.0" }
clearscreen = { version = "4.0.0" }
clearscreen = { version = "3.0.0" }
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
colored = { version = "3.0.0" }
colored = { version = "2.1.0" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
@@ -67,33 +61,24 @@ compact_str = "0.8.0"
criterion = { version = "0.5.1", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
dir-test = { version = "0.4.0" }
dunce = { version = "1.0.5" }
drop_bomb = { version = "0.1.5" }
env_logger = { version = "0.11.0" }
etcetera = { version = "0.8.0" }
fern = { version = "0.7.0" }
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
getrandom = { version = "0.3.1" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globwalk = { version = "0.9.1" }
hashbrown = { version = "0.15.0", default-features = false, features = [
"raw-entry",
"equivalent",
"inline-more",
] }
hashbrown = "0.14.3"
ignore = { version = "0.4.22" }
imara-diff = { version = "0.1.5" }
imperative = { version = "1.0.4" }
indexmap = { version = "2.6.0" }
indicatif = { version = "0.17.8" }
indoc = { version = "2.0.4" }
insta = { version = "1.35.1" }
insta-cmd = { version = "0.6.0" }
is-macro = { version = "0.3.5" }
is-wsl = { version = "0.4.0" }
itertools = { version = "0.14.0" }
itertools = { version = "0.13.0" }
js-sys = { version = "0.3.69" }
jod-thread = { version = "0.1.2" }
libc = { version = "0.2.153" }
@@ -101,29 +86,29 @@ libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" }
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
"proposed",
"proposed",
] }
matchit = { version = "0.8.1" }
memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "8.0.0" }
notify = { version = "6.1.1" }
once_cell = { version = "1.19.0" }
ordermap = { version = "0.5.0" }
path-absolutize = { version = "3.1.1" }
path-slash = { version = "0.2.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.7.1" }
pep440_rs = { version = "0.6.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.13.4" }
quick-junit = { version = "0.5.0" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.4.0" }
quote = { version = "1.0.23" }
rand = { version = "0.9.0" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "88a1d7774d78f048fbd77d40abca9ebd729fd1f0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -131,65 +116,47 @@ serde-wasm-bindgen = { version = "0.6.4" }
serde_json = { version = "1.0.113" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.6.0", default-features = false, features = [
"macros",
"macros",
] }
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2" }
snapbox = { version = "0.6.0", features = [
"diff",
"term-svg",
"cmd",
"examples",
] }
static_assertions = "1.1.0"
strum = { version = "0.26.0", features = ["strum_macros"] }
strum_macros = { version = "0.26.0" }
syn = { version = "2.0.55" }
tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
thiserror = { version = "2.0.0" }
thiserror = { version = "1.0.58" }
tikv-jemallocator = { version = "0.6.0" }
toml = { version = "0.8.11" }
tracing = { version = "0.1.40" }
tracing-flame = { version = "0.2.0" }
tracing-indicatif = { version = "0.3.6" }
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
"env-filter",
"fmt",
] }
tracing-tree = { version = "0.4.0" }
tryfn = { version = "0.2.1" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tracing-tree = { version = "0.3.0" }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version = "0.9" }
unicode-ident = { version = "1.0.12" }
unicode-width = { version = "0.2.0" }
unicode-width = { version = "0.1.11" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
"js",
"v4",
"fast-rng",
"macro-diagnostics",
"js",
] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.42" }
wild = { version = "2" }
zip = { version = "0.6.6", default-features = false }
[workspace.metadata.cargo-shear]
ignored = ["getrandom"]
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
unexpected_cfgs = { level = "warn", check-cfg = [
"cfg(fuzzing)",
"cfg(codspeed)",
] }
[workspace.lints.clippy]
pedantic = { level = "warn", priority = -2 }
@@ -205,9 +172,8 @@ missing_panics_doc = "allow"
module_name_repetitions = "allow"
must_use_candidate = "allow"
similar_names = "allow"
single_match_else = "allow"
too_many_lines = "allow"
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
print_stdout = "warn"
@@ -220,13 +186,6 @@ get_unwrap = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
# nursery rules
redundant_clone = "warn"
debug_assert_with_mut_call = "warn"
unused_peekable = "warn"
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
large_stack_arrays = "allow"
[profile.release]
# Note that we set these explicitly, and these values
@@ -266,12 +225,12 @@ debug = 1
[profile.dist]
inherits = "release"
# Config for 'dist'
# Config for 'cargo dist'
[workspace.metadata.dist]
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.25.2-prerelease.3"
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.18.0"
# CI backends to support
ci = "github"
ci = ["github"]
# The installers to generate for each app
installers = ["shell", "powershell"]
# The archive format to use for windows builds (defaults .zip)
@@ -280,33 +239,33 @@ windows-archive = ".zip"
unix-archive = ".tar.gz"
# Target platforms to build apps for (Rust target-triple syntax)
targets = [
"aarch64-apple-darwin",
"aarch64-pc-windows-msvc",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"arm-unknown-linux-musleabihf",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"i686-unknown-linux-musl",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"aarch64-apple-darwin",
"aarch64-pc-windows-msvc",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"arm-unknown-linux-musleabihf",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"i686-unknown-linux-musl",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
]
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
auto-includes = false
# Whether dist should create a Github Release or use an existing draft
# Whether cargo-dist should create a GitHub Release or use an existing draft
create-release = true
# Which actions to run on pull requests
# Publish jobs to run in CI
pr-run-mode = "skip"
# Whether CI should trigger releases with dispatches instead of tag pushes
dispatch-releases = true
# Which phase dist should use to create the GitHub release
# The stage during which the GitHub Release should be created
github-release = "announce"
# Whether CI should include auto-generated code to build local artifacts
build-local-artifacts = false
@@ -314,15 +273,9 @@ build-local-artifacts = false
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
# Publish jobs to run in CI
publish-jobs = ["./publish-pypi", "./publish-wasm"]
# Post-announce jobs to run in CI
post-announce-jobs = [
"./notify-dependents",
"./publish-docs",
"./publish-playground",
]
# Announcement jobs to run in CI
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
# Custom permissions for GitHub Jobs
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
# Whether to install an updater program
install-updater = false
# Path that installers should place binaries in
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]

View File

@@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM ubuntu AS build
FROM --platform=$BUILDPLATFORM ubuntu as build
ENV HOME="/root"
WORKDIR $HOME

25
LICENSE
View File

@@ -1371,28 +1371,3 @@ are:
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- pydoclint, licensed as follows:
"""
MIT License
Copyright (c) 2023 jsh9
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

View File

@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
- 🐍 Installable via `pip`
- 🛠️ `pyproject.toml` support
- 🤝 Python 3.13 compatibility
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
- 📦 Built-in caching, to avoid re-analyzing unchanged files
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
of popular Flake8 plugins, like flake8-bugbear
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
functionality behind a single, common interface.
@@ -110,28 +110,15 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
1. [Who's Using Ruff?](#whos-using-ruff)
1. [License](#license)
## Getting Started<a id="getting-started"></a>
## Getting Started
For more, see the [documentation](https://docs.astral.sh/ruff/).
### Installation
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI.
Invoke Ruff directly with [`uvx`](https://docs.astral.sh/uv/):
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
```shell
uvx ruff check # Lint all files in the current directory.
uvx ruff format # Format all files in the current directory.
```
Or install Ruff with `uv` (recommended), `pip`, or `pipx`:
```shell
# With uv.
uv tool install ruff@latest # Install Ruff globally.
uv add --dev ruff # Or add Ruff to your project.
# With pip.
pip install ruff
@@ -149,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.9.5/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.9.5/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.5.3/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.5.3/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -183,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.9.5
rev: v0.5.3
hooks:
# Run the linter.
- id: ruff
@@ -192,10 +179,11 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
- id: ruff-format
```
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
[`ruff-action`](https://github.com/astral-sh/ruff-action):
[`ruff-action`](https://github.com/chartboost/ruff-action):
```yaml
name: Ruff
@@ -205,10 +193,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/ruff-action@v3
- uses: chartboost/ruff-action@v1
```
### Configuration<a id="configuration"></a>
### Configuration
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
@@ -251,8 +239,8 @@ exclude = [
line-length = 88
indent-width = 4
# Assume Python 3.9
target-version = "py39"
# Assume Python 3.8
target-version = "py38"
[lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
@@ -304,7 +292,7 @@ features that may change prior to stabilization.
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
for more on the linting and formatting commands, respectively.
## Rules<a id="rules"></a>
## Rules
<!-- Begin section: Rules -->
@@ -380,21 +368,21 @@ quality tools, including:
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
## Contributing<a id="contributing"></a>
## Contributing
Contributions are welcome and highly appreciated. To get started, check out the
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
## Support<a id="support"></a>
## Support
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
## Acknowledgements<a id="acknowledgements"></a>
## Acknowledgements
Ruff's linter draws on both the APIs and implementation details of many other
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
@@ -418,7 +406,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
Ruff is released under the MIT license.
## Who's Using Ruff?<a id="whos-using-ruff"></a>
## Who's Using Ruff?
Ruff is used by a number of major open-source projects and companies, including:
@@ -430,14 +418,12 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Babel](https://github.com/python-babel/babel)
- Benchling ([Refac](https://github.com/benchling/refac))
- [Bokeh](https://github.com/bokeh/bokeh)
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- CERN ([Indico](https://getindico.io/))
- [DVC](https://github.com/iterative/dvc)
- [Dagger](https://github.com/dagger/dagger)
- [Dagster](https://github.com/dagster-io/dagster)
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
- [Dify](https://github.com/langgenius/dify)
- [FastAPI](https://github.com/tiangolo/fastapi)
- [Godot](https://github.com/godotengine/godot)
- [Gradio](https://github.com/gradio-app/gradio)
@@ -448,11 +434,9 @@ Ruff is used by a number of major open-source projects and companies, including:
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
[Datasets](https://github.com/huggingface/datasets),
[Diffusers](https://github.com/huggingface/diffusers))
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [ivy](https://github.com/unifyai/ivy)
- [JAX](https://github.com/jax-ml/jax)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Kraken Tech](https://kraken.tech/)
- [LangChain](https://github.com/hwchase17/langchain)
@@ -539,7 +523,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
```
## License<a id="license"></a>
## License
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)

View File

@@ -1,10 +1,6 @@
[files]
# https://github.com/crate-ci/typos/issues/868
extend-exclude = [
"crates/red_knot_vendored/vendor/**/*",
"**/resources/**/*",
"**/snapshots/**/*",
]
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
[default.extend-words]
"arange" = "arange" # e.g. `numpy.arange`
@@ -12,18 +8,14 @@ hel = "hel"
whos = "whos"
spawnve = "spawnve"
ned = "ned"
pn = "pn" # `import panel as pn` is a thing
pn = "pn" # `import panel as pd` is a thing
poit = "poit"
BA = "BA" # acronym for "Bad Allowed", used in testing.
jod = "jod" # e.g., `jod-thread`
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
[default]
extend-ignore-re = [
# Line ignore with trailing "spellchecker:disable-line"
"(?Rm)^.*#\\s*spellchecker:disable-line$",
"LICENSEs",
# Line ignore with trailing "spellchecker:disable-line"
"(?Rm)^.*#\\s*spellchecker:disable-line$",
"LICENSEs",
]
[default.extend-identifiers]
"FrIeNdLy" = "FrIeNdLy"

View File

@@ -1,25 +1,13 @@
doc-valid-idents = [
"..",
"CodeQL",
"FastAPI",
"IPython",
"LangChain",
"LibCST",
"McCabe",
"NumPy",
"SCREAMING_SNAKE_CASE",
"SQLAlchemy",
"StackOverflow",
"PyCharm",
"SNMPv1",
"SNMPv2",
"SNMPv3",
"PyFlakes"
]
ignore-interior-mutability = [
# Interned is read-only. The wrapped `Rc` never gets updated.
"ruff_formatter::format_element::Interned",
# The expression is read-only.
"ruff_python_ast::hashable::HashableExpr",
"..",
"CodeQL",
"FastAPI",
"IPython",
"LangChain",
"LibCST",
"McCabe",
"NumPy",
"SCREAMING_SNAKE_CASE",
"SQLAlchemy",
"StackOverflow",
]

View File

@@ -12,35 +12,25 @@ license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
red_knot_module_resolver = { workspace = true }
red_knot_python_semantic = { workspace = true }
red_knot_project = { workspace = true, features = ["zstd"] }
red_knot_server = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
ruff_python_ast = { workspace = true }
anyhow = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }
colored = { workspace = true }
countme = { workspace = true, features = ["enable"] }
crossbeam = { workspace = true }
ctrlc = { version = "3.4.4" }
notify = { workspace = true }
rayon = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
tracing = { workspace = true, features = ["release_max_level_debug"] }
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
tracing-flame = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
tracing-tree = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_python_trivia = { workspace = true }
insta = { workspace = true, features = ["filters"] }
insta-cmd = { workspace = true }
filetime = { workspace = true }
regex = { workspace = true }
tempfile = { workspace = true }
toml = { workspace = true }
[lints]
workspace = true

View File

@@ -1,104 +0,0 @@
use std::{
fs,
path::{Path, PathBuf},
process::Command,
};
fn main() {
// The workspace root directory is not available without walking up the tree
// https://github.com/rust-lang/cargo/issues/3946
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
.join("..")
.join("..");
commit_info(&workspace_root);
#[allow(clippy::disallowed_methods)]
let target = std::env::var("TARGET").unwrap();
println!("cargo::rustc-env=RUST_HOST_TARGET={target}");
}
fn commit_info(workspace_root: &Path) {
// If not in a git repository, do not attempt to retrieve commit information
let git_dir = workspace_root.join(".git");
if !git_dir.exists() {
return;
}
if let Some(git_head_path) = git_head(&git_dir) {
println!("cargo:rerun-if-changed={}", git_head_path.display());
let git_head_contents = fs::read_to_string(git_head_path);
if let Ok(git_head_contents) = git_head_contents {
// The contents are either a commit or a reference in the following formats
// - "<commit>" when the head is detached
// - "ref <ref>" when working on a branch
// If a commit, checking if the HEAD file has changed is sufficient
// If a ref, we need to add the head file for that ref to rebuild on commit
let mut git_ref_parts = git_head_contents.split_whitespace();
git_ref_parts.next();
if let Some(git_ref) = git_ref_parts.next() {
let git_ref_path = git_dir.join(git_ref);
println!("cargo:rerun-if-changed={}", git_ref_path.display());
}
}
}
let output = match Command::new("git")
.arg("log")
.arg("-1")
.arg("--date=short")
.arg("--abbrev=9")
.arg("--format=%H %h %cd %(describe)")
.output()
{
Ok(output) if output.status.success() => output,
_ => return,
};
let stdout = String::from_utf8(output.stdout).unwrap();
let mut parts = stdout.split_whitespace();
let mut next = || parts.next().unwrap();
let _commit_hash = next();
println!("cargo::rustc-env=RED_KNOT_COMMIT_SHORT_HASH={}", next());
println!("cargo::rustc-env=RED_KNOT_COMMIT_DATE={}", next());
// Describe can fail for some commits
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
if let Some(describe) = parts.next() {
let mut describe_parts = describe.split('-');
let _last_tag = describe_parts.next().unwrap();
// If this is the tagged commit, this component will be missing
println!(
"cargo::rustc-env=RED_KNOT_LAST_TAG_DISTANCE={}",
describe_parts.next().unwrap_or("0")
);
}
}
fn git_head(git_dir: &Path) -> Option<PathBuf> {
// The typical case is a standard git repository.
let git_head_path = git_dir.join("HEAD");
if git_head_path.exists() {
return Some(git_head_path);
}
if !git_dir.is_file() {
return None;
}
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
// then let's try to attempt to read it as a worktree. If it's
// a worktree, then its contents will look like this, e.g.:
//
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
//
// And the HEAD file we want to watch will be at:
//
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
let contents = fs::read_to_string(git_dir).ok()?;
let (label, worktree_path) = contents.split_once(':')?;
if label != "gitdir" {
return None;
}
let worktree_path = worktree_path.trim();
Some(PathBuf::from(worktree_path))
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

View File

@@ -1,128 +0,0 @@
# Tracing
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
Tracing spans are only shown when using `-vvv`.
## Verbosity levels
The CLI supports different verbosity levels.
- default: Only show errors and warnings.
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
but this can result in a confused logging output where messages from different threads are intertwined.
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
### Examples
#### Show all debug messages
Shows debug messages from all crates.
```bash
RED_KNOT_LOG=debug
```
#### Show salsa query execution messages
Show the salsa `execute: my_query` messages in addition to all red knot messages.
```bash
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
```
#### Show typing traces
Only show traces for the `red_knot_python_semantic::types` module.
```bash
RED_KNOT_LOG="red_knot_python_semantic::types"
```
Note: Ensure that you use `-vvv` to see tracing spans.
#### Show messages for a single file
Shows all messages that are inside of a span for a specific file.
```bash
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
```
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
whether one if its children has the file `x.py`.
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
This very quickly leads to extremely long outputs.
## Tracing and Salsa
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
For example, don't use `tracing` to show the user a message when generating a lint violation failed
because the message would only be shown when linting the file the first time, but not on subsequent analysis
runs or when restoring from a persistent cache. This can be confusing for users because they
don't understand why a specific lint violation isn't raised. Instead, change your
query to return the failure as part of the query's result or use a Salsa accumulator.
## Tracing in tests
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
```rust
use ruff_db::testing::setup_logging;
#[test]
fn test() {
let _logging = setup_logging();
tracing::info!("This message will be printed to stderr");
}
```
Note: Most test runners capture stderr and only show its output when a test fails.
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
called **once**.
## Release builds
`trace!` events are removed in release builds.
## Profiling
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
```bash
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
```
You can convert the textual representation into a visual one using `inferno`.
```shell
cargo install inferno
```
```shell
# flamegraph
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
# flamechart
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
```
![Example flamegraph](./tracing-flamegraph.png)
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.

View File

@@ -1,201 +0,0 @@
use crate::logging::Verbosity;
use crate::python_version::PythonVersion;
use clap::{ArgAction, ArgMatches, Error, Parser};
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
use red_knot_python_semantic::lint;
use ruff_db::system::SystemPathBuf;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
)]
#[command(version)]
pub(crate) struct Args {
#[command(subcommand)]
pub(crate) command: Command,
}
#[derive(Debug, clap::Subcommand)]
pub(crate) enum Command {
/// Check a project for type errors.
Check(CheckCommand),
/// Start the language server
Server,
/// Display Red Knot's version
Version,
}
#[derive(Debug, Parser)]
pub(crate) struct CheckCommand {
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
///
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
#[arg(long, value_name = "PROJECT")]
pub(crate) project: Option<SystemPathBuf>,
/// Path to the virtual environment the project uses.
///
/// If provided, red-knot will use the `site-packages` directory of this virtual environment
/// to resolve type information for the project's third-party dependencies.
#[arg(long, value_name = "PATH")]
pub(crate) venv_path: Option<SystemPathBuf>,
/// Custom directory to use for stdlib typeshed stubs.
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
pub(crate) typeshed: Option<SystemPathBuf>,
/// Additional path to use as a module-resolution source (can be passed multiple times).
#[arg(long, value_name = "PATH")]
pub(crate) extra_search_path: Option<Vec<SystemPathBuf>>,
/// Python version to assume when resolving types.
#[arg(long, value_name = "VERSION", alias = "target-version")]
pub(crate) python_version: Option<PythonVersion>,
#[clap(flatten)]
pub(crate) verbosity: Verbosity,
#[clap(flatten)]
pub(crate) rules: RulesArg,
/// Use exit code 1 if there are any warning-level diagnostics.
#[arg(long, conflicts_with = "exit_zero")]
pub(crate) error_on_warning: bool,
/// Always use exit code 0, even when there are error-level diagnostics.
#[arg(long)]
pub(crate) exit_zero: bool,
/// Run in watch mode by re-running whenever files change.
#[arg(long, short = 'W')]
pub(crate) watch: bool,
}
impl CheckCommand {
pub(crate) fn into_options(self) -> Options {
let rules = if self.rules.is_empty() {
None
} else {
Some(
self.rules
.into_iter()
.map(|(rule, level)| (RangedValue::cli(rule), RangedValue::cli(level)))
.collect(),
)
};
Options {
environment: Some(EnvironmentOptions {
python_version: self
.python_version
.map(|version| RangedValue::cli(version.into())),
venv_path: self.venv_path.map(RelativePathBuf::cli),
typeshed: self.typeshed.map(RelativePathBuf::cli),
extra_paths: self.extra_search_path.map(|extra_search_paths| {
extra_search_paths
.into_iter()
.map(RelativePathBuf::cli)
.collect()
}),
..EnvironmentOptions::default()
}),
rules,
..Default::default()
}
}
}
/// A list of rules to enable or disable with a given severity.
///
/// This type is used to parse the `--error`, `--warn`, and `--ignore` arguments
/// while preserving the order in which they were specified (arguments last override previous severities).
#[derive(Debug)]
pub(crate) struct RulesArg(Vec<(String, lint::Level)>);
impl RulesArg {
fn is_empty(&self) -> bool {
self.0.is_empty()
}
fn into_iter(self) -> impl Iterator<Item = (String, lint::Level)> {
self.0.into_iter()
}
}
impl clap::FromArgMatches for RulesArg {
fn from_arg_matches(matches: &ArgMatches) -> Result<Self, Error> {
let mut rules = Vec::new();
for (level, arg_id) in [
(lint::Level::Ignore, "ignore"),
(lint::Level::Warn, "warn"),
(lint::Level::Error, "error"),
] {
let indices = matches.indices_of(arg_id).into_iter().flatten();
let levels = matches.get_many::<String>(arg_id).into_iter().flatten();
rules.extend(
indices
.zip(levels)
.map(|(index, rule)| (index, rule, level)),
);
}
// Sort by their index so that values specified later override earlier ones.
rules.sort_by_key(|(index, _, _)| *index);
Ok(Self(
rules
.into_iter()
.map(|(_, rule, level)| (rule.to_owned(), level))
.collect(),
))
}
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), Error> {
self.0 = Self::from_arg_matches(matches)?.0;
Ok(())
}
}
impl clap::Args for RulesArg {
fn augment_args(cmd: clap::Command) -> clap::Command {
const HELP_HEADING: &str = "Enabling / disabling rules";
cmd.arg(
clap::Arg::new("error")
.long("error")
.action(ArgAction::Append)
.help("Treat the given rule as having severity 'error'. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
.arg(
clap::Arg::new("warn")
.long("warn")
.action(ArgAction::Append)
.help("Treat the given rule as having severity 'warn'. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
.arg(
clap::Arg::new("ignore")
.long("ignore")
.action(ArgAction::Append)
.help("Disables the rule. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
}
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
Self::augment_args(cmd)
}
}

View File

@@ -0,0 +1,2 @@
pub(crate) mod target_version;
pub(crate) mod verbosity;

View File

@@ -0,0 +1,34 @@
/// Enumeration of all supported Python versions
///
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
pub enum TargetVersion {
Py37,
#[default]
Py38,
Py39,
Py310,
Py311,
Py312,
Py313,
}
impl std::fmt::Display for TargetVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
ruff_db::program::TargetVersion::from(*self).fmt(f)
}
}
impl From<TargetVersion> for ruff_db::program::TargetVersion {
fn from(value: TargetVersion) -> Self {
match value {
TargetVersion::Py37 => Self::Py37,
TargetVersion::Py38 => Self::Py38,
TargetVersion::Py39 => Self::Py39,
TargetVersion::Py310 => Self::Py310,
TargetVersion::Py311 => Self::Py311,
TargetVersion::Py312 => Self::Py312,
TargetVersion::Py313 => Self::Py313,
}
}
}

View File

@@ -0,0 +1,34 @@
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub(crate) enum VerbosityLevel {
Info,
Debug,
Trace,
}
/// Logging flags to `#[command(flatten)]` into your CLI
#[derive(clap::Args, Debug, Clone, Default)]
#[command(about = None, long_about = None)]
pub(crate) struct Verbosity {
#[arg(
long,
short = 'v',
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
action = clap::ArgAction::Count,
global = true,
)]
verbose: u8,
}
impl Verbosity {
/// Returns the verbosity level based on the number of `-v` flags.
///
/// Returns `None` if the user did not specify any verbosity flags.
pub(crate) fn level(&self) -> Option<VerbosityLevel> {
match self.verbose {
0 => None,
1 => Some(VerbosityLevel::Info),
2 => Some(VerbosityLevel::Debug),
_ => Some(VerbosityLevel::Trace),
}
}
}

200
crates/red_knot/src/db.rs Normal file
View File

@@ -0,0 +1,200 @@
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
use std::sync::Arc;
use salsa::{Cancelled, Database, DbWithJar};
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
use ruff_db::files::{system_path_to_file, File, Files};
use ruff_db::program::{Program, ProgramSettings};
use ruff_db::system::System;
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
use crate::watch::{FileChangeKind, FileWatcherChange};
use crate::workspace::{check_file, Package, Workspace, WorkspaceMetadata};
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
#[salsa::jar(db=Db)]
pub struct Jar(
Workspace,
Package,
lint_syntax,
lint_semantic,
unwind_if_cancelled,
);
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
pub struct RootDatabase {
workspace: Option<Workspace>,
storage: salsa::Storage<RootDatabase>,
files: Files,
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
}
impl RootDatabase {
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
where
S: System + 'static + Send + Sync + RefUnwindSafe,
{
let mut db = Self {
workspace: None,
storage: salsa::Storage::default(),
files: Files::default(),
system: Arc::new(system),
};
let workspace = Workspace::from_metadata(&db, workspace);
// Initialize the `Program` singleton
Program::from_settings(&db, settings);
db.workspace = Some(workspace);
db
}
pub fn workspace(&self) -> Workspace {
// SAFETY: The workspace is always initialized in `new`.
self.workspace.unwrap()
}
#[tracing::instrument(level = "debug", skip(self, changes))]
pub fn apply_changes(&mut self, changes: Vec<FileWatcherChange>) {
let workspace = self.workspace();
let workspace_path = workspace.root(self).to_path_buf();
// TODO: Optimize change tracking by only reloading a package if a file that is part of the package was changed.
let mut structural_change = false;
for change in changes {
if matches!(
change.path.file_name(),
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
) {
// Changes to ignore files or settings can change the workspace structure or add/remove files
// from packages.
structural_change = true;
} else {
match change.kind {
FileChangeKind::Created => {
// Reload the package when a new file was added. This is necessary because the file might be excluded
// by a gitignore.
if workspace.package(self, &change.path).is_some() {
structural_change = true;
}
}
FileChangeKind::Modified => {}
FileChangeKind::Deleted => {
if let Some(package) = workspace.package(self, &change.path) {
if let Some(file) = system_path_to_file(self, &change.path) {
package.remove_file(self, file);
}
}
}
}
}
File::touch_path(self, &change.path);
}
if structural_change {
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
Ok(metadata) => {
tracing::debug!("Reload workspace after structural change.");
// TODO: Handle changes in the program settings.
workspace.reload(self, metadata);
}
Err(error) => {
tracing::error!("Failed to load workspace, keep old workspace: {error}");
}
}
}
}
/// Checks all open files in the workspace and its dependencies.
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
self.with_db(|db| db.workspace().check(db))
}
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
self.with_db(|db| check_file(db, file))
}
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
where
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
{
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
// unwind safe.
//
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
//
// That still leaves us with possible logical bugs in two sources:
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
// Reviewing Salsa code specifically around unwind safety seems doable.
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
// certainly makes it harder to catch these issues in our user code.
//
// For now, this is the only solution at hand unless Salsa decides to change its design.
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
let db = &AssertUnwindSafe(self);
Cancelled::catch(|| f(db))
}
}
impl Upcast<dyn SemanticDb> for RootDatabase {
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
self
}
}
impl Upcast<dyn SourceDb> for RootDatabase {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
}
impl Upcast<dyn ResolverDb> for RootDatabase {
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
self
}
}
impl ResolverDb for RootDatabase {}
impl SemanticDb for RootDatabase {}
impl SourceDb for RootDatabase {
fn vendored(&self) -> &VendoredFileSystem {
vendored_typeshed_stubs()
}
fn system(&self) -> &dyn System {
&*self.system
}
fn files(&self) -> &Files {
&self.files
}
}
impl Database for RootDatabase {}
impl Db for RootDatabase {}
impl salsa::ParallelDatabase for RootDatabase {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
workspace: self.workspace,
storage: self.storage.snapshot(),
files: self.files.snapshot(),
system: self.system.clone(),
})
}
}

View File

@@ -0,0 +1,6 @@
use crate::db::Jar;
pub mod db;
pub mod lint;
pub mod watch;
pub mod workspace;

274
crates/red_knot/src/lint.rs Normal file
View File

@@ -0,0 +1,274 @@
use std::cell::RefCell;
use std::ops::Deref;
use std::time::Duration;
use tracing::trace_span;
use red_knot_module_resolver::ModuleName;
use red_knot_python_semantic::types::Type;
use red_knot_python_semantic::{HasTy, SemanticModel};
use ruff_db::files::File;
use ruff_db::parsed::{parsed_module, ParsedModule};
use ruff_db::source::{source_text, SourceText};
use ruff_python_ast as ast;
use ruff_python_ast::visitor::{walk_stmt, Visitor};
use crate::db::Db;
/// Workaround query to test for if the computation should be cancelled.
/// Ideally, push for Salsa to expose an API for testing if cancellation was requested.
#[salsa::tracked]
#[allow(unused_variables)]
pub(crate) fn unwind_if_cancelled(db: &dyn Db) {}
#[salsa::tracked(return_ref)]
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
#[allow(clippy::print_stdout)]
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
for i in 0..10 {
unwind_if_cancelled(db);
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
std::thread::sleep(Duration::from_secs(1));
}
}
let mut diagnostics = Vec::new();
let source = source_text(db.upcast(), file_id);
lint_lines(&source, &mut diagnostics);
let parsed = parsed_module(db.upcast(), file_id);
if parsed.errors().is_empty() {
let ast = parsed.syntax();
let mut visitor = SyntaxLintVisitor {
diagnostics,
source: &source,
};
visitor.visit_body(&ast.body);
diagnostics = visitor.diagnostics;
} else {
diagnostics.extend(parsed.errors().iter().map(ToString::to_string));
}
Diagnostics::from(diagnostics)
}
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
for (line_number, line) in source.lines().enumerate() {
if line.len() < 88 {
continue;
}
let char_count = line.chars().count();
if char_count > 88 {
diagnostics.push(format!(
"Line {} is too long ({} characters)",
line_number + 1,
char_count
));
}
}
}
#[salsa::tracked(return_ref)]
pub(crate) fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
let _span = trace_span!("lint_semantic", ?file_id).entered();
let source = source_text(db.upcast(), file_id);
let parsed = parsed_module(db.upcast(), file_id);
let semantic = SemanticModel::new(db.upcast(), file_id);
if !parsed.is_valid() {
return Diagnostics::Empty;
}
let context = SemanticLintContext {
source,
parsed,
semantic,
diagnostics: RefCell::new(Vec::new()),
};
SemanticVisitor { context: &context }.visit_body(parsed.suite());
Diagnostics::from(context.diagnostics.take())
}
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
match import {
AnyImportRef::Import(import) => {
for alias in &import.names {
let ty = alias.ty(&context.semantic);
if ty.is_unbound() {
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
}
}
}
AnyImportRef::ImportFrom(import) => {
for alias in &import.names {
let ty = alias.ty(&context.semantic);
if ty.is_unbound() {
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
}
}
}
}
}
fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
let semantic = &context.semantic;
// TODO we should have a special marker on the real typing module (from typeshed) so if you
// have your own "typing" module in your project, we don't consider it THE typing module (and
// same for other stdlib modules that our lint rules care about)
let Some(typing) = semantic.resolve_module(ModuleName::new("typing").unwrap()) else {
return;
};
let override_ty = semantic.global_symbol_ty(&typing, "override");
let Type::Class(class_ty) = class.ty(semantic) else {
return;
};
for function in class
.body
.iter()
.filter_map(|stmt| stmt.as_function_def_stmt())
{
let Type::Function(ty) = function.ty(semantic) else {
return;
};
// TODO this shouldn't make direct use of the Db; see comment on SemanticModel::db
let db = semantic.db();
if ty.has_decorator(db, override_ty) {
let method_name = ty.name(db);
if class_ty
.inherited_class_member(db, &method_name)
.is_unbound()
{
// TODO should have a qualname() method to support nested classes
context.push_diagnostic(
format!(
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
class_ty.name(db),
method_name,
));
}
}
}
}
pub(crate) struct SemanticLintContext<'a> {
source: SourceText,
parsed: &'a ParsedModule,
semantic: SemanticModel<'a>,
diagnostics: RefCell<Vec<String>>,
}
impl<'db> SemanticLintContext<'db> {
#[allow(unused)]
pub(crate) fn source_text(&self) -> &str {
self.source.as_str()
}
#[allow(unused)]
pub(crate) fn ast(&self) -> &'db ast::ModModule {
self.parsed.syntax()
}
pub(crate) fn push_diagnostic(&self, diagnostic: String) {
self.diagnostics.borrow_mut().push(diagnostic);
}
#[allow(unused)]
pub(crate) fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
self.diagnostics.get_mut().extend(diagnostics);
}
}
#[derive(Debug)]
struct SyntaxLintVisitor<'a> {
diagnostics: Vec<String>,
source: &'a str,
}
impl Visitor<'_> for SyntaxLintVisitor<'_> {
fn visit_string_literal(&mut self, string_literal: &'_ ast::StringLiteral) {
// A very naive implementation of use double quotes
let text = &self.source[string_literal.range];
if text.starts_with('\'') {
self.diagnostics
.push("Use double quotes for strings".to_string());
}
}
}
struct SemanticVisitor<'a> {
context: &'a SemanticLintContext<'a>,
}
impl Visitor<'_> for SemanticVisitor<'_> {
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
match stmt {
ast::Stmt::ClassDef(class) => {
lint_bad_override(self.context, class);
}
ast::Stmt::Import(import) => {
lint_unresolved_imports(self.context, AnyImportRef::Import(import));
}
ast::Stmt::ImportFrom(import) => {
lint_unresolved_imports(self.context, AnyImportRef::ImportFrom(import));
}
_ => {}
}
walk_stmt(self, stmt);
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Diagnostics {
Empty,
List(Vec<String>),
}
impl Diagnostics {
pub fn as_slice(&self) -> &[String] {
match self {
Diagnostics::Empty => &[],
Diagnostics::List(list) => list.as_slice(),
}
}
}
impl Deref for Diagnostics {
type Target = [String];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl From<Vec<String>> for Diagnostics {
fn from(value: Vec<String>) -> Self {
if value.is_empty() {
Diagnostics::Empty
} else {
Diagnostics::List(value)
}
}
}
#[derive(Copy, Clone, Debug)]
enum AnyImportRef<'a> {
Import(&'a ast::StmtImport),
ImportFrom(&'a ast::StmtImportFrom),
}

View File

@@ -1,254 +0,0 @@
//! Sets up logging for Red Knot
use anyhow::Context;
use colored::Colorize;
use std::fmt;
use std::fs::File;
use std::io::BufWriter;
use tracing::{Event, Subscriber};
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::fmt::format::Writer;
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
use tracing_subscriber::registry::LookupSpan;
use tracing_subscriber::EnvFilter;
/// Logging flags to `#[command(flatten)]` into your CLI
#[derive(clap::Args, Debug, Clone, Default)]
#[command(about = None, long_about = None)]
pub(crate) struct Verbosity {
#[arg(
long,
short = 'v',
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
action = clap::ArgAction::Count,
global = true,
)]
verbose: u8,
}
impl Verbosity {
/// Returns the verbosity level based on the number of `-v` flags.
///
/// Returns `None` if the user did not specify any verbosity flags.
pub(crate) fn level(&self) -> VerbosityLevel {
match self.verbose {
0 => VerbosityLevel::Default,
1 => VerbosityLevel::Verbose,
2 => VerbosityLevel::ExtraVerbose,
_ => VerbosityLevel::Trace,
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub(crate) enum VerbosityLevel {
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
Default,
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
/// Corresponds to `-v`.
Verbose,
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
/// Corresponds to `-vv`
ExtraVerbose,
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
Trace,
}
impl VerbosityLevel {
const fn level_filter(self) -> LevelFilter {
match self {
VerbosityLevel::Default => LevelFilter::WARN,
VerbosityLevel::Verbose => LevelFilter::INFO,
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
VerbosityLevel::Trace => LevelFilter::TRACE,
}
}
pub(crate) const fn is_trace(self) -> bool {
matches!(self, VerbosityLevel::Trace)
}
pub(crate) const fn is_extra_verbose(self) -> bool {
matches!(self, VerbosityLevel::ExtraVerbose)
}
}
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
use tracing_subscriber::prelude::*;
// The `RED_KNOT_LOG` environment variable overrides the default log level.
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
EnvFilter::builder()
.parse(log_env_variable)
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
} else {
match level {
VerbosityLevel::Default => {
// Show warning traces
EnvFilter::default().add_directive(LevelFilter::WARN.into())
}
level => {
let level_filter = level.level_filter();
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
let filter = EnvFilter::default().add_directive(
format!("red_knot={level_filter}")
.parse()
.expect("Hardcoded directive to be valid"),
);
filter.add_directive(
format!("ruff={level_filter}")
.parse()
.expect("Hardcoded directive to be valid"),
)
}
}
};
let (profiling_layer, guard) = setup_profile();
let registry = tracing_subscriber::registry()
.with(filter)
.with(profiling_layer);
if level.is_trace() {
let subscriber = registry.with(
tracing_tree::HierarchicalLayer::default()
.with_indent_lines(true)
.with_indent_amount(2)
.with_bracketed_fields(true)
.with_thread_ids(true)
.with_targets(true)
.with_writer(std::io::stderr)
.with_timer(tracing_tree::time::Uptime::default()),
);
subscriber.init();
} else {
let subscriber = registry.with(
tracing_subscriber::fmt::layer()
.event_format(RedKnotFormat {
display_level: true,
display_timestamp: level.is_extra_verbose(),
show_spans: false,
})
.with_writer(std::io::stderr),
);
subscriber.init();
}
Ok(TracingGuard {
_flame_guard: guard,
})
}
#[allow(clippy::type_complexity)]
fn setup_profile<S>() -> (
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
)
where
S: Subscriber + for<'span> LookupSpan<'span>,
{
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
.expect("Flame layer to be created");
(Some(layer), Some(guard))
} else {
(None, None)
}
}
pub(crate) struct TracingGuard {
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
}
struct RedKnotFormat {
display_timestamp: bool,
display_level: bool,
show_spans: bool,
}
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
impl<S, N> FormatEvent<S, N> for RedKnotFormat
where
S: Subscriber + for<'a> LookupSpan<'a>,
N: for<'a> FormatFields<'a> + 'static,
{
fn format_event(
&self,
ctx: &FmtContext<'_, S, N>,
mut writer: Writer<'_>,
event: &Event<'_>,
) -> fmt::Result {
let meta = event.metadata();
let ansi = writer.has_ansi_escapes();
if self.display_timestamp {
let timestamp = chrono::Local::now()
.format("%Y-%m-%d %H:%M:%S.%f")
.to_string();
if ansi {
write!(writer, "{} ", timestamp.dimmed())?;
} else {
write!(
writer,
"{} ",
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
)?;
}
}
if self.display_level {
let level = meta.level();
// Same colors as tracing
if ansi {
let formatted_level = level.to_string();
match *level {
tracing::Level::TRACE => {
write!(writer, "{} ", formatted_level.purple().bold())?;
}
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
}
} else {
write!(writer, "{level} ")?;
}
}
if self.show_spans {
let span = event.parent();
let mut seen = false;
let span = span
.and_then(|id| ctx.span(id))
.or_else(|| ctx.lookup_current());
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
for span in scope {
seen = true;
if ansi {
write!(writer, "{}:", span.metadata().name().bold())?;
} else {
write!(writer, "{}:", span.metadata().name())?;
}
}
if seen {
writer.write_char(' ')?;
}
}
ctx.field_format().format_fields(writer.by_ref(), event)?;
writeln!(writer)
}
}

View File

@@ -1,115 +1,111 @@
use std::io::{self, BufWriter, Write};
use std::process::{ExitCode, Termination};
use anyhow::Result;
use std::sync::Mutex;
use crate::args::{Args, CheckCommand, Command};
use crate::logging::setup_tracing;
use anyhow::{anyhow, Context};
use clap::Parser;
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use red_knot_project::metadata::options::Options;
use red_knot_project::watch;
use red_knot_project::watch::ProjectWatcher;
use red_knot_project::{ProjectDatabase, ProjectMetadata};
use red_knot_server::run_server;
use ruff_db::diagnostic::{Diagnostic, Severity};
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use salsa::plumbing::ZalsaDatabase;
use salsa::ParallelDatabase;
use tracing::subscriber::Interest;
use tracing::{Level, Metadata};
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
use tracing_subscriber::{Layer, Registry};
use tracing_tree::time::Uptime;
mod args;
mod logging;
mod python_version;
mod verbosity;
mod version;
use red_knot::db::RootDatabase;
use red_knot::watch::FileWatcher;
use red_knot::watch::FileWatcherChange;
use red_knot::workspace::WorkspaceMetadata;
use ruff_db::program::{ProgramSettings, SearchPathSettings};
use ruff_db::system::{OsSystem, System, SystemPathBuf};
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
pub fn main() -> ExitStatus {
run().unwrap_or_else(|error| {
use std::io::Write;
use cli::target_version::TargetVersion;
use cli::verbosity::{Verbosity, VerbosityLevel};
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
let mut stderr = std::io::stderr().lock();
mod cli;
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
// some reason (e.g. failed to resolve the configuration)
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
// Currently we generally only see one error, but e.g. with io errors when resolving
// the configuration it is help to chain errors ("resolving configuration failed" ->
// "failed to read file: subdir/pyproject.toml")
for cause in error.chain() {
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
}
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An experimental multifile analysis backend for Ruff"
)]
#[command(version)]
struct Args {
#[arg(
long,
help = "Changes the current working directory.",
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
value_name = "PATH"
)]
current_directory: Option<SystemPathBuf>,
ExitStatus::Error
})
#[arg(
long,
value_name = "DIRECTORY",
help = "Custom directory to use for stdlib typeshed stubs"
)]
custom_typeshed_dir: Option<SystemPathBuf>,
#[arg(
long,
value_name = "PATH",
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
)]
extra_search_path: Vec<SystemPathBuf>,
#[arg(long, help = "Python version to assume when resolving types", default_value_t = TargetVersion::default(), value_name="VERSION")]
target_version: TargetVersion,
#[clap(flatten)]
verbosity: Verbosity,
}
fn run() -> anyhow::Result<ExitStatus> {
let args = Args::parse_from(std::env::args());
#[allow(
clippy::print_stdout,
clippy::unnecessary_wraps,
clippy::print_stderr,
clippy::dbg_macro
)]
pub fn main() -> anyhow::Result<()> {
let Args {
current_directory,
custom_typeshed_dir,
extra_search_path: extra_paths,
target_version,
verbosity,
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
match args.command {
Command::Server => run_server().map(|()| ExitStatus::Success),
Command::Check(check_args) => run_check(check_args),
Command::Version => version().map(|()| ExitStatus::Success),
}
}
let verbosity = verbosity.level();
countme::enable(verbosity == Some(VerbosityLevel::Trace));
setup_tracing(verbosity);
pub(crate) fn version() -> Result<()> {
let mut stdout = BufWriter::new(io::stdout().lock());
let version_info = crate::version::version();
writeln!(stdout, "red knot {}", &version_info)?;
Ok(())
}
fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
let verbosity = args.verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
// The base path to which all CLI arguments are relative to.
let cli_base_path = {
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
SystemPathBuf::from_path_buf(cwd)
.map_err(|path| {
anyhow!(
"The current working directory `{}` contains non-Unicode characters. Red Knot only supports Unicode paths.",
path.display()
)
})?
};
let cwd = args
.project
.as_ref()
.map(|cwd| {
if cwd.as_std_path().is_dir() {
Ok(SystemPath::absolute(cwd, &cli_base_path))
} else {
Err(anyhow!("Provided project path `{cwd}` is not a directory"))
}
})
.transpose()?
.unwrap_or_else(|| cli_base_path.clone());
let system = OsSystem::new(cwd);
let watch = args.watch;
let exit_zero = args.exit_zero;
let min_error_severity = if args.error_on_warning {
Severity::Warning
let cwd = if let Some(cwd) = current_directory {
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
SystemPathBuf::from_utf8_path_buf(canonicalized)
} else {
Severity::Error
let cwd = std::env::current_dir().unwrap();
SystemPathBuf::from_path_buf(cwd).unwrap()
};
let cli_options = args.into_options();
let mut workspace_metadata = ProjectMetadata::discover(system.current_directory(), &system)?;
workspace_metadata.apply_cli_options(cli_options.clone());
let system = OsSystem::new(cwd.clone());
let workspace_metadata =
WorkspaceMetadata::from_path(system.current_directory(), &system).unwrap();
let mut db = ProjectDatabase::new(workspace_metadata, system)?;
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
let program_settings = ProgramSettings {
target_version: target_version.into(),
search_paths: SearchPathSettings {
extra_paths,
workspace_root: workspace_metadata.root().to_path_buf(),
custom_typeshed: custom_typeshed_dir,
site_packages: None,
},
};
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options, min_error_severity);
// TODO: Use the `program_settings` to compute the key for the database's persistent
// cache and load the cache if it exists.
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
// Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
@@ -121,178 +117,125 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
}
})?;
let exit_status = if watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)
};
let file_changes_notifier = main_loop.file_changes_notifier();
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
// Watch for file changes and re-trigger the analysis.
let mut file_watcher = FileWatcher::new(move |changes| {
file_changes_notifier.notify(changes);
})?;
std::mem::forget(db);
file_watcher.watch_folder(db.workspace().root(&db).as_std_path())?;
if exit_zero {
Ok(ExitStatus::Success)
} else {
Ok(exit_status)
}
}
main_loop.run(&mut db);
#[derive(Copy, Clone)]
pub enum ExitStatus {
/// Checking was successful and there were no errors.
Success = 0,
println!("{}", countme::get_all());
/// Checking was successful but there were errors.
Failure = 1,
/// Checking failed.
Error = 2,
}
impl Termination for ExitStatus {
fn report(self) -> ExitCode {
ExitCode::from(self as u8)
}
Ok(())
}
struct MainLoop {
/// Sender that can be used to send messages to the main loop.
sender: crossbeam_channel::Sender<MainLoopMessage>,
/// Receiver for the messages sent **to** the main loop.
verbosity: Option<VerbosityLevel>,
orchestrator: crossbeam_channel::Sender<OrchestratorMessage>,
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
/// The file system watcher, if running in watch mode.
watcher: Option<ProjectWatcher>,
cli_options: Options,
/// The minimum severity to consider an error when deciding the exit status.
///
/// TODO(micha): Get from the terminal settings.
min_error_severity: Severity,
}
impl MainLoop {
fn new(
cli_options: Options,
min_error_severity: Severity,
) -> (Self, MainLoopCancellationToken) {
let (sender, receiver) = crossbeam_channel::bounded(10);
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
let mut orchestrator = Orchestrator {
receiver: orchestrator_receiver,
main_loop: main_loop_sender.clone(),
revision: 0,
};
std::thread::spawn(move || {
orchestrator.run();
});
(
Self {
sender: sender.clone(),
receiver,
watcher: None,
cli_options,
min_error_severity,
verbosity,
orchestrator: orchestrator_sender,
receiver: main_loop_receiver,
},
MainLoopCancellationToken {
sender: main_loop_sender,
},
MainLoopCancellationToken { sender },
)
}
fn watch(mut self, db: &mut ProjectDatabase) -> anyhow::Result<ExitStatus> {
tracing::debug!("Starting watch mode");
let sender = self.sender.clone();
let watcher = watch::directory_watcher(move |event| {
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
})?;
self.watcher = Some(ProjectWatcher::new(watcher, db));
self.run(db);
Ok(ExitStatus::Success)
fn file_changes_notifier(&self) -> FileChangesNotifier {
FileChangesNotifier {
sender: self.orchestrator.clone(),
}
}
fn run(mut self, db: &mut ProjectDatabase) -> ExitStatus {
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
#[allow(clippy::print_stderr)]
fn run(self, db: &mut RootDatabase) {
self.orchestrator.send(OrchestratorMessage::Run).unwrap();
let result = self.main_loop(db);
for message in &self.receiver {
tracing::trace!("Main Loop: Tick");
tracing::debug!("Exiting main loop");
result
}
fn main_loop(&mut self, db: &mut ProjectDatabase) -> ExitStatus {
// Schedule the first check.
tracing::debug!("Starting main loop");
let mut revision = 0u64;
while let Ok(message) = self.receiver.recv() {
match message {
MainLoopMessage::CheckWorkspace => {
let db = db.clone();
let sender = self.sender.clone();
MainLoopMessage::CheckWorkspace { revision } => {
let db = db.snapshot();
let orchestrator = self.orchestrator.clone();
// Spawn a new task that checks the project. This needs to be done in a separate thread
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
// to prevent blocking the main loop here.
rayon::spawn(move || {
if let Ok(result) = db.check() {
// Send the result back to the main loop for printing.
sender
.send(MainLoopMessage::CheckCompleted { result, revision })
orchestrator
.send(OrchestratorMessage::CheckCompleted {
diagnostics: result,
revision,
})
.unwrap();
}
});
}
MainLoopMessage::CheckCompleted {
result,
revision: check_revision,
} => {
let failed = result
.iter()
.any(|diagnostic| diagnostic.severity() >= self.min_error_severity);
if check_revision == revision {
#[allow(clippy::print_stdout)]
for diagnostic in result {
println!("{}", diagnostic.display(db));
}
} else {
tracing::debug!(
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
);
}
if self.watcher.is_none() {
return if failed {
ExitStatus::Failure
} else {
ExitStatus::Success
};
}
tracing::trace!("Counts after last check:\n{}", countme::get_all());
}
MainLoopMessage::ApplyChanges(changes) => {
revision += 1;
// Automatically cancels any pending queries and waits for them to complete.
db.apply_changes(changes, Some(&self.cli_options));
if let Some(watcher) = self.watcher.as_mut() {
watcher.update(db);
db.apply_changes(changes);
}
MainLoopMessage::CheckCompleted(diagnostics) => {
eprintln!("{}", diagnostics.join("\n"));
if self.verbosity == Some(VerbosityLevel::Trace) {
eprintln!("{}", countme::get_all());
}
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
}
MainLoopMessage::Exit => {
// Cancel any pending queries and wait for them to complete.
// TODO: Don't use Salsa internal APIs
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
let _ = db.zalsa_mut();
return ExitStatus::Success;
if self.verbosity == Some(VerbosityLevel::Trace) {
eprintln!("{}", countme::get_all());
}
return;
}
}
tracing::debug!("Waiting for next main loop message.");
}
}
}
ExitStatus::Success
impl Drop for MainLoop {
fn drop(&mut self) {
self.orchestrator
.send(OrchestratorMessage::Shutdown)
.unwrap();
}
}
#[derive(Debug, Clone)]
struct FileChangesNotifier {
sender: crossbeam_channel::Sender<OrchestratorMessage>,
}
impl FileChangesNotifier {
fn notify(&self, changes: Vec<FileWatcherChange>) {
self.sender
.send(OrchestratorMessage::FileChanges(changes))
.unwrap();
}
}
@@ -307,14 +250,170 @@ impl MainLoopCancellationToken {
}
}
struct Orchestrator {
/// Sends messages to the main loop.
main_loop: crossbeam_channel::Sender<MainLoopMessage>,
/// Receives messages from the main loop.
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
revision: usize,
}
impl Orchestrator {
#[allow(clippy::print_stderr)]
fn run(&mut self) {
while let Ok(message) = self.receiver.recv() {
match message {
OrchestratorMessage::Run => {
self.main_loop
.send(MainLoopMessage::CheckWorkspace {
revision: self.revision,
})
.unwrap();
}
OrchestratorMessage::CheckCompleted {
diagnostics,
revision,
} => {
// Only take the diagnostics if they are for the latest revision.
if self.revision == revision {
self.main_loop
.send(MainLoopMessage::CheckCompleted(diagnostics))
.unwrap();
} else {
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
}
}
OrchestratorMessage::FileChanges(changes) => {
// Request cancellation, but wait until all analysis tasks have completed to
// avoid stale messages in the next main loop.
self.revision += 1;
self.debounce_changes(changes);
}
OrchestratorMessage::Shutdown => {
return self.shutdown();
}
}
}
}
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
loop {
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
crossbeam_channel::select! {
recv(self.receiver) -> message => {
match message {
Ok(OrchestratorMessage::Shutdown) => {
return self.shutdown();
}
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
changes.extend(file_changes);
}
Ok(OrchestratorMessage::CheckCompleted { .. })=> {
// disregard any outdated completion message.
}
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
Err(_) => {
// There are no more senders, no point in waiting for more messages
return;
}
}
},
default(std::time::Duration::from_millis(10)) => {
// No more file changes after 10 ms, send the changes and schedule a new analysis
self.main_loop.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
self.main_loop.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
return;
}
}
}
}
#[allow(clippy::unused_self)]
fn shutdown(&self) {
tracing::trace!("Shutting down orchestrator.");
}
}
/// Message sent from the orchestrator to the main loop.
#[derive(Debug)]
enum MainLoopMessage {
CheckWorkspace,
CheckCompleted {
result: Vec<Box<dyn Diagnostic>>,
revision: u64,
},
ApplyChanges(Vec<watch::ChangeEvent>),
CheckWorkspace { revision: usize },
CheckCompleted(Vec<String>),
ApplyChanges(Vec<FileWatcherChange>),
Exit,
}
#[derive(Debug)]
enum OrchestratorMessage {
Run,
Shutdown,
CheckCompleted {
diagnostics: Vec<String>,
revision: usize,
},
FileChanges(Vec<FileWatcherChange>),
}
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
let trace_level = match verbosity {
None => Level::WARN,
Some(VerbosityLevel::Info) => Level::INFO,
Some(VerbosityLevel::Debug) => Level::DEBUG,
Some(VerbosityLevel::Trace) => Level::TRACE,
};
let subscriber = Registry::default().with(
tracing_tree::HierarchicalLayer::default()
.with_indent_lines(true)
.with_indent_amount(2)
.with_bracketed_fields(true)
.with_thread_ids(true)
.with_targets(true)
.with_writer(|| Box::new(std::io::stderr()))
.with_timer(Uptime::default())
.with_filter(LoggingFilter { trace_level }),
);
tracing::subscriber::set_global_default(subscriber).unwrap();
}
struct LoggingFilter {
trace_level: Level,
}
impl LoggingFilter {
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
self.trace_level
} else {
Level::INFO
};
meta.level() <= &filter
}
}
impl<S> Filter<S> for LoggingFilter {
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
self.is_enabled(meta)
}
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
if self.is_enabled(meta) {
Interest::always()
} else {
Interest::never()
}
}
fn max_level_hint(&self) -> Option<LevelFilter> {
Some(LevelFilter::from_level(self.trace_level))
}
}

View File

@@ -1,68 +0,0 @@
/// Enumeration of all supported Python versions
///
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
pub enum PythonVersion {
#[value(name = "3.7")]
Py37,
#[value(name = "3.8")]
Py38,
#[default]
#[value(name = "3.9")]
Py39,
#[value(name = "3.10")]
Py310,
#[value(name = "3.11")]
Py311,
#[value(name = "3.12")]
Py312,
#[value(name = "3.13")]
Py313,
}
impl PythonVersion {
const fn as_str(self) -> &'static str {
match self {
Self::Py37 => "3.7",
Self::Py38 => "3.8",
Self::Py39 => "3.9",
Self::Py310 => "3.10",
Self::Py311 => "3.11",
Self::Py312 => "3.12",
Self::Py313 => "3.13",
}
}
}
impl std::fmt::Display for PythonVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.as_str())
}
}
impl From<PythonVersion> for red_knot_python_semantic::PythonVersion {
fn from(value: PythonVersion) -> Self {
match value {
PythonVersion::Py37 => Self::PY37,
PythonVersion::Py38 => Self::PY38,
PythonVersion::Py39 => Self::PY39,
PythonVersion::Py310 => Self::PY310,
PythonVersion::Py311 => Self::PY311,
PythonVersion::Py312 => Self::PY312,
PythonVersion::Py313 => Self::PY313,
}
}
}
#[cfg(test)]
mod tests {
use crate::python_version::PythonVersion;
#[test]
fn same_default_as_python_version() {
assert_eq!(
red_knot_python_semantic::PythonVersion::from(PythonVersion::default()),
red_knot_python_semantic::PythonVersion::default()
);
}
}

View File

@@ -1 +0,0 @@

View File

@@ -1,105 +0,0 @@
//! Code for representing Red Knot's release version number.
use std::fmt;
/// Information about the git repository where Red Knot was built from.
pub(crate) struct CommitInfo {
short_commit_hash: String,
commit_date: String,
commits_since_last_tag: u32,
}
/// Red Knot's version.
pub(crate) struct VersionInfo {
/// Red Knot's version, such as "0.5.1"
version: String,
/// Information about the git commit we may have been built from.
///
/// `None` if not built from a git repo or if retrieval failed.
commit_info: Option<CommitInfo>,
}
impl fmt::Display for VersionInfo {
/// Formatted version information: `<version>[+<commits>] (<commit> <date>)`
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.version)?;
if let Some(ref ci) = self.commit_info {
if ci.commits_since_last_tag > 0 {
write!(f, "+{}", ci.commits_since_last_tag)?;
}
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
}
Ok(())
}
}
/// Returns information about Red Knot's version.
pub(crate) fn version() -> VersionInfo {
// Environment variables are only read at compile-time
macro_rules! option_env_str {
($name:expr) => {
option_env!($name).map(|s| s.to_string())
};
}
// This version is pulled from Cargo.toml and set by Cargo
let version = option_env_str!("CARGO_PKG_VERSION").unwrap();
// Commit info is pulled from git and set by `build.rs`
let commit_info =
option_env_str!("RED_KNOT_COMMIT_SHORT_HASH").map(|short_commit_hash| CommitInfo {
short_commit_hash,
commit_date: option_env_str!("RED_KNOT_COMMIT_DATE").unwrap(),
commits_since_last_tag: option_env_str!("RED_KNOT_LAST_TAG_DISTANCE")
.as_deref()
.map_or(0, |value| value.parse::<u32>().unwrap_or(0)),
});
VersionInfo {
version,
commit_info,
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use super::{CommitInfo, VersionInfo};
#[test]
fn version_formatting() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: None,
};
assert_snapshot!(version, @"0.0.0");
}
#[test]
fn version_formatting_with_commit_info() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 0,
}),
};
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
}
#[test]
fn version_formatting_with_commits_since_last_tag() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 24,
}),
};
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
}
}

View File

@@ -0,0 +1,111 @@
use std::path::Path;
use anyhow::Context;
use notify::event::{CreateKind, ModifyKind, RemoveKind};
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
use ruff_db::system::{SystemPath, SystemPathBuf};
pub struct FileWatcher {
watcher: RecommendedWatcher,
}
pub trait EventHandler: Send + 'static {
fn handle(&self, changes: Vec<FileWatcherChange>);
}
impl<F> EventHandler for F
where
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
{
fn handle(&self, changes: Vec<FileWatcherChange>) {
let f = self;
f(changes);
}
}
impl FileWatcher {
pub fn new<E>(handler: E) -> anyhow::Result<Self>
where
E: EventHandler,
{
Self::from_handler(Box::new(handler))
}
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
let watcher = recommended_watcher(move |event: notify::Result<Event>| {
match event {
Ok(event) => {
// TODO verify that this handles all events correctly
let change_kind = match event.kind {
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::From)) => {
FileChangeKind::Deleted
}
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::To)) => {
FileChangeKind::Created
}
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Any)) => {
// TODO Introduce a better catch all event for cases that we don't understand.
FileChangeKind::Created
}
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Both)) => {
todo!("Handle both create and delete event.");
}
EventKind::Modify(_) => FileChangeKind::Modified,
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
_ => {
return;
}
};
let mut changes = Vec::new();
for path in event.paths {
if let Some(fs_path) = SystemPath::from_std_path(&path) {
changes
.push(FileWatcherChange::new(fs_path.to_path_buf(), change_kind));
}
}
if !changes.is_empty() {
handler.handle(changes);
}
}
// TODO proper error handling
Err(err) => {
panic!("Error: {err}");
}
}
})
.context("Failed to create file watcher.")?;
Ok(Self { watcher })
}
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
self.watcher.watch(path, RecursiveMode::Recursive)?;
Ok(())
}
}
#[derive(Clone, Debug)]
pub struct FileWatcherChange {
pub path: SystemPathBuf,
#[allow(unused)]
pub kind: FileChangeKind,
}
impl FileWatcherChange {
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
Self { path, kind }
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum FileChangeKind {
Created,
Modified,
Deleted,
}

View File

@@ -0,0 +1,344 @@
// TODO: Fix clippy warnings created by salsa macros
#![allow(clippy::used_underscore_binding)]
use std::{collections::BTreeMap, sync::Arc};
use rustc_hash::{FxBuildHasher, FxHashSet};
pub use metadata::{PackageMetadata, WorkspaceMetadata};
use ruff_db::{
files::{system_path_to_file, File},
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
};
use ruff_python_ast::{name::Name, PySourceType};
use crate::{
db::Db,
lint::{lint_semantic, lint_syntax, Diagnostics},
};
mod metadata;
/// The project workspace as a Salsa ingredient.
///
/// A workspace consists of one or multiple packages. Packages can be nested. A file in a workspace
/// belongs to no or exactly one package (files can't belong to multiple packages).
///
/// How workspaces and packages are discovered is TBD. For now, a workspace can be any directory,
/// and it always contains a single package which has the same root as the workspace.
///
/// ## Examples
///
/// ```text
/// app-1/
/// pyproject.toml
/// src/
/// ... python files
///
/// app-2/
/// pyproject.toml
/// src/
/// ... python files
///
/// shared/
/// pyproject.toml
/// src/
/// ... python files
///
/// pyproject.toml
/// ```
///
/// The above project structure has three packages: `app-1`, `app-2`, and `shared`.
/// Each of the packages can define their own settings in their `pyproject.toml` file, but
/// they must be compatible. For example, each package can define a different `requires-python` range,
/// but the ranges must overlap.
///
/// ## How is a workspace different from a program?
/// There are two (related) motivations:
///
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
/// without introducing a cyclic dependency. The workspace is defined in a higher level crate
/// where it can reference these setting types.
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
/// it remains the same workspace. That's why program is a narrowed view of the workspace only
/// holding on to the most fundamental settings required for checking.
#[salsa::input]
pub struct Workspace {
#[id]
#[return_ref]
root_buf: SystemPathBuf,
/// The files that are open in the workspace.
///
/// Setting the open files to a non-`None` value changes `check` to only check the
/// open files rather than all files in the workspace.
#[return_ref]
open_file_set: Option<Arc<FxHashSet<File>>>,
/// The (first-party) packages in this workspace.
#[return_ref]
package_tree: BTreeMap<SystemPathBuf, Package>,
}
/// A first-party package in a workspace.
#[salsa::input]
pub struct Package {
#[return_ref]
pub name: Name,
/// The path to the root directory of the package.
#[id]
#[return_ref]
root_buf: SystemPathBuf,
/// The files that are part of this package.
#[return_ref]
file_set: Arc<FxHashSet<File>>,
// TODO: Add the loaded settings.
}
impl Workspace {
/// Discovers the closest workspace at `path` and returns its metadata.
pub fn from_metadata(db: &dyn Db, metadata: WorkspaceMetadata) -> Self {
let mut packages = BTreeMap::new();
for package in metadata.packages {
packages.insert(package.root.clone(), Package::from_metadata(db, package));
}
Workspace::new(db, metadata.root, None, packages)
}
pub fn root(self, db: &dyn Db) -> &SystemPath {
self.root_buf(db)
}
pub fn packages(self, db: &dyn Db) -> impl Iterator<Item = Package> + '_ {
self.package_tree(db).values().copied()
}
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
assert_eq!(self.root(db), metadata.root());
let mut old_packages = self.package_tree(db).clone();
let mut new_packages = BTreeMap::new();
for package_metadata in metadata.packages {
let path = package_metadata.root().to_path_buf();
let package = if let Some(old_package) = old_packages.remove(&path) {
old_package.update(db, package_metadata);
old_package
} else {
Package::from_metadata(db, package_metadata)
};
new_packages.insert(path, package);
}
self.set_package_tree(db).to(new_packages);
}
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
let path = metadata.root().to_path_buf();
if let Some(package) = self.package_tree(db).get(&path).copied() {
package.update(db, metadata);
Ok(())
} else {
Err(anyhow::anyhow!("Package {path} not found"))
}
}
/// Returns the closest package to which the first-party `path` belongs.
///
/// Returns `None` if the `path` is outside of any package or if `file` isn't a first-party file
/// (e.g. third-party dependencies or `excluded`).
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
let packages = self.package_tree(db);
let (package_path, package) = packages.range(..path.to_path_buf()).next_back()?;
if path.starts_with(package_path) {
Some(*package)
} else {
None
}
}
/// Checks all open files in the workspace and its dependencies.
#[tracing::instrument(level = "debug", skip_all)]
pub fn check(self, db: &dyn Db) -> Vec<String> {
let mut result = Vec::new();
if let Some(open_files) = self.open_files(db) {
for file in open_files {
result.extend_from_slice(&check_file(db, *file));
}
} else {
for package in self.packages(db) {
result.extend(package.check(db));
}
}
result
}
/// Opens a file in the workspace.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
#[tracing::instrument(level = "debug", skip(self, db))]
pub fn open_file(self, db: &mut dyn Db, file: File) {
let mut open_files = self.take_open_files(db);
open_files.insert(file);
self.set_open_files(db, open_files);
}
/// Closes a file in the workspace.
#[tracing::instrument(level = "debug", skip(self, db))]
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
let mut open_files = self.take_open_files(db);
let removed = open_files.remove(&file);
if removed {
self.set_open_files(db, open_files);
}
removed
}
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
self.open_file_set(db).as_deref()
}
/// Sets the open files in the workspace.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
#[tracing::instrument(level = "debug", skip(self, db))]
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
self.set_open_file_set(db).to(Some(Arc::new(open_files)));
}
/// This takes the open files from the workspace and returns them.
///
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
let open_files = self.open_file_set(db).clone();
if let Some(open_files) = open_files {
// Salsa will cancel any pending queries and remove its own reference to `open_files`
// so that the reference counter to `open_files` now drops to 1.
self.set_open_file_set(db).to(None);
Arc::try_unwrap(open_files).unwrap()
} else {
FxHashSet::default()
}
}
}
impl Package {
pub fn root(self, db: &dyn Db) -> &SystemPath {
self.root_buf(db)
}
/// Returns `true` if `file` is a first-party file part of this package.
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
self.files(db).contains(&file)
}
pub fn files(self, db: &dyn Db) -> &FxHashSet<File> {
self.file_set(db)
}
pub fn remove_file(self, db: &mut dyn Db, file: File) -> bool {
let mut files_arc = self.file_set(db).clone();
// Set a dummy value. Salsa will cancel any pending queries and remove its own reference to `files`
// so that the reference counter to `files` now drops to 1.
self.set_file_set(db).to(Arc::new(FxHashSet::default()));
let files = Arc::get_mut(&mut files_arc).unwrap();
let removed = files.remove(&file);
self.set_file_set(db).to(files_arc);
removed
}
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
let mut result = Vec::new();
for file in self.files(db) {
let diagnostics = check_file(db, *file);
result.extend_from_slice(&diagnostics);
}
result
}
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
let files = discover_package_files(db, metadata.root());
Self::new(db, metadata.name, metadata.root, Arc::new(files))
}
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
let root = self.root(db);
assert_eq!(root, metadata.root());
let files = discover_package_files(db, root);
self.set_name(db).to(metadata.name);
self.set_file_set(db).to(Arc::new(files));
}
}
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
let mut diagnostics = Vec::new();
diagnostics.extend_from_slice(lint_syntax(db, file));
diagnostics.extend_from_slice(lint_semantic(db, file));
Diagnostics::from(diagnostics)
}
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
let paths = std::sync::Mutex::new(Vec::new());
db.system().walk_directory(path).run(|| {
Box::new(|entry| {
match entry {
Ok(entry) => {
// Skip over any non python files to avoid creating too many entries in `Files`.
if entry.file_type().is_file()
&& entry
.path()
.extension()
.and_then(PySourceType::try_from_extension)
.is_some()
{
let mut paths = paths.lock().unwrap();
paths.push(entry.into_path());
}
}
Err(error) => {
// TODO Handle error
tracing::error!("Failed to walk path: {error}");
}
}
WalkState::Continue
})
});
let paths = paths.into_inner().unwrap();
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
for path in paths {
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
// We can ignore this.
if let Some(file) = system_path_to_file(db.upcast(), &path) {
files.insert(file);
}
}
files
}

View File

@@ -0,0 +1,68 @@
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
#[derive(Debug)]
pub struct WorkspaceMetadata {
pub(super) root: SystemPathBuf,
/// The (first-party) packages in this workspace.
pub(super) packages: Vec<PackageMetadata>,
}
/// A first-party package in a workspace.
#[derive(Debug)]
pub struct PackageMetadata {
pub(super) name: Name,
/// The path to the root directory of the package.
pub(super) root: SystemPathBuf,
// TODO: Add the loaded package configuration (not the nested ruff settings)
}
impl WorkspaceMetadata {
/// Discovers the closest workspace at `path` and returns its metadata.
pub fn from_path(path: &SystemPath, system: &dyn System) -> anyhow::Result<WorkspaceMetadata> {
let root = if system.is_file(path) {
path.parent().unwrap().to_path_buf()
} else {
path.to_path_buf()
};
if !system.is_directory(&root) {
anyhow::bail!("no workspace found at {:?}", root);
}
// TODO: Discover package name from `pyproject.toml`.
let package_name: Name = path.file_name().unwrap_or("<root>").into();
let package = PackageMetadata {
name: package_name,
root: root.clone(),
};
let workspace = WorkspaceMetadata {
root,
packages: vec![package],
};
Ok(workspace)
}
pub fn root(&self) -> &SystemPath {
&self.root
}
pub fn packages(&self) -> &[PackageMetadata] {
&self.packages
}
}
impl PackageMetadata {
pub fn name(&self) -> &Name {
&self.name
}
pub fn root(&self) -> &SystemPath {
&self.root
}
}

View File

@@ -1,769 +0,0 @@
use anyhow::Context;
use insta::internals::SettingsBindDropGuard;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use std::path::{Path, PathBuf};
use std::process::Command;
use tempfile::TempDir;
/// Specifying an option on the CLI should take precedence over the same setting in the
/// project's configuration.
#[test]
fn config_override() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
"#,
),
(
"test.py",
r#"
import sys
# Access `sys.last_exc` that was only added in Python 3.12
print(sys.last_exc)
"#,
),
])?;
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:unresolved-attribute
--> <temp_dir>/test.py:5:7
|
4 | # Access `sys.last_exc` that was only added in Python 3.12
5 | print(sys.last_exc)
| ^^^^^^^^^^^^ Type `<module 'sys'>` has no attribute `last_exc`
|
----- stderr -----
"###);
assert_cmd_snapshot!(case.command().arg("--python-version").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
}
/// Paths specified on the CLI are relative to the current working directory and not the project root.
///
/// We test this by adding an extra search path from the CLI to the libs directory when
/// running the CLI from the child directory (using relative paths).
///
/// Project layout:
/// ```
/// - libs
/// |- utils.py
/// - child
/// | - test.py
/// - pyproject.toml
/// ```
///
/// And the command is run in the `child` directory.
#[test]
fn cli_arguments_are_relative_to_the_current_directory() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
"#,
),
(
"libs/utils.py",
r#"
def add(a: int, b: int) -> int:
a + b
"#,
),
(
"child/test.py",
r#"
from utils import add
stat = add(10, 15)
"#,
),
])?;
// Make sure that the CLI fails when the `libs` directory is not in the search path.
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:unresolved-import
--> <temp_dir>/child/test.py:2:6
|
2 | from utils import add
| ^^^^^ Cannot resolve import `utils`
3 |
4 | stat = add(10, 15)
|
----- stderr -----
"###);
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")).arg("--extra-search-path").arg("../libs"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
}
/// Paths specified in a configuration file are relative to the project root.
///
/// We test this by adding `libs` (as a relative path) to the extra search path in the configuration and run
/// the CLI from a subdirectory.
///
/// Project layout:
/// ```
/// - libs
/// |- utils.py
/// - child
/// | - test.py
/// - pyproject.toml
/// ```
#[test]
fn paths_in_configuration_files_are_relative_to_the_project_root() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
extra-paths = ["libs"]
"#,
),
(
"libs/utils.py",
r#"
def add(a: int, b: int) -> int:
a + b
"#,
),
(
"child/test.py",
r#"
from utils import add
stat = add(10, 15)
"#,
),
])?;
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
}
/// The rule severity can be changed in the configuration file
#[test]
fn configuration_rule_severity() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:division-by-zero
--> <temp_dir>/test.py:2:5
|
2 | y = 4 / 0
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
warning: lint:possibly-unresolved-reference
--> <temp_dir>/test.py:7:7
|
5 | x = a
6 |
7 | print(x) # possibly-unresolved-reference
| - Name `x` used when possibly not defined
|
----- stderr -----
"###);
case.write_file(
"pyproject.toml",
r#"
[tool.knot.rules]
division-by-zero = "warn" # demote to warn
possibly-unresolved-reference = "ignore"
"#,
)?;
assert_cmd_snapshot!(case.command(), @r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:division-by-zero
--> <temp_dir>/test.py:2:5
|
2 | y = 4 / 0
| ----- Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
----- stderr -----
"###);
Ok(())
}
/// The rule severity can be changed using `--ignore`, `--warn`, and `--error`
#[test]
fn cli_rule_severity() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
import does_not_exit
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:unresolved-import
--> <temp_dir>/test.py:2:8
|
2 | import does_not_exit
| ^^^^^^^^^^^^^ Cannot resolve import `does_not_exit`
3 |
4 | y = 4 / 0
|
error: lint:division-by-zero
--> <temp_dir>/test.py:4:5
|
2 | import does_not_exit
3 |
4 | y = 4 / 0
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
5 |
6 | for a in range(0, y):
|
warning: lint:possibly-unresolved-reference
--> <temp_dir>/test.py:9:7
|
7 | x = a
8 |
9 | print(x) # possibly-unresolved-reference
| - Name `x` used when possibly not defined
|
----- stderr -----
"###);
assert_cmd_snapshot!(
case
.command()
.arg("--ignore")
.arg("possibly-unresolved-reference")
.arg("--warn")
.arg("division-by-zero")
.arg("--warn")
.arg("unresolved-import"),
@r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:unresolved-import
--> <temp_dir>/test.py:2:8
|
2 | import does_not_exit
| ------------- Cannot resolve import `does_not_exit`
3 |
4 | y = 4 / 0
|
warning: lint:division-by-zero
--> <temp_dir>/test.py:4:5
|
2 | import does_not_exit
3 |
4 | y = 4 / 0
| ----- Cannot divide object of type `Literal[4]` by zero
5 |
6 | for a in range(0, y):
|
----- stderr -----
"###
);
Ok(())
}
/// The rule severity can be changed using `--ignore`, `--warn`, and `--error` and
/// values specified last override previous severities.
#[test]
fn cli_rule_severity_precedence() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:division-by-zero
--> <temp_dir>/test.py:2:5
|
2 | y = 4 / 0
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
warning: lint:possibly-unresolved-reference
--> <temp_dir>/test.py:7:7
|
5 | x = a
6 |
7 | print(x) # possibly-unresolved-reference
| - Name `x` used when possibly not defined
|
----- stderr -----
"###);
assert_cmd_snapshot!(
case
.command()
.arg("--error")
.arg("possibly-unresolved-reference")
.arg("--warn")
.arg("division-by-zero")
// Override the error severity with warning
.arg("--ignore")
.arg("possibly-unresolved-reference"),
@r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:division-by-zero
--> <temp_dir>/test.py:2:5
|
2 | y = 4 / 0
| ----- Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
----- stderr -----
"###
);
Ok(())
}
/// Red Knot warns about unknown rules specified in a configuration file
#[test]
fn configuration_unknown_rules() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.rules]
division-by-zer = "warn" # incorrect rule name
"#,
),
("test.py", "print(10)"),
])?;
assert_cmd_snapshot!(case.command(), @r###"
success: true
exit_code: 0
----- stdout -----
warning: unknown-rule
--> <temp_dir>/pyproject.toml:3:1
|
2 | [tool.knot.rules]
3 | division-by-zer = "warn" # incorrect rule name
| --------------- Unknown lint rule `division-by-zer`
|
----- stderr -----
"###);
Ok(())
}
/// Red Knot warns about unknown rules specified in a CLI argument
#[test]
fn cli_unknown_rules() -> anyhow::Result<()> {
let case = TestCase::with_file("test.py", "print(10)")?;
assert_cmd_snapshot!(case.command().arg("--ignore").arg("division-by-zer"), @r###"
success: true
exit_code: 0
----- stdout -----
warning: unknown-rule: Unknown lint rule `division-by-zer`
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_only_warnings() -> anyhow::Result<()> {
let case = TestCase::with_file("test.py", r"print(x) # [unresolved-reference]")?;
assert_cmd_snapshot!(case.command(), @r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:1:7
|
1 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_only_info() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
from typing_extensions import reveal_type
reveal_type(1)
"#,
)?;
assert_cmd_snapshot!(case.command(), @r###"
success: true
exit_code: 0
----- stdout -----
info: revealed-type
--> <temp_dir>/test.py:3:1
|
2 | from typing_extensions import reveal_type
3 | reveal_type(1)
| -------------- info: Revealed type is `Literal[1]`
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_only_info_and_error_on_warning_is_true() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
from typing_extensions import reveal_type
reveal_type(1)
"#,
)?;
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
success: true
exit_code: 0
----- stdout -----
info: revealed-type
--> <temp_dir>/test.py:3:1
|
2 | from typing_extensions import reveal_type
3 | reveal_type(1)
| -------------- info: Revealed type is `Literal[1]`
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_no_errors_but_error_on_warning_is_true() -> anyhow::Result<()> {
let case = TestCase::with_file("test.py", r"print(x) # [unresolved-reference]")?;
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
success: false
exit_code: 1
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:1:7
|
1 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_both_warnings_and_errors() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
print(x) # [unresolved-reference]
print(4[1]) # [non-subscriptable]
"#,
)?;
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:2:7
|
2 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
3 | print(4[1]) # [non-subscriptable]
|
error: lint:non-subscriptable
--> <temp_dir>/test.py:3:7
|
2 | print(x) # [unresolved-reference]
3 | print(4[1]) # [non-subscriptable]
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_both_warnings_and_errors_and_error_on_warning_is_true() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r###"
print(x) # [unresolved-reference]
print(4[1]) # [non-subscriptable]
"###,
)?;
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
success: false
exit_code: 1
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:2:7
|
2 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
3 | print(4[1]) # [non-subscriptable]
|
error: lint:non-subscriptable
--> <temp_dir>/test.py:3:7
|
2 | print(x) # [unresolved-reference]
3 | print(4[1]) # [non-subscriptable]
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_exit_zero_is_true() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
print(x) # [unresolved-reference]
print(4[1]) # [non-subscriptable]
"#,
)?;
assert_cmd_snapshot!(case.command().arg("--exit-zero"), @r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:2:7
|
2 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
3 | print(4[1]) # [non-subscriptable]
|
error: lint:non-subscriptable
--> <temp_dir>/test.py:3:7
|
2 | print(x) # [unresolved-reference]
3 | print(4[1]) # [non-subscriptable]
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
----- stderr -----
"###);
Ok(())
}
struct TestCase {
_temp_dir: TempDir,
_settings_scope: SettingsBindDropGuard,
project_dir: PathBuf,
}
impl TestCase {
fn new() -> anyhow::Result<Self> {
let temp_dir = TempDir::new()?;
// Canonicalize the tempdir path because macos uses symlinks for tempdirs
// and that doesn't play well with our snapshot filtering.
let project_dir = temp_dir
.path()
.canonicalize()
.context("Failed to canonicalize project path")?;
let mut settings = insta::Settings::clone_current();
settings.add_filter(&tempdir_filter(&project_dir), "<temp_dir>/");
settings.add_filter(r#"\\(\w\w|\s|\.|")"#, "/$1");
let settings_scope = settings.bind_to_scope();
Ok(Self {
project_dir,
_temp_dir: temp_dir,
_settings_scope: settings_scope,
})
}
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
let case = Self::new()?;
case.write_files(files)?;
Ok(case)
}
fn with_file(path: impl AsRef<Path>, content: &str) -> anyhow::Result<Self> {
let case = Self::new()?;
case.write_file(path, content)?;
Ok(case)
}
fn write_files<'a>(
&self,
files: impl IntoIterator<Item = (&'a str, &'a str)>,
) -> anyhow::Result<()> {
for (path, content) in files {
self.write_file(path, content)?;
}
Ok(())
}
fn write_file(&self, path: impl AsRef<Path>, content: &str) -> anyhow::Result<()> {
let path = path.as_ref();
let path = self.project_dir.join(path);
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("Failed to create directory `{}`", parent.display()))?;
}
std::fs::write(&path, &*ruff_python_trivia::textwrap::dedent(content))
.with_context(|| format!("Failed to write file `{path}`", path = path.display()))?;
Ok(())
}
fn project_dir(&self) -> &Path {
&self.project_dir
}
fn command(&self) -> Command {
let mut command = Command::new(get_cargo_bin("red_knot"));
command.current_dir(&self.project_dir).arg("check");
command
}
}
fn tempdir_filter(path: &Path) -> String {
format!(r"{}\\?/?", regex::escape(path.to_str().unwrap()))
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
[package]
name = "red_knot_vendored"
name = "red_knot_module_resolver"
version = "0.0.0"
publish = false
authors = { workspace = true }
@@ -12,20 +12,28 @@ license = { workspace = true }
[dependencies]
ruff_db = { workspace = true }
ruff_python_stdlib = { workspace = true }
compact_str = { workspace = true }
camino = { workspace = true }
once_cell = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
tracing = { workspace = true }
zip = { workspace = true }
[build-dependencies]
path-slash = { workspace = true }
walkdir = { workspace = true }
zip = { workspace = true, features = ["zstd", "deflate"] }
zip = { workspace = true }
[dev-dependencies]
walkdir = { workspace = true }
ruff_db = { workspace = true, features = ["os"] }
[features]
zstd = ["zip/zstd"]
deflate = ["zip/deflate"]
anyhow = { workspace = true }
insta = { workspace = true }
tempfile = { workspace = true }
walkdir = { workspace = true }
[lints]
workspace = true

View File

@@ -1,5 +1,9 @@
# Vendored types for the stdlib
# Red Knot
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_vendored/vendor/typeshed`. The file `crates/red_knot_vendored/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
A work-in-progress multifile module resolver for Ruff.
## Vendored types for the stdlib
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).

View File

@@ -3,10 +3,9 @@
//!
//! This script should be automatically run at build time
//! whenever the script itself changes, or whenever any files
//! in `crates/red_knot_vendored/vendor/typeshed` change.
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
use std::fs::File;
use std::io::Write;
use std::path::Path;
use path_slash::PathExt;
@@ -15,41 +14,24 @@ use zip::write::{FileOptions, ZipWriter};
use zip::CompressionMethod;
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
const KNOT_EXTENSIONS_STUBS: &str = "knot_extensions/knot_extensions.pyi";
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
/// Recursively zip the contents of the entire typeshed directory and patch typeshed
/// on the fly to include the `knot_extensions` module.
/// Recursively zip the contents of an entire directory.
///
/// This routine is adapted from a recipe at
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
fn write_zipped_typeshed_to(writer: File) -> ZipResult<File> {
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
let mut zip = ZipWriter::new(writer);
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
// (WASM itself is already slower than a native build for a specific platform).
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
// architecture of the system running the build script and not the architecture of the build-target.
// That's why we use the `TARGET` environment variable here.
let method = if cfg!(feature = "zstd") {
CompressionMethod::Zstd
} else if cfg!(feature = "deflate") {
CompressionMethod::Deflated
} else {
CompressionMethod::Stored
};
let options = FileOptions::default()
.compression_method(method)
.compression_method(CompressionMethod::Zstd)
.unix_permissions(0o644);
for entry in walkdir::WalkDir::new(TYPESHED_SOURCE_DIR) {
for entry in walkdir::WalkDir::new(directory_path) {
let dir_entry = entry.unwrap();
let absolute_path = dir_entry.path();
let normalized_relative_path = absolute_path
.strip_prefix(Path::new(TYPESHED_SOURCE_DIR))
.strip_prefix(Path::new(directory_path))
.unwrap()
.to_slash()
.expect("Unexpected non-utf8 typeshed path!");
@@ -58,14 +40,9 @@ fn write_zipped_typeshed_to(writer: File) -> ZipResult<File> {
// Some unzip tools unzip files with directory paths correctly, some do not!
if absolute_path.is_file() {
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
zip.start_file(&*normalized_relative_path, options)?;
zip.start_file(normalized_relative_path, options)?;
let mut f = File::open(absolute_path)?;
std::io::copy(&mut f, &mut zip).unwrap();
// Patch the VERSIONS file to make `knot_extensions` available
if normalized_relative_path == "stdlib/VERSIONS" {
writeln!(&mut zip, "knot_extensions: 3.0-")?;
}
} else if !normalized_relative_path.is_empty() {
// Only if not root! Avoids path spec / warning
// and mapname conversion failed error on unzip
@@ -73,17 +50,11 @@ fn write_zipped_typeshed_to(writer: File) -> ZipResult<File> {
zip.add_directory(normalized_relative_path, options)?;
}
}
// Patch typeshed and add the stubs for the `knot_extensions` module
println!("adding file {KNOT_EXTENSIONS_STUBS} as stdlib/knot_extensions.pyi ...");
zip.start_file("stdlib/knot_extensions.pyi", options)?;
let mut f = File::open(KNOT_EXTENSIONS_STUBS)?;
std::io::copy(&mut f, &mut zip).unwrap();
zip.finish()
}
fn main() {
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
assert!(
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
"Where is typeshed?"
@@ -98,6 +69,6 @@ fn main() {
// which can't be done at compile time.)
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
let zipped_typeshed_file = File::create(zipped_typeshed_location).unwrap();
write_zipped_typeshed_to(zipped_typeshed_file).unwrap();
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
}

View File

@@ -0,0 +1,126 @@
use ruff_db::Upcast;
use crate::resolver::{
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
module_resolution_settings, resolve_module_query,
};
use crate::typeshed::parse_typeshed_versions;
#[salsa::jar(db=Db)]
pub struct Jar(
ModuleNameIngredient<'_>,
module_resolution_settings,
editable_install_resolution_paths,
resolve_module_query,
file_to_module,
parse_typeshed_versions,
);
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
#[cfg(test)]
pub(crate) mod tests {
use std::sync;
use salsa::DebugWithDb;
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use crate::vendored_typeshed_stubs;
use super::*;
#[salsa::db(Jar, ruff_db::Jar)]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
system: TestSystem,
vendored: VendoredFileSystem,
files: Files,
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
}
impl TestDb {
pub(crate) fn new() -> Self {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: vendored_typeshed_stubs().snapshot(),
events: sync::Arc::default(),
files: Files::default(),
}
}
/// Takes the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
let events = inner.get_mut().unwrap();
std::mem::take(&mut *events)
}
/// Clears the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn clear_salsa_events(&mut self) {
self.take_salsa_events();
}
}
impl Upcast<dyn ruff_db::Db> for TestDb {
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
self
}
}
impl ruff_db::Db for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
}
fn system(&self) -> &dyn ruff_db::system::System {
&self.system
}
fn files(&self) -> &Files {
&self.files
}
}
impl Db for TestDb {}
impl DbWithTestSystem for TestDb {
fn test_system(&self) -> &TestSystem {
&self.system
}
fn test_system_mut(&mut self) -> &mut TestSystem {
&mut self.system
}
}
impl salsa::Database for TestDb {
fn salsa_event(&self, event: salsa::Event) {
tracing::trace!("event: {:?}", event.debug(self));
let mut events = self.events.lock().unwrap();
events.push(event);
}
}
impl salsa::ParallelDatabase for TestDb {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
storage: self.storage.snapshot(),
system: self.system.snapshot(),
vendored: self.vendored.snapshot(),
files: self.files.snapshot(),
events: self.events.clone(),
})
}
}
}

View File

@@ -0,0 +1,18 @@
mod db;
mod module;
mod module_name;
mod path;
mod resolver;
mod state;
mod typeshed;
#[cfg(test)]
mod testing;
pub use db::{Db, Jar};
pub use module::{Module, ModuleKind};
pub use module_name::ModuleName;
pub use resolver::resolve_module;
pub use typeshed::{
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
};

View File

@@ -0,0 +1,91 @@
use std::fmt::Formatter;
use std::sync::Arc;
use ruff_db::files::File;
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::path::{ModuleResolutionPathBuf, ModuleResolutionPathRef};
/// Representation of a Python module.
#[derive(Clone, PartialEq, Eq)]
pub struct Module {
inner: Arc<ModuleInner>,
}
impl Module {
pub(crate) fn new(
name: ModuleName,
kind: ModuleKind,
search_path: Arc<ModuleResolutionPathBuf>,
file: File,
) -> Self {
Self {
inner: Arc::new(ModuleInner {
name,
kind,
search_path,
file,
}),
}
}
/// The absolute name of the module (e.g. `foo.bar`)
pub fn name(&self) -> &ModuleName {
&self.inner.name
}
/// The file to the source code that defines this module
pub fn file(&self) -> File {
self.inner.file
}
/// The search path from which the module was resolved.
pub(crate) fn search_path(&self) -> ModuleResolutionPathRef {
ModuleResolutionPathRef::from(&*self.inner.search_path)
}
/// Determine whether this module is a single-file module or a package
pub fn kind(&self) -> ModuleKind {
self.inner.kind
}
}
impl std::fmt::Debug for Module {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Module")
.field("name", &self.name())
.field("kind", &self.kind())
.field("file", &self.file())
.field("search_path", &self.search_path())
.finish()
}
}
impl salsa::DebugWithDb<dyn Db> for Module {
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
f.debug_struct("Module")
.field("name", &self.name())
.field("kind", &self.kind())
.field("file", &self.file().debug(db.upcast()))
.field("search_path", &self.search_path())
.finish()
}
}
#[derive(PartialEq, Eq)]
struct ModuleInner {
name: ModuleName,
kind: ModuleKind,
search_path: Arc<ModuleResolutionPathBuf>,
file: File,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum ModuleKind {
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
Module,
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
Package,
}

View File

@@ -42,7 +42,7 @@ impl ModuleName {
/// ## Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
/// assert_eq!(ModuleName::new_static(""), None);
@@ -68,7 +68,7 @@ impl ModuleName {
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
/// ```
@@ -82,7 +82,7 @@ impl ModuleName {
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
@@ -101,7 +101,7 @@ impl ModuleName {
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
///
@@ -133,7 +133,7 @@ impl ModuleName {
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
@@ -168,44 +168,6 @@ impl ModuleName {
};
Some(Self(name))
}
/// Extend `self` with the components of `other`
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// let mut module_name = ModuleName::new_static("foo").unwrap();
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
/// assert_eq!(&module_name, "foo.bar");
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
/// ```
pub fn extend(&mut self, other: &ModuleName) {
self.0.push('.');
self.0.push_str(other);
}
/// Returns an iterator of this module name and all of its parent modules.
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// assert_eq!(
/// ModuleName::new_static("foo.bar.baz").unwrap().ancestors().collect::<Vec<_>>(),
/// vec![
/// ModuleName::new_static("foo.bar.baz").unwrap(),
/// ModuleName::new_static("foo.bar").unwrap(),
/// ModuleName::new_static("foo").unwrap(),
/// ],
/// );
/// ```
pub fn ancestors(&self) -> impl Iterator<Item = Self> {
std::iter::successors(Some(self.clone()), Self::parent)
}
}
impl Deref for ModuleName {

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,30 @@
use ruff_db::program::TargetVersion;
use ruff_db::system::System;
use ruff_db::vendored::VendoredFileSystem;
use crate::db::Db;
use crate::typeshed::LazyTypeshedVersions;
pub(crate) struct ResolverState<'db> {
pub(crate) db: &'db dyn Db,
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
pub(crate) target_version: TargetVersion,
}
impl<'db> ResolverState<'db> {
pub(crate) fn new(db: &'db dyn Db, target_version: TargetVersion) -> Self {
Self {
db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version,
}
}
pub(crate) fn system(&self) -> &dyn System {
self.db.system()
}
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
self.db.vendored()
}
}

View File

@@ -1,10 +1,8 @@
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
use ruff_db::vendored::VendoredPathBuf;
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::{ProgramSettings, PythonPlatform, SitePackages};
/// A test case for the module resolver.
///
@@ -14,11 +12,8 @@ pub(crate) struct TestCase<T> {
pub(crate) db: TestDb,
pub(crate) src: SystemPathBuf,
pub(crate) stdlib: T,
// Most test cases only ever need a single `site-packages` directory,
// so this is a single directory instead of a `Vec` of directories,
// like it is in `ruff_db::Program`.
pub(crate) site_packages: SystemPathBuf,
pub(crate) python_version: PythonVersion,
pub(crate) target_version: TargetVersion,
}
/// A `(file_name, file_contents)` tuple
@@ -67,13 +62,13 @@ pub(crate) struct UnspecifiedTypeshed;
/// ```rs
/// let test_case = TestCaseBuilder::new()
/// .with_src_files(...)
/// .with_python_version(...)
/// .with_target_version(...)
/// .build();
/// ```
///
/// For tests checking that standard-library module resolution is working
/// correctly, you should usually create a [`MockedTypeshed`] instance
/// and pass it to the [`TestCaseBuilder::with_mocked_typeshed`] method.
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
/// If you need to check something that involves the vendored typeshed stubs
/// we include as part of the binary, you can instead use the
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
@@ -85,13 +80,13 @@ pub(crate) struct UnspecifiedTypeshed;
/// const TYPESHED = MockedTypeshed { ... };
///
/// let test_case = resolver_test_case()
/// .with_mocked_typeshed(TYPESHED)
/// .with_python_version(...)
/// .with_custom_typeshed(TYPESHED)
/// .with_target_version(...)
/// .build();
///
/// let test_case2 = resolver_test_case()
/// .with_vendored_typeshed()
/// .with_python_version(...)
/// .with_target_version(...)
/// .build();
/// ```
///
@@ -100,8 +95,7 @@ pub(crate) struct UnspecifiedTypeshed;
/// to `()`.
pub(crate) struct TestCaseBuilder<T> {
typeshed_option: T,
python_version: PythonVersion,
python_platform: PythonPlatform,
target_version: TargetVersion,
first_party_files: Vec<FileSpec>,
site_packages_files: Vec<FileSpec>,
}
@@ -119,9 +113,9 @@ impl<T> TestCaseBuilder<T> {
self
}
/// Specify the Python version the module resolver should assume
pub(crate) fn with_python_version(mut self, python_version: PythonVersion) -> Self {
self.python_version = python_version;
/// Specify the target Python version the module resolver should assume
pub(crate) fn with_target_version(mut self, target_version: TargetVersion) -> Self {
self.target_version = target_version;
self
}
@@ -131,8 +125,6 @@ impl<T> TestCaseBuilder<T> {
files: impl IntoIterator<Item = FileSpec>,
) -> SystemPathBuf {
let root = location.as_ref().to_path_buf();
// Make sure to create the directory even if the list of files is empty:
db.memory_file_system().create_directory_all(&root).unwrap();
db.write_files(
files
.into_iter()
@@ -147,8 +139,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
Self {
typeshed_option: UnspecifiedTypeshed,
python_version: PythonVersion::default(),
python_platform: PythonPlatform::default(),
target_version: TargetVersion::default(),
first_party_files: vec![],
site_packages_files: vec![],
}
@@ -158,37 +149,32 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
pub(crate) fn with_vendored_typeshed(self) -> TestCaseBuilder<VendoredTypeshed> {
let TestCaseBuilder {
typeshed_option: _,
python_version,
python_platform,
target_version,
first_party_files,
site_packages_files,
} = self;
TestCaseBuilder {
typeshed_option: VendoredTypeshed,
python_version,
python_platform,
target_version,
first_party_files,
site_packages_files,
}
}
/// Use a mock typeshed directory for this test case
pub(crate) fn with_mocked_typeshed(
pub(crate) fn with_custom_typeshed(
self,
typeshed: MockedTypeshed,
) -> TestCaseBuilder<MockedTypeshed> {
let TestCaseBuilder {
typeshed_option: _,
python_version,
python_platform,
target_version,
first_party_files,
site_packages_files,
} = self;
TestCaseBuilder {
typeshed_option: typeshed,
python_version,
python_platform,
target_version,
first_party_files,
site_packages_files,
}
@@ -200,15 +186,14 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
src,
stdlib: _,
site_packages,
python_version,
} = self.with_mocked_typeshed(MockedTypeshed::default()).build();
target_version,
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
TestCase {
db,
src,
stdlib: (),
site_packages,
python_version,
target_version,
}
}
}
@@ -217,8 +202,7 @@ impl TestCaseBuilder<MockedTypeshed> {
pub(crate) fn build(self) -> TestCase<SystemPathBuf> {
let TestCaseBuilder {
typeshed_option,
python_version,
python_platform,
target_version,
first_party_files,
site_packages_files,
} = self;
@@ -230,27 +214,23 @@ impl TestCaseBuilder<MockedTypeshed> {
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
Program::from_settings(
Program::new(
&db,
ProgramSettings {
python_version,
python_platform,
search_paths: SearchPathSettings {
extra_paths: vec![],
src_roots: vec![src.clone()],
custom_typeshed: Some(typeshed.clone()),
site_packages: SitePackages::Known(vec![site_packages.clone()]),
},
target_version,
SearchPathSettings {
extra_paths: vec![],
workspace_root: src.clone(),
custom_typeshed: Some(typeshed.clone()),
site_packages: Some(site_packages.clone()),
},
)
.expect("Valid program settings");
);
TestCase {
db,
src,
stdlib: typeshed.join("stdlib"),
site_packages,
python_version,
target_version,
}
}
@@ -276,8 +256,7 @@ impl TestCaseBuilder<VendoredTypeshed> {
pub(crate) fn build(self) -> TestCase<VendoredPathBuf> {
let TestCaseBuilder {
typeshed_option: VendoredTypeshed,
python_version,
python_platform,
target_version,
first_party_files,
site_packages_files,
} = self;
@@ -288,25 +267,23 @@ impl TestCaseBuilder<VendoredTypeshed> {
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
Program::from_settings(
Program::new(
&db,
ProgramSettings {
python_version,
python_platform,
search_paths: SearchPathSettings {
site_packages: SitePackages::Known(vec![site_packages.clone()]),
..SearchPathSettings::new(vec![src.clone()])
},
target_version,
SearchPathSettings {
extra_paths: vec![],
workspace_root: src.clone(),
custom_typeshed: None,
site_packages: Some(site_packages.clone()),
},
)
.expect("Valid search path settings");
);
TestCase {
db,
src,
stdlib: VendoredPathBuf::from("stdlib"),
site_packages,
python_version,
target_version,
}
}
}

View File

@@ -0,0 +1,8 @@
pub use self::vendored::vendored_typeshed_stubs;
pub(crate) use self::versions::{
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsQueryResult,
};
pub use self::versions::{TypeshedVersionsParseError, TypeshedVersionsParseErrorKind};
mod vendored;
mod versions;

View File

@@ -1,13 +1,14 @@
use once_cell::sync::Lazy;
use ruff_db::vendored::VendoredFileSystem;
use std::sync::LazyLock;
// The file path here is hardcoded in this crate's `build.rs` script.
// Luckily this crate will fail to build if this file isn't available at build time.
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
pub fn file_system() -> &'static VendoredFileSystem {
static VENDORED_TYPESHED_STUBS: LazyLock<VendoredFileSystem> =
LazyLock::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
pub fn vendored_typeshed_stubs() -> &'static VendoredFileSystem {
static VENDORED_TYPESHED_STUBS: Lazy<VendoredFileSystem> =
Lazy::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
&VENDORED_TYPESHED_STUBS
}
@@ -41,7 +42,7 @@ mod tests {
#[test]
fn typeshed_vfs_consistent_with_vendored_stubs() {
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
let vendored_typeshed_stubs = file_system();
let vendored_typeshed_stubs = vendored_typeshed_stubs();
let mut empty_iterator = true;
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {

View File

@@ -1,30 +1,96 @@
use std::cell::OnceCell;
use std::collections::BTreeMap;
use std::fmt;
use std::num::{NonZeroU16, NonZeroUsize};
use std::ops::{RangeFrom, RangeInclusive};
use std::str::FromStr;
use once_cell::sync::Lazy;
use ruff_db::program::TargetVersion;
use ruff_db::system::SystemPath;
use rustc_hash::FxHashMap;
use ruff_db::files::{system_path_to_file, File};
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::{Program, PythonVersion};
pub(in crate::module_resolver) fn vendored_typeshed_versions(db: &dyn Db) -> TypeshedVersions {
use super::vendored::vendored_typeshed_stubs;
#[derive(Debug)]
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
impl<'db> LazyTypeshedVersions<'db> {
#[must_use]
pub(crate) fn new() -> Self {
Self(OnceCell::new())
}
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
///
/// Simply probing whether a file exists in typeshed is insufficient for this question,
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
/// will still be available (either in a custom typeshed dir or in our vendored copy)
/// even if the user specified Python 3.8 as the target version.
///
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
/// as to whether the module exists on the specified target version. However, VERSIONS does not
/// provide comprehensive information on all submodules, meaning that this method sometimes
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
/// See [`TypeshedVersionsQueryResult`] for more details.
#[must_use]
pub(crate) fn query_module(
&self,
db: &'db dyn Db,
module: &ModuleName,
stdlib_root: Option<&SystemPath>,
target_version: TargetVersion,
) -> TypeshedVersionsQueryResult {
let versions = self.0.get_or_init(|| {
let versions_path = if let Some(system_path) = stdlib_root {
system_path.join("VERSIONS")
} else {
return &VENDORED_VERSIONS;
};
let Some(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
todo!(
"Still need to figure out how to handle VERSIONS files being deleted \
from custom typeshed directories! Expected a file to exist at {versions_path}"
)
};
// TODO(Alex/Micha): If VERSIONS is invalid,
// this should invalidate not just the specific module resolution we're currently attempting,
// but all type inference that depends on any standard-library types.
// Unwrapping here is not correct...
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
});
versions.query_module(module, PyVersion::from(target_version))
}
}
#[salsa::tracked(return_ref)]
pub(crate) fn parse_typeshed_versions(
db: &dyn Db,
versions_file: File,
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
// TODO: Handle IO errors
let file_content = versions_file
.read_to_string(db.upcast())
.unwrap_or_default();
file_content.parse()
}
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
TypeshedVersions::from_str(
&db.vendored()
&vendored_typeshed_stubs()
.read_to_string("stdlib/VERSIONS")
.expect("The vendored typeshed stubs should contain a VERSIONS file"),
.unwrap(),
)
.expect("The VERSIONS file in the vendored typeshed stubs should be well-formed")
}
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
Program::get(db).search_paths(db).typeshed_versions()
}
.unwrap()
});
#[derive(Debug, PartialEq, Eq, Clone)]
pub(crate) struct TypeshedVersionsParseError {
pub struct TypeshedVersionsParseError {
line_number: Option<NonZeroU16>,
reason: TypeshedVersionsParseErrorKind,
}
@@ -57,7 +123,7 @@ impl std::error::Error for TypeshedVersionsParseError {
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub(super) enum TypeshedVersionsParseErrorKind {
pub enum TypeshedVersionsParseErrorKind {
TooManyLines(NonZeroUsize),
UnexpectedNumberOfColons,
InvalidModuleName(String),
@@ -109,13 +175,13 @@ impl TypeshedVersions {
}
#[must_use]
pub(in crate::module_resolver) fn query_module(
fn query_module(
&self,
module: &ModuleName,
python_version: PythonVersion,
target_version: PyVersion,
) -> TypeshedVersionsQueryResult {
if let Some(range) = self.exact(module) {
if range.contains(python_version) {
if range.contains(target_version) {
TypeshedVersionsQueryResult::Exists
} else {
TypeshedVersionsQueryResult::DoesNotExist
@@ -125,7 +191,7 @@ impl TypeshedVersions {
while let Some(module_to_try) = module {
if let Some(range) = self.exact(&module_to_try) {
return {
if range.contains(python_version) {
if range.contains(target_version) {
TypeshedVersionsQueryResult::MaybeExists
} else {
TypeshedVersionsQueryResult::DoesNotExist
@@ -139,7 +205,7 @@ impl TypeshedVersions {
}
}
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
/// "Does this module exist in the stdlib at runtime on a certain target version?"
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
pub(crate) enum TypeshedVersionsQueryResult {
@@ -257,13 +323,13 @@ impl fmt::Display for TypeshedVersions {
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
enum PyVersionRange {
AvailableFrom(RangeFrom<PythonVersion>),
AvailableWithin(RangeInclusive<PythonVersion>),
AvailableFrom(RangeFrom<PyVersion>),
AvailableWithin(RangeInclusive<PyVersion>),
}
impl PyVersionRange {
#[must_use]
fn contains(&self, version: PythonVersion) -> bool {
fn contains(&self, version: PyVersion) -> bool {
match self {
Self::AvailableFrom(inner) => inner.contains(&version),
Self::AvailableWithin(inner) => inner.contains(&version),
@@ -277,14 +343,9 @@ impl FromStr for PyVersionRange {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('-').map(str::trim);
match (parts.next(), parts.next(), parts.next()) {
(Some(lower), Some(""), None) => {
let lower = PythonVersion::from_versions_file_string(lower)?;
Ok(Self::AvailableFrom(lower..))
}
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
(Some(lower), Some(upper), None) => {
let lower = PythonVersion::from_versions_file_string(lower)?;
let upper = PythonVersion::from_versions_file_string(upper)?;
Ok(Self::AvailableWithin(lower..=upper))
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
}
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
}
@@ -302,20 +363,74 @@ impl fmt::Display for PyVersionRange {
}
}
impl PythonVersion {
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
struct PyVersion {
major: u8,
minor: u8,
}
impl FromStr for PyVersion {
type Err = TypeshedVersionsParseErrorKind;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('.').map(str::trim);
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
s.to_string(),
));
};
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
version: s.to_string(),
err: int_parse_error,
let major = match u8::from_str(major) {
Ok(major) => major,
Err(err) => {
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
version: s.to_string(),
err,
})
}
})
};
let minor = match u8::from_str(minor) {
Ok(minor) => minor,
Err(err) => {
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
version: s.to_string(),
err,
})
}
};
Ok(Self { major, minor })
}
}
impl fmt::Display for PyVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let PyVersion { major, minor } = self;
write!(f, "{major}.{minor}")
}
}
impl From<TargetVersion> for PyVersion {
fn from(value: TargetVersion) -> Self {
match value {
TargetVersion::Py37 => PyVersion { major: 3, minor: 7 },
TargetVersion::Py38 => PyVersion { major: 3, minor: 8 },
TargetVersion::Py39 => PyVersion { major: 3, minor: 9 },
TargetVersion::Py310 => PyVersion {
major: 3,
minor: 10,
},
TargetVersion::Py311 => PyVersion {
major: 3,
minor: 11,
},
TargetVersion::Py312 => PyVersion {
major: 3,
minor: 12,
},
TargetVersion::Py313 => PyVersion {
major: 3,
minor: 13,
},
}
}
}
@@ -325,8 +440,7 @@ mod tests {
use std::path::Path;
use insta::assert_snapshot;
use crate::db::tests::TestDb;
use ruff_db::program::TargetVersion;
use super::*;
@@ -349,9 +463,12 @@ mod tests {
#[test]
fn can_parse_vendored_versions_file() {
let db = TestDb::new();
let versions_data = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/vendor/typeshed/stdlib/VERSIONS"
));
let versions = vendored_typeshed_versions(&db);
let versions = TypeshedVersions::from_str(versions_data).unwrap();
assert!(versions.len() > 100);
assert!(versions.len() < 1000);
@@ -361,37 +478,36 @@ mod tests {
assert!(versions.contains_exact(&asyncio));
assert_eq!(
versions.query_module(&asyncio, PythonVersion::PY310),
versions.query_module(&asyncio, TargetVersion::Py310.into()),
TypeshedVersionsQueryResult::Exists
);
assert!(versions.contains_exact(&asyncio_staggered));
assert_eq!(
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
versions.query_module(&asyncio_staggered, TargetVersion::Py38.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
versions.query_module(&asyncio_staggered, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
assert!(versions.contains_exact(&audioop));
assert_eq!(
versions.query_module(&audioop, PythonVersion::PY312),
versions.query_module(&audioop, TargetVersion::Py312.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
versions.query_module(&audioop, PythonVersion::PY313),
versions.query_module(&audioop, TargetVersion::Py313.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
#[test]
fn typeshed_versions_consistent_with_vendored_stubs() {
let db = TestDb::new();
let vendored_typeshed_versions = vendored_typeshed_versions(&db);
let vendored_typeshed_dir =
Path::new(env!("CARGO_MANIFEST_DIR")).join("../red_knot_vendored/vendor/typeshed");
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
let mut empty_iterator = true;
@@ -459,11 +575,11 @@ foo: 3.8- # trailing comment
";
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
assert_eq!(parsed_versions.len(), 3);
assert_snapshot!(parsed_versions.to_string(), @r"
assert_snapshot!(parsed_versions.to_string(), @r###"
bar: 2.7-3.10
bar.baz: 3.1-3.9
foo: 3.8-
"
"###
);
}
@@ -474,15 +590,15 @@ foo: 3.8- # trailing comment
assert!(parsed_versions.contains_exact(&bar));
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY37),
parsed_versions.query_module(&bar, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY310),
parsed_versions.query_module(&bar, TargetVersion::Py310.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY311),
parsed_versions.query_module(&bar, TargetVersion::Py311.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
@@ -494,15 +610,15 @@ foo: 3.8- # trailing comment
assert!(parsed_versions.contains_exact(&foo));
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY37),
parsed_versions.query_module(&foo, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY38),
parsed_versions.query_module(&foo, TargetVersion::Py38.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY311),
parsed_versions.query_module(&foo, TargetVersion::Py311.into()),
TypeshedVersionsQueryResult::Exists
);
}
@@ -514,15 +630,15 @@ foo: 3.8- # trailing comment
assert!(parsed_versions.contains_exact(&bar_baz));
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
parsed_versions.query_module(&bar_baz, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
parsed_versions.query_module(&bar_baz, TargetVersion::Py39.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
parsed_versions.query_module(&bar_baz, TargetVersion::Py310.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
@@ -534,15 +650,15 @@ foo: 3.8- # trailing comment
assert!(!parsed_versions.contains_exact(&bar_eggs));
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
parsed_versions.query_module(&bar_eggs, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::MaybeExists
);
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
parsed_versions.query_module(&bar_eggs, TargetVersion::Py310.into()),
TypeshedVersionsQueryResult::MaybeExists
);
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
parsed_versions.query_module(&bar_eggs, TargetVersion::Py311.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
@@ -554,11 +670,11 @@ foo: 3.8- # trailing comment
assert!(!parsed_versions.contains_exact(&spam));
assert_eq!(
parsed_versions.query_module(&spam, PythonVersion::PY37),
parsed_versions.query_module(&spam, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
assert_eq!(
parsed_versions.query_module(&spam, PythonVersion::PY313),
parsed_versions.query_module(&spam, TargetVersion::Py313.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}

View File

@@ -21,7 +21,7 @@ the project the stubs are for, but instead report them here to typeshed.**
Further documentation on stub files, typeshed, and Python's typing system in
general, can also be found at https://typing.readthedocs.io/en/latest/.
Typeshed supports Python versions 3.9 to 3.13.
Typeshed supports Python versions 3.8 and up.
## Using

View File

@@ -0,0 +1 @@
f863db6bc5242348ceaa6a3bca4e59aa9e62faaa

View File

@@ -20,55 +20,37 @@
__future__: 3.0-
__main__: 3.0-
_ast: 3.0-
_asyncio: 3.0-
_bisect: 3.0-
_blake2: 3.6-
_bootlocale: 3.4-3.9
_bz2: 3.3-
_codecs: 3.0-
_collections_abc: 3.3-
_compat_pickle: 3.1-
_compression: 3.5-
_contextvars: 3.7-
_csv: 3.0-
_ctypes: 3.0-
_curses: 3.0-
_curses_panel: 3.0-
_dbm: 3.0-
_decimal: 3.3-
_dummy_thread: 3.0-3.8
_dummy_threading: 3.0-3.8
_frozen_importlib: 3.0-
_frozen_importlib_external: 3.5-
_gdbm: 3.0-
_hashlib: 3.0-
_heapq: 3.0-
_imp: 3.0-
_interpchannels: 3.13-
_interpqueues: 3.13-
_interpreters: 3.13-
_io: 3.0-
_json: 3.0-
_locale: 3.0-
_lsprof: 3.0-
_lzma: 3.3-
_markupbase: 3.0-
_msi: 3.0-3.12
_multibytecodec: 3.0-
_msi: 3.0-
_operator: 3.4-
_osx_support: 3.0-
_pickle: 3.0-
_posixsubprocess: 3.2-
_py_abc: 3.7-
_pydecimal: 3.5-
_queue: 3.7-
_random: 3.0-
_sitebuiltins: 3.4-
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
_sqlite3: 3.0-
_ssl: 3.0-
_stat: 3.4-
_struct: 3.0-
_thread: 3.0-
_threading_local: 3.0-
_tkinter: 3.0-
@@ -143,12 +125,6 @@ doctest: 3.0-
dummy_threading: 3.0-3.8
email: 3.0-
encodings: 3.0-
encodings.cp1125: 3.4-
encodings.cp273: 3.4-
encodings.cp858: 3.2-
encodings.koi8_t: 3.5-
encodings.kz1048: 3.5-
encodings.mac_centeuro: 3.0-3.8
ensurepip: 3.0-
enum: 3.4-
errno: 3.0-
@@ -180,15 +156,11 @@ imghdr: 3.0-3.12
imp: 3.0-3.11
importlib: 3.0-
importlib._abc: 3.10-
importlib._bootstrap: 3.0-
importlib._bootstrap_external: 3.5-
importlib.metadata: 3.8-
importlib.metadata._meta: 3.10-
importlib.metadata.diagnose: 3.13-
importlib.readers: 3.10-
importlib.resources: 3.7-
importlib.resources._common: 3.11-
importlib.resources._functional: 3.13-
importlib.resources.abc: 3.11-
importlib.resources.readers: 3.11-
importlib.resources.simple: 3.11-

View File

@@ -2,13 +2,10 @@ import codecs
import sys
from _typeshed import ReadableBuffer
from collections.abc import Callable
from typing import Literal, final, overload, type_check_only
from typing import Literal, overload
from typing_extensions import TypeAlias
# This type is not exposed; it is defined in unicodeobject.c
# At runtime it calls itself builtins.EncodingMap
@final
@type_check_only
class _EncodingMap:
def size(self) -> int: ...

View File

@@ -1,14 +1,14 @@
import sys
from abc import abstractmethod
from types import MappingProxyType
from typing import ( # noqa: Y022,Y038
from typing import ( # noqa: Y022,Y038,Y057
AbstractSet as Set,
AsyncGenerator as AsyncGenerator,
AsyncIterable as AsyncIterable,
AsyncIterator as AsyncIterator,
Awaitable as Awaitable,
ByteString as ByteString,
Callable as Callable,
ClassVar,
Collection as Collection,
Container as Container,
Coroutine as Coroutine,
@@ -59,12 +59,8 @@ __all__ = [
"ValuesView",
"Sequence",
"MutableSequence",
"ByteString",
]
if sys.version_info < (3, 14):
from typing import ByteString as ByteString # noqa: Y057
__all__ += ["ByteString"]
if sys.version_info >= (3, 12):
__all__ += ["Buffer"]
@@ -74,8 +70,6 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
@final
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
def __eq__(self, value: object, /) -> bool: ...
def __reversed__(self) -> Iterator[_KT_co]: ...
__hash__: ClassVar[None] # type: ignore[assignment]
if sys.version_info >= (3, 13):
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
if sys.version_info >= (3, 10):
@@ -84,7 +78,6 @@ class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
@final
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
def __reversed__(self) -> Iterator[_VT_co]: ...
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
@@ -92,8 +85,6 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
@final
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
def __eq__(self, value: object, /) -> bool: ...
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
__hash__: ClassVar[None] # type: ignore[assignment]
if sys.version_info >= (3, 13):
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
if sys.version_info >= (3, 10):

View File

@@ -0,0 +1,90 @@
import sys
from _typeshed import SupportsWrite
from collections.abc import Iterable, Iterator
from typing import Any, Final, Literal
from typing_extensions import TypeAlias
__version__: Final[str]
QUOTE_ALL: Literal[1]
QUOTE_MINIMAL: Literal[0]
QUOTE_NONE: Literal[3]
QUOTE_NONNUMERIC: Literal[2]
if sys.version_info >= (3, 12):
QUOTE_STRINGS: Literal[4]
QUOTE_NOTNULL: Literal[5]
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
# However, using literals in situations like these can cause false-positives (see #7258)
_QuotingType: TypeAlias = int
class Error(Exception): ...
class Dialect:
delimiter: str
quotechar: str | None
escapechar: str | None
doublequote: bool
skipinitialspace: bool
lineterminator: str
quoting: _QuotingType
strict: bool
def __init__(self) -> None: ...
_DialectLike: TypeAlias = str | Dialect | type[Dialect]
class _reader(Iterator[list[str]]):
@property
def dialect(self) -> Dialect: ...
line_num: int
def __next__(self) -> list[str]: ...
class _writer:
@property
def dialect(self) -> Dialect: ...
def writerow(self, row: Iterable[Any]) -> Any: ...
def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ...
def writer(
csvfile: SupportsWrite[str],
dialect: _DialectLike = "excel",
*,
delimiter: str = ",",
quotechar: str | None = '"',
escapechar: str | None = None,
doublequote: bool = True,
skipinitialspace: bool = False,
lineterminator: str = "\r\n",
quoting: _QuotingType = 0,
strict: bool = False,
) -> _writer: ...
def reader(
csvfile: Iterable[str],
dialect: _DialectLike = "excel",
*,
delimiter: str = ",",
quotechar: str | None = '"',
escapechar: str | None = None,
doublequote: bool = True,
skipinitialspace: bool = False,
lineterminator: str = "\r\n",
quoting: _QuotingType = 0,
strict: bool = False,
) -> _reader: ...
def register_dialect(
name: str,
dialect: type[Dialect] = ...,
*,
delimiter: str = ",",
quotechar: str | None = '"',
escapechar: str | None = None,
doublequote: bool = True,
skipinitialspace: bool = False,
lineterminator: str = "\r\n",
quoting: _QuotingType = 0,
strict: bool = False,
) -> None: ...
def unregister_dialect(name: str) -> None: ...
def get_dialect(name: str) -> Dialect: ...
def list_dialects() -> list[str]: ...
def field_size_limit(new_limit: int = ...) -> int: ...

View File

@@ -0,0 +1,210 @@
import sys
from _typeshed import ReadableBuffer, WriteableBuffer
from abc import abstractmethod
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
from ctypes import CDLL, ArgumentError as ArgumentError
from typing import Any, ClassVar, Generic, TypeVar, overload
from typing_extensions import Self, TypeAlias
if sys.version_info >= (3, 9):
from types import GenericAlias
_T = TypeVar("_T")
_CT = TypeVar("_CT", bound=_CData)
FUNCFLAG_CDECL: int
FUNCFLAG_PYTHONAPI: int
FUNCFLAG_USE_ERRNO: int
FUNCFLAG_USE_LASTERROR: int
RTLD_GLOBAL: int
RTLD_LOCAL: int
if sys.version_info >= (3, 11):
CTYPES_MAX_ARGCOUNT: int
if sys.version_info >= (3, 12):
SIZEOF_TIME_T: int
if sys.platform == "win32":
# Description, Source, HelpFile, HelpContext, scode
_COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None]
class COMError(Exception):
hresult: int
text: str | None
details: _COMError_Details
def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ...
def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ...
FUNCFLAG_HRESULT: int
FUNCFLAG_STDCALL: int
def FormatError(code: int = ...) -> str: ...
def get_last_error() -> int: ...
def set_last_error(value: int) -> int: ...
def LoadLibrary(name: str, load_flags: int = 0, /) -> int: ...
def FreeLibrary(handle: int, /) -> None: ...
class _CDataMeta(type):
# By default mypy complains about the following two methods, because strictly speaking cls
# might not be a Type[_CT]. However this can never actually happen, because the only class that
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
class _CData(metaclass=_CDataMeta):
_b_base_: int
_b_needsfree_: bool
_objects: Mapping[Any, int] | None
# At runtime the following classmethods are available only on classes, not
# on instances. This can't be reflected properly in the type system:
#
# Structure.from_buffer(...) # valid at runtime
# Structure(...).from_buffer(...) # invalid at runtime
#
@classmethod
def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ...
@classmethod
def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ...
@classmethod
def from_address(cls, address: int) -> Self: ...
@classmethod
def from_param(cls, obj: Any) -> Self | _CArgObject: ...
@classmethod
def in_dll(cls, library: CDLL, name: str) -> Self: ...
def __buffer__(self, flags: int, /) -> memoryview: ...
def __release_buffer__(self, buffer: memoryview, /) -> None: ...
class _SimpleCData(_CData, Generic[_T]):
value: _T
# The TypeVar can be unsolved here,
# but we can't use overloads without creating many, many mypy false-positive errors
def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse]
class _CanCastTo(_CData): ...
class _PointerLike(_CanCastTo): ...
class _Pointer(_PointerLike, _CData, Generic[_CT]):
_type_: type[_CT]
contents: _CT
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, arg: _CT) -> None: ...
@overload
def __getitem__(self, key: int, /) -> Any: ...
@overload
def __getitem__(self, key: slice, /) -> list[Any]: ...
def __setitem__(self, key: int, value: Any, /) -> None: ...
def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ...
def pointer(obj: _CT, /) -> _Pointer[_CT]: ...
class _CArgObject: ...
def byref(obj: _CData, offset: int = ...) -> _CArgObject: ...
_ECT: TypeAlias = Callable[[_CData | None, CFuncPtr, tuple[_CData, ...]], _CData]
_PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any]
class CFuncPtr(_PointerLike, _CData):
restype: type[_CData] | Callable[[int], Any] | None
argtypes: Sequence[type[_CData]]
errcheck: _ECT
# Abstract attribute that must be defined on subclasses
_flags_: ClassVar[int]
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, address: int, /) -> None: ...
@overload
def __init__(self, callable: Callable[..., Any], /) -> None: ...
@overload
def __init__(self, func_spec: tuple[str | int, CDLL], paramflags: tuple[_PF, ...] | None = ..., /) -> None: ...
if sys.platform == "win32":
@overload
def __init__(
self, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | None = ..., /
) -> None: ...
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
_GetT = TypeVar("_GetT")
_SetT = TypeVar("_SetT")
class _CField(Generic[_CT, _GetT, _SetT]):
offset: int
size: int
@overload
def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ...
@overload
def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ...
def __set__(self, instance: Any, value: _SetT, /) -> None: ...
class _StructUnionMeta(_CDataMeta):
_fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]]
_pack_: int
_anonymous_: Sequence[str]
def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ...
class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
def __init__(self, *args: Any, **kw: Any) -> None: ...
def __getattr__(self, name: str) -> Any: ...
def __setattr__(self, name: str, value: Any) -> None: ...
class Union(_StructUnionBase): ...
class Structure(_StructUnionBase): ...
class Array(_CData, Generic[_CT]):
@property
@abstractmethod
def _length_(self) -> int: ...
@_length_.setter
def _length_(self, value: int) -> None: ...
@property
@abstractmethod
def _type_(self) -> type[_CT]: ...
@_type_.setter
def _type_(self, value: type[_CT]) -> None: ...
# Note: only available if _CT == c_char
@property
def raw(self) -> bytes: ...
@raw.setter
def raw(self, value: ReadableBuffer) -> None: ...
value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
# TODO These methods cannot be annotated correctly at the moment.
# All of these "Any"s stand for the array's element type, but it's not possible to use _CT
# here, because of a special feature of ctypes.
# By default, when accessing an element of an Array[_CT], the returned object has type _CT.
# However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object
# and converts it to the corresponding Python primitive. For example, when accessing an element
# of an Array[c_int], a Python int object is returned, not a c_int.
# This behavior does *not* apply to subclasses of "simple types".
# If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns
# a MyInt, not an int.
# This special behavior is not easy to model in a stub, so for now all places where
# the array element type would belong are annotated with Any instead.
def __init__(self, *args: Any) -> None: ...
@overload
def __getitem__(self, key: int, /) -> Any: ...
@overload
def __getitem__(self, key: slice, /) -> list[Any]: ...
@overload
def __setitem__(self, key: int, value: Any, /) -> None: ...
@overload
def __setitem__(self, key: slice, value: Iterable[Any], /) -> None: ...
def __iter__(self) -> Iterator[Any]: ...
# Can't inherit from Sized because the metaclass conflict between
# Sized and _CData prevents using _CDataMeta.
def __len__(self) -> int: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
def addressof(obj: _CData, /) -> int: ...
def alignment(obj_or_type: _CData | type[_CData], /) -> int: ...
def get_errno() -> int: ...
def resize(obj: _CData, size: int, /) -> None: ...
def set_errno(value: int, /) -> int: ...
def sizeof(obj_or_type: _CData | type[_CData], /) -> int: ...

View File

@@ -1,7 +1,6 @@
import sys
from _typeshed import ReadOnlyBuffer, SupportsRead, SupportsWrite
from curses import _ncurses_version
from typing import Any, final, overload
from _typeshed import ReadOnlyBuffer, SupportsRead
from typing import IO, Any, NamedTuple, final, overload
from typing_extensions import TypeAlias
# NOTE: This module is ordinarily only available on Unix, but the windows-curses
@@ -299,7 +298,7 @@ if sys.version_info >= (3, 9):
def getmouse() -> tuple[int, int, int, int, int]: ...
def getsyx() -> tuple[int, int]: ...
def getwin(file: SupportsRead[bytes], /) -> window: ...
def getwin(file: SupportsRead[bytes], /) -> _CursesWindow: ...
def halfdelay(tenths: int, /) -> None: ...
def has_colors() -> bool: ...
@@ -311,7 +310,7 @@ def has_il() -> bool: ...
def has_key(key: int, /) -> bool: ...
def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ...
def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ...
def initscr() -> window: ...
def initscr() -> _CursesWindow: ...
def intrflush(flag: bool, /) -> None: ...
def is_term_resized(nlines: int, ncols: int, /) -> bool: ...
def isendwin() -> bool: ...
@@ -322,8 +321,8 @@ def meta(yes: bool, /) -> None: ...
def mouseinterval(interval: int, /) -> None: ...
def mousemask(newmask: int, /) -> tuple[int, int]: ...
def napms(ms: int, /) -> int: ...
def newpad(nlines: int, ncols: int, /) -> window: ...
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> window: ...
def newpad(nlines: int, ncols: int, /) -> _CursesWindow: ...
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> _CursesWindow: ...
def nl(flag: bool = True, /) -> None: ...
def nocbreak() -> None: ...
def noecho() -> None: ...
@@ -369,7 +368,11 @@ def tparm(
) -> bytes: ...
def typeahead(fd: int, /) -> None: ...
def unctrl(ch: _ChType, /) -> bytes: ...
def unget_wch(ch: int | str, /) -> None: ...
if sys.version_info < (3, 12) or sys.platform != "darwin":
# The support for macos was dropped in 3.12
def unget_wch(ch: int | str, /) -> None: ...
def ungetch(ch: _ChType, /) -> None: ...
def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ...
def update_lines_cols() -> None: ...
@@ -379,7 +382,7 @@ def use_env(flag: bool, /) -> None: ...
class error(Exception): ...
@final
class window: # undocumented
class _CursesWindow:
encoding: str
@overload
def addch(self, ch: _ChType, attr: int = ...) -> None: ...
@@ -432,9 +435,9 @@ class window: # undocumented
def delch(self, y: int, x: int) -> None: ...
def deleteln(self) -> None: ...
@overload
def derwin(self, begin_y: int, begin_x: int) -> window: ...
def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
@overload
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
def echochar(self, ch: _ChType, attr: int = ..., /) -> None: ...
def enclose(self, y: int, x: int, /) -> bool: ...
def erase(self) -> None: ...
@@ -444,10 +447,13 @@ class window: # undocumented
def getch(self) -> int: ...
@overload
def getch(self, y: int, x: int) -> int: ...
@overload
def get_wch(self) -> int | str: ...
@overload
def get_wch(self, y: int, x: int) -> int | str: ...
if sys.version_info < (3, 12) or sys.platform != "darwin":
# The support for macos was dropped in 3.12
@overload
def get_wch(self) -> int | str: ...
@overload
def get_wch(self, y: int, x: int) -> int | str: ...
@overload
def getkey(self) -> str: ...
@overload
@@ -494,7 +500,7 @@ class window: # undocumented
def instr(self, y: int, x: int, n: int = ...) -> bytes: ...
def is_linetouched(self, line: int, /) -> bool: ...
def is_wintouched(self) -> bool: ...
def keypad(self, yes: bool, /) -> None: ...
def keypad(self, yes: bool) -> None: ...
def leaveok(self, yes: bool) -> None: ...
def move(self, new_y: int, new_x: int) -> None: ...
def mvderwin(self, y: int, x: int) -> None: ...
@@ -506,18 +512,18 @@ class window: # undocumented
@overload
def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ...
@overload
def overlay(self, destwin: window) -> None: ...
def overlay(self, destwin: _CursesWindow) -> None: ...
@overload
def overlay(
self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
) -> None: ...
@overload
def overwrite(self, destwin: window) -> None: ...
def overwrite(self, destwin: _CursesWindow) -> None: ...
@overload
def overwrite(
self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
) -> None: ...
def putwin(self, file: SupportsWrite[bytes], /) -> None: ...
def putwin(self, file: IO[Any], /) -> None: ...
def redrawln(self, beg: int, num: int, /) -> None: ...
def redrawwin(self) -> None: ...
@overload
@@ -531,13 +537,13 @@ class window: # undocumented
def standend(self) -> None: ...
def standout(self) -> None: ...
@overload
def subpad(self, begin_y: int, begin_x: int) -> window: ...
def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
@overload
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
@overload
def subwin(self, begin_y: int, begin_x: int) -> window: ...
def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
@overload
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
def syncdown(self) -> None: ...
def syncok(self, flag: bool) -> None: ...
def syncup(self) -> None: ...
@@ -550,4 +556,10 @@ class window: # undocumented
@overload
def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ...
class _ncurses_version(NamedTuple):
major: int
minor: int
patch: int
ncurses_version: _ncurses_version
window = _CursesWindow # undocumented

View File

@@ -1,54 +1,37 @@
import numbers
from _decimal import (
HAVE_CONTEXTVAR as HAVE_CONTEXTVAR,
HAVE_THREADS as HAVE_THREADS,
MAX_EMAX as MAX_EMAX,
MAX_PREC as MAX_PREC,
MIN_EMIN as MIN_EMIN,
MIN_ETINY as MIN_ETINY,
ROUND_05UP as ROUND_05UP,
ROUND_CEILING as ROUND_CEILING,
ROUND_DOWN as ROUND_DOWN,
ROUND_FLOOR as ROUND_FLOOR,
ROUND_HALF_DOWN as ROUND_HALF_DOWN,
ROUND_HALF_EVEN as ROUND_HALF_EVEN,
ROUND_HALF_UP as ROUND_HALF_UP,
ROUND_UP as ROUND_UP,
BasicContext as BasicContext,
DefaultContext as DefaultContext,
ExtendedContext as ExtendedContext,
__libmpdec_version__ as __libmpdec_version__,
__version__ as __version__,
getcontext as getcontext,
localcontext as localcontext,
setcontext as setcontext,
)
import sys
from collections.abc import Container, Sequence
from types import TracebackType
from typing import Any, ClassVar, Literal, NamedTuple, final, overload, type_check_only
from typing import Any, ClassVar, Final, Literal, NamedTuple, overload
from typing_extensions import Self, TypeAlias
_Decimal: TypeAlias = Decimal | int
_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int]
_ComparableNum: TypeAlias = Decimal | float | numbers.Rational
_TrapType: TypeAlias = type[DecimalException]
# At runtime, these classes are implemented in C as part of "_decimal".
# However, they consider themselves to live in "decimal", so we'll put them here.
# This type isn't exposed at runtime. It calls itself decimal.ContextManager
@final
@type_check_only
class _ContextManager:
def __init__(self, new_context: Context) -> None: ...
def __enter__(self) -> Context: ...
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
__version__: Final[str]
__libmpdec_version__: Final[str]
class DecimalTuple(NamedTuple):
sign: int
digits: tuple[int, ...]
exponent: int | Literal["n", "N", "F"]
ROUND_DOWN: str
ROUND_HALF_UP: str
ROUND_HALF_EVEN: str
ROUND_CEILING: str
ROUND_FLOOR: str
ROUND_UP: str
ROUND_HALF_DOWN: str
ROUND_05UP: str
HAVE_CONTEXTVAR: bool
HAVE_THREADS: bool
MAX_EMAX: int
MAX_PREC: int
MIN_EMIN: int
MIN_ETINY: int
class DecimalException(ArithmeticError): ...
class Clamped(DecimalException): ...
class InvalidOperation(DecimalException): ...
@@ -64,8 +47,28 @@ class Overflow(Inexact, Rounded): ...
class Underflow(Inexact, Rounded, Subnormal): ...
class FloatOperation(DecimalException, TypeError): ...
def setcontext(context: Context, /) -> None: ...
def getcontext() -> Context: ...
if sys.version_info >= (3, 11):
def localcontext(
ctx: Context | None = None,
*,
prec: int | None = ...,
rounding: str | None = ...,
Emin: int | None = ...,
Emax: int | None = ...,
capitals: int | None = ...,
clamp: int | None = ...,
traps: dict[_TrapType, bool] | None = ...,
flags: dict[_TrapType, bool] | None = ...,
) -> _ContextManager: ...
else:
def localcontext(ctx: Context | None = None) -> _ContextManager: ...
class Decimal:
def __new__(cls, value: _DecimalNew = "0", context: Context | None = None) -> Self: ...
def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ...
@classmethod
def from_float(cls, f: float, /) -> Self: ...
def __bool__(self) -> bool: ...
@@ -163,12 +166,21 @@ class Decimal:
def __reduce__(self) -> tuple[type[Self], tuple[str]]: ...
def __copy__(self) -> Self: ...
def __deepcopy__(self, memo: Any, /) -> Self: ...
def __format__(self, specifier: str, context: Context | None = None, /) -> str: ...
def __format__(self, specifier: str, context: Context | None = ..., /) -> str: ...
class _ContextManager:
new_context: Context
saved_context: Context
def __init__(self, new_context: Context) -> None: ...
def __enter__(self) -> Context: ...
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
_TrapType: TypeAlias = type[DecimalException]
class Context:
# TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime,
# even settable attributes like `prec` and `rounding`,
# but that's inexpressible in the stub.
# but that's inexpressable in the stub.
# Type checkers either ignore it or misinterpret it
# if you add a `def __delattr__(self, name: str, /) -> NoReturn` method to the stub
prec: int
@@ -181,14 +193,15 @@ class Context:
flags: dict[_TrapType, bool]
def __init__(
self,
prec: int | None = None,
rounding: str | None = None,
Emin: int | None = None,
Emax: int | None = None,
capitals: int | None = None,
clamp: int | None = None,
flags: dict[_TrapType, bool] | Container[_TrapType] | None = None,
traps: dict[_TrapType, bool] | Container[_TrapType] | None = None,
prec: int | None = ...,
rounding: str | None = ...,
Emin: int | None = ...,
Emax: int | None = ...,
capitals: int | None = ...,
clamp: int | None = ...,
flags: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
traps: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
_ignored_flags: list[_TrapType] | None = ...,
) -> None: ...
def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ...
def clear_flags(self) -> None: ...
@@ -262,3 +275,7 @@ class Context:
def to_integral_exact(self, x: _Decimal, /) -> Decimal: ...
def to_integral_value(self, x: _Decimal, /) -> Decimal: ...
def to_integral(self, x: _Decimal, /) -> Decimal: ...
DefaultContext: Context
BasicContext: Context
ExtendedContext: Context

View File

@@ -0,0 +1,164 @@
import sys
from _thread import _excepthook, _ExceptHookArgs
from _typeshed import ProfileFunction, TraceFunction
from collections.abc import Callable, Iterable, Mapping
from types import TracebackType
from typing import Any, TypeVar
_T = TypeVar("_T")
__all__ = [
"get_ident",
"active_count",
"Condition",
"current_thread",
"enumerate",
"main_thread",
"TIMEOUT_MAX",
"Event",
"Lock",
"RLock",
"Semaphore",
"BoundedSemaphore",
"Thread",
"Barrier",
"BrokenBarrierError",
"Timer",
"ThreadError",
"setprofile",
"settrace",
"local",
"stack_size",
"ExceptHookArgs",
"excepthook",
]
def active_count() -> int: ...
def current_thread() -> Thread: ...
def currentThread() -> Thread: ...
def get_ident() -> int: ...
def enumerate() -> list[Thread]: ...
def main_thread() -> Thread: ...
def settrace(func: TraceFunction) -> None: ...
def setprofile(func: ProfileFunction | None) -> None: ...
def stack_size(size: int | None = None) -> int: ...
TIMEOUT_MAX: float
class ThreadError(Exception): ...
class local:
def __getattribute__(self, name: str) -> Any: ...
def __setattr__(self, name: str, value: Any) -> None: ...
def __delattr__(self, name: str) -> None: ...
class Thread:
name: str
daemon: bool
@property
def ident(self) -> int | None: ...
def __init__(
self,
group: None = None,
target: Callable[..., object] | None = None,
name: str | None = None,
args: Iterable[Any] = (),
kwargs: Mapping[str, Any] | None = None,
*,
daemon: bool | None = None,
) -> None: ...
def start(self) -> None: ...
def run(self) -> None: ...
def join(self, timeout: float | None = None) -> None: ...
def getName(self) -> str: ...
def setName(self, name: str) -> None: ...
@property
def native_id(self) -> int | None: ... # only available on some platforms
def is_alive(self) -> bool: ...
if sys.version_info < (3, 9):
def isAlive(self) -> bool: ...
def isDaemon(self) -> bool: ...
def setDaemon(self, daemonic: bool) -> None: ...
class _DummyThread(Thread): ...
class Lock:
def __enter__(self) -> bool: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> bool | None: ...
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
def release(self) -> None: ...
def locked(self) -> bool: ...
class _RLock:
def __enter__(self) -> bool: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> bool | None: ...
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
def release(self) -> None: ...
RLock = _RLock
class Condition:
def __init__(self, lock: Lock | _RLock | None = None) -> None: ...
def __enter__(self) -> bool: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> bool | None: ...
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
def release(self) -> None: ...
def wait(self, timeout: float | None = None) -> bool: ...
def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ...
def notify(self, n: int = 1) -> None: ...
def notify_all(self) -> None: ...
def notifyAll(self) -> None: ...
class Semaphore:
def __init__(self, value: int = 1) -> None: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> bool | None: ...
def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ...
def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ...
if sys.version_info >= (3, 9):
def release(self, n: int = ...) -> None: ...
else:
def release(self) -> None: ...
class BoundedSemaphore(Semaphore): ...
class Event:
def is_set(self) -> bool: ...
def set(self) -> None: ...
def clear(self) -> None: ...
def wait(self, timeout: float | None = None) -> bool: ...
excepthook = _excepthook
ExceptHookArgs = _ExceptHookArgs
class Timer(Thread):
def __init__(
self,
interval: float,
function: Callable[..., object],
args: Iterable[Any] | None = None,
kwargs: Mapping[str, Any] | None = None,
) -> None: ...
def cancel(self) -> None: ...
class Barrier:
@property
def parties(self) -> int: ...
@property
def n_waiting(self) -> int: ...
@property
def broken(self) -> bool: ...
def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ...
def wait(self, timeout: float | None = None) -> int: ...
def reset(self) -> None: ...
def abort(self) -> None: ...
class BrokenBarrierError(RuntimeError): ...

View File

@@ -1,5 +1,5 @@
from _typeshed import structseq
from typing import Any, Final, Literal, SupportsIndex, final
from typing import Final, Literal, SupportsIndex, final
from typing_extensions import Buffer, Self
class ChannelError(RuntimeError): ...
@@ -72,15 +72,13 @@ class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, in
@property
def send_released(self) -> bool: ...
def create(unboundop: Literal[1, 2, 3]) -> ChannelID: ...
def create() -> ChannelID: ...
def destroy(cid: SupportsIndex) -> None: ...
def list_all() -> list[ChannelID]: ...
def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ...
def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ...
def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ...
def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: ...
def recv(cid: SupportsIndex, default: object = ...) -> object: ...
def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ...
def get_count(cid: SupportsIndex) -> int: ...
def get_info(cid: SupportsIndex) -> ChannelInfo: ...
def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: ...
def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ...

View File

@@ -0,0 +1,16 @@
from typing import Any, SupportsIndex
class QueueError(RuntimeError): ...
class QueueNotFoundError(QueueError): ...
def bind(qid: SupportsIndex) -> None: ...
def create(maxsize: SupportsIndex, fmt: SupportsIndex) -> int: ...
def destroy(qid: SupportsIndex) -> None: ...
def get(qid: SupportsIndex) -> tuple[Any, int]: ...
def get_count(qid: SupportsIndex) -> int: ...
def get_maxsize(qid: SupportsIndex) -> int: ...
def get_queue_defaults(qid: SupportsIndex) -> tuple[int]: ...
def is_full(qid: SupportsIndex) -> bool: ...
def list_all() -> list[tuple[int, int]]: ...
def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex) -> None: ...
def release(qid: SupportsIndex) -> None: ...

View File

@@ -7,7 +7,7 @@ _Configs: TypeAlias = Literal["default", "isolated", "legacy", "empty", ""]
class InterpreterError(Exception): ...
class InterpreterNotFoundError(InterpreterError): ...
class NotShareableError(ValueError): ...
class NotShareableError(Exception): ...
class CrossInterpreterBufferView:
def __buffer__(self, flags: int, /) -> memoryview: ...
@@ -21,9 +21,7 @@ def get_main() -> tuple[int, int]: ...
def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: ...
def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: ...
def whence(id: SupportsIndex) -> int: ...
def exec(
id: SupportsIndex, code: str | types.CodeType | Callable[[], object], shared: bool | None = None, *, restrict: bool = False
) -> None | types.SimpleNamespace: ...
def exec(id: SupportsIndex, code: str, shared: bool | None = None, *, restrict: bool = False) -> None: ...
def call(
id: SupportsIndex,
callable: Callable[..., object],

View File

@@ -1,6 +1,5 @@
from collections.abc import Callable
from typing import Any, final
from typing_extensions import Self
@final
class make_encoder:
@@ -20,8 +19,8 @@ class make_encoder:
def encoder(self) -> Callable[[str], str]: ...
@property
def item_separator(self) -> str: ...
def __new__(
cls,
def __init__(
self,
markers: dict[int, Any] | None,
default: Callable[[Any], Any],
encoder: Callable[[str], str],
@@ -31,7 +30,7 @@ class make_encoder:
sort_keys: bool,
skipkeys: bool,
allow_nan: bool,
) -> Self: ...
) -> None: ...
def __call__(self, obj: object, _current_indent_level: int) -> Any: ...
@final
@@ -43,9 +42,8 @@ class make_scanner:
parse_float: Any
strict: bool
# TODO: 'context' needs the attrs above (ducktype), but not __call__.
def __new__(cls, context: make_scanner) -> Self: ...
def __init__(self, context: make_scanner) -> None: ...
def __call__(self, string: str, index: int) -> tuple[Any, int]: ...
def encode_basestring(s: str, /) -> str: ...
def encode_basestring_ascii(s: str, /) -> str: ...
def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ...

View File

@@ -0,0 +1,100 @@
import sys
from _typeshed import StrPath
from collections.abc import Mapping
LC_CTYPE: int
LC_COLLATE: int
LC_TIME: int
LC_MONETARY: int
LC_NUMERIC: int
LC_ALL: int
CHAR_MAX: int
def setlocale(category: int, locale: str | None = None, /) -> str: ...
def localeconv() -> Mapping[str, int | str | list[int]]: ...
if sys.version_info >= (3, 11):
def getencoding() -> str: ...
def strcoll(os1: str, os2: str, /) -> int: ...
def strxfrm(string: str, /) -> str: ...
# native gettext functions
# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
if sys.platform != "win32":
LC_MESSAGES: int
ABDAY_1: int
ABDAY_2: int
ABDAY_3: int
ABDAY_4: int
ABDAY_5: int
ABDAY_6: int
ABDAY_7: int
ABMON_1: int
ABMON_2: int
ABMON_3: int
ABMON_4: int
ABMON_5: int
ABMON_6: int
ABMON_7: int
ABMON_8: int
ABMON_9: int
ABMON_10: int
ABMON_11: int
ABMON_12: int
DAY_1: int
DAY_2: int
DAY_3: int
DAY_4: int
DAY_5: int
DAY_6: int
DAY_7: int
ERA: int
ERA_D_T_FMT: int
ERA_D_FMT: int
ERA_T_FMT: int
MON_1: int
MON_2: int
MON_3: int
MON_4: int
MON_5: int
MON_6: int
MON_7: int
MON_8: int
MON_9: int
MON_10: int
MON_11: int
MON_12: int
CODESET: int
D_T_FMT: int
D_FMT: int
T_FMT: int
T_FMT_AMPM: int
AM_STR: int
PM_STR: int
RADIXCHAR: int
THOUSEP: int
YESEXPR: int
NOEXPR: int
CRNCYSTR: int
ALT_DIGITS: int
def nl_langinfo(key: int, /) -> str: ...
# This is dependent on `libintl.h` which is a part of `gettext`
# system dependency. These functions might be missing.
# But, we always say that they are present.
def gettext(msg: str, /) -> str: ...
def dgettext(domain: str | None, msg: str, /) -> str: ...
def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ...
def textdomain(domain: str | None, /) -> str: ...
def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ...
def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ...

Some files were not shown because too many files have changed in this diff Show More