Compare commits

..

6 Commits

Author SHA1 Message Date
Charlie Marsh
b974d1a746 Move gate 2024-07-08 19:30:59 -07:00
Charlie Marsh
b9438eb57a Merge branch 'main' into 20240708-1515 2024-07-08 19:21:48 -07:00
epenet
75e65254e5 Update snap 2024-07-08 16:10:14 +02:00
epenet
225a4169c4 Remove duplicate code 2024-07-08 16:10:08 +02:00
epenet
dc3773b878 Update snap 2024-07-08 15:57:49 +02:00
epenet
60f18bd0ef [flake8-return] Exempt properties (RET501) 2024-07-08 13:43:41 +00:00
5199 changed files with 40581 additions and 145068 deletions

View File

@@ -20,7 +20,7 @@
"extensions": [
"ms-python.python",
"rust-lang.rust-analyzer",
"fill-labs.dependi",
"serayuzgur.crates",
"tamasfe.even-better-toml",
"Swellaby.vscode-rust-test-adapter",
"charliermarsh.ruff"

View File

@@ -17,7 +17,4 @@ indent_size = 4
trim_trailing_whitespace = false
[*.md]
max_line_length = 100
[*.toml]
indent_size = 4
max_line_length = 100

4
.github/CODEOWNERS vendored
View File

@@ -17,5 +17,5 @@
/scripts/fuzz-parser/ @AlexWaygood
# red-knot
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood

View File

@@ -8,32 +8,15 @@
semanticCommits: "disabled",
separateMajorMinor: false,
prHourlyLimit: 10,
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
cargo: {
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
rangeStrategy: "update-lockfile",
},
pep621: {
// The default for this package manager is to only search for `pyproject.toml` files
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
},
pip_requirements: {
// The default for this package manager is to run on all requirements.txt files:
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
// a "negative glob pattern".
// See:
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
ignorePaths: ["!docs/requirements*.txt"]
},
npm: {
// The default for this package manager is to only search for `package.json` files
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
fileMatch: ["^playground/.*package\\.json$"],
},
"pre-commit": {
@@ -65,14 +48,6 @@
matchManagers: ["cargo"],
enabled: false,
},
{
// `mkdocs-material` requires a manual update to keep the version in sync
// with `mkdocs-material-insider`.
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
matchManagers: ["pip_requirements"],
matchPackagePatterns: ["mkdocs-material"],
enabled: false,
},
{
groupName: "pre-commit dependencies",
matchManagers: ["pre-commit"],

View File

@@ -63,7 +63,7 @@ jobs:
macos-x86_64:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14
runs-on: macos-12
steps:
- uses: actions/checkout@v4
with:

View File

@@ -17,21 +17,12 @@ on:
paths:
- .github/workflows/build-docker.yml
env:
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
jobs:
docker-build:
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
docker-publish:
name: Build Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- uses: actions/checkout@v4
with:
@@ -45,6 +36,12 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/astral-sh/ruff
- name: Check tag consistency
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
run: |
@@ -58,233 +55,14 @@ jobs:
echo "Releasing ${version}"
fi
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.RUFF_BASE_IMG }}
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
tags: |
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
- name: Normalize Platform Pair (replace / with -)
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_TUPLE=${platform//\//-}" >> $GITHUB_ENV
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: .
platforms: ${{ matrix.platform }}
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
- name: Export digests
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digests
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_TUPLE }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
docker-publish:
name: Publish Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-build
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@v3
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.RUFF_BASE_IMG }}
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
tags: |
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Create manifest list and push
working-directory: /tmp/digests
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
run: |
docker buildx imagetools create \
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.RUFF_BASE_IMG }}@sha256:%s ' *)
docker-publish-extra:
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-publish
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
strategy:
fail-fast: false
matrix:
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
image-mapping:
- alpine:3.20,alpine3.20,alpine
- debian:bookworm-slim,bookworm-slim,debian-slim
- buildpack-deps:bookworm,bookworm,debian
steps:
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate Dynamic Dockerfile Tags
shell: bash
run: |
set -euo pipefail
# Extract the image and tags from the matrix variable
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
# Generate Dockerfile content
cat <<EOF > Dockerfile
FROM ${BASE_IMAGE}
COPY --from=${{ env.RUFF_BASE_IMG }}:latest /ruff /usr/local/bin/ruff
ENTRYPOINT []
CMD ["/usr/local/bin/ruff"]
EOF
# Initialize a variable to store all tag docker metadata patterns
TAG_PATTERNS=""
# Loop through all base tags and append its docker metadata pattern to the list
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
IFS=','; for TAG in ${BASE_TAGS}; do
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
done
# Remove the trailing newline from the pattern list
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
# Export image cache name
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> $GITHUB_ENV
# Export tag patterns using the multiline env var syntax
{
echo "TAG_PATTERNS<<EOF"
echo -e "${TAG_PATTERNS}"
echo EOF
} >> $GITHUB_ENV
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
# ghcr.io prefers index level annotations
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:
images: ${{ env.RUFF_BASE_IMG }}
flavor: |
latest=false
tags: |
${{ env.TAG_PATTERNS }}
- name: Build and push
- name: "Build and push Docker image"
uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm64
# We do not really need to cache here as the Dockerfile is tiny
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
push: true
tags: ${{ steps.meta.outputs.tags }}
# Reuse the builder
cache-from: type=gha
cache-to: type=gha,mode=max
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
labels: ${{ steps.meta.outputs.labels }}
annotations: ${{ steps.meta.outputs.annotations }}
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
# show the ruff base image first since GitHub always shows the last updated image digests
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
docker-republish:
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-publish-extra
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@v3
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:
images: ${{ env.RUFF_BASE_IMG }}
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
tags: |
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Create manifest list and push
working-directory: /tmp/digests
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
run: |
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
docker buildx imagetools create \
"${annotations[@]}" \
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.RUFF_BASE_IMG }}@sha256:%s ' *)

View File

@@ -16,7 +16,7 @@ env:
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
PACKAGE_NAME: ruff
PYTHON_VERSION: "3.12"
PYTHON_VERSION: "3.11"
jobs:
determine_changes:
@@ -37,7 +37,7 @@ jobs:
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v45
- uses: tj-actions/changed-files@v44
id: changed
with:
files_yaml: |
@@ -111,11 +111,11 @@ jobs:
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
- name: "Clippy (wasm)"
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
cargo-test-linux:
name: "cargo test (linux)"
runs-on: depot-ubuntu-22.04-16
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
@@ -142,13 +142,6 @@ jobs:
# Check for broken links in the documentation.
- run: cargo doc --all --no-deps
env:
RUSTDOCFLAGS: "-D warnings"
# Use --document-private-items so that all our doc comments are kept in
# sync, not just public items. Eventually we should do this for all
# crates; for now add crates here as they are warning-clean to prevent
# regression.
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
@@ -157,36 +150,9 @@ jobs:
name: ruff
path: target/debug/ruff
cargo-test-linux-release:
name: "cargo test (linux, release)"
runs-on: depot-ubuntu-22.04-16
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
cargo-test-windows:
name: "cargo test (windows)"
runs-on: windows-latest-xlarge
runs-on: windows-latest
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
@@ -220,26 +186,21 @@ jobs:
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
with:
node-version: 20
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
with:
version: v0.13.1
- uses: Swatinem/rust-cache@v2
- name: "Test ruff_wasm"
- name: "Run wasm-pack"
run: |
cd crates/ruff_wasm
wasm-pack test --node
- name: "Test red_knot_wasm"
run: |
cd crates/red_knot_wasm
wasm-pack test --node
cargo-build-release:
name: "cargo build (release)"
runs-on: macos-latest
if: ${{ github.ref == 'refs/heads/main' }}
needs: determine_changes
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
@@ -283,11 +244,11 @@ jobs:
NEXTEST_PROFILE: "ci"
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
cargo-fuzz-build:
name: "cargo fuzz build"
cargo-fuzz:
name: "cargo fuzz"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ github.ref == 'refs/heads/main' }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
@@ -306,7 +267,7 @@ jobs:
- run: cargo fuzz build -s none
fuzz-parser:
name: "fuzz parser"
name: "Fuzz the parser"
runs-on: ubuntu-latest
needs:
- cargo-test-linux
@@ -335,7 +296,7 @@ jobs:
# Make executable, since artifact download doesn't preserve this
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
python scripts/fuzz-parser/fuzz.py --bin ruff 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
scripts:
name: "test scripts"
@@ -359,7 +320,7 @@ jobs:
ecosystem:
name: "ecosystem"
runs-on: depot-ubuntu-latest-8
runs-on: ubuntu-latest
needs:
- cargo-test-linux
- determine_changes
@@ -546,8 +507,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.13"
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
@@ -555,15 +514,13 @@ jobs:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@v3
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
run: pip install -r docs/requirements-insiders.txt
- name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: uv pip install -r docs/requirements.txt --system
run: pip install -r docs/requirements.txt
- name: "Update README File"
run: python scripts/transform_readme.py --target mkdocs
- name: "Generate docs"
@@ -589,12 +546,12 @@ jobs:
run: rustup show
- name: "Cache rust"
uses: Swatinem/rust-cache@v2
- name: "Run checks"
- name: "Formatter progress"
run: scripts/formatter_ecosystem_checks.sh
- name: "Github step summary"
run: cat target/formatter-ecosystem/stats.txt > $GITHUB_STEP_SUMMARY
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
- name: "Remove checkouts from cache"
run: rm -r target/formatter-ecosystem
run: rm -r target/progress_projects
check-ruff-lsp:
name: "test ruff-lsp"
@@ -640,7 +597,7 @@ jobs:
just test
benchmarks:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
@@ -662,7 +619,7 @@ jobs:
run: cargo codspeed build --features codspeed -p ruff_benchmark
- name: "Run benchmarks"
uses: CodSpeedHQ/action@v3
uses: CodSpeedHQ/action@v2
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@@ -49,7 +49,7 @@ jobs:
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
run: cargo build --locked
- name: Fuzz
run: python scripts/fuzz-parser/fuzz.py --bin ruff $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
create-issue-on-failure:
name: Create an issue if the daily fuzz surfaced any bugs

View File

@@ -23,7 +23,6 @@ jobs:
name: pr-number
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
if_no_artifact_found: ignore
allow_forks: true
- name: Parse pull request number
id: pr-number
@@ -44,7 +43,6 @@ jobs:
path: pr/ecosystem
workflow_conclusion: completed
if_no_artifact_found: ignore
allow_forks: true
- name: Generate comment content
id: generate-comment

View File

@@ -21,132 +21,42 @@ jobs:
mkdocs:
runs-on: ubuntu-latest
env:
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- uses: actions/setup-python@v5
with:
python-version: 3.12
- name: "Set docs version"
run: |
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
# if version is missing, use 'latest'
if [ -z "$version" ]; then
echo "Using 'latest' as version"
version="latest"
fi
# Use version as display name for now
display_name="$version"
echo "version=$version" >> $GITHUB_ENV
echo "display_name=$display_name" >> $GITHUB_ENV
- name: "Set branch name"
run: |
version="${{ env.version }}"
display_name="${{ env.display_name }}"
timestamp="$(date +%s)"
# create branch_display_name from display_name by replacing all
# characters disallowed in git branch names with hyphens
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
echo "timestamp=$timestamp" >> $GITHUB_ENV
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: pip install -r docs/requirements-insiders.txt
- name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: pip install -r docs/requirements.txt
- name: "Copy README File"
run: |
python scripts/transform_readme.py --target mkdocs
python scripts/generate_mkdocs.py
- name: "Build Insiders docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
- name: "Clone docs repo"
run: |
version="${{ env.version }}"
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
- name: "Copy docs"
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
- name: "Commit docs"
working-directory: astral-docs
run: |
branch_name="${{ env.branch_name }}"
git config user.name "astral-docs-bot"
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
git checkout -b $branch_name
git add site/ruff
git commit -m "Update ruff documentation for $version"
- name: "Create Pull Request"
working-directory: astral-docs
env:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
version="${{ env.version }}"
display_name="${{ env.display_name }}"
branch_name="${{ env.branch_name }}"
# set the PR title
pull_request_title="Update ruff documentation for $display_name"
# Delete any existing pull requests that are open for this version
# by checking against pull_request_title because the new PR will
# supersede the old one.
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
xargs -I {} gh pr close {}
# push the branch to GitHub
git push origin $branch_name
# create the PR
gh pr create --base main --head $branch_name \
--title "$pull_request_title" \
--body "Automated documentation update for $display_name" \
--label "documentation"
- name: "Merge Pull Request"
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
working-directory: astral-docs
env:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
branch_name="${{ env.branch_name }}"
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
# give the PR a few seconds to be created before trying to auto-merge it
sleep 10
gh pr merge --squash $branch_name
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.7.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}

View File

@@ -29,7 +29,7 @@ jobs:
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
with:
node-version: 20
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
@@ -47,7 +47,7 @@ jobs:
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.12.1
uses: cloudflare/wrangler-action@v3.7.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

View File

@@ -21,12 +21,14 @@ jobs:
# For PyPI's trusted publishing.
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@v3
- uses: actions/download-artifact@v4
with:
pattern: wheels-*
path: wheels
merge-multiple: true
- name: Publish to PyPi
run: uv publish -v wheels/*
uses: pypa/gh-action-pypi-publish@release/v1
with:
skip-existing: true
packages-dir: wheels
verbose: true

View File

@@ -1,55 +0,0 @@
# Build and publish ruff-api for wasm.
#
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
# job within `cargo-dist`.
name: "Build and publish wasm"
on:
workflow_dispatch:
workflow_call:
inputs:
plan:
required: true
type: string
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
jobs:
ruff_wasm:
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
strategy:
matrix:
target: [web, bundler, nodejs]
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-bindgen-action@v0.2.0
- name: "Run wasm-pack build"
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
- name: "Rename generated package"
run: | # Replace the package name w/ jq
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
mv /tmp/package.json crates/ruff_wasm/pkg
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- uses: actions/setup-node@v4
with:
node-version: 20
registry-url: "https://registry.npmjs.org"
- name: "Publish (dry-run)"
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
run: npm publish --dry-run crates/ruff_wasm/pkg
- name: "Publish"
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
run: npm publish --provenance --access public crates/ruff_wasm/pkg
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -1,12 +1,10 @@
# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
# CI that:
#
# * checks for a Git Tag that looks like a release
# * builds artifacts with dist (archives, installers, hashes)
# * builds artifacts with cargo-dist (archives, installers, hashes)
# * uploads those artifacts to temporary workflow zip
# * on success, uploads the artifacts to a GitHub Release
#
@@ -24,10 +22,10 @@ permissions:
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
#
# If PACKAGE_NAME is specified, then the announcement will be for that
# package (erroring out if it doesn't have the given version or isn't dist-able).
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
#
# If PACKAGE_NAME isn't specified, then the announcement will be for all
# (dist-able) packages in the workspace with that version (this mode is
# (cargo-dist-able) packages in the workspace with that version (this mode is
# intended for workspaces with only one dist-able package, or with all dist-able
# packages versioned/released in lockstep).
#
@@ -48,7 +46,7 @@ on:
type: string
jobs:
# Run 'dist plan' (or host) to determine what tasks we need to do
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
plan:
runs-on: "ubuntu-20.04"
outputs:
@@ -62,16 +60,16 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install dist
- name: Install cargo-dist
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
- name: Cache dist
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
- name: Cache cargo-dist
uses: actions/upload-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/dist
path: ~/.cargo/bin/cargo-dist
# sure would be cool if github gave us proper conditionals...
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
# functionality based on whether this is a pull_request, and whether it's from a fork.
@@ -79,8 +77,8 @@ jobs:
# but also really annoying to build CI around when it needs secrets to work right.)
- id: plan
run: |
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
echo "dist ran successfully"
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
echo "cargo dist ran successfully"
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
@@ -124,12 +122,12 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached dist
- name: Install cached cargo-dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
- run: chmod +x ~/.cargo/bin/cargo-dist
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@v4
@@ -140,8 +138,8 @@ jobs:
- id: cargo-dist
shell: bash
run: |
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
echo "dist ran successfully"
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
echo "cargo dist ran successfully"
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
@@ -174,12 +172,12 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached dist
- name: Install cached cargo-dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
- run: chmod +x ~/.cargo/bin/cargo-dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@v4
@@ -191,7 +189,7 @@ jobs:
- id: host
shell: bash
run: |
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
echo "artifacts uploaded and released successfully"
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
@@ -216,32 +214,16 @@ jobs:
"id-token": "write"
"packages": "write"
custom-publish-wasm:
needs:
- plan
- host
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
uses: ./.github/workflows/publish-wasm.yml
with:
plan: ${{ needs.plan.outputs.val }}
secrets: inherit
# publish jobs get escalated permissions
permissions:
"contents": "read"
"id-token": "write"
"packages": "write"
# Create a GitHub Release while uploading all files to it
announce:
needs:
- plan
- host
- custom-publish-pypi
- custom-publish-wasm
# use "always() && ..." to allow us to wait for all publish jobs while
# still allowing individual publish jobs to skip themselves (for prereleases).
# "host" however must run to completion, no skipping allowed!
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
runs-on: "ubuntu-20.04"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -37,13 +37,13 @@ jobs:
- name: Sync typeshed
id: sync
run: |
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
- name: Commit the changes
id: commit
if: ${{ steps.sync.outcome == 'success' }}

8
.gitignore vendored
View File

@@ -21,14 +21,6 @@ flamegraph.svg
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
/target*
# samply profiles
profile.json
# tracing-flame traces
tracing.folded
tracing-flamechart.svg
tracing-flamegraph.svg
###
# Rust.gitignore
###

View File

@@ -14,9 +14,6 @@ MD041: false
# MD013/line-length
MD013: false
# MD014/commands-show-output
MD014: false
# MD024/no-duplicate-heading
MD024:
# Allow when nested under different parents e.g. CHANGELOG.md

View File

@@ -1,13 +1,10 @@
fail_fast: false
fail_fast: true
exclude: |
(?x)^(
crates/red_knot_vendored/vendor/.*|
crates/red_knot_workspace/resources/.*|
crates/red_knot_module_resolver/vendor/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff_notebook/resources/.*|
crates/ruff_server/resources/.*|
crates/ruff/resources/.*|
crates/ruff_python_formatter/resources/.*|
crates/ruff_python_formatter/tests/snapshots/.*|
@@ -17,18 +14,17 @@ exclude: |
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.23
rev: v0.18
hooks:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.18
rev: 0.7.17
hooks:
- id: mdformat
additional_dependencies:
- mdformat-mkdocs
- mdformat-admon
- mdformat-footnote
exclude: |
(?x)^(
docs/formatter/black\.md
@@ -36,7 +32,7 @@ repos:
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.42.0
rev: v0.41.0
hooks:
- id: markdownlint-fix
exclude: |
@@ -45,21 +41,8 @@ repos:
| docs/\w+\.md
)$
- repo: https://github.com/adamchainz/blacken-docs
rev: 1.19.1
hooks:
- id: blacken-docs
args: ["--pyi", "--line-length", "130"]
files: '^crates/.*/resources/mdtest/.*\.md'
exclude: |
(?x)^(
.*?invalid(_.+)*_syntax\.md
)$
additional_dependencies:
- black==24.10.0
- repo: https://github.com/crate-ci/typos
rev: v1.27.3
rev: v1.23.1
hooks:
- id: typos
@@ -73,17 +56,22 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.4
rev: v0.5.1
hooks:
- id: ruff-format
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
types_or: [python, pyi]
require_serial: true
exclude: |
(?x)^(
crates/ruff_linter/resources/.*|
crates/ruff_python_formatter/resources/.*
)$
# Prettier
- repo: https://github.com/rbubley/mirrors-prettier
rev: v3.3.3
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0
hooks:
- id: prettier
types: [yaml]

View File

@@ -1,81 +1,5 @@
# Breaking Changes
## 0.8.0
- **Default to Python 3.9**
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
- **Changed location of `pydoclint` diagnostics**
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
If you've opted into these preview rules but have them suppressed using
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
some places, this change may mean that you need to move the `noqa` suppression
comments. Most users should be unaffected by this change.
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
- **Changes to the line width calculation**
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
## 0.7.0
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
instead.
## 0.6.0
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
```toml
[tool.ruff.lint.per-file-ignores]
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
```
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
Notebook files and not format them:
```toml
[tool.ruff.format]
exclude = ["*.ipynb"]
```
And, conversely, the following would only format Jupyter Notebook files and not lint them:
```toml
[tool.ruff.lint]
exclude = ["*.ipynb"]
```
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
```toml
[tool.ruff]
extend-exclude = ["*.ipynb"]
```
## 0.5.0
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)

View File

@@ -1,846 +1,5 @@
# Changelog
## 0.8.0
Check out the [blog post](https://astral.sh/blog/ruff-v0.8.0) for a migration guide and overview of the changes!
### Breaking changes
See also, the "Remapped rules" section which may result in disabled rules.
- **Default to Python 3.9**
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
- **Changed location of `pydoclint` diagnostics**
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
If you've opted into these preview rules but have them suppressed using
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
some places, this change may mean that you need to move the `noqa` suppression
comments. Most users should be unaffected by this change.
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
- **Changes to the line width calculation**
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
### Removed Rules
The following deprecated rules have been removed:
- [`missing-type-self`](https://docs.astral.sh/ruff/rules/missing-type-self/) (`ANN101`)
- [`missing-type-cls`](https://docs.astral.sh/ruff/rules/missing-type-cls/) (`ANN102`)
- [`syntax-error`](https://docs.astral.sh/ruff/rules/syntax-error/) (`E999`)
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
### Remapped rules
The following rules have been remapped to new rule codes:
- [`flake8-type-checking`](https://docs.astral.sh/ruff/rules/#flake8-type-checking-tc): `TCH` to `TC`
### Stabilization
The following rules have been stabilized and are no longer in preview:
- [`builtin-import-shadowing`](https://docs.astral.sh/ruff/rules/builtin-import-shadowing/) (`A004`)
- [`mutable-contextvar-default`](https://docs.astral.sh/ruff/rules/mutable-contextvar-default/) (`B039`)
- [`fast-api-redundant-response-model`](https://docs.astral.sh/ruff/rules/fast-api-redundant-response-model/) (`FAST001`)
- [`fast-api-non-annotated-dependency`](https://docs.astral.sh/ruff/rules/fast-api-non-annotated-dependency/) (`FAST002`)
- [`dict-index-missing-items`](https://docs.astral.sh/ruff/rules/dict-index-missing-items/) (`PLC0206`)
- [`pep484-style-positional-only-parameter`](https://docs.astral.sh/ruff/rules/pep484-style-positional-only-parameter/) (`PYI063`)
- [`redundant-final-literal`](https://docs.astral.sh/ruff/rules/redundant-final-literal/) (`PYI064`)
- [`bad-version-info-order`](https://docs.astral.sh/ruff/rules/bad-version-info-order/) (`PYI066`)
- [`parenthesize-chained-operators`](https://docs.astral.sh/ruff/rules/parenthesize-chained-operators/) (`RUF021`)
- [`unsorted-dunder-all`](https://docs.astral.sh/ruff/rules/unsorted-dunder-all/) (`RUF022`)
- [`unsorted-dunder-slots`](https://docs.astral.sh/ruff/rules/unsorted-dunder-slots/) (`RUF023`)
- [`assert-with-print-message`](https://docs.astral.sh/ruff/rules/assert-with-print-message/) (`RUF030`)
- [`unnecessary-default-type-args`](https://docs.astral.sh/ruff/rules/unnecessary-default-type-args/) (`UP043`)
The following behaviors have been stabilized:
- [`ambiguous-variable-name`](https://docs.astral.sh/ruff/rules/ambiguous-variable-name/) (`E741`): Violations in stub files are now ignored. Stub authors typically don't control variable names.
- [`printf-string-formatting`](https://docs.astral.sh/ruff/rules/printf-string-formatting/) (`UP031`): Report all `printf`-like usages even if no autofix is available
The following fixes have been stabilized:
- [`zip-instead-of-pairwise`](https://docs.astral.sh/ruff/rules/zip-instead-of-pairwise/) (`RUF007`)
### Preview features
- \[`flake8-datetimez`\] Exempt `min.time()` and `max.time()` (`DTZ901`) ([#14394](https://github.com/astral-sh/ruff/pull/14394))
- \[`flake8-pie`\] Mark fix as unsafe if the following statement is a string literal (`PIE790`) ([#14393](https://github.com/astral-sh/ruff/pull/14393))
- \[`flake8-pyi`\] New rule `redundant-none-literal` (`PYI061`) ([#14316](https://github.com/astral-sh/ruff/pull/14316))
- \[`flake8-pyi`\] Add autofix for `redundant-numeric-union` (`PYI041`) ([#14273](https://github.com/astral-sh/ruff/pull/14273))
- \[`ruff`\] New rule `map-int-version-parsing` (`RUF048`) ([#14373](https://github.com/astral-sh/ruff/pull/14373))
- \[`ruff`\] New rule `redundant-bool-literal` (`RUF038`) ([#14319](https://github.com/astral-sh/ruff/pull/14319))
- \[`ruff`\] New rule `unraw-re-pattern` (`RUF039`) ([#14446](https://github.com/astral-sh/ruff/pull/14446))
- \[`pycodestyle`\] Exempt `pytest.importorskip()` calls (`E402`) ([#14474](https://github.com/astral-sh/ruff/pull/14474))
- \[`pylint`\] Autofix suggests using sets when possible (`PLR1714`) ([#14372](https://github.com/astral-sh/ruff/pull/14372))
### Rule changes
- [`invalid-pyproject-toml`](https://docs.astral.sh/ruff/rules/invalid-pyproject-toml/) (`RUF200`): Updated to reflect the provisionally accepted [PEP 639](https://peps.python.org/pep-0639/).
- \[`flake8-pyi`\] Avoid panic in unfixable case (`PYI041`) ([#14402](https://github.com/astral-sh/ruff/pull/14402))
- \[`flake8-type-checking`\] Correctly handle quotes in subscript expression when generating an autofix ([#14371](https://github.com/astral-sh/ruff/pull/14371))
- \[`pylint`\] Suggest correct autofix for `__contains__` (`PLC2801`) ([#14424](https://github.com/astral-sh/ruff/pull/14424))
### Configuration
- Ruff now emits a warning instead of an error when a configuration [`ignore`](https://docs.astral.sh/ruff/settings/#lint_ignore)s a rule that has been removed ([#14435](https://github.com/astral-sh/ruff/pull/14435))
- Ruff now validates that `lint.flake8-import-conventions.aliases` only uses valid module names and aliases ([#14477](https://github.com/astral-sh/ruff/pull/14477))
## 0.7.4
### Preview features
- \[`flake8-datetimez`\] Detect usages of `datetime.max`/`datetime.min` (`DTZ901`) ([#14288](https://github.com/astral-sh/ruff/pull/14288))
- \[`flake8-logging`\] Implement `root-logger-calls` (`LOG015`) ([#14302](https://github.com/astral-sh/ruff/pull/14302))
- \[`flake8-no-pep420`\] Detect empty implicit namespace packages (`INP001`) ([#14236](https://github.com/astral-sh/ruff/pull/14236))
- \[`flake8-pyi`\] Add "replace with `Self`" fix (`PYI019`) ([#14238](https://github.com/astral-sh/ruff/pull/14238))
- \[`perflint`\] Implement quick-fix for `manual-list-comprehension` (`PERF401`) ([#13919](https://github.com/astral-sh/ruff/pull/13919))
- \[`pylint`\] Implement `shallow-copy-environ` (`W1507`) ([#14241](https://github.com/astral-sh/ruff/pull/14241))
- \[`ruff`\] Implement `none-not-at-end-of-union` (`RUF036`) ([#14314](https://github.com/astral-sh/ruff/pull/14314))
- \[`ruff`\] Implementation `unsafe-markup-call` from `flake8-markupsafe` plugin (`RUF035`) ([#14224](https://github.com/astral-sh/ruff/pull/14224))
- \[`ruff`\] Report problems for `attrs` dataclasses (`RUF008`, `RUF009`) ([#14327](https://github.com/astral-sh/ruff/pull/14327))
### Rule changes
- \[`flake8-boolean-trap`\] Exclude dunder methods that define operators (`FBT001`) ([#14203](https://github.com/astral-sh/ruff/pull/14203))
- \[`flake8-pyi`\] Add "replace with `Self`" fix (`PYI034`) ([#14217](https://github.com/astral-sh/ruff/pull/14217))
- \[`flake8-pyi`\] Always autofix `duplicate-union-members` (`PYI016`) ([#14270](https://github.com/astral-sh/ruff/pull/14270))
- \[`flake8-pyi`\] Improve autofix for nested and mixed type unions for `unnecessary-type-union` (`PYI055`) ([#14272](https://github.com/astral-sh/ruff/pull/14272))
- \[`flake8-pyi`\] Mark fix as unsafe when type annotation contains comments for `duplicate-literal-member` (`PYI062`) ([#14268](https://github.com/astral-sh/ruff/pull/14268))
### Server
- Use the current working directory to resolve settings from `ruff.configuration` ([#14352](https://github.com/astral-sh/ruff/pull/14352))
### Bug fixes
- Avoid conflicts between `PLC014` (`useless-import-alias`) and `I002` (`missing-required-import`) by considering `lint.isort.required-imports` for `PLC014` ([#14287](https://github.com/astral-sh/ruff/pull/14287))
- \[`flake8-type-checking`\] Skip quoting annotation if it becomes invalid syntax (`TCH001`)
- \[`flake8-pyi`\] Avoid using `typing.Self` in stub files pre-Python 3.11 (`PYI034`) ([#14230](https://github.com/astral-sh/ruff/pull/14230))
- \[`flake8-pytest-style`\] Flag `pytest.raises` call with keyword argument `expected_exception` (`PT011`) ([#14298](https://github.com/astral-sh/ruff/pull/14298))
- \[`flake8-simplify`\] Infer "unknown" truthiness for literal iterables whose items are all unpacks (`SIM222`) ([#14263](https://github.com/astral-sh/ruff/pull/14263))
- \[`flake8-type-checking`\] Fix false positives for `typing.Annotated` (`TCH001`) ([#14311](https://github.com/astral-sh/ruff/pull/14311))
- \[`pylint`\] Allow `await` at the top-level scope of a notebook (`PLE1142`) ([#14225](https://github.com/astral-sh/ruff/pull/14225))
- \[`pylint`\] Fix miscellaneous issues in `await-outside-async` detection (`PLE1142`) ([#14218](https://github.com/astral-sh/ruff/pull/14218))
- \[`pyupgrade`\] Avoid applying PEP 646 rewrites in invalid contexts (`UP044`) ([#14234](https://github.com/astral-sh/ruff/pull/14234))
- \[`pyupgrade`\] Detect permutations in redundant open modes (`UP015`) ([#14255](https://github.com/astral-sh/ruff/pull/14255))
- \[`refurb`\] Avoid triggering `hardcoded-string-charset` for reordered sets (`FURB156`) ([#14233](https://github.com/astral-sh/ruff/pull/14233))
- \[`refurb`\] Further special cases added to `verbose-decimal-constructor` (`FURB157`) ([#14216](https://github.com/astral-sh/ruff/pull/14216))
- \[`refurb`\] Use `UserString` instead of non-existent `UserStr` (`FURB189`) ([#14209](https://github.com/astral-sh/ruff/pull/14209))
- \[`ruff`\] Avoid treating lowercase letters as `# noqa` codes (`RUF100`) ([#14229](https://github.com/astral-sh/ruff/pull/14229))
- \[`ruff`\] Do not report when `Optional` has no type arguments (`RUF013`) ([#14181](https://github.com/astral-sh/ruff/pull/14181))
### Documentation
- Add "Notebook behavior" section for `F704`, `PLE1142` ([#14266](https://github.com/astral-sh/ruff/pull/14266))
- Document comment policy around fix safety ([#14300](https://github.com/astral-sh/ruff/pull/14300))
## 0.7.3
### Preview features
- Formatter: Disallow single-line implicit concatenated strings ([#13928](https://github.com/astral-sh/ruff/pull/13928))
- \[`flake8-pyi`\] Include all Python file types for `PYI006` and `PYI066` ([#14059](https://github.com/astral-sh/ruff/pull/14059))
- \[`flake8-simplify`\] Implement `split-of-static-string` (`SIM905`) ([#14008](https://github.com/astral-sh/ruff/pull/14008))
- \[`refurb`\] Implement `subclass-builtin` (`FURB189`) ([#14105](https://github.com/astral-sh/ruff/pull/14105))
- \[`ruff`\] Improve diagnostic messages and docs (`RUF031`, `RUF032`, `RUF034`) ([#14068](https://github.com/astral-sh/ruff/pull/14068))
### Rule changes
- Detect items that hash to same value in duplicate sets (`B033`, `PLC0208`) ([#14064](https://github.com/astral-sh/ruff/pull/14064))
- \[`eradicate`\] Better detection of IntelliJ language injection comments (`ERA001`) ([#14094](https://github.com/astral-sh/ruff/pull/14094))
- \[`flake8-pyi`\] Add autofix for `docstring-in-stub` (`PYI021`) ([#14150](https://github.com/astral-sh/ruff/pull/14150))
- \[`flake8-pyi`\] Update `duplicate-literal-member` (`PYI062`) to alawys provide an autofix ([#14188](https://github.com/astral-sh/ruff/pull/14188))
- \[`pyflakes`\] Detect items that hash to same value in duplicate dictionaries (`F601`) ([#14065](https://github.com/astral-sh/ruff/pull/14065))
- \[`ruff`\] Fix false positive for decorators (`RUF028`) ([#14061](https://github.com/astral-sh/ruff/pull/14061))
### Bug fixes
- Avoid parsing joint rule codes as distinct codes in `# noqa` ([#12809](https://github.com/astral-sh/ruff/pull/12809))
- \[`eradicate`\] ignore `# language=` in commented-out-code rule (ERA001) ([#14069](https://github.com/astral-sh/ruff/pull/14069))
- \[`flake8-bugbear`\] - do not run `mutable-argument-default` on stubs (`B006`) ([#14058](https://github.com/astral-sh/ruff/pull/14058))
- \[`flake8-builtins`\] Skip lambda expressions in `builtin-argument-shadowing (A002)` ([#14144](https://github.com/astral-sh/ruff/pull/14144))
- \[`flake8-comprehension`\] Also remove trailing comma while fixing `C409` and `C419` ([#14097](https://github.com/astral-sh/ruff/pull/14097))
- \[`flake8-simplify`\] Allow `open` without context manager in `return` statement (`SIM115`) ([#14066](https://github.com/astral-sh/ruff/pull/14066))
- \[`pylint`\] Respect hash-equivalent literals in `iteration-over-set` (`PLC0208`) ([#14063](https://github.com/astral-sh/ruff/pull/14063))
- \[`pylint`\] Update known dunder methods for Python 3.13 (`PLW3201`) ([#14146](https://github.com/astral-sh/ruff/pull/14146))
- \[`pyupgrade`\] - ignore kwarg unpacking for `UP044` ([#14053](https://github.com/astral-sh/ruff/pull/14053))
- \[`refurb`\] Parse more exotic decimal strings in `verbose-decimal-constructor` (`FURB157`) ([#14098](https://github.com/astral-sh/ruff/pull/14098))
### Documentation
- Add links to missing related options within rule documentations ([#13971](https://github.com/astral-sh/ruff/pull/13971))
- Add rule short code to mkdocs tags to allow searching via rule codes ([#14040](https://github.com/astral-sh/ruff/pull/14040))
## 0.7.2
### Preview features
- Fix formatting of single with-item with trailing comment ([#14005](https://github.com/astral-sh/ruff/pull/14005))
- \[`pyupgrade`\] Add PEP 646 `Unpack` conversion to `*` with fix (`UP044`) ([#13988](https://github.com/astral-sh/ruff/pull/13988))
### Rule changes
- Regenerate `known_stdlibs.rs` with stdlibs 2024.10.25 ([#13963](https://github.com/astral-sh/ruff/pull/13963))
- \[`flake8-no-pep420`\] Skip namespace package enforcement for PEP 723 scripts (`INP001`) ([#13974](https://github.com/astral-sh/ruff/pull/13974))
### Server
- Fix server panic when undoing an edit ([#14010](https://github.com/astral-sh/ruff/pull/14010))
### Bug fixes
- Fix issues in discovering ruff in pip build environments ([#13881](https://github.com/astral-sh/ruff/pull/13881))
- \[`flake8-type-checking`\] Fix false positive for `singledispatchmethod` (`TCH003`) ([#13941](https://github.com/astral-sh/ruff/pull/13941))
- \[`flake8-type-checking`\] Treat return type of `singledispatch` as runtime-required (`TCH003`) ([#13957](https://github.com/astral-sh/ruff/pull/13957))
### Documentation
- \[`flake8-simplify`\] Include caveats of enabling `if-else-block-instead-of-if-exp` (`SIM108`) ([#14019](https://github.com/astral-sh/ruff/pull/14019))
## 0.7.1
### Preview features
- Fix `E221` and `E222` to flag missing or extra whitespace around `==` operator ([#13890](https://github.com/astral-sh/ruff/pull/13890))
- Formatter: Alternate quotes for strings inside f-strings in preview ([#13860](https://github.com/astral-sh/ruff/pull/13860))
- Formatter: Join implicit concatenated strings when they fit on a line ([#13663](https://github.com/astral-sh/ruff/pull/13663))
- \[`pylint`\] Restrict `iteration-over-set` to only work on sets of literals (`PLC0208`) ([#13731](https://github.com/astral-sh/ruff/pull/13731))
### Rule changes
- \[`flake8-type-checking`\] Support auto-quoting when annotations contain quotes ([#11811](https://github.com/astral-sh/ruff/pull/11811))
### Server
- Avoid indexing the workspace for single-file mode ([#13770](https://github.com/astral-sh/ruff/pull/13770))
### Bug fixes
- Make `ARG002` compatible with `EM101` when raising `NotImplementedError` ([#13714](https://github.com/astral-sh/ruff/pull/13714))
### Other changes
- Introduce more Docker tags for Ruff (similar to uv) ([#13274](https://github.com/astral-sh/ruff/pull/13274))
## 0.7.0
Check out the [blog post](https://astral.sh/blog/ruff-v0.7.0) for a migration guide and overview of the changes!
### Breaking changes
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
instead.
### Formatter preview style
- Normalize implicit concatenated f-string quotes per part ([#13539](https://github.com/astral-sh/ruff/pull/13539))
### Preview linter features
- \[`refurb`\] implement `hardcoded-string-charset` (FURB156) ([#13530](https://github.com/astral-sh/ruff/pull/13530))
- \[`refurb`\] Count codepoints not bytes for `slice-to-remove-prefix-or-suffix (FURB188)` ([#13631](https://github.com/astral-sh/ruff/pull/13631))
### Rule changes
- \[`pylint`\] Mark `PLE1141` fix as unsafe ([#13629](https://github.com/astral-sh/ruff/pull/13629))
- \[`flake8-async`\] Consider async generators to be "checkpoints" for `cancel-scope-no-checkpoint` (`ASYNC100`) ([#13639](https://github.com/astral-sh/ruff/pull/13639))
- \[`flake8-bugbear`\] Do not suggest setting parameter `strict=` to `False` in `B905` diagnostic message ([#13656](https://github.com/astral-sh/ruff/pull/13656))
- \[`flake8-todos`\] Only flag the word "TODO", not words starting with "todo" (`TD006`) ([#13640](https://github.com/astral-sh/ruff/pull/13640))
- \[`pycodestyle`\] Fix whitespace-related false positives and false negatives inside type-parameter lists (`E231`, `E251`) ([#13704](https://github.com/astral-sh/ruff/pull/13704))
- \[`flake8-simplify`\] Stabilize preview behavior for `SIM115` so that the rule can detect files
being opened from a wider range of standard-library functions ([#12959](https://github.com/astral-sh/ruff/pull/12959)).
### CLI
- Add explanation of fixable in `--statistics` command ([#13774](https://github.com/astral-sh/ruff/pull/13774))
### Bug fixes
- \[`pyflakes`\] Allow `ipytest` cell magic (`F401`) ([#13745](https://github.com/astral-sh/ruff/pull/13745))
- \[`flake8-use-pathlib`\] Fix `PTH123` false positive when `open` is passed a file descriptor ([#13616](https://github.com/astral-sh/ruff/pull/13616))
- \[`flake8-bandit`\] Detect patterns from multi line SQL statements (`S608`) ([#13574](https://github.com/astral-sh/ruff/pull/13574))
- \[`flake8-pyi`\] - Fix dropped expressions in `PYI030` autofix ([#13727](https://github.com/astral-sh/ruff/pull/13727))
## 0.6.9
### Preview features
- Fix codeblock dynamic line length calculation for indented docstring examples ([#13523](https://github.com/astral-sh/ruff/pull/13523))
- \[`refurb`\] Mark `FURB118` fix as unsafe ([#13613](https://github.com/astral-sh/ruff/pull/13613))
### Rule changes
- \[`pydocstyle`\] Don't raise `D208` when last line is non-empty ([#13372](https://github.com/astral-sh/ruff/pull/13372))
- \[`pylint`\] Preserve trivia (i.e. comments) in `PLR5501` autofix ([#13573](https://github.com/astral-sh/ruff/pull/13573))
### Configuration
- \[`pyflakes`\] Add `allow-unused-imports` setting for `unused-import` rule (`F401`) ([#13601](https://github.com/astral-sh/ruff/pull/13601))
### Bug fixes
- Support ruff discovery in pip build environments ([#13591](https://github.com/astral-sh/ruff/pull/13591))
- \[`flake8-bugbear`\] Avoid short circuiting `B017` for multiple context managers ([#13609](https://github.com/astral-sh/ruff/pull/13609))
- \[`pylint`\] Do not offer an invalid fix for `PLR1716` when the comparisons contain parenthesis ([#13527](https://github.com/astral-sh/ruff/pull/13527))
- \[`pyupgrade`\] Fix `UP043` to apply to `collections.abc.Generator` and `collections.abc.AsyncGenerator` ([#13611](https://github.com/astral-sh/ruff/pull/13611))
- \[`refurb`\] Fix handling of slices in tuples for `FURB118`, e.g., `x[:, 1]` ([#13518](https://github.com/astral-sh/ruff/pull/13518))
### Documentation
- Update GitHub Action link to `astral-sh/ruff-action` ([#13551](https://github.com/astral-sh/ruff/pull/13551))
## 0.6.8
### Preview features
- Remove unnecessary parentheses around `match case` clauses ([#13510](https://github.com/astral-sh/ruff/pull/13510))
- Parenthesize overlong `if` guards in `match..case` clauses ([#13513](https://github.com/astral-sh/ruff/pull/13513))
- Detect basic wildcard imports in `ruff analyze graph` ([#13486](https://github.com/astral-sh/ruff/pull/13486))
- \[`pylint`\] Implement `boolean-chained-comparison` (`R1716`) ([#13435](https://github.com/astral-sh/ruff/pull/13435))
### Rule changes
- \[`lake8-simplify`\] Detect `SIM910` when using variadic keyword arguments, i.e., `**kwargs` ([#13503](https://github.com/astral-sh/ruff/pull/13503))
- \[`pyupgrade`\] Avoid false negatives with non-reference shadowed bindings of loop variables (`UP028`) ([#13504](https://github.com/astral-sh/ruff/pull/13504))
### Bug fixes
- Detect tuples bound to variadic positional arguments i.e. `*args` ([#13512](https://github.com/astral-sh/ruff/pull/13512))
- Exit gracefully on broken pipe errors ([#13485](https://github.com/astral-sh/ruff/pull/13485))
- Avoid panic when analyze graph hits broken pipe ([#13484](https://github.com/astral-sh/ruff/pull/13484))
### Performance
- Reuse `BTreeSets` in module resolver ([#13440](https://github.com/astral-sh/ruff/pull/13440))
- Skip traversal for non-compound statements ([#13441](https://github.com/astral-sh/ruff/pull/13441))
## 0.6.7
### Preview features
- Add Python version support to ruff analyze CLI ([#13426](https://github.com/astral-sh/ruff/pull/13426))
- Add `exclude` support to `ruff analyze` ([#13425](https://github.com/astral-sh/ruff/pull/13425))
- Fix parentheses around return type annotations ([#13381](https://github.com/astral-sh/ruff/pull/13381))
### Rule changes
- \[`pycodestyle`\] Fix: Don't autofix if the first line ends in a question mark? (D400) ([#13399](https://github.com/astral-sh/ruff/pull/13399))
### Bug fixes
- Respect `lint.exclude` in ruff check `--add-noqa` ([#13427](https://github.com/astral-sh/ruff/pull/13427))
### Performance
- Avoid tracking module resolver files in Salsa ([#13437](https://github.com/astral-sh/ruff/pull/13437))
- Use `forget` for module resolver database ([#13438](https://github.com/astral-sh/ruff/pull/13438))
## 0.6.6
### Preview features
- \[`refurb`\] Skip `slice-to-remove-prefix-or-suffix` (`FURB188`) when non-trivial slice steps are present ([#13405](https://github.com/astral-sh/ruff/pull/13405))
- Add a subcommand to generate dependency graphs ([#13402](https://github.com/astral-sh/ruff/pull/13402))
### Formatter
- Fix placement of inline parameter comments ([#13379](https://github.com/astral-sh/ruff/pull/13379))
### Server
- Fix off-by one error in the `LineIndex::offset` calculation ([#13407](https://github.com/astral-sh/ruff/pull/13407))
### Bug fixes
- \[`fastapi`\] Respect FastAPI aliases in route definitions ([#13394](https://github.com/astral-sh/ruff/pull/13394))
- \[`pydocstyle`\] Respect word boundaries when detecting function signature in docs ([#13388](https://github.com/astral-sh/ruff/pull/13388))
### Documentation
- Add backlinks to rule overview linter ([#13368](https://github.com/astral-sh/ruff/pull/13368))
- Fix documentation for editor vim plugin ALE ([#13348](https://github.com/astral-sh/ruff/pull/13348))
- Fix rendering of `FURB188` docs ([#13406](https://github.com/astral-sh/ruff/pull/13406))
## 0.6.5
### Preview features
- \[`pydoclint`\] Ignore `DOC201` when function name is "**new**" ([#13300](https://github.com/astral-sh/ruff/pull/13300))
- \[`refurb`\] Implement `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#13256](https://github.com/astral-sh/ruff/pull/13256))
### Rule changes
- \[`eradicate`\] Ignore script-comments with multiple end-tags (`ERA001`) ([#13283](https://github.com/astral-sh/ruff/pull/13283))
- \[`pyflakes`\] Improve error message for `UndefinedName` when a builtin was added in a newer version than specified in Ruff config (`F821`) ([#13293](https://github.com/astral-sh/ruff/pull/13293))
### Server
- Add support for extensionless Python files for server ([#13326](https://github.com/astral-sh/ruff/pull/13326))
- Fix configuration inheritance for configurations specified in the LSP settings ([#13285](https://github.com/astral-sh/ruff/pull/13285))
### Bug fixes
- \[`ruff`\] Handle unary operators in `decimal-from-float-literal` (`RUF032`) ([#13275](https://github.com/astral-sh/ruff/pull/13275))
### CLI
- Only include rules with diagnostics in SARIF metadata ([#13268](https://github.com/astral-sh/ruff/pull/13268))
### Playground
- Add "Copy as pyproject.toml/ruff.toml" and "Paste from TOML" ([#13328](https://github.com/astral-sh/ruff/pull/13328))
- Fix errors not shown for restored snippet on page load ([#13262](https://github.com/astral-sh/ruff/pull/13262))
## 0.6.4
### Preview features
- \[`flake8-builtins`\] Use dynamic builtins list based on Python version ([#13172](https://github.com/astral-sh/ruff/pull/13172))
- \[`pydoclint`\] Permit yielding `None` in `DOC402` and `DOC403` ([#13148](https://github.com/astral-sh/ruff/pull/13148))
- \[`pylint`\] Update diagnostic message for `PLW3201` ([#13194](https://github.com/astral-sh/ruff/pull/13194))
- \[`ruff`\] Implement `post-init-default` (`RUF033`) ([#13192](https://github.com/astral-sh/ruff/pull/13192))
- \[`ruff`\] Implement useless if-else (`RUF034`) ([#13218](https://github.com/astral-sh/ruff/pull/13218))
### Rule changes
- \[`flake8-pyi`\] Respect `pep8_naming.classmethod-decorators` settings when determining if a method is a classmethod in `custom-type-var-return-type` (`PYI019`) ([#13162](https://github.com/astral-sh/ruff/pull/13162))
- \[`flake8-pyi`\] Teach various rules that annotations might be stringized ([#12951](https://github.com/astral-sh/ruff/pull/12951))
- \[`pylint`\] Avoid `no-self-use` for `attrs`-style validators ([#13166](https://github.com/astral-sh/ruff/pull/13166))
- \[`pylint`\] Recurse into subscript subexpressions when searching for list/dict lookups (`PLR1733`, `PLR1736`) ([#13186](https://github.com/astral-sh/ruff/pull/13186))
- \[`pyupgrade`\] Detect `aiofiles.open` calls in `UP015` ([#13173](https://github.com/astral-sh/ruff/pull/13173))
- \[`pyupgrade`\] Mark `sys.version_info[0] < 3` and similar comparisons as outdated (`UP036`) ([#13175](https://github.com/astral-sh/ruff/pull/13175))
### CLI
- Enrich messages of SARIF results ([#13180](https://github.com/astral-sh/ruff/pull/13180))
- Handle singular case for incompatible rules warning in `ruff format` output ([#13212](https://github.com/astral-sh/ruff/pull/13212))
### Bug fixes
- \[`pydocstyle`\] Improve heuristics for detecting Google-style docstrings ([#13142](https://github.com/astral-sh/ruff/pull/13142))
- \[`refurb`\] Treat `sep` arguments with effects as unsafe removals (`FURB105`) ([#13165](https://github.com/astral-sh/ruff/pull/13165))
## 0.6.3
### Preview features
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
### Rule changes
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
### Bug fixes
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
## 0.6.2
### Preview features
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
### Rule changes
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
### Server
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
### Bug fixes
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
### Documentation
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
## 0.6.1
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
Ruff changed its behavior to lint and format Jupyter notebooks by default;
however, due to an oversight, these files were still excluded by default if
Ruff was run via pre-commit, leading to inconsistent behavior.
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
### Preview features
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
### Rule changes
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
### Server
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
### Other changes
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
## 0.6.0
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
### Breaking changes
See also, the "Remapped rules" section which may result in disabled rules.
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
### Deprecations
The following rules are now deprecated:
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
### Remapped rules
The following rules have been remapped to new rule codes:
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
### Stabilization
The following rules have been stabilized and are no longer in preview:
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
The following behaviors have been stabilized:
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
The following fixes have been stabilized:
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
### Preview features
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
### Rule changes
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
### Server
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
### Bug fixes
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
### Other changes
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
## 0.5.7
### Preview features
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
### Rule changes
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
### Server
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
### CLI
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
### Bug fixes
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
## 0.5.6
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
```toml
[tool.ruff]
extend-exclude = ["*.ipynb"]
```
### Preview features
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
### Rule changes
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
### Server
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
### Configuration
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
### Bug fixes
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
### Other changes
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
## 0.5.5
### Preview features
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
### Rule changes
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
### Formatter
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
### Server
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
### Bug fixes
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
### Documentation
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
## 0.5.4
### Rule changes
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
### Bug fixes
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
### Documentation
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
## 0.5.3
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
[documentation](https://docs.astral.sh/ruff/editors), including [setup guides for your editor of
choice](https://docs.astral.sh/ruff/editors/setup) and [the language server
itself](https://docs.astral.sh/ruff/editors/settings)**.
### Preview features
- Formatter: Insert empty line between suite and alternative branch after function/class definition ([#12294](https://github.com/astral-sh/ruff/pull/12294))
- \[`pyupgrade`\] Implement `unnecessary-default-type-args` (`UP043`) ([#12371](https://github.com/astral-sh/ruff/pull/12371))
### Rule changes
- \[`flake8-bugbear`\] Detect enumerate iterations in `loop-iterator-mutation` (`B909`) ([#12366](https://github.com/astral-sh/ruff/pull/12366))
- \[`flake8-bugbear`\] Remove `discard`, `remove`, and `pop` allowance for `loop-iterator-mutation` (`B909`) ([#12365](https://github.com/astral-sh/ruff/pull/12365))
- \[`pylint`\] Allow `repeated-equality-comparison` for mixed operations (`PLR1714`) ([#12369](https://github.com/astral-sh/ruff/pull/12369))
- \[`pylint`\] Ignore `self` and `cls` when counting arguments (`PLR0913`) ([#12367](https://github.com/astral-sh/ruff/pull/12367))
- \[`pylint`\] Use UTF-8 as default encoding in `unspecified-encoding` fix (`PLW1514`) ([#12370](https://github.com/astral-sh/ruff/pull/12370))
### Server
- Build settings index in parallel for the native server ([#12299](https://github.com/astral-sh/ruff/pull/12299))
- Use fallback settings when indexing the project ([#12362](https://github.com/astral-sh/ruff/pull/12362))
- Consider `--preview` flag for `server` subcommand for the linter and formatter ([#12208](https://github.com/astral-sh/ruff/pull/12208))
### Bug fixes
- \[`flake8-comprehensions`\] Allow additional arguments for `sum` and `max` comprehensions (`C419`) ([#12364](https://github.com/astral-sh/ruff/pull/12364))
- \[`pylint`\] Avoid dropping extra boolean operations in `repeated-equality-comparison` (`PLR1714`) ([#12368](https://github.com/astral-sh/ruff/pull/12368))
- \[`pylint`\] Consider expression before statement when determining binding kind (`PLR1704`) ([#12346](https://github.com/astral-sh/ruff/pull/12346))
### Documentation
- Add docs for Ruff language server ([#12344](https://github.com/astral-sh/ruff/pull/12344))
- Migrate to standalone docs repo ([#12341](https://github.com/astral-sh/ruff/pull/12341))
- Update versioning policy for editor integration ([#12375](https://github.com/astral-sh/ruff/pull/12375))
### Other changes
- Publish Wasm API to npm ([#12317](https://github.com/astral-sh/ruff/pull/12317))
## 0.5.2
### Preview features
- Use `space` separator before parenthesized expressions in comprehensions with leading comments ([#12282](https://github.com/astral-sh/ruff/pull/12282))
- \[`flake8-async`\] Update `ASYNC100` to include `anyio` and `asyncio` ([#12221](https://github.com/astral-sh/ruff/pull/12221))
- \[`flake8-async`\] Update `ASYNC109` to include `anyio` and `asyncio` ([#12236](https://github.com/astral-sh/ruff/pull/12236))
- \[`flake8-async`\] Update `ASYNC110` to include `anyio` and `asyncio` ([#12261](https://github.com/astral-sh/ruff/pull/12261))
- \[`flake8-async`\] Update `ASYNC115` to include `anyio` and `asyncio` ([#12262](https://github.com/astral-sh/ruff/pull/12262))
- \[`flake8-async`\] Update `ASYNC116` to include `anyio` and `asyncio` ([#12266](https://github.com/astral-sh/ruff/pull/12266))
### Rule changes
- \[`flake8-return`\] Exempt properties from explicit return rule (`RET501`) ([#12243](https://github.com/astral-sh/ruff/pull/12243))
- \[`numpy`\] Add `np.NAN`-to-`np.nan` diagnostic ([#12292](https://github.com/astral-sh/ruff/pull/12292))
- \[`refurb`\] Make `list-reverse-copy` an unsafe fix ([#12303](https://github.com/astral-sh/ruff/pull/12303))
### Server
- Consider `include` and `extend-include` settings in native server ([#12252](https://github.com/astral-sh/ruff/pull/12252))
- Include nested configurations in settings reloading ([#12253](https://github.com/astral-sh/ruff/pull/12253))
### CLI
- Omit code frames for fixes with empty ranges ([#12304](https://github.com/astral-sh/ruff/pull/12304))
- Warn about formatter incompatibility for `D203` ([#12238](https://github.com/astral-sh/ruff/pull/12238))
### Bug fixes
- Make cache-write failures non-fatal on Windows ([#12302](https://github.com/astral-sh/ruff/pull/12302))
- Treat `not` operations as boolean tests ([#12301](https://github.com/astral-sh/ruff/pull/12301))
- \[`flake8-bandit`\] Avoid `S310` violations for HTTP-safe f-strings ([#12305](https://github.com/astral-sh/ruff/pull/12305))
- \[`flake8-bandit`\] Support explicit string concatenations in S310 HTTP detection ([#12315](https://github.com/astral-sh/ruff/pull/12315))
- \[`flake8-bandit`\] fix S113 false positive for httpx without `timeout` argument ([#12213](https://github.com/astral-sh/ruff/pull/12213))
- \[`pycodestyle`\] Remove "non-obvious" allowance for E721 ([#12300](https://github.com/astral-sh/ruff/pull/12300))
- \[`pyflakes`\] Consider `with` blocks as single-item branches for redefinition analysis ([#12311](https://github.com/astral-sh/ruff/pull/12311))
- \[`refurb`\] Restrict forwarding for `newline` argument in `open()` calls to Python versions >= 3.10 ([#12244](https://github.com/astral-sh/ruff/pull/12244))
### Documentation
- Update help and documentation to reflect `--output-format full` default ([#12248](https://github.com/astral-sh/ruff/pull/12248))
### Performance
- Use more threads when discovering Python files ([#12258](https://github.com/astral-sh/ruff/pull/12258))
## 0.5.1
### Preview features
@@ -1077,7 +236,7 @@ The following deprecated CLI commands have been removed:
### Preview features
- \[`flake8-bugbear`\] Implement `return-in-generator` (`B901`) ([#11644](https://github.com/astral-sh/ruff/pull/11644))
- \[`flake8-pyi`\] Implement `pep484-style-positional-only-parameter` (`PYI063`) ([#11699](https://github.com/astral-sh/ruff/pull/11699))
- \[`flake8-pyi`\] Implement `PYI063` ([#11699](https://github.com/astral-sh/ruff/pull/11699))
- \[`pygrep_hooks`\] Check blanket ignores via file-level pragmas (`PGH004`) ([#11540](https://github.com/astral-sh/ruff/pull/11540))
### Rule changes

View File

@@ -2,6 +2,35 @@
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
- [The Basics](#the-basics)
- [Prerequisites](#prerequisites)
- [Development](#development)
- [Project Structure](#project-structure)
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
- [Rule naming convention](#rule-naming-convention)
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
- [MkDocs](#mkdocs)
- [Release Process](#release-process)
- [Creating a new release](#creating-a-new-release)
- [Ecosystem CI](#ecosystem-ci)
- [Benchmarking and Profiling](#benchmarking-and-profiling)
- [CPython Benchmark](#cpython-benchmark)
- [Microbenchmarks](#microbenchmarks)
- [Benchmark-driven Development](#benchmark-driven-development)
- [PR Summary](#pr-summary)
- [Tips](#tips)
- [Profiling Projects](#profiling-projects)
- [Linux](#linux)
- [Mac](#mac)
- [`cargo dev`](#cargo-dev)
- [Subsystems](#subsystems)
- [Compilation Pipeline](#compilation-pipeline)
- [Import Categorization](#import-categorization)
- [Project root](#project-root)
- [Package root](#package-root)
- [Import categorization](#import-categorization-1)
## The Basics
Ruff welcomes contributions in the form of pull requests.
@@ -29,14 +58,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
cargo install cargo-insta
```
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
run Python utility commands.
And you'll need pre-commit to run some validation checks:
```shell
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
```
You can optionally install pre-commit hooks to automatically run the validation checks
when making a commit:
```shell
uv tool install pre-commit
pre-commit install
```
@@ -64,7 +95,7 @@ and that it passes both the lint and test validation checks:
```shell
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
```
These checks will run on GitHub Actions when you open your pull request, but running them locally
@@ -265,20 +296,26 @@ To preview any changes to the documentation locally:
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
1. Install MkDocs and Material for MkDocs with:
```shell
pip install -r docs/requirements.txt
```
1. Generate the MkDocs site with:
```shell
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
python scripts/generate_mkdocs.py
```
1. Run the development server with:
```shell
# For contributors.
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
mkdocs serve -f mkdocs.public.yml
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
mkdocs serve -f mkdocs.insiders.yml
```
The documentation should then be available locally at
@@ -296,34 +333,22 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
### Creating a new release
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
1. Run `./scripts/release.sh`; this command will:
- Generate a temporary virtual environment with `rooster`
- Generate a changelog entry in `CHANGELOG.md`
- Update versions in `pyproject.toml` and `Cargo.toml`
- Update references to versions in the `README.md` and documentation
- Display contributors for the release
1. The changelog should then be editorialized for consistency
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
- Changes should be edited to be user-facing descriptions, avoiding internal details
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
1. Run `cargo check`. This should update the lock file with new versions.
1. Create a pull request with the changelog and version updates
1. Merge the PR
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
- The new version number (without starting `v`)
1. The release workflow will do the following:
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
make sure you're [re-running all the failed
@@ -334,25 +359,14 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
1. Attach artifacts to draft GitHub release
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
downstream jobs manually if needed.
1. Verify the GitHub release:
1. The Changelog should match the content of `CHANGELOG.md`
1. Append the contributors from the `scripts/release.sh` script
1. Append the contributors from the `bump.sh` script
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
1. One can determine if an update is needed when
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
1. Once run successfully, you should follow the link in the output to create a PR.
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
the release instructions in those repositories. `ruff-lsp` should always be updated
before `ruff-vscode`.
This step is generally not required for a patch release, but should always be done
for a minor release.
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
## Ecosystem CI
@@ -360,8 +374,9 @@ GitHub Actions will run your changes against a number of real-world projects fro
report on any linter or formatter differences. You can also run those checks locally via:
```shell
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
pip install -e ./python/ruff-ecosystem
ruff-ecosystem check ruff "./target/debug/ruff"
ruff-ecosystem format ruff "./target/debug/ruff"
```
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
@@ -374,7 +389,7 @@ We have several ways of benchmarking and profiling Ruff:
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
- Profiling the linter on either the microbenchmarks or entire projects
> **Note**
> \[!NOTE\]
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
> applications, like web browsers). You may also want to switch your CPU to a "performance"
> mode, if it exists, especially when benchmarking short-lived processes.
@@ -388,18 +403,12 @@ which makes it a good target for benchmarking.
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
```
Install `hyperfine`:
```shell
cargo install hyperfine
```
To benchmark the release build:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
@@ -418,7 +427,7 @@ To benchmark against the ecosystem's existing tools:
```shell
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"pyflakes crates/ruff_linter/resources/test/cpython" \
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
"pycodestyle crates/ruff_linter/resources/test/cpython" \
@@ -464,7 +473,7 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
```
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
@@ -521,8 +530,6 @@ You can run the benchmarks with
cargo benchmark
```
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
#### Benchmark-driven Development
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
@@ -561,7 +568,7 @@ cargo install critcmp
#### Tips
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
to only run the lexer benchmarks.
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
@@ -898,11 +905,15 @@ There are three ways in which an import can be categorized as "first-party":
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
automatically. This check is as simple as comparing the first segment of the current file's
module path to the first segment of the import.
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
sets the directories to scan when identifying first-party imports. The algorithm is
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
file `foo.py`.
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
This ensures that Ruff supports both flat and "src" layouts out of the box.
By default, `src` is set to the project root. In the above example, we'd want to set
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
explicitly.

1492
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ resolver = "2"
[workspace.package]
edition = "2021"
rust-version = "1.80"
rust-version = "1.75"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@@ -14,10 +14,9 @@ license = "MIT"
[workspace.dependencies]
ruff = { path = "crates/ruff" }
ruff_cache = { path = "crates/ruff_cache" }
ruff_db = { path = "crates/ruff_db", default-features = false }
ruff_db = { path = "crates/ruff_db" }
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
ruff_formatter = { path = "crates/ruff_formatter" }
ruff_graph = { path = "crates/ruff_graph" }
ruff_index = { path = "crates/ruff_index" }
ruff_linter = { path = "crates/ruff_linter" }
ruff_macros = { path = "crates/ruff_macros" }
@@ -34,18 +33,15 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
ruff_server = { path = "crates/ruff_server" }
ruff_source_file = { path = "crates/ruff_source_file" }
ruff_text_size = { path = "crates/ruff_text_size" }
red_knot_vendored = { path = "crates/red_knot_vendored" }
ruff_workspace = { path = "crates/ruff_workspace" }
red_knot = { path = "crates/red_knot" }
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
red_knot_server = { path = "crates/red_knot_server" }
red_knot_test = { path = "crates/red_knot_test" }
red_knot_workspace = { path = "crates/red_knot_workspace", default-features = false }
aho-corasick = { version = "1.1.3" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
assert_fs = { version = "1.1.0" }
argfile = { version = "0.2.0" }
bincode = { version = "1.3.3" }
bitflags = { version = "2.5.0" }
@@ -54,35 +50,28 @@ cachedir = { version = "0.3.1" }
camino = { version = "1.1.7" }
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete_command = { version = "0.6.0" }
clap_complete_command = { version = "0.5.1" }
clearscreen = { version = "3.0.0" }
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
colored = { version = "2.1.0" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
countme = { version = "3.0.1" }
compact_str = "0.8.0"
compact_str = "0.7.1"
criterion = { version = "0.5.1", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
dir-test = { version = "0.4.0" }
dunce = { version = "1.0.5" }
drop_bomb = { version = "0.1.5" }
env_logger = { version = "0.11.0" }
etcetera = { version = "0.8.0" }
fern = { version = "0.7.0" }
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globwalk = { version = "0.9.1" }
hashbrown = { version = "0.15.0", default-features = false, features = [
"raw-entry",
"inline-more",
] }
hashbrown = "0.14.3"
ignore = { version = "0.4.22" }
imara-diff = { version = "0.1.5" }
imperative = { version = "1.0.4" }
indexmap = { version = "2.6.0" }
indicatif = { version = "0.17.8" }
indoc = { version = "2.0.4" }
insta = { version = "1.35.1" }
@@ -97,28 +86,29 @@ libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" }
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
"proposed",
"proposed",
] }
matchit = { version = "0.8.1" }
memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "7.0.0" }
notify = { version = "6.1.1" }
once_cell = { version = "1.19.0" }
ordermap = { version = "0.5.0" }
path-absolutize = { version = "3.1.1" }
path-slash = { version = "0.2.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.7.1" }
pep440_rs = { version = "0.6.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.13.4" }
quick-junit = { version = "0.5.0" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.4.0" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "254c749b02cde2fd29852a7463a33e800b771758" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -126,7 +116,7 @@ serde-wasm-bindgen = { version = "0.6.4" }
serde_json = { version = "1.0.113" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.6.0", default-features = false, features = [
"macros",
"macros",
] }
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
@@ -137,44 +127,36 @@ strum_macros = { version = "0.26.0" }
syn = { version = "2.0.55" }
tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
thiserror = { version = "2.0.0" }
tikv-jemallocator = { version = "0.6.0" }
thiserror = { version = "1.0.58" }
tikv-jemallocator = { version = "0.5.0" }
toml = { version = "0.8.11" }
tracing = { version = "0.1.40" }
tracing-flame = { version = "0.2.0" }
tracing-indicatif = { version = "0.3.6" }
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
"env-filter",
"fmt",
] }
tracing-tree = { version = "0.4.0" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tracing-tree = { version = "0.3.0" }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version = "0.9" }
unicode-ident = { version = "1.0.12" }
unicode-width = { version = "0.2.0" }
unicode-width = { version = "0.1.11" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
"js",
"v4",
"fast-rng",
"macro-diagnostics",
"js",
] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.42" }
wild = { version = "2" }
zip = { version = "0.6.6", default-features = false }
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
unexpected_cfgs = { level = "warn", check-cfg = [
"cfg(fuzzing)",
"cfg(codspeed)",
] }
[workspace.lints.clippy]
pedantic = { level = "warn", priority = -2 }
@@ -190,9 +172,8 @@ missing_panics_doc = "allow"
module_name_repetitions = "allow"
must_use_candidate = "allow"
similar_names = "allow"
single_match_else = "allow"
too_many_lines = "allow"
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
print_stdout = "warn"
@@ -205,10 +186,6 @@ get_unwrap = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
# nursery rules
redundant_clone = "warn"
debug_assert_with_mut_call = "warn"
unused_peekable = "warn"
[profile.release]
# Note that we set these explicitly, and these values
@@ -248,12 +225,12 @@ debug = 1
[profile.dist]
inherits = "release"
# Config for 'dist'
# Config for 'cargo dist'
[workspace.metadata.dist]
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.25.2-prerelease.3"
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.18.0"
# CI backends to support
ci = "github"
ci = ["github"]
# The installers to generate for each app
installers = ["shell", "powershell"]
# The archive format to use for windows builds (defaults .zip)
@@ -262,45 +239,43 @@ windows-archive = ".zip"
unix-archive = ".tar.gz"
# Target platforms to build apps for (Rust target-triple syntax)
targets = [
"aarch64-apple-darwin",
"aarch64-pc-windows-msvc",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"arm-unknown-linux-musleabihf",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"i686-unknown-linux-musl",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"aarch64-apple-darwin",
"aarch64-pc-windows-msvc",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"arm-unknown-linux-musleabihf",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"i686-unknown-linux-musl",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
]
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
auto-includes = false
# Whether dist should create a Github Release or use an existing draft
# Whether cargo-dist should create a GitHub Release or use an existing draft
create-release = true
# Which actions to run on pull requests
# Publish jobs to run in CI
pr-run-mode = "skip"
# Whether CI should trigger releases with dispatches instead of tag pushes
dispatch-releases = true
# Which phase dist should use to create the GitHub release
# The stage during which the GitHub Release should be created
github-release = "announce"
# Whether CI should include auto-generated code to build local artifacts
build-local-artifacts = false
# Local artifacts jobs to run in CI
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
# Publish jobs to run in CI
publish-jobs = ["./publish-pypi", "./publish-wasm"]
# Post-announce jobs to run in CI
publish-jobs = ["./publish-pypi"]
# Announcement jobs to run in CI
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
# Custom permissions for GitHub Jobs
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" } }
# Whether to install an updater program
install-updater = false
# Path that installers should place binaries in
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]

View File

@@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM ubuntu AS build
FROM --platform=$BUILDPLATFORM ubuntu as build
ENV HOME="/root"
WORKDIR $HOME

25
LICENSE
View File

@@ -1371,28 +1371,3 @@ are:
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
- pydoclint, licensed as follows:
"""
MIT License
Copyright (c) 2023 jsh9
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

View File

@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
- 🐍 Installable via `pip`
- 🛠️ `pyproject.toml` support
- 🤝 Python 3.13 compatibility
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
- 📦 Built-in caching, to avoid re-analyzing unchanged files
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
of popular Flake8 plugins, like flake8-bugbear
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
functionality behind a single, common interface.
@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
1. [Who's Using Ruff?](#whos-using-ruff)
1. [License](#license)
## Getting Started<a id="getting-started"></a>
## Getting Started
For more, see the [documentation](https://docs.astral.sh/ruff/).
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.8.0/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.8.0/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.5.1/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.5.1/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.8.0
rev: v0.5.1
hooks:
# Run the linter.
- id: ruff
@@ -179,10 +179,11 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
- id: ruff-format
```
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
[`ruff-action`](https://github.com/astral-sh/ruff-action):
[`ruff-action`](https://github.com/chartboost/ruff-action):
```yaml
name: Ruff
@@ -192,10 +193,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/ruff-action@v1
- uses: chartboost/ruff-action@v1
```
### Configuration<a id="configuration"></a>
### Configuration
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
@@ -238,8 +239,8 @@ exclude = [
line-length = 88
indent-width = 4
# Assume Python 3.9
target-version = "py39"
# Assume Python 3.8
target-version = "py38"
[lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
@@ -291,7 +292,7 @@ features that may change prior to stabilization.
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
for more on the linting and formatting commands, respectively.
## Rules<a id="rules"></a>
## Rules
<!-- Begin section: Rules -->
@@ -367,21 +368,21 @@ quality tools, including:
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
## Contributing<a id="contributing"></a>
## Contributing
Contributions are welcome and highly appreciated. To get started, check out the
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
## Support<a id="support"></a>
## Support
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
## Acknowledgements<a id="acknowledgements"></a>
## Acknowledgements
Ruff's linter draws on both the APIs and implementation details of many other
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
@@ -405,7 +406,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
Ruff is released under the MIT license.
## Who's Using Ruff?<a id="whos-using-ruff"></a>
## Who's Using Ruff?
Ruff is used by a number of major open-source projects and companies, including:
@@ -417,14 +418,12 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Babel](https://github.com/python-babel/babel)
- Benchling ([Refac](https://github.com/benchling/refac))
- [Bokeh](https://github.com/bokeh/bokeh)
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
- CERN ([Indico](https://getindico.io/))
- [DVC](https://github.com/iterative/dvc)
- [Dagger](https://github.com/dagger/dagger)
- [Dagster](https://github.com/dagster-io/dagster)
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
- [Dify](https://github.com/langgenius/dify)
- [FastAPI](https://github.com/tiangolo/fastapi)
- [Godot](https://github.com/godotengine/godot)
- [Gradio](https://github.com/gradio-app/gradio)
@@ -435,7 +434,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
[Datasets](https://github.com/huggingface/datasets),
[Diffusers](https://github.com/huggingface/diffusers))
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [ivy](https://github.com/unifyai/ivy)
@@ -525,7 +523,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
```
## License<a id="license"></a>
## License
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)

View File

@@ -1,11 +1,6 @@
[files]
# https://github.com/crate-ci/typos/issues/868
extend-exclude = [
"crates/red_knot_vendored/vendor/**/*",
"**/resources/**/*",
"**/snapshots/**/*",
"crates/red_knot_workspace/src/workspace/pyproject/package_name.rs"
]
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
[default.extend-words]
"arange" = "arange" # e.g. `numpy.arange`
@@ -13,11 +8,10 @@ hel = "hel"
whos = "whos"
spawnve = "spawnve"
ned = "ned"
pn = "pn" # `import panel as pn` is a thing
pn = "pn" # `import panel as pd` is a thing
poit = "poit"
BA = "BA" # acronym for "Bad Allowed", used in testing.
jod = "jod" # e.g., `jod-thread`
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
[default]
extend-ignore-re = [

View File

@@ -10,12 +10,4 @@ doc-valid-idents = [
"SCREAMING_SNAKE_CASE",
"SQLAlchemy",
"StackOverflow",
"PyCharm",
]
ignore-interior-mutability = [
# Interned is read-only. The wrapped `Rc` never gets updated.
"ruff_formatter::format_element::Interned",
# The expression is read-only.
"ruff_python_ast::hashable::HashableExpr",
]

View File

@@ -12,29 +12,24 @@ license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
red_knot_module_resolver = { workspace = true }
red_knot_python_semantic = { workspace = true }
red_knot_workspace = { workspace = true, features = ["zstd"] }
red_knot_server = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
ruff_db = { workspace = true }
ruff_python_ast = { workspace = true }
anyhow = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }
colored = { workspace = true }
countme = { workspace = true, features = ["enable"] }
crossbeam = { workspace = true }
ctrlc = { version = "3.4.4" }
notify = { workspace = true }
rayon = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
tracing = { workspace = true, features = ["release_max_level_debug"] }
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
tracing-flame = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
tracing-tree = { workspace = true }
[dev-dependencies]
filetime = { workspace = true }
tempfile = { workspace = true }
ruff_db = { workspace = true, features = ["testing"] }
[lints]
workspace = true

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

View File

@@ -1,128 +0,0 @@
# Tracing
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
Tracing spans are only shown when using `-vvv`.
## Verbosity levels
The CLI supports different verbosity levels.
- default: Only show errors and warnings.
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
but this can result in a confused logging output where messages from different threads are intertwined.
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
### Examples
#### Show all debug messages
Shows debug messages from all crates.
```bash
RED_KNOT_LOG=debug
```
#### Show salsa query execution messages
Show the salsa `execute: my_query` messages in addition to all red knot messages.
```bash
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
```
#### Show typing traces
Only show traces for the `red_knot_python_semantic::types` module.
```bash
RED_KNOT_LOG="red_knot_python_semantic::types"
```
Note: Ensure that you use `-vvv` to see tracing spans.
#### Show messages for a single file
Shows all messages that are inside of a span for a specific file.
```bash
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
```
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
whether one if its children has the file `x.py`.
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
This very quickly leads to extremely long outputs.
## Tracing and Salsa
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
For example, don't use `tracing` to show the user a message when generating a lint violation failed
because the message would only be shown when linting the file the first time, but not on subsequent analysis
runs or when restoring from a persistent cache. This can be confusing for users because they
don't understand why a specific lint violation isn't raised. Instead, change your
query to return the failure as part of the query's result or use a Salsa accumulator.
## Tracing in tests
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
```rust
use ruff_db::testing::setup_logging;
#[test]
fn test() {
let _logging = setup_logging();
tracing::info!("This message will be printed to stderr");
}
```
Note: Most test runners capture stderr and only show its output when a test fails.
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
called **once**.
## Release builds
`trace!` events are removed in release builds.
## Profiling
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
```bash
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
```
You can convert the textual representation into a visual one using `inferno`.
```shell
cargo install inferno
```
```shell
# flamegraph
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
# flamechart
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
```
![Example flamegraph](./tracing-flamegraph.png)
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.

10
crates/red_knot/src/db.rs Normal file
View File

@@ -0,0 +1,10 @@
use red_knot_python_semantic::Db as SemanticDb;
use ruff_db::Upcast;
use salsa::DbWithJar;
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled};
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
#[salsa::jar(db=Db)]
pub struct Jar(lint_syntax, lint_semantic, unwind_if_cancelled);

View File

@@ -0,0 +1,52 @@
use rustc_hash::FxHashSet;
use ruff_db::file_system::{FileSystemPath, FileSystemPathBuf};
use ruff_db::vfs::VfsFile;
use crate::db::Jar;
pub mod db;
pub mod lint;
pub mod program;
pub mod watch;
#[derive(Debug, Clone)]
pub struct Workspace {
root: FileSystemPathBuf,
/// The files that are open in the workspace.
///
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
/// * CLI: The resolved files passed as arguments to the CLI.
open_files: FxHashSet<VfsFile>,
}
impl Workspace {
pub fn new(root: FileSystemPathBuf) -> Self {
Self {
root,
open_files: FxHashSet::default(),
}
}
pub fn root(&self) -> &FileSystemPath {
self.root.as_path()
}
// TODO having the content in workspace feels wrong.
pub fn open_file(&mut self, file_id: VfsFile) {
self.open_files.insert(file_id);
}
pub fn close_file(&mut self, file_id: VfsFile) {
self.open_files.remove(&file_id);
}
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
pub fn open_files(&self) -> impl Iterator<Item = VfsFile> + '_ {
self.open_files.iter().copied()
}
pub fn is_file_open(&self, file_id: VfsFile) -> bool {
self.open_files.contains(&file_id)
}
}

275
crates/red_knot/src/lint.rs Normal file
View File

@@ -0,0 +1,275 @@
use std::cell::RefCell;
use std::ops::Deref;
use std::time::Duration;
use tracing::trace_span;
use red_knot_module_resolver::ModuleName;
use red_knot_python_semantic::types::Type;
use red_knot_python_semantic::{HasTy, SemanticModel};
use ruff_db::parsed::{parsed_module, ParsedModule};
use ruff_db::source::{source_text, SourceText};
use ruff_db::vfs::VfsFile;
use ruff_python_ast as ast;
use ruff_python_ast::visitor::{walk_stmt, Visitor};
use crate::db::Db;
/// Workaround query to test for if the computation should be cancelled.
/// Ideally, push for Salsa to expose an API for testing if cancellation was requested.
#[salsa::tracked]
#[allow(unused_variables)]
pub(crate) fn unwind_if_cancelled(db: &dyn Db) {}
#[salsa::tracked(return_ref)]
pub(crate) fn lint_syntax(db: &dyn Db, file_id: VfsFile) -> Diagnostics {
#[allow(clippy::print_stdout)]
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
for i in 0..10 {
unwind_if_cancelled(db);
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
std::thread::sleep(Duration::from_secs(1));
}
}
let mut diagnostics = Vec::new();
let source = source_text(db.upcast(), file_id);
lint_lines(&source, &mut diagnostics);
let parsed = parsed_module(db.upcast(), file_id);
if parsed.errors().is_empty() {
let ast = parsed.syntax();
let mut visitor = SyntaxLintVisitor {
diagnostics,
source: &source,
};
visitor.visit_body(&ast.body);
diagnostics = visitor.diagnostics;
} else {
diagnostics.extend(parsed.errors().iter().map(ToString::to_string));
}
Diagnostics::from(diagnostics)
}
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
for (line_number, line) in source.lines().enumerate() {
if line.len() < 88 {
continue;
}
let char_count = line.chars().count();
if char_count > 88 {
diagnostics.push(format!(
"Line {} is too long ({} characters)",
line_number + 1,
char_count
));
}
}
}
#[salsa::tracked(return_ref)]
pub(crate) fn lint_semantic(db: &dyn Db, file_id: VfsFile) -> Diagnostics {
let _span = trace_span!("lint_semantic", ?file_id).entered();
let source = source_text(db.upcast(), file_id);
let parsed = parsed_module(db.upcast(), file_id);
let semantic = SemanticModel::new(db.upcast(), file_id);
if !parsed.is_valid() {
return Diagnostics::Empty;
}
let context = SemanticLintContext {
source,
parsed,
semantic,
diagnostics: RefCell::new(Vec::new()),
};
SemanticVisitor { context: &context }.visit_body(parsed.suite());
Diagnostics::from(context.diagnostics.take())
}
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
match import {
AnyImportRef::Import(import) => {
for alias in &import.names {
let ty = alias.ty(&context.semantic);
if ty.is_unknown() {
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
}
}
}
AnyImportRef::ImportFrom(import) => {
for alias in &import.names {
let ty = alias.ty(&context.semantic);
if ty.is_unknown() {
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
}
}
}
}
}
fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
let semantic = &context.semantic;
// TODO we should have a special marker on the real typing module (from typeshed) so if you
// have your own "typing" module in your project, we don't consider it THE typing module (and
// same for other stdlib modules that our lint rules care about)
let Some(typing) = semantic.resolve_module(ModuleName::new("typing").unwrap()) else {
return;
};
let Some(typing_override) = semantic.public_symbol(&typing, "override") else {
return;
};
let override_ty = semantic.public_symbol_ty(typing_override);
let Type::Class(class_ty) = class.ty(semantic) else {
return;
};
for function in class
.body
.iter()
.filter_map(|stmt| stmt.as_function_def_stmt())
{
let Type::Function(ty) = function.ty(semantic) else {
return;
};
// TODO this shouldn't make direct use of the Db; see comment on SemanticModel::db
let db = semantic.db();
if ty.has_decorator(db, override_ty) {
let method_name = ty.name(db);
if class_ty.inherited_class_member(db, &method_name).is_none() {
// TODO should have a qualname() method to support nested classes
context.push_diagnostic(
format!(
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
class_ty.name(db),
method_name,
));
}
}
}
}
pub(crate) struct SemanticLintContext<'a> {
source: SourceText,
parsed: &'a ParsedModule,
semantic: SemanticModel<'a>,
diagnostics: RefCell<Vec<String>>,
}
impl<'db> SemanticLintContext<'db> {
#[allow(unused)]
pub(crate) fn source_text(&self) -> &str {
self.source.as_str()
}
#[allow(unused)]
pub(crate) fn ast(&self) -> &'db ast::ModModule {
self.parsed.syntax()
}
pub(crate) fn push_diagnostic(&self, diagnostic: String) {
self.diagnostics.borrow_mut().push(diagnostic);
}
#[allow(unused)]
pub(crate) fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
self.diagnostics.get_mut().extend(diagnostics);
}
}
#[derive(Debug)]
struct SyntaxLintVisitor<'a> {
diagnostics: Vec<String>,
source: &'a str,
}
impl Visitor<'_> for SyntaxLintVisitor<'_> {
fn visit_string_literal(&mut self, string_literal: &'_ ast::StringLiteral) {
// A very naive implementation of use double quotes
let text = &self.source[string_literal.range];
if text.starts_with('\'') {
self.diagnostics
.push("Use double quotes for strings".to_string());
}
}
}
struct SemanticVisitor<'a> {
context: &'a SemanticLintContext<'a>,
}
impl Visitor<'_> for SemanticVisitor<'_> {
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
match stmt {
ast::Stmt::ClassDef(class) => {
lint_bad_override(self.context, class);
}
ast::Stmt::Import(import) => {
lint_unresolved_imports(self.context, AnyImportRef::Import(import));
}
ast::Stmt::ImportFrom(import) => {
lint_unresolved_imports(self.context, AnyImportRef::ImportFrom(import));
}
_ => {}
}
walk_stmt(self, stmt);
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Diagnostics {
Empty,
List(Vec<String>),
}
impl Diagnostics {
pub fn as_slice(&self) -> &[String] {
match self {
Diagnostics::Empty => &[],
Diagnostics::List(list) => list.as_slice(),
}
}
}
impl Deref for Diagnostics {
type Target = [String];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl From<Vec<String>> for Diagnostics {
fn from(value: Vec<String>) -> Self {
if value.is_empty() {
Diagnostics::Empty
} else {
Diagnostics::List(value)
}
}
}
#[derive(Copy, Clone, Debug)]
enum AnyImportRef<'a> {
Import(&'a ast::StmtImport),
ImportFrom(&'a ast::StmtImportFrom),
}

View File

@@ -1,254 +0,0 @@
//! Sets up logging for Red Knot
use anyhow::Context;
use colored::Colorize;
use std::fmt;
use std::fs::File;
use std::io::BufWriter;
use tracing::{Event, Subscriber};
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::fmt::format::Writer;
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
use tracing_subscriber::registry::LookupSpan;
use tracing_subscriber::EnvFilter;
/// Logging flags to `#[command(flatten)]` into your CLI
#[derive(clap::Args, Debug, Clone, Default)]
#[command(about = None, long_about = None)]
pub(crate) struct Verbosity {
#[arg(
long,
short = 'v',
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
action = clap::ArgAction::Count,
global = true,
)]
verbose: u8,
}
impl Verbosity {
/// Returns the verbosity level based on the number of `-v` flags.
///
/// Returns `None` if the user did not specify any verbosity flags.
pub(crate) fn level(&self) -> VerbosityLevel {
match self.verbose {
0 => VerbosityLevel::Default,
1 => VerbosityLevel::Verbose,
2 => VerbosityLevel::ExtraVerbose,
_ => VerbosityLevel::Trace,
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub(crate) enum VerbosityLevel {
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
Default,
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
/// Corresponds to `-v`.
Verbose,
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
/// Corresponds to `-vv`
ExtraVerbose,
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
Trace,
}
impl VerbosityLevel {
const fn level_filter(self) -> LevelFilter {
match self {
VerbosityLevel::Default => LevelFilter::WARN,
VerbosityLevel::Verbose => LevelFilter::INFO,
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
VerbosityLevel::Trace => LevelFilter::TRACE,
}
}
pub(crate) const fn is_trace(self) -> bool {
matches!(self, VerbosityLevel::Trace)
}
pub(crate) const fn is_extra_verbose(self) -> bool {
matches!(self, VerbosityLevel::ExtraVerbose)
}
}
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
use tracing_subscriber::prelude::*;
// The `RED_KNOT_LOG` environment variable overrides the default log level.
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
EnvFilter::builder()
.parse(log_env_variable)
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
} else {
match level {
VerbosityLevel::Default => {
// Show warning traces
EnvFilter::default().add_directive(LevelFilter::WARN.into())
}
level => {
let level_filter = level.level_filter();
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
let filter = EnvFilter::default().add_directive(
format!("red_knot={level_filter}")
.parse()
.expect("Hardcoded directive to be valid"),
);
filter.add_directive(
format!("ruff={level_filter}")
.parse()
.expect("Hardcoded directive to be valid"),
)
}
}
};
let (profiling_layer, guard) = setup_profile();
let registry = tracing_subscriber::registry()
.with(filter)
.with(profiling_layer);
if level.is_trace() {
let subscriber = registry.with(
tracing_tree::HierarchicalLayer::default()
.with_indent_lines(true)
.with_indent_amount(2)
.with_bracketed_fields(true)
.with_thread_ids(true)
.with_targets(true)
.with_writer(std::io::stderr)
.with_timer(tracing_tree::time::Uptime::default()),
);
subscriber.init();
} else {
let subscriber = registry.with(
tracing_subscriber::fmt::layer()
.event_format(RedKnotFormat {
display_level: true,
display_timestamp: level.is_extra_verbose(),
show_spans: false,
})
.with_writer(std::io::stderr),
);
subscriber.init();
}
Ok(TracingGuard {
_flame_guard: guard,
})
}
#[allow(clippy::type_complexity)]
fn setup_profile<S>() -> (
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
)
where
S: Subscriber + for<'span> LookupSpan<'span>,
{
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
.expect("Flame layer to be created");
(Some(layer), Some(guard))
} else {
(None, None)
}
}
pub(crate) struct TracingGuard {
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
}
struct RedKnotFormat {
display_timestamp: bool,
display_level: bool,
show_spans: bool,
}
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
impl<S, N> FormatEvent<S, N> for RedKnotFormat
where
S: Subscriber + for<'a> LookupSpan<'a>,
N: for<'a> FormatFields<'a> + 'static,
{
fn format_event(
&self,
ctx: &FmtContext<'_, S, N>,
mut writer: Writer<'_>,
event: &Event<'_>,
) -> fmt::Result {
let meta = event.metadata();
let ansi = writer.has_ansi_escapes();
if self.display_timestamp {
let timestamp = chrono::Local::now()
.format("%Y-%m-%d %H:%M:%S.%f")
.to_string();
if ansi {
write!(writer, "{} ", timestamp.dimmed())?;
} else {
write!(
writer,
"{} ",
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
)?;
}
}
if self.display_level {
let level = meta.level();
// Same colors as tracing
if ansi {
let formatted_level = level.to_string();
match *level {
tracing::Level::TRACE => {
write!(writer, "{} ", formatted_level.purple().bold())?;
}
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
}
} else {
write!(writer, "{level} ")?;
}
}
if self.show_spans {
let span = event.parent();
let mut seen = false;
let span = span
.and_then(|id| ctx.span(id))
.or_else(|| ctx.lookup_current());
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
for span in scope {
seen = true;
if ansi {
write!(writer, "{}:", span.metadata().name().bold())?;
} else {
write!(writer, "{}:", span.metadata().name())?;
}
}
if seen {
writer.write_char(' ')?;
}
}
ctx.field_format().format_fields(writer.by_ref(), event)?;
writeln!(writer)
}
}

View File

@@ -1,199 +1,77 @@
use std::process::{ExitCode, Termination};
use std::sync::Mutex;
use anyhow::{anyhow, Context};
use clap::Parser;
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use red_knot_python_semantic::SitePackages;
use red_knot_server::run_server;
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::watch;
use red_knot_workspace::watch::WorkspaceWatcher;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use salsa::plumbing::ZalsaDatabase;
use target_version::TargetVersion;
use salsa::ParallelDatabase;
use tracing::subscriber::Interest;
use tracing::{Level, Metadata};
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
use tracing_subscriber::{Layer, Registry};
use tracing_tree::time::Uptime;
use crate::logging::{setup_tracing, Verbosity};
use red_knot::program::{FileWatcherChange, Program};
use red_knot::watch::FileWatcher;
use red_knot::Workspace;
use red_knot_module_resolver::{
set_module_resolution_settings, RawModuleResolutionSettings, TargetVersion,
};
use ruff_db::file_system::{FileSystem, FileSystemPath, OsFileSystem};
use ruff_db::vfs::system_path_to_file;
mod logging;
mod target_version;
mod verbosity;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
#[allow(
clippy::print_stdout,
clippy::unnecessary_wraps,
clippy::print_stderr,
clippy::dbg_macro
)]
#[command(version)]
struct Args {
#[command(subcommand)]
pub(crate) command: Option<Command>,
pub fn main() -> anyhow::Result<()> {
countme::enable(true);
setup_tracing();
#[arg(
long,
help = "Changes the current working directory.",
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
value_name = "PATH"
)]
current_directory: Option<SystemPathBuf>,
let arguments: Vec<_> = std::env::args().collect();
#[arg(
long,
help = "Path to the virtual environment the project uses",
long_help = "\
Path to the virtual environment the project uses. \
If provided, red-knot will use the `site-packages` directory of this virtual environment \
to resolve type information for the project's third-party dependencies.",
value_name = "PATH"
)]
venv_path: Option<SystemPathBuf>,
#[arg(
long,
value_name = "DIRECTORY",
help = "Custom directory to use for stdlib typeshed stubs"
)]
custom_typeshed_dir: Option<SystemPathBuf>,
#[arg(
long,
value_name = "PATH",
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
)]
extra_search_path: Option<Vec<SystemPathBuf>>,
#[arg(
long,
help = "Python version to assume when resolving types",
value_name = "VERSION"
)]
target_version: Option<TargetVersion>,
#[clap(flatten)]
verbosity: Verbosity,
#[arg(
long,
help = "Run in watch mode by re-running whenever files change",
short = 'W'
)]
watch: bool,
}
impl Args {
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
let mut configuration = Configuration::default();
if let Some(target_version) = self.target_version {
configuration.target_version = Some(target_version.into());
}
if let Some(venv_path) = &self.venv_path {
configuration.search_paths.site_packages = Some(SitePackages::Derived {
venv_path: SystemPath::absolute(venv_path, cli_cwd),
});
}
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
configuration.search_paths.custom_typeshed =
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
}
if let Some(extra_search_paths) = &self.extra_search_path {
configuration.search_paths.extra_paths = extra_search_paths
.iter()
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
.collect();
}
configuration
}
}
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Start the language server
Server,
}
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
pub fn main() -> ExitStatus {
run().unwrap_or_else(|error| {
use std::io::Write;
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
let mut stderr = std::io::stderr().lock();
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
// some reason (e.g. failed to resolve the configuration)
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
// Currently we generally only see one error, but e.g. with io errors when resolving
// the configuration it is help to chain errors ("resolving configuration failed" ->
// "failed to read file: subdir/pyproject.toml")
for cause in error.chain() {
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
}
ExitStatus::Error
})
}
fn run() -> anyhow::Result<ExitStatus> {
let args = Args::parse_from(std::env::args());
if matches!(args.command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success);
if arguments.len() < 2 {
eprintln!("Usage: red_knot <path>");
return Err(anyhow::anyhow!("Invalid arguments"));
}
let verbosity = args.verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
let fs = OsFileSystem;
let entry_point = FileSystemPath::new(&arguments[1]);
// The base path to which all CLI arguments are relative to.
let cli_base_path = {
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
SystemPathBuf::from_path_buf(cwd)
.map_err(|path| {
anyhow!(
"The current working directory `{}` contains non-Unicode characters. Red Knot only supports Unicode paths.",
path.display()
)
})?
};
if !fs.exists(entry_point) {
eprintln!("The entry point does not exist.");
return Err(anyhow::anyhow!("Invalid arguments"));
}
let cwd = args
.current_directory
.as_ref()
.map(|cwd| {
if cwd.as_std_path().is_dir() {
Ok(SystemPath::absolute(cwd, &cli_base_path))
} else {
Err(anyhow!(
"Provided current-directory path `{cwd}` is not a directory"
))
}
})
.transpose()?
.unwrap_or_else(|| cli_base_path.clone());
if !fs.is_file(entry_point) {
eprintln!("The entry point is not a file.");
return Err(anyhow::anyhow!("Invalid arguments"));
}
let system = OsSystem::new(cwd.clone());
let cli_configuration = args.to_configuration(&cwd);
let workspace_metadata = WorkspaceMetadata::discover(
system.current_directory(),
&system,
Some(&cli_configuration),
)?;
let entry_point = entry_point.to_path_buf();
// TODO: Use the `program_settings` to compute the key for the database's persistent
// cache and load the cache if it exists.
let mut db = RootDatabase::new(workspace_metadata, system)?;
let workspace_folder = entry_point.parent().unwrap();
let workspace = Workspace::new(workspace_folder.to_path_buf());
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
let workspace_search_path = workspace.root().to_path_buf();
let mut program = Program::new(workspace, fs);
set_module_resolution_settings(
&mut program,
RawModuleResolutionSettings {
extra_paths: vec![],
workspace_root: workspace_search_path,
site_packages: None,
custom_typeshed: None,
target_version: TargetVersion::Py38,
},
);
let entry_id = system_path_to_file(&program, entry_point.clone()).unwrap();
program.workspace_mut().open_file(entry_id);
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
// Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
@@ -205,162 +83,121 @@ fn run() -> anyhow::Result<ExitStatus> {
}
})?;
let exit_status = if args.watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)
};
let file_changes_notifier = main_loop.file_changes_notifier();
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
// Watch for file changes and re-trigger the analysis.
let mut file_watcher = FileWatcher::new(move |changes| {
file_changes_notifier.notify(changes);
})?;
std::mem::forget(db);
file_watcher.watch_folder(workspace_folder.as_std_path())?;
Ok(exit_status)
}
main_loop.run(&mut program);
#[derive(Copy, Clone)]
pub enum ExitStatus {
/// Checking was successful and there were no errors.
Success = 0,
println!("{}", countme::get_all());
/// Checking was successful but there were errors.
Failure = 1,
/// Checking failed.
Error = 2,
}
impl Termination for ExitStatus {
fn report(self) -> ExitCode {
ExitCode::from(self as u8)
}
Ok(())
}
struct MainLoop {
/// Sender that can be used to send messages to the main loop.
sender: crossbeam_channel::Sender<MainLoopMessage>,
/// Receiver for the messages sent **to** the main loop.
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
/// The file system watcher, if running in watch mode.
watcher: Option<WorkspaceWatcher>,
cli_configuration: Configuration,
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
}
impl MainLoop {
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
let (sender, receiver) = crossbeam_channel::bounded(10);
fn new() -> (Self, MainLoopCancellationToken) {
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
let mut orchestrator = Orchestrator {
receiver: orchestrator_receiver,
sender: main_loop_sender.clone(),
revision: 0,
};
std::thread::spawn(move || {
orchestrator.run();
});
(
Self {
sender: sender.clone(),
receiver,
watcher: None,
cli_configuration,
orchestrator_sender,
main_loop_receiver,
},
MainLoopCancellationToken {
sender: main_loop_sender,
},
MainLoopCancellationToken { sender },
)
}
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
tracing::debug!("Starting watch mode");
let sender = self.sender.clone();
let watcher = watch::directory_watcher(move |event| {
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
})?;
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
self.run(db);
Ok(ExitStatus::Success)
fn file_changes_notifier(&self) -> FileChangesNotifier {
FileChangesNotifier {
sender: self.orchestrator_sender.clone(),
}
}
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
#[allow(clippy::print_stderr)]
fn run(self, program: &mut Program) {
self.orchestrator_sender
.send(OrchestratorMessage::Run)
.unwrap();
let result = self.main_loop(db);
for message in &self.main_loop_receiver {
tracing::trace!("Main Loop: Tick");
tracing::debug!("Exiting main loop");
result
}
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
// Schedule the first check.
tracing::debug!("Starting main loop");
let mut revision = 0u64;
while let Ok(message) = self.receiver.recv() {
match message {
MainLoopMessage::CheckWorkspace => {
let db = db.snapshot();
let sender = self.sender.clone();
MainLoopMessage::CheckProgram { revision } => {
let program = program.snapshot();
let sender = self.orchestrator_sender.clone();
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
// Spawn a new task that checks the program. This needs to be done in a separate thread
// to prevent blocking the main loop here.
rayon::spawn(move || {
if let Ok(result) = db.check() {
// Send the result back to the main loop for printing.
if let Ok(result) = program.check() {
sender
.send(MainLoopMessage::CheckCompleted { result, revision })
.send(OrchestratorMessage::CheckProgramCompleted {
diagnostics: result,
revision,
})
.unwrap();
}
});
}
MainLoopMessage::CheckCompleted {
result,
revision: check_revision,
} => {
let has_diagnostics = !result.is_empty();
if check_revision == revision {
#[allow(clippy::print_stdout)]
for diagnostic in result {
println!("{}", diagnostic.display(db));
}
} else {
tracing::debug!(
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
);
}
if self.watcher.is_none() {
return if has_diagnostics {
ExitStatus::Failure
} else {
ExitStatus::Success
};
}
tracing::trace!("Counts after last check:\n{}", countme::get_all());
}
MainLoopMessage::ApplyChanges(changes) => {
revision += 1;
// Automatically cancels any pending queries and waits for them to complete.
db.apply_changes(changes, Some(&self.cli_configuration));
if let Some(watcher) = self.watcher.as_mut() {
watcher.update(db);
}
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
program.apply_changes(changes);
}
MainLoopMessage::CheckCompleted(diagnostics) => {
eprintln!("{}", diagnostics.join("\n"));
eprintln!("{}", countme::get_all());
}
MainLoopMessage::Exit => {
// Cancel any pending queries and wait for them to complete.
// TODO: Don't use Salsa internal APIs
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
let _ = db.zalsa_mut();
return ExitStatus::Success;
eprintln!("{}", countme::get_all());
return;
}
}
tracing::debug!("Waiting for next main loop message.");
}
}
}
ExitStatus::Success
impl Drop for MainLoop {
fn drop(&mut self) {
self.orchestrator_sender
.send(OrchestratorMessage::Shutdown)
.unwrap();
}
}
#[derive(Debug, Clone)]
struct FileChangesNotifier {
sender: crossbeam_channel::Sender<OrchestratorMessage>,
}
impl FileChangesNotifier {
fn notify(&self, changes: Vec<FileWatcherChange>) {
self.sender
.send(OrchestratorMessage::FileChanges(changes))
.unwrap();
}
}
@@ -375,14 +212,165 @@ impl MainLoopCancellationToken {
}
}
struct Orchestrator {
/// Sends messages to the main loop.
sender: crossbeam_channel::Sender<MainLoopMessage>,
/// Receives messages from the main loop.
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
revision: usize,
}
impl Orchestrator {
#[allow(clippy::print_stderr)]
fn run(&mut self) {
while let Ok(message) = self.receiver.recv() {
match message {
OrchestratorMessage::Run => {
self.sender
.send(MainLoopMessage::CheckProgram {
revision: self.revision,
})
.unwrap();
}
OrchestratorMessage::CheckProgramCompleted {
diagnostics,
revision,
} => {
// Only take the diagnostics if they are for the latest revision.
if self.revision == revision {
self.sender
.send(MainLoopMessage::CheckCompleted(diagnostics))
.unwrap();
} else {
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
}
}
OrchestratorMessage::FileChanges(changes) => {
// Request cancellation, but wait until all analysis tasks have completed to
// avoid stale messages in the next main loop.
self.revision += 1;
self.debounce_changes(changes);
}
OrchestratorMessage::Shutdown => {
return self.shutdown();
}
}
}
}
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
loop {
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
crossbeam_channel::select! {
recv(self.receiver) -> message => {
match message {
Ok(OrchestratorMessage::Shutdown) => {
return self.shutdown();
}
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
changes.extend(file_changes);
}
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
// disregard any outdated completion message.
}
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
Err(_) => {
// There are no more senders, no point in waiting for more messages
return;
}
}
},
default(std::time::Duration::from_millis(10)) => {
// No more file changes after 10 ms, send the changes and schedule a new analysis
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
return;
}
}
}
}
#[allow(clippy::unused_self)]
fn shutdown(&self) {
tracing::trace!("Shutting down orchestrator.");
}
}
/// Message sent from the orchestrator to the main loop.
#[derive(Debug)]
enum MainLoopMessage {
CheckWorkspace,
CheckCompleted {
result: Vec<Box<dyn Diagnostic>>,
revision: u64,
},
ApplyChanges(Vec<watch::ChangeEvent>),
CheckProgram { revision: usize },
CheckCompleted(Vec<String>),
ApplyChanges(Vec<FileWatcherChange>),
Exit,
}
#[derive(Debug)]
enum OrchestratorMessage {
Run,
Shutdown,
CheckProgramCompleted {
diagnostics: Vec<String>,
revision: usize,
},
FileChanges(Vec<FileWatcherChange>),
}
fn setup_tracing() {
let subscriber = Registry::default().with(
tracing_tree::HierarchicalLayer::default()
.with_indent_lines(true)
.with_indent_amount(2)
.with_bracketed_fields(true)
.with_thread_ids(true)
.with_targets(true)
.with_writer(|| Box::new(std::io::stderr()))
.with_timer(Uptime::default())
.with_filter(LoggingFilter {
trace_level: Level::TRACE,
}),
);
tracing::subscriber::set_global_default(subscriber).unwrap();
}
struct LoggingFilter {
trace_level: Level,
}
impl LoggingFilter {
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
self.trace_level
} else {
Level::INFO
};
meta.level() <= &filter
}
}
impl<S> Filter<S> for LoggingFilter {
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
self.is_enabled(meta)
}
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
if self.is_enabled(meta) {
Interest::always()
} else {
Interest::never()
}
}
fn max_level_hint(&self) -> Option<LevelFilter> {
Some(LevelFilter::from_level(self.trace_level))
}
}

View File

@@ -0,0 +1,32 @@
use ruff_db::vfs::VfsFile;
use salsa::Cancelled;
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
use crate::program::Program;
impl Program {
/// Checks all open files in the workspace and its dependencies.
#[tracing::instrument(level = "debug", skip_all)]
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
self.with_db(|db| {
let mut result = Vec::new();
for open_file in db.workspace.open_files() {
result.extend_from_slice(&db.check_file_impl(open_file));
}
result
})
}
#[tracing::instrument(level = "debug", skip(self))]
pub fn check_file(&self, file: VfsFile) -> Result<Diagnostics, Cancelled> {
self.with_db(|db| db.check_file_impl(file))
}
fn check_file_impl(&self, file: VfsFile) -> Diagnostics {
let mut diagnostics = Vec::new();
diagnostics.extend_from_slice(lint_syntax(self, file));
diagnostics.extend_from_slice(lint_semantic(self, file));
Diagnostics::from(diagnostics)
}
}

View File

@@ -0,0 +1,131 @@
use std::panic::{RefUnwindSafe, UnwindSafe};
use std::sync::Arc;
use salsa::{Cancelled, Database};
use red_knot_module_resolver::{Db as ResolverDb, Jar as ResolverJar};
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
use ruff_db::file_system::{FileSystem, FileSystemPathBuf};
use ruff_db::vfs::{Vfs, VfsFile, VfsPath};
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
use crate::db::{Db, Jar};
use crate::Workspace;
mod check;
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
pub struct Program {
storage: salsa::Storage<Program>,
vfs: Vfs,
fs: Arc<dyn FileSystem + Send + Sync + RefUnwindSafe>,
workspace: Workspace,
}
impl Program {
pub fn new<Fs>(workspace: Workspace, file_system: Fs) -> Self
where
Fs: FileSystem + 'static + Send + Sync + RefUnwindSafe,
{
Self {
storage: salsa::Storage::default(),
vfs: Vfs::default(),
fs: Arc::new(file_system),
workspace,
}
}
pub fn apply_changes<I>(&mut self, changes: I)
where
I: IntoIterator<Item = FileWatcherChange>,
{
for change in changes {
VfsFile::touch_path(self, &VfsPath::file_system(change.path));
}
}
pub fn workspace(&self) -> &Workspace {
&self.workspace
}
pub fn workspace_mut(&mut self) -> &mut Workspace {
&mut self.workspace
}
#[allow(clippy::unnecessary_wraps)]
fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
where
F: FnOnce(&Program) -> T + UnwindSafe,
{
// TODO: Catch in `Caancelled::catch`
// See https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60
Ok(f(self))
}
}
impl Upcast<dyn SemanticDb> for Program {
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
self
}
}
impl Upcast<dyn SourceDb> for Program {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
}
impl Upcast<dyn ResolverDb> for Program {
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
self
}
}
impl ResolverDb for Program {}
impl SemanticDb for Program {}
impl SourceDb for Program {
fn file_system(&self) -> &dyn FileSystem {
&*self.fs
}
fn vfs(&self) -> &Vfs {
&self.vfs
}
}
impl Database for Program {}
impl Db for Program {}
impl salsa::ParallelDatabase for Program {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
storage: self.storage.snapshot(),
vfs: self.vfs.snapshot(),
fs: self.fs.clone(),
workspace: self.workspace.clone(),
})
}
}
#[derive(Clone, Debug)]
pub struct FileWatcherChange {
path: FileSystemPathBuf,
#[allow(unused)]
kind: FileChangeKind,
}
impl FileWatcherChange {
pub fn new(path: FileSystemPathBuf, kind: FileChangeKind) -> Self {
Self { path, kind }
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum FileChangeKind {
Created,
Modified,
Deleted,
}

View File

@@ -1,62 +0,0 @@
/// Enumeration of all supported Python versions
///
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
pub enum TargetVersion {
Py37,
Py38,
#[default]
Py39,
Py310,
Py311,
Py312,
Py313,
}
impl TargetVersion {
const fn as_str(self) -> &'static str {
match self {
Self::Py37 => "py37",
Self::Py38 => "py38",
Self::Py39 => "py39",
Self::Py310 => "py310",
Self::Py311 => "py311",
Self::Py312 => "py312",
Self::Py313 => "py313",
}
}
}
impl std::fmt::Display for TargetVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.as_str())
}
}
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
fn from(value: TargetVersion) -> Self {
match value {
TargetVersion::Py37 => Self::PY37,
TargetVersion::Py38 => Self::PY38,
TargetVersion::Py39 => Self::PY39,
TargetVersion::Py310 => Self::PY310,
TargetVersion::Py311 => Self::PY311,
TargetVersion::Py312 => Self::PY312,
TargetVersion::Py313 => Self::PY313,
}
}
}
#[cfg(test)]
mod tests {
use crate::target_version::TargetVersion;
use red_knot_python_semantic::PythonVersion;
#[test]
fn same_default_as_python_version() {
assert_eq!(
PythonVersion::from(TargetVersion::default()),
PythonVersion::default()
);
}
}

View File

@@ -1 +0,0 @@

View File

@@ -0,0 +1,82 @@
use std::path::Path;
use crate::program::{FileChangeKind, FileWatcherChange};
use anyhow::Context;
use notify::event::{CreateKind, RemoveKind};
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
use ruff_db::file_system::FileSystemPath;
pub struct FileWatcher {
watcher: RecommendedWatcher,
}
pub trait EventHandler: Send + 'static {
fn handle(&self, changes: Vec<FileWatcherChange>);
}
impl<F> EventHandler for F
where
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
{
fn handle(&self, changes: Vec<FileWatcherChange>) {
let f = self;
f(changes);
}
}
impl FileWatcher {
pub fn new<E>(handler: E) -> anyhow::Result<Self>
where
E: EventHandler,
{
Self::from_handler(Box::new(handler))
}
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
match changes {
Ok(event) => {
// TODO verify that this handles all events correctly
let change_kind = match event.kind {
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
EventKind::Modify(_) => FileChangeKind::Modified,
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
_ => {
return;
}
};
let mut changes = Vec::new();
for path in event.paths {
if path.is_file() {
if let Some(fs_path) = FileSystemPath::from_std_path(&path) {
changes.push(FileWatcherChange::new(
fs_path.to_path_buf(),
change_kind,
));
}
}
}
if !changes.is_empty() {
handler.handle(changes);
}
}
// TODO proper error handling
Err(err) => {
panic!("Error: {err}");
}
}
})
.context("Failed to create file watcher.")?;
Ok(Self { watcher })
}
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
self.watcher.watch(path, RecursiveMode::Recursive)?;
Ok(())
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
[package]
name = "red_knot_vendored"
name = "red_knot_module_resolver"
version = "0.0.0"
publish = false
authors = { workspace = true }
@@ -12,20 +12,25 @@ license = { workspace = true }
[dependencies]
ruff_db = { workspace = true }
ruff_python_stdlib = { workspace = true }
compact_str = { workspace = true }
camino = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
tracing = { workspace = true }
zip = { workspace = true }
[build-dependencies]
path-slash = { workspace = true }
walkdir = { workspace = true }
zip = { workspace = true, features = ["zstd", "deflate"] }
zip = { workspace = true }
[dev-dependencies]
anyhow = { workspace = true }
insta = { workspace = true }
tempfile = { workspace = true }
walkdir = { workspace = true }
[features]
zstd = ["zip/zstd"]
deflate = ["zip/deflate"]
[lints]
workspace = true

View File

@@ -1,5 +1,9 @@
# Vendored types for the stdlib
# Red Knot
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_vendored/vendor/typeshed`. The file `crates/red_knot_vendored/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
A work-in-progress multifile module resolver for Ruff.
## Vendored types for the stdlib
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).

View File

@@ -3,7 +3,7 @@
//!
//! This script should be automatically run at build time
//! whenever the script itself changes, or whenever any files
//! in `crates/red_knot_vendored/vendor/typeshed` change.
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
use std::fs::File;
use std::path::Path;
@@ -23,23 +23,8 @@ const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
let mut zip = ZipWriter::new(writer);
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
// (WASM itself is already slower than a native build for a specific platform).
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
// architecture of the system running the build script and not the architecture of the build-target.
// That's why we use the `TARGET` environment variable here.
let method = if cfg!(feature = "zstd") {
CompressionMethod::Zstd
} else if cfg!(feature = "deflate") {
CompressionMethod::Deflated
} else {
CompressionMethod::Stored
};
let options = FileOptions::default()
.compression_method(method)
.compression_method(CompressionMethod::Zstd)
.unix_permissions(0o644);
for entry in walkdir::WalkDir::new(directory_path) {
@@ -69,7 +54,7 @@ fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
}
fn main() {
println!("cargo::rerun-if-changed={TYPESHED_SOURCE_DIR}");
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
assert!(
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
"Where is typeshed?"

View File

@@ -0,0 +1,264 @@
use ruff_db::Upcast;
use crate::resolver::{
file_to_module,
internal::{ModuleNameIngredient, ModuleResolverSettings},
resolve_module_query,
};
use crate::typeshed::parse_typeshed_versions;
#[salsa::jar(db=Db)]
pub struct Jar(
ModuleNameIngredient<'_>,
ModuleResolverSettings,
resolve_module_query,
file_to_module,
parse_typeshed_versions,
);
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
#[cfg(test)]
pub(crate) mod tests {
use std::sync;
use salsa::DebugWithDb;
use ruff_db::file_system::{FileSystem, FileSystemPathBuf, MemoryFileSystem, OsFileSystem};
use ruff_db::vfs::Vfs;
use crate::resolver::{set_module_resolution_settings, RawModuleResolutionSettings};
use crate::supported_py_version::TargetVersion;
use super::*;
#[salsa::db(Jar, ruff_db::Jar)]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
file_system: TestFileSystem,
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
vfs: Vfs,
}
impl TestDb {
pub(crate) fn new() -> Self {
Self {
storage: salsa::Storage::default(),
file_system: TestFileSystem::Memory(MemoryFileSystem::default()),
events: sync::Arc::default(),
vfs: Vfs::with_stubbed_vendored(),
}
}
/// Returns the memory file system.
///
/// ## Panics
/// If this test db isn't using a memory file system.
pub(crate) fn memory_file_system(&self) -> &MemoryFileSystem {
if let TestFileSystem::Memory(fs) = &self.file_system {
fs
} else {
panic!("The test db is not using a memory file system");
}
}
/// Uses the real file system instead of the memory file system.
///
/// This useful for testing advanced file system features like permissions, symlinks, etc.
///
/// Note that any files written to the memory file system won't be copied over.
pub(crate) fn with_os_file_system(&mut self) {
self.file_system = TestFileSystem::Os(OsFileSystem);
}
#[allow(unused)]
pub(crate) fn vfs_mut(&mut self) -> &mut Vfs {
&mut self.vfs
}
/// Takes the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
let events = inner.get_mut().unwrap();
std::mem::take(&mut *events)
}
/// Clears the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn clear_salsa_events(&mut self) {
self.take_salsa_events();
}
}
impl Upcast<dyn ruff_db::Db> for TestDb {
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
self
}
}
impl ruff_db::Db for TestDb {
fn file_system(&self) -> &dyn ruff_db::file_system::FileSystem {
self.file_system.inner()
}
fn vfs(&self) -> &ruff_db::vfs::Vfs {
&self.vfs
}
}
impl Db for TestDb {}
impl salsa::Database for TestDb {
fn salsa_event(&self, event: salsa::Event) {
tracing::trace!("event: {:?}", event.debug(self));
let mut events = self.events.lock().unwrap();
events.push(event);
}
}
impl salsa::ParallelDatabase for TestDb {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
storage: self.storage.snapshot(),
file_system: self.file_system.snapshot(),
events: self.events.clone(),
vfs: self.vfs.snapshot(),
})
}
}
enum TestFileSystem {
Memory(MemoryFileSystem),
#[allow(unused)]
Os(OsFileSystem),
}
impl TestFileSystem {
fn inner(&self) -> &dyn FileSystem {
match self {
Self::Memory(inner) => inner,
Self::Os(inner) => inner,
}
}
fn snapshot(&self) -> Self {
match self {
Self::Memory(inner) => Self::Memory(inner.snapshot()),
Self::Os(inner) => Self::Os(inner.snapshot()),
}
}
}
pub(crate) struct TestCaseBuilder {
db: TestDb,
src: FileSystemPathBuf,
custom_typeshed: FileSystemPathBuf,
site_packages: FileSystemPathBuf,
target_version: Option<TargetVersion>,
}
impl TestCaseBuilder {
#[must_use]
pub(crate) fn with_target_version(mut self, target_version: TargetVersion) -> Self {
self.target_version = Some(target_version);
self
}
pub(crate) fn build(self) -> TestCase {
let TestCaseBuilder {
mut db,
src,
custom_typeshed,
site_packages,
target_version,
} = self;
let settings = RawModuleResolutionSettings {
target_version: target_version.unwrap_or_default(),
extra_paths: vec![],
workspace_root: src.clone(),
custom_typeshed: Some(custom_typeshed.clone()),
site_packages: Some(site_packages.clone()),
};
set_module_resolution_settings(&mut db, settings);
TestCase {
db,
src,
custom_typeshed,
site_packages,
}
}
}
pub(crate) struct TestCase {
pub(crate) db: TestDb,
pub(crate) src: FileSystemPathBuf,
pub(crate) custom_typeshed: FileSystemPathBuf,
pub(crate) site_packages: FileSystemPathBuf,
}
pub(crate) fn create_resolver_builder() -> std::io::Result<TestCaseBuilder> {
static VERSIONS_DATA: &str = "\
asyncio: 3.8- # 'Regular' package on py38+
asyncio.tasks: 3.9-3.11
collections: 3.9- # 'Regular' package on py39+
functools: 3.8-
importlib: 3.9- # Namespace package on py39+
xml: 3.8-3.8 # Namespace package on py38 only
";
let db = TestDb::new();
let src = FileSystemPathBuf::from("src");
let site_packages = FileSystemPathBuf::from("site_packages");
let custom_typeshed = FileSystemPathBuf::from("typeshed");
let fs = db.memory_file_system();
fs.create_directory_all(&src)?;
fs.create_directory_all(&site_packages)?;
fs.create_directory_all(&custom_typeshed)?;
fs.write_file(custom_typeshed.join("stdlib/VERSIONS"), VERSIONS_DATA)?;
// Regular package on py38+
fs.create_directory_all(custom_typeshed.join("stdlib/asyncio"))?;
fs.touch(custom_typeshed.join("stdlib/asyncio/__init__.pyi"))?;
fs.write_file(
custom_typeshed.join("stdlib/asyncio/tasks.pyi"),
"class Task: ...",
)?;
// Regular package on py39+
fs.create_directory_all(custom_typeshed.join("stdlib/collections"))?;
fs.touch(custom_typeshed.join("stdlib/collections/__init__.pyi"))?;
// Namespace package on py38 only
fs.create_directory_all(custom_typeshed.join("stdlib/xml"))?;
fs.touch(custom_typeshed.join("stdlib/xml/etree.pyi"))?;
// Namespace package on py39+
fs.create_directory_all(custom_typeshed.join("stdlib/importlib"))?;
fs.touch(custom_typeshed.join("stdlib/importlib/abc.pyi"))?;
fs.write_file(
custom_typeshed.join("stdlib/functools.pyi"),
"def update_wrapper(): ...",
)?;
Ok(TestCaseBuilder {
db,
src,
custom_typeshed,
site_packages,
target_version: None,
})
}
}

View File

@@ -0,0 +1,15 @@
mod db;
mod module;
mod module_name;
mod path;
mod resolver;
mod state;
mod supported_py_version;
mod typeshed;
pub use db::{Db, Jar};
pub use module::{Module, ModuleKind};
pub use module_name::ModuleName;
pub use resolver::{resolve_module, set_module_resolution_settings, RawModuleResolutionSettings};
pub use supported_py_version::TargetVersion;
pub use typeshed::{TypeshedVersionsParseError, TypeshedVersionsParseErrorKind};

View File

@@ -1,10 +1,11 @@
use std::fmt::Formatter;
use std::sync::Arc;
use ruff_db::files::File;
use ruff_db::vfs::VfsFile;
use super::path::SearchPath;
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::path::{ModuleResolutionPathBuf, ModuleResolutionPathRef};
/// Representation of a Python module.
#[derive(Clone, PartialEq, Eq)]
@@ -16,8 +17,8 @@ impl Module {
pub(crate) fn new(
name: ModuleName,
kind: ModuleKind,
search_path: SearchPath,
file: File,
search_path: Arc<ModuleResolutionPathBuf>,
file: VfsFile,
) -> Self {
Self {
inner: Arc::new(ModuleInner {
@@ -35,13 +36,13 @@ impl Module {
}
/// The file to the source code that defines this module
pub fn file(&self) -> File {
pub fn file(&self) -> VfsFile {
self.inner.file
}
/// The search path from which the module was resolved.
pub(crate) fn search_path(&self) -> &SearchPath {
&self.inner.search_path
pub(crate) fn search_path(&self) -> ModuleResolutionPathRef {
ModuleResolutionPathRef::from(&*self.inner.search_path)
}
/// Determine whether this module is a single-file module or a package
@@ -61,12 +62,23 @@ impl std::fmt::Debug for Module {
}
}
impl salsa::DebugWithDb<dyn Db> for Module {
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
f.debug_struct("Module")
.field("name", &self.name())
.field("kind", &self.kind())
.field("file", &self.file().debug(db.upcast()))
.field("search_path", &self.search_path())
.finish()
}
}
#[derive(PartialEq, Eq)]
struct ModuleInner {
name: ModuleName,
kind: ModuleKind,
search_path: SearchPath,
file: File,
search_path: Arc<ModuleResolutionPathBuf>,
file: VfsFile,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
@@ -77,9 +89,3 @@ pub enum ModuleKind {
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
Package,
}
impl ModuleKind {
pub const fn is_package(self) -> bool {
matches!(self, ModuleKind::Package)
}
}

View File

@@ -42,7 +42,7 @@ impl ModuleName {
/// ## Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
/// assert_eq!(ModuleName::new_static(""), None);
@@ -55,7 +55,8 @@ impl ModuleName {
#[inline]
#[must_use]
pub fn new_static(name: &'static str) -> Option<Self> {
Self::is_valid_name(name).then(|| Self(CompactString::const_new(name)))
// TODO(Micha): Use CompactString::const_new once we upgrade to 0.8 https://github.com/ParkMyCar/compact_str/pull/336
Self::is_valid_name(name).then(|| Self(CompactString::from(name)))
}
#[must_use]
@@ -68,7 +69,7 @@ impl ModuleName {
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
/// ```
@@ -82,7 +83,7 @@ impl ModuleName {
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
@@ -101,7 +102,7 @@ impl ModuleName {
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
///
@@ -133,7 +134,7 @@ impl ModuleName {
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
/// use red_knot_module_resolver::ModuleName;
///
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
@@ -168,24 +169,6 @@ impl ModuleName {
};
Some(Self(name))
}
/// Extend `self` with the components of `other`
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// let mut module_name = ModuleName::new_static("foo").unwrap();
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
/// assert_eq!(&module_name, "foo.bar");
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
/// ```
pub fn extend(&mut self, other: &ModuleName) {
self.0.push('.');
self.0.push_str(other);
}
}
impl Deref for ModuleName {

View File

@@ -0,0 +1,997 @@
/// Internal abstractions for differentiating between different kinds of search paths.
///
/// TODO(Alex): Should we use different types for absolute vs relative paths?
/// <https://github.com/astral-sh/ruff/pull/12141#discussion_r1667010245>
use std::fmt;
use ruff_db::file_system::{FileSystemPath, FileSystemPathBuf};
use ruff_db::vfs::{system_path_to_file, VfsFile};
use crate::module_name::ModuleName;
use crate::state::ResolverState;
use crate::typeshed::TypeshedVersionsQueryResult;
/// Enumeration of the different kinds of search paths type checkers are expected to support.
///
/// N.B. Although we don't implement `Ord` for this enum, they are ordered in terms of the
/// priority that we want to give these modules when resolving them,
/// as per [the order given in the typing spec]
///
/// [the order given in the typing spec]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum ModuleResolutionPathBufInner {
Extra(FileSystemPathBuf),
FirstParty(FileSystemPathBuf),
StandardLibrary(FileSystemPathBuf),
SitePackages(FileSystemPathBuf),
}
impl ModuleResolutionPathBufInner {
fn push(&mut self, component: &str) {
let extension = camino::Utf8Path::new(component).extension();
let inner = match self {
Self::Extra(ref mut path) => {
if let Some(extension) = extension {
assert!(
matches!(extension, "pyi" | "py"),
"Extension must be `py` or `pyi`; got `{extension}`"
);
}
path
}
Self::FirstParty(ref mut path) => {
if let Some(extension) = extension {
assert!(
matches!(extension, "pyi" | "py"),
"Extension must be `py` or `pyi`; got `{extension}`"
);
}
path
}
Self::StandardLibrary(ref mut path) => {
if let Some(extension) = extension {
assert_eq!(
extension, "pyi",
"Extension must be `pyi`; got `{extension}`"
);
}
path
}
Self::SitePackages(ref mut path) => {
if let Some(extension) = extension {
assert!(
matches!(extension, "pyi" | "py"),
"Extension must be `py` or `pyi`; got `{extension}`"
);
}
path
}
};
assert!(
inner.extension().is_none(),
"Cannot push part {component} to {inner}, which already has an extension"
);
inner.push(component);
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub(crate) struct ModuleResolutionPathBuf(ModuleResolutionPathBufInner);
impl ModuleResolutionPathBuf {
/// Push a new part to the path,
/// while maintaining the invariant that the path can only have `.py` or `.pyi` extensions.
/// For the stdlib variant specifically, it may only have a `.pyi` extension.
///
/// ## Panics:
/// If a component with an invalid extension is passed
pub(crate) fn push(&mut self, component: &str) {
self.0.push(component);
}
#[must_use]
pub(crate) fn extra(path: impl Into<FileSystemPathBuf>) -> Option<Self> {
let path = path.into();
path.extension()
.map_or(true, |ext| matches!(ext, "py" | "pyi"))
.then_some(Self(ModuleResolutionPathBufInner::Extra(path)))
}
#[must_use]
pub(crate) fn first_party(path: impl Into<FileSystemPathBuf>) -> Option<Self> {
let path = path.into();
path.extension()
.map_or(true, |ext| matches!(ext, "pyi" | "py"))
.then_some(Self(ModuleResolutionPathBufInner::FirstParty(path)))
}
#[must_use]
pub(crate) fn standard_library(path: impl Into<FileSystemPathBuf>) -> Option<Self> {
let path = path.into();
path.extension()
.map_or(true, |ext| ext == "pyi")
.then_some(Self(ModuleResolutionPathBufInner::StandardLibrary(path)))
}
#[must_use]
pub(crate) fn stdlib_from_typeshed_root(typeshed_root: &FileSystemPath) -> Option<Self> {
Self::standard_library(typeshed_root.join(FileSystemPath::new("stdlib")))
}
#[must_use]
pub(crate) fn site_packages(path: impl Into<FileSystemPathBuf>) -> Option<Self> {
let path = path.into();
path.extension()
.map_or(true, |ext| matches!(ext, "pyi" | "py"))
.then_some(Self(ModuleResolutionPathBufInner::SitePackages(path)))
}
#[must_use]
pub(crate) fn is_regular_package(&self, search_path: &Self, resolver: &ResolverState) -> bool {
ModuleResolutionPathRef::from(self).is_regular_package(search_path, resolver)
}
#[must_use]
pub(crate) fn is_directory(&self, search_path: &Self, resolver: &ResolverState) -> bool {
ModuleResolutionPathRef::from(self).is_directory(search_path, resolver)
}
#[must_use]
pub(crate) fn with_pyi_extension(&self) -> Self {
ModuleResolutionPathRef::from(self).with_pyi_extension()
}
#[must_use]
pub(crate) fn with_py_extension(&self) -> Option<Self> {
ModuleResolutionPathRef::from(self).with_py_extension()
}
#[must_use]
pub(crate) fn relativize_path<'a>(
&'a self,
absolute_path: &'a (impl AsRef<FileSystemPath> + ?Sized),
) -> Option<ModuleResolutionPathRef<'a>> {
ModuleResolutionPathRef::from(self).relativize_path(absolute_path.as_ref())
}
/// Returns `None` if the path doesn't exist, isn't accessible, or if the path points to a directory.
pub(crate) fn to_vfs_file(
&self,
search_path: &Self,
resolver: &ResolverState,
) -> Option<VfsFile> {
ModuleResolutionPathRef::from(self).to_vfs_file(search_path, resolver)
}
}
impl fmt::Debug for ModuleResolutionPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let (name, path) = match &self.0 {
ModuleResolutionPathBufInner::Extra(path) => ("Extra", path),
ModuleResolutionPathBufInner::FirstParty(path) => ("FirstParty", path),
ModuleResolutionPathBufInner::SitePackages(path) => ("SitePackages", path),
ModuleResolutionPathBufInner::StandardLibrary(path) => ("StandardLibrary", path),
};
f.debug_tuple(&format!("ModuleResolutionPathBuf::{name}"))
.field(path)
.finish()
}
}
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
enum ModuleResolutionPathRefInner<'a> {
Extra(&'a FileSystemPath),
FirstParty(&'a FileSystemPath),
StandardLibrary(&'a FileSystemPath),
SitePackages(&'a FileSystemPath),
}
impl<'a> ModuleResolutionPathRefInner<'a> {
#[must_use]
fn query_stdlib_version<'db>(
module_path: &'a FileSystemPath,
stdlib_search_path: Self,
stdlib_root: &FileSystemPath,
resolver_state: &ResolverState<'db>,
) -> TypeshedVersionsQueryResult {
let Some(module_name) = stdlib_search_path
.relativize_path(module_path)
.and_then(Self::to_module_name)
else {
return TypeshedVersionsQueryResult::DoesNotExist;
};
let ResolverState {
db,
typeshed_versions,
target_version,
} = resolver_state;
typeshed_versions.query_module(&module_name, *db, stdlib_root, *target_version)
}
#[must_use]
fn is_directory(&self, search_path: Self, resolver: &ResolverState) -> bool {
match (self, search_path) {
(Self::Extra(path), Self::Extra(_)) => resolver.file_system().is_directory(path),
(Self::FirstParty(path), Self::FirstParty(_)) => resolver.file_system().is_directory(path),
(Self::SitePackages(path), Self::SitePackages(_)) => resolver.file_system().is_directory(path),
(Self::StandardLibrary(path), Self::StandardLibrary(stdlib_root)) => {
match Self::query_stdlib_version( path, search_path, stdlib_root, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists => resolver.file_system().is_directory(path),
TypeshedVersionsQueryResult::MaybeExists => resolver.file_system().is_directory(path),
}
}
(path, root) => unreachable!(
"The search path should always be the same variant as `self` (got: {path:?}, {root:?})"
)
}
}
#[must_use]
fn is_regular_package(&self, search_path: Self, resolver: &ResolverState) -> bool {
fn is_non_stdlib_pkg(state: &ResolverState, path: &FileSystemPath) -> bool {
let file_system = state.file_system();
file_system.exists(&path.join("__init__.py"))
|| file_system.exists(&path.join("__init__.pyi"))
}
match (self, search_path) {
(Self::Extra(path), Self::Extra(_)) => is_non_stdlib_pkg(resolver, path),
(Self::FirstParty(path), Self::FirstParty(_)) => is_non_stdlib_pkg(resolver, path),
(Self::SitePackages(path), Self::SitePackages(_)) => is_non_stdlib_pkg(resolver, path),
// Unlike the other variants:
// (1) Account for VERSIONS
// (2) Only test for `__init__.pyi`, not `__init__.py`
(Self::StandardLibrary(path), Self::StandardLibrary(stdlib_root)) => {
match Self::query_stdlib_version( path, search_path, stdlib_root, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists => resolver.db.file_system().exists(&path.join("__init__.pyi")),
TypeshedVersionsQueryResult::MaybeExists => resolver.db.file_system().exists(&path.join("__init__.pyi")),
}
}
(path, root) => unreachable!(
"The search path should always be the same variant as `self` (got: {path:?}, {root:?})"
)
}
}
fn to_vfs_file(self, search_path: Self, resolver: &ResolverState) -> Option<VfsFile> {
match (self, search_path) {
(Self::Extra(path), Self::Extra(_)) => system_path_to_file(resolver.db.upcast(), path),
(Self::FirstParty(path), Self::FirstParty(_)) => system_path_to_file(resolver.db.upcast(), path),
(Self::SitePackages(path), Self::SitePackages(_)) => {
system_path_to_file(resolver.db.upcast(), path)
}
(Self::StandardLibrary(path), Self::StandardLibrary(stdlib_root)) => {
match Self::query_stdlib_version(path, search_path, stdlib_root, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => None,
TypeshedVersionsQueryResult::Exists => system_path_to_file(resolver.db.upcast(), path),
TypeshedVersionsQueryResult::MaybeExists => system_path_to_file(resolver.db.upcast(), path)
}
}
(path, root) => unreachable!(
"The search path should always be the same variant as `self` (got: {path:?}, {root:?})"
)
}
}
#[must_use]
fn to_module_name(self) -> Option<ModuleName> {
let (fs_path, skip_final_part) = match self {
Self::Extra(path) | Self::FirstParty(path) | Self::SitePackages(path) => (
path,
path.ends_with("__init__.py") || path.ends_with("__init__.pyi"),
),
Self::StandardLibrary(path) => (path, path.ends_with("__init__.pyi")),
};
let parent_components = fs_path
.parent()?
.components()
.map(|component| component.as_str());
if skip_final_part {
ModuleName::from_components(parent_components)
} else {
ModuleName::from_components(parent_components.chain(fs_path.file_stem()))
}
}
#[must_use]
fn with_pyi_extension(&self) -> ModuleResolutionPathBufInner {
match self {
Self::Extra(path) => ModuleResolutionPathBufInner::Extra(path.with_extension("pyi")),
Self::FirstParty(path) => {
ModuleResolutionPathBufInner::FirstParty(path.with_extension("pyi"))
}
Self::StandardLibrary(path) => {
ModuleResolutionPathBufInner::StandardLibrary(path.with_extension("pyi"))
}
Self::SitePackages(path) => {
ModuleResolutionPathBufInner::SitePackages(path.with_extension("pyi"))
}
}
}
#[must_use]
fn with_py_extension(&self) -> Option<ModuleResolutionPathBufInner> {
match self {
Self::Extra(path) => Some(ModuleResolutionPathBufInner::Extra(
path.with_extension("py"),
)),
Self::FirstParty(path) => Some(ModuleResolutionPathBufInner::FirstParty(
path.with_extension("py"),
)),
Self::StandardLibrary(_) => None,
Self::SitePackages(path) => Some(ModuleResolutionPathBufInner::SitePackages(
path.with_extension("py"),
)),
}
}
#[must_use]
fn relativize_path(&self, absolute_path: &'a FileSystemPath) -> Option<Self> {
match self {
Self::Extra(root) => absolute_path.strip_prefix(root).ok().and_then(|path| {
path.extension()
.map_or(true, |ext| matches!(ext, "py" | "pyi"))
.then_some(Self::Extra(path))
}),
Self::FirstParty(root) => absolute_path.strip_prefix(root).ok().and_then(|path| {
path.extension()
.map_or(true, |ext| matches!(ext, "pyi" | "py"))
.then_some(Self::FirstParty(path))
}),
Self::StandardLibrary(root) => absolute_path.strip_prefix(root).ok().and_then(|path| {
path.extension()
.map_or(true, |ext| ext == "pyi")
.then_some(Self::StandardLibrary(path))
}),
Self::SitePackages(root) => absolute_path.strip_prefix(root).ok().and_then(|path| {
path.extension()
.map_or(true, |ext| matches!(ext, "pyi" | "py"))
.then_some(Self::SitePackages(path))
}),
}
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub(crate) struct ModuleResolutionPathRef<'a>(ModuleResolutionPathRefInner<'a>);
impl<'a> ModuleResolutionPathRef<'a> {
#[must_use]
pub(crate) fn is_directory(
&self,
search_path: impl Into<Self>,
resolver: &ResolverState,
) -> bool {
self.0.is_directory(search_path.into().0, resolver)
}
#[must_use]
pub(crate) fn is_regular_package(
&self,
search_path: impl Into<Self>,
resolver: &ResolverState,
) -> bool {
self.0.is_regular_package(search_path.into().0, resolver)
}
#[must_use]
pub(crate) fn to_vfs_file(
self,
search_path: impl Into<Self>,
resolver: &ResolverState,
) -> Option<VfsFile> {
self.0.to_vfs_file(search_path.into().0, resolver)
}
#[must_use]
pub(crate) fn to_module_name(self) -> Option<ModuleName> {
self.0.to_module_name()
}
#[must_use]
pub(crate) fn with_pyi_extension(&self) -> ModuleResolutionPathBuf {
ModuleResolutionPathBuf(self.0.with_pyi_extension())
}
#[must_use]
pub(crate) fn with_py_extension(self) -> Option<ModuleResolutionPathBuf> {
self.0.with_py_extension().map(ModuleResolutionPathBuf)
}
#[must_use]
pub(crate) fn relativize_path(&self, absolute_path: &'a FileSystemPath) -> Option<Self> {
self.0.relativize_path(absolute_path).map(Self)
}
}
impl fmt::Debug for ModuleResolutionPathRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let (name, path) = match &self.0 {
ModuleResolutionPathRefInner::Extra(path) => ("Extra", path),
ModuleResolutionPathRefInner::FirstParty(path) => ("FirstParty", path),
ModuleResolutionPathRefInner::SitePackages(path) => ("SitePackages", path),
ModuleResolutionPathRefInner::StandardLibrary(path) => ("StandardLibrary", path),
};
f.debug_tuple(&format!("ModuleResolutionPathRef::{name}"))
.field(path)
.finish()
}
}
impl<'a> From<&'a ModuleResolutionPathBuf> for ModuleResolutionPathRef<'a> {
fn from(value: &'a ModuleResolutionPathBuf) -> Self {
let inner = match &value.0 {
ModuleResolutionPathBufInner::Extra(path) => ModuleResolutionPathRefInner::Extra(path),
ModuleResolutionPathBufInner::FirstParty(path) => {
ModuleResolutionPathRefInner::FirstParty(path)
}
ModuleResolutionPathBufInner::StandardLibrary(path) => {
ModuleResolutionPathRefInner::StandardLibrary(path)
}
ModuleResolutionPathBufInner::SitePackages(path) => {
ModuleResolutionPathRefInner::SitePackages(path)
}
};
ModuleResolutionPathRef(inner)
}
}
impl PartialEq<FileSystemPath> for ModuleResolutionPathRef<'_> {
fn eq(&self, other: &FileSystemPath) -> bool {
let fs_path = match self.0 {
ModuleResolutionPathRefInner::Extra(path) => path,
ModuleResolutionPathRefInner::FirstParty(path) => path,
ModuleResolutionPathRefInner::SitePackages(path) => path,
ModuleResolutionPathRefInner::StandardLibrary(path) => path,
};
fs_path == other
}
}
impl PartialEq<ModuleResolutionPathRef<'_>> for FileSystemPath {
fn eq(&self, other: &ModuleResolutionPathRef) -> bool {
other == self
}
}
impl PartialEq<FileSystemPathBuf> for ModuleResolutionPathRef<'_> {
fn eq(&self, other: &FileSystemPathBuf) -> bool {
self == &**other
}
}
impl PartialEq<ModuleResolutionPathRef<'_>> for FileSystemPathBuf {
fn eq(&self, other: &ModuleResolutionPathRef<'_>) -> bool {
&**self == other
}
}
#[cfg(test)]
mod tests {
use insta::assert_debug_snapshot;
use crate::db::tests::{create_resolver_builder, TestCase, TestDb};
use crate::supported_py_version::TargetVersion;
use crate::typeshed::LazyTypeshedVersions;
use super::*;
impl ModuleResolutionPathBuf {
#[must_use]
pub(crate) fn join(&self, component: &str) -> Self {
ModuleResolutionPathRef::from(self).join(component)
}
}
impl<'a> ModuleResolutionPathRef<'a> {
#[must_use]
fn join(
&self,
component: &'a (impl AsRef<FileSystemPath> + ?Sized),
) -> ModuleResolutionPathBuf {
let mut result = self.to_path_buf();
result.push(component.as_ref().as_str());
result
}
#[must_use]
pub(crate) fn to_path_buf(self) -> ModuleResolutionPathBuf {
let inner = match self.0 {
ModuleResolutionPathRefInner::Extra(path) => {
ModuleResolutionPathBufInner::Extra(path.to_path_buf())
}
ModuleResolutionPathRefInner::FirstParty(path) => {
ModuleResolutionPathBufInner::FirstParty(path.to_path_buf())
}
ModuleResolutionPathRefInner::StandardLibrary(path) => {
ModuleResolutionPathBufInner::StandardLibrary(path.to_path_buf())
}
ModuleResolutionPathRefInner::SitePackages(path) => {
ModuleResolutionPathBufInner::SitePackages(path.to_path_buf())
}
};
ModuleResolutionPathBuf(inner)
}
#[must_use]
pub(crate) const fn is_stdlib_search_path(&self) -> bool {
matches!(&self.0, ModuleResolutionPathRefInner::StandardLibrary(_))
}
}
#[test]
fn constructor_rejects_non_pyi_stdlib_paths() {
assert_eq!(ModuleResolutionPathBuf::standard_library("foo.py"), None);
assert_eq!(
ModuleResolutionPathBuf::standard_library("foo/__init__.py"),
None
);
}
#[test]
fn path_buf_debug_impl() {
assert_debug_snapshot!(
ModuleResolutionPathBuf::standard_library("foo/bar.pyi").unwrap(),
@r###"
ModuleResolutionPathBuf::StandardLibrary(
"foo/bar.pyi",
)
"###
);
}
#[test]
fn path_ref_debug_impl() {
assert_debug_snapshot!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::Extra(FileSystemPath::new("foo/bar.py"))),
@r###"
ModuleResolutionPathRef::Extra(
"foo/bar.py",
)
"###
);
}
#[test]
fn with_extension_methods() {
assert_eq!(
ModuleResolutionPathBuf::standard_library("foo")
.unwrap()
.with_py_extension(),
None
);
assert_eq!(
ModuleResolutionPathBuf::standard_library("foo")
.unwrap()
.with_pyi_extension(),
ModuleResolutionPathBuf(ModuleResolutionPathBufInner::StandardLibrary(
FileSystemPathBuf::from("foo.pyi")
))
);
assert_eq!(
ModuleResolutionPathBuf::first_party("foo/bar")
.unwrap()
.with_py_extension()
.unwrap(),
ModuleResolutionPathBuf(ModuleResolutionPathBufInner::FirstParty(
FileSystemPathBuf::from("foo/bar.py")
))
);
}
#[test]
fn module_name_1_part() {
assert_eq!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::Extra(FileSystemPath::new(
"foo"
)))
.to_module_name(),
ModuleName::new_static("foo")
);
assert_eq!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::StandardLibrary(
FileSystemPath::new("foo.pyi")
))
.to_module_name(),
ModuleName::new_static("foo")
);
assert_eq!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::FirstParty(
FileSystemPath::new("foo/__init__.py")
))
.to_module_name(),
ModuleName::new_static("foo")
);
}
#[test]
fn module_name_2_parts() {
assert_eq!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::StandardLibrary(
FileSystemPath::new("foo/bar")
))
.to_module_name(),
ModuleName::new_static("foo.bar")
);
assert_eq!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::Extra(FileSystemPath::new(
"foo/bar.pyi"
)))
.to_module_name(),
ModuleName::new_static("foo.bar")
);
assert_eq!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::SitePackages(
FileSystemPath::new("foo/bar/__init__.pyi")
))
.to_module_name(),
ModuleName::new_static("foo.bar")
);
}
#[test]
fn module_name_3_parts() {
assert_eq!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::SitePackages(
FileSystemPath::new("foo/bar/__init__.pyi")
))
.to_module_name(),
ModuleName::new_static("foo.bar")
);
assert_eq!(
ModuleResolutionPathRef(ModuleResolutionPathRefInner::SitePackages(
FileSystemPath::new("foo/bar/baz")
))
.to_module_name(),
ModuleName::new_static("foo.bar.baz")
);
}
#[test]
fn join() {
assert_eq!(
ModuleResolutionPathBuf::standard_library("foo")
.unwrap()
.join("bar"),
ModuleResolutionPathBuf(ModuleResolutionPathBufInner::StandardLibrary(
FileSystemPathBuf::from("foo/bar")
))
);
assert_eq!(
ModuleResolutionPathBuf::standard_library("foo")
.unwrap()
.join("bar.pyi"),
ModuleResolutionPathBuf(ModuleResolutionPathBufInner::StandardLibrary(
FileSystemPathBuf::from("foo/bar.pyi")
))
);
assert_eq!(
ModuleResolutionPathBuf::extra("foo")
.unwrap()
.join("bar.py"),
ModuleResolutionPathBuf(ModuleResolutionPathBufInner::Extra(
FileSystemPathBuf::from("foo/bar.py")
))
);
}
#[test]
#[should_panic(expected = "Extension must be `pyi`; got `py`")]
fn stdlib_path_invalid_join_py() {
ModuleResolutionPathBuf::standard_library("foo")
.unwrap()
.push("bar.py");
}
#[test]
#[should_panic(expected = "Extension must be `pyi`; got `rs`")]
fn stdlib_path_invalid_join_rs() {
ModuleResolutionPathBuf::standard_library("foo")
.unwrap()
.push("bar.rs");
}
#[test]
#[should_panic(expected = "Extension must be `py` or `pyi`; got `rs`")]
fn non_stdlib_path_invalid_join_rs() {
ModuleResolutionPathBuf::site_packages("foo")
.unwrap()
.push("bar.rs");
}
#[test]
#[should_panic(expected = "already has an extension")]
fn invalid_stdlib_join_too_many_extensions() {
ModuleResolutionPathBuf::standard_library("foo.pyi")
.unwrap()
.push("bar.pyi");
}
#[test]
fn relativize_stdlib_path_errors() {
let root = ModuleResolutionPathBuf::standard_library("foo/stdlib").unwrap();
// Must have a `.pyi` extension or no extension:
let bad_absolute_path = FileSystemPath::new("foo/stdlib/x.py");
assert_eq!(root.relativize_path(bad_absolute_path), None);
let second_bad_absolute_path = FileSystemPath::new("foo/stdlib/x.rs");
assert_eq!(root.relativize_path(second_bad_absolute_path), None);
// Must be a path that is a child of `root`:
let third_bad_absolute_path = FileSystemPath::new("bar/stdlib/x.pyi");
assert_eq!(root.relativize_path(third_bad_absolute_path), None);
}
#[test]
fn relativize_non_stdlib_path_errors() {
let root = ModuleResolutionPathBuf::extra("foo/stdlib").unwrap();
// Must have a `.py` extension, a `.pyi` extension, or no extension:
let bad_absolute_path = FileSystemPath::new("foo/stdlib/x.rs");
assert_eq!(root.relativize_path(bad_absolute_path), None);
// Must be a path that is a child of `root`:
let second_bad_absolute_path = FileSystemPath::new("bar/stdlib/x.pyi");
assert_eq!(root.relativize_path(second_bad_absolute_path), None);
}
#[test]
fn relativize_path() {
assert_eq!(
ModuleResolutionPathBuf::standard_library("foo/baz")
.unwrap()
.relativize_path("foo/baz/eggs/__init__.pyi")
.unwrap(),
ModuleResolutionPathRef(ModuleResolutionPathRefInner::StandardLibrary(
FileSystemPath::new("eggs/__init__.pyi")
))
);
}
fn py38_stdlib_test_case() -> (TestDb, ModuleResolutionPathBuf) {
let TestCase {
db,
custom_typeshed,
..
} = create_resolver_builder().unwrap().build();
let stdlib_module_path =
ModuleResolutionPathBuf::stdlib_from_typeshed_root(&custom_typeshed).unwrap();
(db, stdlib_module_path)
}
#[test]
fn mocked_typeshed_existing_regular_stdlib_pkg_py38() {
let (db, stdlib_path) = py38_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py38,
};
let asyncio_regular_package = stdlib_path.join("asyncio");
assert!(asyncio_regular_package.is_directory(&stdlib_path, &resolver));
assert!(asyncio_regular_package.is_regular_package(&stdlib_path, &resolver));
// Paths to directories don't resolve to VfsFiles
assert_eq!(
asyncio_regular_package.to_vfs_file(&stdlib_path, &resolver),
None
);
assert!(asyncio_regular_package
.join("__init__.pyi")
.to_vfs_file(&stdlib_path, &resolver)
.is_some());
// The `asyncio` package exists on Python 3.8, but the `asyncio.tasks` submodule does not,
// according to the `VERSIONS` file in our typeshed mock:
let asyncio_tasks_module = stdlib_path.join("asyncio/tasks.pyi");
assert_eq!(
asyncio_tasks_module.to_vfs_file(&stdlib_path, &resolver),
None
);
assert!(!asyncio_tasks_module.is_directory(&stdlib_path, &resolver));
assert!(!asyncio_tasks_module.is_regular_package(&stdlib_path, &resolver));
}
#[test]
fn mocked_typeshed_existing_namespace_stdlib_pkg_py38() {
let (db, stdlib_path) = py38_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py38,
};
let xml_namespace_package = stdlib_path.join("xml");
assert!(xml_namespace_package.is_directory(&stdlib_path, &resolver));
// Paths to directories don't resolve to VfsFiles
assert_eq!(
xml_namespace_package.to_vfs_file(&stdlib_path, &resolver),
None
);
assert!(!xml_namespace_package.is_regular_package(&stdlib_path, &resolver));
let xml_etree = stdlib_path.join("xml/etree.pyi");
assert!(!xml_etree.is_directory(&stdlib_path, &resolver));
assert!(xml_etree.to_vfs_file(&stdlib_path, &resolver).is_some());
assert!(!xml_etree.is_regular_package(&stdlib_path, &resolver));
}
#[test]
fn mocked_typeshed_single_file_stdlib_module_py38() {
let (db, stdlib_path) = py38_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py38,
};
let functools_module = stdlib_path.join("functools.pyi");
assert!(functools_module
.to_vfs_file(&stdlib_path, &resolver)
.is_some());
assert!(!functools_module.is_directory(&stdlib_path, &resolver));
assert!(!functools_module.is_regular_package(&stdlib_path, &resolver));
}
#[test]
fn mocked_typeshed_nonexistent_regular_stdlib_pkg_py38() {
let (db, stdlib_path) = py38_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py38,
};
let collections_regular_package = stdlib_path.join("collections");
assert_eq!(
collections_regular_package.to_vfs_file(&stdlib_path, &resolver),
None
);
assert!(!collections_regular_package.is_directory(&stdlib_path, &resolver));
assert!(!collections_regular_package.is_regular_package(&stdlib_path, &resolver));
}
#[test]
fn mocked_typeshed_nonexistent_namespace_stdlib_pkg_py38() {
let (db, stdlib_path) = py38_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py38,
};
let importlib_namespace_package = stdlib_path.join("importlib");
assert_eq!(
importlib_namespace_package.to_vfs_file(&stdlib_path, &resolver),
None
);
assert!(!importlib_namespace_package.is_directory(&stdlib_path, &resolver));
assert!(!importlib_namespace_package.is_regular_package(&stdlib_path, &resolver));
let importlib_abc = stdlib_path.join("importlib/abc.pyi");
assert_eq!(importlib_abc.to_vfs_file(&stdlib_path, &resolver), None);
assert!(!importlib_abc.is_directory(&stdlib_path, &resolver));
assert!(!importlib_abc.is_regular_package(&stdlib_path, &resolver));
}
#[test]
fn mocked_typeshed_nonexistent_single_file_module_py38() {
let (db, stdlib_path) = py38_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py38,
};
let non_existent = stdlib_path.join("doesnt_even_exist");
assert_eq!(non_existent.to_vfs_file(&stdlib_path, &resolver), None);
assert!(!non_existent.is_directory(&stdlib_path, &resolver));
assert!(!non_existent.is_regular_package(&stdlib_path, &resolver));
}
fn py39_stdlib_test_case() -> (TestDb, ModuleResolutionPathBuf) {
let TestCase {
db,
custom_typeshed,
..
} = create_resolver_builder()
.unwrap()
.with_target_version(TargetVersion::Py39)
.build();
let stdlib_module_path =
ModuleResolutionPathBuf::stdlib_from_typeshed_root(&custom_typeshed).unwrap();
(db, stdlib_module_path)
}
#[test]
fn mocked_typeshed_existing_regular_stdlib_pkgs_py39() {
let (db, stdlib_path) = py39_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py39,
};
// Since we've set the target version to Py39,
// `collections` should now exist as a directory, according to VERSIONS...
let collections_regular_package = stdlib_path.join("collections");
assert!(collections_regular_package.is_directory(&stdlib_path, &resolver));
assert!(collections_regular_package.is_regular_package(&stdlib_path, &resolver));
// (This is still `None`, as directories don't resolve to `Vfs` files)
assert_eq!(
collections_regular_package.to_vfs_file(&stdlib_path, &resolver),
None
);
assert!(collections_regular_package
.join("__init__.pyi")
.to_vfs_file(&stdlib_path, &resolver)
.is_some());
// ...and so should the `asyncio.tasks` submodule (though it's still not a directory):
let asyncio_tasks_module = stdlib_path.join("asyncio/tasks.pyi");
assert!(asyncio_tasks_module
.to_vfs_file(&stdlib_path, &resolver)
.is_some());
assert!(!asyncio_tasks_module.is_directory(&stdlib_path, &resolver));
assert!(!asyncio_tasks_module.is_regular_package(&stdlib_path, &resolver));
}
#[test]
fn mocked_typeshed_existing_namespace_stdlib_pkg_py39() {
let (db, stdlib_path) = py39_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py39,
};
// The `importlib` directory now also exists...
let importlib_namespace_package = stdlib_path.join("importlib");
assert!(importlib_namespace_package.is_directory(&stdlib_path, &resolver));
assert!(!importlib_namespace_package.is_regular_package(&stdlib_path, &resolver));
// (This is still `None`, as directories don't resolve to `Vfs` files)
assert_eq!(
importlib_namespace_package.to_vfs_file(&stdlib_path, &resolver),
None
);
// ...As do submodules in the `importlib` namespace package:
let importlib_abc = importlib_namespace_package.join("abc.pyi");
assert!(!importlib_abc.is_directory(&stdlib_path, &resolver));
assert!(!importlib_abc.is_regular_package(&stdlib_path, &resolver));
assert!(importlib_abc.to_vfs_file(&stdlib_path, &resolver).is_some());
}
#[test]
fn mocked_typeshed_nonexistent_namespace_stdlib_pkg_py39() {
let (db, stdlib_path) = py39_stdlib_test_case();
let resolver = ResolverState {
db: &db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version: TargetVersion::Py39,
};
// The `xml` package no longer exists on py39:
let xml_namespace_package = stdlib_path.join("xml");
assert_eq!(
xml_namespace_package.to_vfs_file(&stdlib_path, &resolver),
None
);
assert!(!xml_namespace_package.is_directory(&stdlib_path, &resolver));
assert!(!xml_namespace_package.is_regular_package(&stdlib_path, &resolver));
let xml_etree = xml_namespace_package.join("etree.pyi");
assert_eq!(xml_etree.to_vfs_file(&stdlib_path, &resolver), None);
assert!(!xml_etree.is_directory(&stdlib_path, &resolver));
assert!(!xml_etree.is_regular_package(&stdlib_path, &resolver));
}
}

View File

@@ -0,0 +1,985 @@
use std::ops::Deref;
use std::sync::Arc;
use ruff_db::file_system::FileSystemPathBuf;
use ruff_db::vfs::{vfs_path_to_file, VfsFile, VfsPath};
use crate::db::Db;
use crate::module::{Module, ModuleKind};
use crate::module_name::ModuleName;
use crate::path::ModuleResolutionPathBuf;
use crate::resolver::internal::ModuleResolverSettings;
use crate::state::ResolverState;
use crate::supported_py_version::TargetVersion;
/// Configures the module resolver settings.
///
/// Must be called before calling any other module resolution functions.
pub fn set_module_resolution_settings(db: &mut dyn Db, config: RawModuleResolutionSettings) {
// There's no concurrency issue here because we hold a `&mut dyn Db` reference. No other
// thread can mutate the `Db` while we're in this call, so using `try_get` to test if
// the settings have already been set is safe.
let resolved_settings = config.into_configuration_settings();
if let Some(existing) = ModuleResolverSettings::try_get(db) {
existing.set_settings(db).to(resolved_settings);
} else {
ModuleResolverSettings::new(db, resolved_settings);
}
}
/// Resolves a module name to a module.
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
let interned_name = internal::ModuleNameIngredient::new(db, module_name);
resolve_module_query(db, interned_name)
}
/// Salsa query that resolves an interned [`ModuleNameIngredient`] to a module.
///
/// This query should not be called directly. Instead, use [`resolve_module`]. It only exists
/// because Salsa requires the module name to be an ingredient.
#[salsa::tracked]
pub(crate) fn resolve_module_query<'db>(
db: &'db dyn Db,
module_name: internal::ModuleNameIngredient<'db>,
) -> Option<Module> {
let _span = tracing::trace_span!("resolve_module", ?module_name).entered();
let name = module_name.name(db);
let (search_path, module_file, kind) = resolve_name(db, name)?;
let module = Module::new(name.clone(), kind, search_path, module_file);
Some(module)
}
/// Resolves the module for the given path.
///
/// Returns `None` if the path is not a module locatable via any of the known search paths.
#[allow(unused)]
pub(crate) fn path_to_module(db: &dyn Db, path: &VfsPath) -> Option<Module> {
// It's not entirely clear on first sight why this method calls `file_to_module` instead of
// it being the other way round, considering that the first thing that `file_to_module` does
// is to retrieve the file's path.
//
// The reason is that `file_to_module` is a tracked Salsa query and salsa queries require that
// all arguments are Salsa ingredients (something stored in Salsa). `Path`s aren't salsa ingredients but
// `VfsFile` is. So what we do here is to retrieve the `path`'s `VfsFile` so that we can make
// use of Salsa's caching and invalidation.
let file = vfs_path_to_file(db.upcast(), path)?;
file_to_module(db, file)
}
/// Resolves the module for the file with the given id.
///
/// Returns `None` if the file is not a module locatable via any of the known search paths.
#[salsa::tracked]
pub(crate) fn file_to_module(db: &dyn Db, file: VfsFile) -> Option<Module> {
let _span = tracing::trace_span!("file_to_module", ?file).entered();
let VfsPath::FileSystem(path) = file.path(db.upcast()) else {
todo!("VendoredPaths are not yet supported")
};
let resolver_settings = module_resolver_settings(db);
let relative_path = resolver_settings
.search_paths()
.iter()
.find_map(|root| root.relativize_path(path))?;
let module_name = relative_path.to_module_name()?;
// Resolve the module name to see if Python would resolve the name to the same path.
// If it doesn't, then that means that multiple modules have the same name in different
// root paths, but that the module corresponding to `path` is in a lower priority search path,
// in which case we ignore it.
let module = resolve_module(db, module_name)?;
if file == module.file() {
Some(module)
} else {
// This path is for a module with the same name but with a different precedence. For example:
// ```
// src/foo.py
// src/foo/__init__.py
// ```
// The module name of `src/foo.py` is `foo`, but the module loaded by Python is `src/foo/__init__.py`.
// That means we need to ignore `src/foo.py` even though it resolves to the same module name.
None
}
}
/// "Raw" configuration settings for module resolution: unvalidated, unnormalized
#[derive(Eq, PartialEq, Debug)]
pub struct RawModuleResolutionSettings {
/// The target Python version the user has specified
pub target_version: TargetVersion,
/// List of user-provided paths that should take first priority in the module resolution.
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
/// or pyright's stubPath configuration setting.
pub extra_paths: Vec<FileSystemPathBuf>,
/// The root of the workspace, used for finding first-party modules.
pub workspace_root: FileSystemPathBuf,
/// Optional (already validated) path to standard-library typeshed stubs.
/// If this is not provided, we will fallback to our vendored typeshed stubs
/// bundled as a zip file in the binary
pub custom_typeshed: Option<FileSystemPathBuf>,
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
pub site_packages: Option<FileSystemPathBuf>,
}
impl RawModuleResolutionSettings {
/// Implementation of the typing spec's [module resolution order]
///
/// TODO(Alex): this method does multiple `.unwrap()` calls when it should really return an error.
/// Each `.unwrap()` call is a point where we're validating a setting that the user would pass
/// and transforming it into an internal representation for a validated path.
/// Rather than panicking if a path fails to validate, we should display an error message to the user
/// and exit the process with a nonzero exit code.
/// This validation should probably be done outside of Salsa?
///
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
fn into_configuration_settings(self) -> ModuleResolutionSettings {
let RawModuleResolutionSettings {
target_version,
extra_paths,
workspace_root,
site_packages,
custom_typeshed,
} = self;
let mut paths: Vec<ModuleResolutionPathBuf> = extra_paths
.into_iter()
.map(|fs_path| ModuleResolutionPathBuf::extra(fs_path).unwrap())
.collect();
paths.push(ModuleResolutionPathBuf::first_party(workspace_root).unwrap());
if let Some(custom_typeshed) = custom_typeshed {
paths.push(
ModuleResolutionPathBuf::stdlib_from_typeshed_root(&custom_typeshed).unwrap(),
);
}
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
if let Some(site_packages) = site_packages {
paths.push(ModuleResolutionPathBuf::site_packages(site_packages).unwrap());
}
ModuleResolutionSettings {
target_version,
search_paths: OrderedSearchPaths(paths.into_iter().map(Arc::new).collect()),
}
}
}
/// A resolved module resolution order as per the [typing spec]
///
/// [typing spec]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub(crate) struct OrderedSearchPaths(Vec<Arc<ModuleResolutionPathBuf>>);
impl Deref for OrderedSearchPaths {
type Target = [Arc<ModuleResolutionPathBuf>];
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) struct ModuleResolutionSettings {
search_paths: OrderedSearchPaths,
target_version: TargetVersion,
}
impl ModuleResolutionSettings {
pub(crate) fn search_paths(&self) -> &[Arc<ModuleResolutionPathBuf>] {
&self.search_paths
}
pub(crate) fn target_version(&self) -> TargetVersion {
self.target_version
}
}
// The singleton methods generated by salsa are all `pub` instead of `pub(crate)` which triggers
// `unreachable_pub`. Work around this by creating a module and allow `unreachable_pub` for it.
// Salsa also generates uses to `_db` variables for `interned` which triggers `clippy::used_underscore_binding`. Suppress that too
// TODO(micha): Contribute a fix for this upstream where the singleton methods have the same visibility as the struct.
#[allow(unreachable_pub, clippy::used_underscore_binding)]
pub(crate) mod internal {
use crate::module_name::ModuleName;
use crate::resolver::ModuleResolutionSettings;
#[salsa::input(singleton)]
pub(crate) struct ModuleResolverSettings {
#[return_ref]
pub(super) settings: ModuleResolutionSettings,
}
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
///
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
#[salsa::interned]
pub(crate) struct ModuleNameIngredient<'db> {
#[return_ref]
pub(super) name: ModuleName,
}
}
fn module_resolver_settings(db: &dyn Db) -> &ModuleResolutionSettings {
ModuleResolverSettings::get(db).settings(db)
}
/// Given a module name and a list of search paths in which to lookup modules,
/// attempt to resolve the module name
fn resolve_name(
db: &dyn Db,
name: &ModuleName,
) -> Option<(Arc<ModuleResolutionPathBuf>, VfsFile, ModuleKind)> {
let resolver_settings = module_resolver_settings(db);
let resolver_state = ResolverState::new(db, resolver_settings.target_version());
for search_path in resolver_settings.search_paths() {
let mut components = name.components();
let module_name = components.next_back()?;
match resolve_package(search_path, components, &resolver_state) {
Ok(resolved_package) => {
let mut package_path = resolved_package.path;
package_path.push(module_name);
// Must be a `__init__.pyi` or `__init__.py` or it isn't a package.
let kind = if package_path.is_directory(search_path, &resolver_state) {
package_path.push("__init__");
ModuleKind::Package
} else {
ModuleKind::Module
};
// TODO Implement full https://peps.python.org/pep-0561/#type-checker-module-resolution-order resolution
if let Some(stub) = package_path
.with_pyi_extension()
.to_vfs_file(search_path, &resolver_state)
{
return Some((search_path.clone(), stub, kind));
}
if let Some(module) = package_path
.with_py_extension()
.and_then(|path| path.to_vfs_file(search_path, &resolver_state))
{
return Some((search_path.clone(), module, kind));
}
// For regular packages, don't search the next search path. All files of that
// package must be in the same location
if resolved_package.kind.is_regular_package() {
return None;
}
}
Err(parent_kind) => {
if parent_kind.is_regular_package() {
// For regular packages, don't search the next search path.
return None;
}
}
}
}
None
}
fn resolve_package<'a, 'db, I>(
module_search_path: &ModuleResolutionPathBuf,
components: I,
resolver_state: &ResolverState<'db>,
) -> Result<ResolvedPackage, PackageKind>
where
I: Iterator<Item = &'a str>,
{
let mut package_path = module_search_path.clone();
// `true` if inside a folder that is a namespace package (has no `__init__.py`).
// Namespace packages are special because they can be spread across multiple search paths.
// https://peps.python.org/pep-0420/
let mut in_namespace_package = false;
// `true` if resolving a sub-package. For example, `true` when resolving `bar` of `foo.bar`.
let mut in_sub_package = false;
// For `foo.bar.baz`, test that `foo` and `baz` both contain a `__init__.py`.
for folder in components {
package_path.push(folder);
let is_regular_package =
package_path.is_regular_package(module_search_path, resolver_state);
if is_regular_package {
in_namespace_package = false;
} else if package_path.is_directory(module_search_path, resolver_state) {
// A directory without an `__init__.py` is a namespace package, continue with the next folder.
in_namespace_package = true;
} else if in_namespace_package {
// Package not found but it is part of a namespace package.
return Err(PackageKind::Namespace);
} else if in_sub_package {
// A regular sub package wasn't found.
return Err(PackageKind::Regular);
} else {
// We couldn't find `foo` for `foo.bar.baz`, search the next search path.
return Err(PackageKind::Root);
}
in_sub_package = true;
}
let kind = if in_namespace_package {
PackageKind::Namespace
} else if in_sub_package {
PackageKind::Regular
} else {
PackageKind::Root
};
Ok(ResolvedPackage {
kind,
path: package_path,
})
}
#[derive(Debug)]
struct ResolvedPackage {
path: ModuleResolutionPathBuf,
kind: PackageKind,
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum PackageKind {
/// A root package or module. E.g. `foo` in `foo.bar.baz` or just `foo`.
Root,
/// A regular sub-package where the parent contains an `__init__.py`.
///
/// For example, `bar` in `foo.bar` when the `foo` directory contains an `__init__.py`.
Regular,
/// A sub-package in a namespace package. A namespace package is a package without an `__init__.py`.
///
/// For example, `bar` in `foo.bar` if the `foo` directory contains no `__init__.py`.
Namespace,
}
impl PackageKind {
const fn is_regular_package(self) -> bool {
matches!(self, PackageKind::Regular)
}
}
#[cfg(test)]
mod tests {
use ruff_db::file_system::FileSystemPath;
use ruff_db::vfs::{system_path_to_file, VfsFile, VfsPath};
use crate::db::tests::{create_resolver_builder, TestCase};
use crate::module::ModuleKind;
use crate::module_name::ModuleName;
use super::*;
fn setup_resolver_test() -> TestCase {
create_resolver_builder().unwrap().build()
}
#[test]
fn first_party_module() -> anyhow::Result<()> {
let TestCase { db, src, .. } = setup_resolver_test();
let foo_module_name = ModuleName::new_static("foo").unwrap();
let foo_path = src.join("foo.py");
db.memory_file_system()
.write_file(&foo_path, "print('Hello, world!')")?;
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
assert_eq!(
Some(&foo_module),
resolve_module(&db, foo_module_name.clone()).as_ref()
);
assert_eq!("foo", foo_module.name());
assert_eq!(&src, &foo_module.search_path());
assert_eq!(ModuleKind::Module, foo_module.kind());
assert_eq!(&foo_path, foo_module.file().path(&db));
assert_eq!(
Some(foo_module),
path_to_module(&db, &VfsPath::FileSystem(foo_path))
);
Ok(())
}
#[test]
fn stdlib() {
let TestCase {
db,
custom_typeshed,
..
} = setup_resolver_test();
let stdlib_dir =
ModuleResolutionPathBuf::stdlib_from_typeshed_root(&custom_typeshed).unwrap();
let functools_module_name = ModuleName::new_static("functools").unwrap();
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
assert_eq!(
Some(&functools_module),
resolve_module(&db, functools_module_name).as_ref()
);
assert_eq!(stdlib_dir, functools_module.search_path().to_path_buf());
assert_eq!(ModuleKind::Module, functools_module.kind());
let expected_functools_path =
VfsPath::FileSystem(custom_typeshed.join("stdlib/functools.pyi"));
assert_eq!(&expected_functools_path, functools_module.file().path(&db));
assert_eq!(
Some(functools_module),
path_to_module(&db, &expected_functools_path)
);
}
fn create_module_names(raw_names: &[&str]) -> Vec<ModuleName> {
raw_names
.iter()
.map(|raw| ModuleName::new(raw).unwrap())
.collect()
}
#[test]
fn stdlib_resolution_respects_versions_file_py38_existing_modules() {
let TestCase {
db,
custom_typeshed,
..
} = setup_resolver_test();
let existing_modules = create_module_names(&["asyncio", "functools", "xml.etree"]);
for module_name in existing_modules {
let resolved_module = resolve_module(&db, module_name.clone()).unwrap_or_else(|| {
panic!("Expected module {module_name} to exist in the mock stdlib")
});
let search_path = resolved_module.search_path();
assert_eq!(
&custom_typeshed.join("stdlib"),
&search_path,
"Search path for {module_name} was unexpectedly {search_path:?}"
);
assert!(
search_path.is_stdlib_search_path(),
"Expected a stdlib search path, but got {search_path:?}"
);
}
}
#[test]
fn stdlib_resolution_respects_versions_file_py38_nonexisting_modules() {
let TestCase { db, .. } = setup_resolver_test();
let nonexisting_modules = create_module_names(&[
"collections",
"importlib",
"importlib.abc",
"xml",
"asyncio.tasks",
]);
for module_name in nonexisting_modules {
assert!(
resolve_module(&db, module_name.clone()).is_none(),
"Unexpectedly resolved a module for {module_name}"
);
}
}
#[test]
fn stdlib_resolution_respects_versions_file_py39_existing_modules() {
let TestCase {
db,
custom_typeshed,
..
} = create_resolver_builder()
.unwrap()
.with_target_version(TargetVersion::Py39)
.build();
let existing_modules = create_module_names(&[
"asyncio",
"functools",
"importlib.abc",
"collections",
"asyncio.tasks",
]);
for module_name in existing_modules {
let resolved_module = resolve_module(&db, module_name.clone()).unwrap_or_else(|| {
panic!("Expected module {module_name} to exist in the mock stdlib")
});
let search_path = resolved_module.search_path();
assert_eq!(
&custom_typeshed.join("stdlib"),
&search_path,
"Search path for {module_name} was unexpectedly {search_path:?}"
);
assert!(
search_path.is_stdlib_search_path(),
"Expected a stdlib search path, but got {search_path:?}"
);
}
}
#[test]
fn stdlib_resolution_respects_versions_file_py39_nonexisting_modules() {
let TestCase { db, .. } = create_resolver_builder()
.unwrap()
.with_target_version(TargetVersion::Py39)
.build();
let nonexisting_modules = create_module_names(&["importlib", "xml", "xml.etree"]);
for module_name in nonexisting_modules {
assert!(
resolve_module(&db, module_name.clone()).is_none(),
"Unexpectedly resolved a module for {module_name}"
);
}
}
#[test]
fn first_party_precedence_over_stdlib() -> anyhow::Result<()> {
let TestCase { db, src, .. } = setup_resolver_test();
let first_party_functools_path = src.join("functools.py");
db.memory_file_system()
.write_file(&first_party_functools_path, "def update_wrapper(): ...")?;
let functools_module_name = ModuleName::new_static("functools").unwrap();
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
assert_eq!(
Some(&functools_module),
resolve_module(&db, functools_module_name).as_ref()
);
assert_eq!(&src, &functools_module.search_path());
assert_eq!(ModuleKind::Module, functools_module.kind());
assert_eq!(
&first_party_functools_path,
functools_module.file().path(&db)
);
assert_eq!(
Some(functools_module),
path_to_module(&db, &VfsPath::FileSystem(first_party_functools_path))
);
Ok(())
}
#[test]
fn resolve_package() -> anyhow::Result<()> {
let TestCase { src, db, .. } = setup_resolver_test();
let foo_dir = src.join("foo");
let foo_path = foo_dir.join("__init__.py");
db.memory_file_system()
.write_file(&foo_path, "print('Hello, world!')")?;
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
assert_eq!("foo", foo_module.name());
assert_eq!(&src, &foo_module.search_path());
assert_eq!(&foo_path, foo_module.file().path(&db));
assert_eq!(
Some(&foo_module),
path_to_module(&db, &VfsPath::FileSystem(foo_path)).as_ref()
);
// Resolving by directory doesn't resolve to the init file.
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_dir)));
Ok(())
}
#[test]
fn package_priority_over_module() -> anyhow::Result<()> {
let TestCase { db, src, .. } = setup_resolver_test();
let foo_dir = src.join("foo");
let foo_init = foo_dir.join("__init__.py");
db.memory_file_system()
.write_file(&foo_init, "print('Hello, world!')")?;
let foo_py = src.join("foo.py");
db.memory_file_system()
.write_file(&foo_py, "print('Hello, world!')")?;
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
assert_eq!(&src, &foo_module.search_path());
assert_eq!(&foo_init, foo_module.file().path(&db));
assert_eq!(ModuleKind::Package, foo_module.kind());
assert_eq!(
Some(foo_module),
path_to_module(&db, &VfsPath::FileSystem(foo_init))
);
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_py)));
Ok(())
}
#[test]
fn typing_stub_over_module() -> anyhow::Result<()> {
let TestCase { db, src, .. } = setup_resolver_test();
let foo_stub = src.join("foo.pyi");
let foo_py = src.join("foo.py");
db.memory_file_system()
.write_files([(&foo_stub, "x: int"), (&foo_py, "print('Hello, world!')")])?;
let foo = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
assert_eq!(&src, &foo.search_path());
assert_eq!(&foo_stub, foo.file().path(&db));
assert_eq!(
Some(foo),
path_to_module(&db, &VfsPath::FileSystem(foo_stub))
);
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_py)));
Ok(())
}
#[test]
fn sub_packages() -> anyhow::Result<()> {
let TestCase { db, src, .. } = setup_resolver_test();
let foo = src.join("foo");
let bar = foo.join("bar");
let baz = bar.join("baz.py");
db.memory_file_system().write_files([
(&foo.join("__init__.py"), ""),
(&bar.join("__init__.py"), ""),
(&baz, "print('Hello, world!')"),
])?;
let baz_module =
resolve_module(&db, ModuleName::new_static("foo.bar.baz").unwrap()).unwrap();
assert_eq!(&src, &baz_module.search_path());
assert_eq!(&baz, baz_module.file().path(&db));
assert_eq!(
Some(baz_module),
path_to_module(&db, &VfsPath::FileSystem(baz))
);
Ok(())
}
#[test]
fn namespace_package() -> anyhow::Result<()> {
let TestCase {
db,
src,
site_packages,
..
} = setup_resolver_test();
// From [PEP420](https://peps.python.org/pep-0420/#nested-namespace-packages).
// But uses `src` for `project1` and `site_packages2` for `project2`.
// ```
// src
// parent
// child
// one.py
// site_packages
// parent
// child
// two.py
// ```
let parent1 = src.join("parent");
let child1 = parent1.join("child");
let one = child1.join("one.py");
let parent2 = site_packages.join("parent");
let child2 = parent2.join("child");
let two = child2.join("two.py");
db.memory_file_system().write_files([
(&one, "print('Hello, world!')"),
(&two, "print('Hello, world!')"),
])?;
let one_module =
resolve_module(&db, ModuleName::new_static("parent.child.one").unwrap()).unwrap();
assert_eq!(
Some(one_module),
path_to_module(&db, &VfsPath::FileSystem(one))
);
let two_module =
resolve_module(&db, ModuleName::new_static("parent.child.two").unwrap()).unwrap();
assert_eq!(
Some(two_module),
path_to_module(&db, &VfsPath::FileSystem(two))
);
Ok(())
}
#[test]
fn regular_package_in_namespace_package() -> anyhow::Result<()> {
let TestCase {
db,
src,
site_packages,
..
} = setup_resolver_test();
// Adopted test case from the [PEP420 examples](https://peps.python.org/pep-0420/#nested-namespace-packages).
// The `src/parent/child` package is a regular package. Therefore, `site_packages/parent/child/two.py` should not be resolved.
// ```
// src
// parent
// child
// one.py
// site_packages
// parent
// child
// two.py
// ```
let parent1 = src.join("parent");
let child1 = parent1.join("child");
let one = child1.join("one.py");
let parent2 = site_packages.join("parent");
let child2 = parent2.join("child");
let two = child2.join("two.py");
db.memory_file_system().write_files([
(&child1.join("__init__.py"), "print('Hello, world!')"),
(&one, "print('Hello, world!')"),
(&two, "print('Hello, world!')"),
])?;
let one_module =
resolve_module(&db, ModuleName::new_static("parent.child.one").unwrap()).unwrap();
assert_eq!(
Some(one_module),
path_to_module(&db, &VfsPath::FileSystem(one))
);
assert_eq!(
None,
resolve_module(&db, ModuleName::new_static("parent.child.two").unwrap())
);
Ok(())
}
#[test]
fn module_search_path_priority() -> anyhow::Result<()> {
let TestCase {
db,
src,
site_packages,
..
} = setup_resolver_test();
let foo_src = src.join("foo.py");
let foo_site_packages = site_packages.join("foo.py");
db.memory_file_system()
.write_files([(&foo_src, ""), (&foo_site_packages, "")])?;
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
assert_eq!(&src, &foo_module.search_path());
assert_eq!(&foo_src, foo_module.file().path(&db));
assert_eq!(
Some(foo_module),
path_to_module(&db, &VfsPath::FileSystem(foo_src))
);
assert_eq!(
None,
path_to_module(&db, &VfsPath::FileSystem(foo_site_packages))
);
Ok(())
}
#[test]
#[cfg(target_family = "unix")]
fn symlink() -> anyhow::Result<()> {
let TestCase {
mut db,
src,
site_packages,
custom_typeshed,
} = setup_resolver_test();
db.with_os_file_system();
let temp_dir = tempfile::tempdir()?;
let root = FileSystemPath::from_std_path(temp_dir.path()).unwrap();
let src = root.join(src);
let site_packages = root.join(site_packages);
let custom_typeshed = root.join(custom_typeshed);
let foo = src.join("foo.py");
let bar = src.join("bar.py");
std::fs::create_dir_all(src.as_std_path())?;
std::fs::create_dir_all(site_packages.as_std_path())?;
std::fs::create_dir_all(custom_typeshed.as_std_path())?;
std::fs::write(foo.as_std_path(), "")?;
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
let settings = RawModuleResolutionSettings {
target_version: TargetVersion::Py38,
extra_paths: vec![],
workspace_root: src.clone(),
site_packages: Some(site_packages.clone()),
custom_typeshed: Some(custom_typeshed.clone()),
};
set_module_resolution_settings(&mut db, settings);
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
assert_ne!(foo_module, bar_module);
assert_eq!(&src, &foo_module.search_path());
assert_eq!(&foo, foo_module.file().path(&db));
// `foo` and `bar` shouldn't resolve to the same file
assert_eq!(&src, &bar_module.search_path());
assert_eq!(&bar, bar_module.file().path(&db));
assert_eq!(&foo, foo_module.file().path(&db));
assert_ne!(&foo_module, &bar_module);
assert_eq!(
Some(foo_module),
path_to_module(&db, &VfsPath::FileSystem(foo))
);
assert_eq!(
Some(bar_module),
path_to_module(&db, &VfsPath::FileSystem(bar))
);
Ok(())
}
#[test]
fn deleting_an_unrelated_file_doesnt_change_module_resolution() -> anyhow::Result<()> {
let TestCase { mut db, src, .. } = setup_resolver_test();
let foo_path = src.join("foo.py");
let bar_path = src.join("bar.py");
db.memory_file_system()
.write_files([(&foo_path, "x = 1"), (&bar_path, "y = 2")])?;
let foo_module_name = ModuleName::new_static("foo").unwrap();
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
let bar = system_path_to_file(&db, &bar_path).expect("bar.py to exist");
db.clear_salsa_events();
// Delete `bar.py`
db.memory_file_system().remove_file(&bar_path)?;
bar.touch(&mut db);
// Re-query the foo module. The foo module should still be cached because `bar.py` isn't relevant
// for resolving `foo`.
let foo_module2 = resolve_module(&db, foo_module_name);
assert!(!db
.take_salsa_events()
.iter()
.any(|event| { matches!(event.kind, salsa::EventKind::WillExecute { .. }) }));
assert_eq!(Some(foo_module), foo_module2);
Ok(())
}
#[test]
fn adding_a_file_on_which_the_module_resolution_depends_on_invalidates_the_query(
) -> anyhow::Result<()> {
let TestCase { mut db, src, .. } = setup_resolver_test();
let foo_path = src.join("foo.py");
let foo_module_name = ModuleName::new_static("foo").unwrap();
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
// Now write the foo file
db.memory_file_system().write_file(&foo_path, "x = 1")?;
VfsFile::touch_path(&mut db, &VfsPath::FileSystem(foo_path.clone()));
let foo_file = system_path_to_file(&db, &foo_path).expect("foo.py to exist");
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
assert_eq!(foo_file, foo_module.file());
Ok(())
}
#[test]
fn removing_a_file_that_the_module_resolution_depends_on_invalidates_the_query(
) -> anyhow::Result<()> {
let TestCase { mut db, src, .. } = setup_resolver_test();
let foo_path = src.join("foo.py");
let foo_init_path = src.join("foo/__init__.py");
db.memory_file_system()
.write_files([(&foo_path, "x = 1"), (&foo_init_path, "x = 2")])?;
let foo_module_name = ModuleName::new_static("foo").unwrap();
let foo_module = resolve_module(&db, foo_module_name.clone()).expect("foo module to exist");
assert_eq!(&foo_init_path, foo_module.file().path(&db));
// Delete `foo/__init__.py` and the `foo` folder. `foo` should now resolve to `foo.py`
db.memory_file_system().remove_file(&foo_init_path)?;
db.memory_file_system()
.remove_directory(foo_init_path.parent().unwrap())?;
VfsFile::touch_path(&mut db, &VfsPath::FileSystem(foo_init_path));
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
assert_eq!(&foo_path, foo_module.file().path(&db));
Ok(())
}
}

View File

@@ -0,0 +1,25 @@
use ruff_db::file_system::FileSystem;
use crate::db::Db;
use crate::supported_py_version::TargetVersion;
use crate::typeshed::LazyTypeshedVersions;
pub(crate) struct ResolverState<'db> {
pub(crate) db: &'db dyn Db,
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
pub(crate) target_version: TargetVersion,
}
impl<'db> ResolverState<'db> {
pub(crate) fn new(db: &'db dyn Db, target_version: TargetVersion) -> Self {
Self {
db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version,
}
}
pub(crate) fn file_system(&self) -> &dyn FileSystem {
self.db.file_system()
}
}

View File

@@ -0,0 +1,14 @@
/// Enumeration of all supported Python versions
///
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
pub enum TargetVersion {
Py37,
#[default]
Py38,
Py39,
Py310,
Py311,
Py312,
Py313,
}

View File

@@ -1,24 +1,22 @@
use ruff_db::vendored::VendoredFileSystem;
use std::sync::LazyLock;
mod versions;
// The file path here is hardcoded in this crate's `build.rs` script.
// Luckily this crate will fail to build if this file isn't available at build time.
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
pub fn file_system() -> &'static VendoredFileSystem {
static VENDORED_TYPESHED_STUBS: LazyLock<VendoredFileSystem> =
LazyLock::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
&VENDORED_TYPESHED_STUBS
}
pub(crate) use versions::{
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsQueryResult,
};
pub use versions::{TypeshedVersionsParseError, TypeshedVersionsParseErrorKind};
#[cfg(test)]
mod tests {
use std::io::{self, Read};
use std::path::Path;
use ruff_db::vendored::VendoredPath;
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::vfs::VendoredPath;
use super::*;
// The file path here is hardcoded in this crate's `build.rs` script.
// Luckily this crate will fail to build if this file isn't available at build time.
const TYPESHED_ZIP_BYTES: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
#[test]
fn typeshed_zip_created_at_build_time() {
@@ -41,7 +39,7 @@ mod tests {
#[test]
fn typeshed_vfs_consistent_with_vendored_stubs() {
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
let vendored_typeshed_stubs = file_system();
let vendored_typeshed_stubs = VendoredFileSystem::new(TYPESHED_ZIP_BYTES).unwrap();
let mut empty_iterator = true;
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
@@ -71,7 +69,7 @@ mod tests {
let vendored_path_kind = vendored_typeshed_stubs
.metadata(vendored_path)
.unwrap_or_else(|_| {
.unwrap_or_else(|| {
panic!(
"Expected metadata for {vendored_path:?} to be retrievable from the `VendoredFileSystem!

View File

@@ -1,30 +1,77 @@
use std::cell::OnceCell;
use std::collections::BTreeMap;
use std::fmt;
use std::num::{NonZeroU16, NonZeroUsize};
use std::ops::{RangeFrom, RangeInclusive};
use std::str::FromStr;
use ruff_db::file_system::FileSystemPath;
use ruff_db::source::source_text;
use ruff_db::vfs::{system_path_to_file, VfsFile};
use rustc_hash::FxHashMap;
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::{Program, PythonVersion};
use crate::supported_py_version::TargetVersion;
pub(in crate::module_resolver) fn vendored_typeshed_versions(db: &dyn Db) -> TypeshedVersions {
TypeshedVersions::from_str(
&db.vendored()
.read_to_string("stdlib/VERSIONS")
.expect("The vendored typeshed stubs should contain a VERSIONS file"),
)
.expect("The VERSIONS file in the vendored typeshed stubs should be well-formed")
#[derive(Debug)]
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
impl<'db> LazyTypeshedVersions<'db> {
#[must_use]
pub(crate) fn new() -> Self {
Self(OnceCell::new())
}
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
///
/// Simply probing whether a file exists in typeshed is insufficient for this question,
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
/// will still be available (either in a custom typeshed dir or in our vendored copy)
/// even if the user specified Python 3.8 as the target version.
///
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
/// as to whether the module exists on the specified target version. However, VERSIONS does not
/// provide comprehensive information on all submodules, meaning that this method sometimes
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
/// See [`TypeshedVersionsQueryResult`] for more details.
#[must_use]
pub(crate) fn query_module(
&self,
module: &ModuleName,
db: &'db dyn Db,
stdlib_root: &FileSystemPath,
target_version: TargetVersion,
) -> TypeshedVersionsQueryResult {
let versions = self.0.get_or_init(|| {
let versions_path = stdlib_root.join("VERSIONS");
let Some(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
todo!(
"Still need to figure out how to handle VERSIONS files being deleted \
from custom typeshed directories! Expected a file to exist at {versions_path}"
)
};
// TODO(Alex/Micha): If VERSIONS is invalid,
// this should invalidate not just the specific module resolution we're currently attempting,
// but all type inference that depends on any standard-library types.
// Unwrapping here is not correct...
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
});
versions.query_module(module, PyVersion::from(target_version))
}
}
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
Program::get(db).search_paths(db).typeshed_versions()
#[salsa::tracked(return_ref)]
pub(crate) fn parse_typeshed_versions(
db: &dyn Db,
versions_file: VfsFile,
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
let file_content = source_text(db.upcast(), versions_file);
file_content.parse()
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub(crate) struct TypeshedVersionsParseError {
pub struct TypeshedVersionsParseError {
line_number: Option<NonZeroU16>,
reason: TypeshedVersionsParseErrorKind,
}
@@ -57,7 +104,7 @@ impl std::error::Error for TypeshedVersionsParseError {
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub(super) enum TypeshedVersionsParseErrorKind {
pub enum TypeshedVersionsParseErrorKind {
TooManyLines(NonZeroUsize),
UnexpectedNumberOfColons,
InvalidModuleName(String),
@@ -67,6 +114,7 @@ pub(super) enum TypeshedVersionsParseErrorKind {
version: String,
err: std::num::ParseIntError,
},
EmptyVersionsFile,
}
impl fmt::Display for TypeshedVersionsParseErrorKind {
@@ -95,6 +143,7 @@ impl fmt::Display for TypeshedVersionsParseErrorKind {
f,
"Failed to convert '{version}' to a pair of integers due to {err}",
),
Self::EmptyVersionsFile => f.write_str("Versions file was empty!"),
}
}
}
@@ -109,10 +158,10 @@ impl TypeshedVersions {
}
#[must_use]
pub(in crate::module_resolver) fn query_module(
fn query_module(
&self,
module: &ModuleName,
target_version: PythonVersion,
target_version: PyVersion,
) -> TypeshedVersionsQueryResult {
if let Some(range) = self.exact(module) {
if range.contains(target_version) {
@@ -139,7 +188,7 @@ impl TypeshedVersions {
}
}
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
/// "Does this module exist in the stdlib at runtime on a certain target version?"
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
pub(crate) enum TypeshedVersionsQueryResult {
@@ -241,7 +290,14 @@ impl FromStr for TypeshedVersions {
};
}
Ok(Self(map))
if map.is_empty() {
Err(TypeshedVersionsParseError {
line_number: None,
reason: TypeshedVersionsParseErrorKind::EmptyVersionsFile,
})
} else {
Ok(Self(map))
}
}
}
@@ -257,13 +313,13 @@ impl fmt::Display for TypeshedVersions {
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
enum PyVersionRange {
AvailableFrom(RangeFrom<PythonVersion>),
AvailableWithin(RangeInclusive<PythonVersion>),
AvailableFrom(RangeFrom<PyVersion>),
AvailableWithin(RangeInclusive<PyVersion>),
}
impl PyVersionRange {
#[must_use]
fn contains(&self, version: PythonVersion) -> bool {
fn contains(&self, version: PyVersion) -> bool {
match self {
Self::AvailableFrom(inner) => inner.contains(&version),
Self::AvailableWithin(inner) => inner.contains(&version),
@@ -277,14 +333,9 @@ impl FromStr for PyVersionRange {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('-').map(str::trim);
match (parts.next(), parts.next(), parts.next()) {
(Some(lower), Some(""), None) => {
let lower = PythonVersion::from_versions_file_string(lower)?;
Ok(Self::AvailableFrom(lower..))
}
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
(Some(lower), Some(upper), None) => {
let lower = PythonVersion::from_versions_file_string(lower)?;
let upper = PythonVersion::from_versions_file_string(upper)?;
Ok(Self::AvailableWithin(lower..=upper))
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
}
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
}
@@ -302,20 +353,74 @@ impl fmt::Display for PyVersionRange {
}
}
impl PythonVersion {
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
struct PyVersion {
major: u8,
minor: u8,
}
impl FromStr for PyVersion {
type Err = TypeshedVersionsParseErrorKind;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('.').map(str::trim);
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
s.to_string(),
));
};
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
version: s.to_string(),
err: int_parse_error,
let major = match u8::from_str(major) {
Ok(major) => major,
Err(err) => {
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
version: s.to_string(),
err,
})
}
})
};
let minor = match u8::from_str(minor) {
Ok(minor) => minor,
Err(err) => {
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
version: s.to_string(),
err,
})
}
};
Ok(Self { major, minor })
}
}
impl fmt::Display for PyVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let PyVersion { major, minor } = self;
write!(f, "{major}.{minor}")
}
}
impl From<TargetVersion> for PyVersion {
fn from(value: TargetVersion) -> Self {
match value {
TargetVersion::Py37 => PyVersion { major: 3, minor: 7 },
TargetVersion::Py38 => PyVersion { major: 3, minor: 8 },
TargetVersion::Py39 => PyVersion { major: 3, minor: 9 },
TargetVersion::Py310 => PyVersion {
major: 3,
minor: 10,
},
TargetVersion::Py311 => PyVersion {
major: 3,
minor: 11,
},
TargetVersion::Py312 => PyVersion {
major: 3,
minor: 12,
},
TargetVersion::Py313 => PyVersion {
major: 3,
minor: 13,
},
}
}
}
@@ -324,12 +429,10 @@ mod tests {
use std::num::{IntErrorKind, NonZeroU16};
use std::path::Path;
use insta::assert_snapshot;
use crate::db::tests::TestDb;
use super::*;
use insta::assert_snapshot;
const TYPESHED_STDLIB_DIR: &str = "stdlib";
#[allow(unsafe_code)]
@@ -349,9 +452,12 @@ mod tests {
#[test]
fn can_parse_vendored_versions_file() {
let db = TestDb::new();
let versions_data = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/vendor/typeshed/stdlib/VERSIONS"
));
let versions = vendored_typeshed_versions(&db);
let versions = TypeshedVersions::from_str(versions_data).unwrap();
assert!(versions.len() > 100);
assert!(versions.len() < 1000);
@@ -361,37 +467,36 @@ mod tests {
assert!(versions.contains_exact(&asyncio));
assert_eq!(
versions.query_module(&asyncio, PythonVersion::PY310),
versions.query_module(&asyncio, TargetVersion::Py310.into()),
TypeshedVersionsQueryResult::Exists
);
assert!(versions.contains_exact(&asyncio_staggered));
assert_eq!(
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
versions.query_module(&asyncio_staggered, TargetVersion::Py38.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
versions.query_module(&asyncio_staggered, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
assert!(versions.contains_exact(&audioop));
assert_eq!(
versions.query_module(&audioop, PythonVersion::PY312),
versions.query_module(&audioop, TargetVersion::Py312.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
versions.query_module(&audioop, PythonVersion::PY313),
versions.query_module(&audioop, TargetVersion::Py313.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
#[test]
fn typeshed_versions_consistent_with_vendored_stubs() {
let db = TestDb::new();
let vendored_typeshed_versions = vendored_typeshed_versions(&db);
let vendored_typeshed_dir =
Path::new(env!("CARGO_MANIFEST_DIR")).join("../red_knot_vendored/vendor/typeshed");
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
let mut empty_iterator = true;
@@ -459,11 +564,11 @@ foo: 3.8- # trailing comment
";
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
assert_eq!(parsed_versions.len(), 3);
assert_snapshot!(parsed_versions.to_string(), @r"
assert_snapshot!(parsed_versions.to_string(), @r###"
bar: 2.7-3.10
bar.baz: 3.1-3.9
foo: 3.8-
"
"###
);
}
@@ -474,15 +579,15 @@ foo: 3.8- # trailing comment
assert!(parsed_versions.contains_exact(&bar));
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY37),
parsed_versions.query_module(&bar, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY310),
parsed_versions.query_module(&bar, TargetVersion::Py310.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar, PythonVersion::PY311),
parsed_versions.query_module(&bar, TargetVersion::Py311.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
@@ -494,15 +599,15 @@ foo: 3.8- # trailing comment
assert!(parsed_versions.contains_exact(&foo));
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY37),
parsed_versions.query_module(&foo, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY38),
parsed_versions.query_module(&foo, TargetVersion::Py38.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&foo, PythonVersion::PY311),
parsed_versions.query_module(&foo, TargetVersion::Py311.into()),
TypeshedVersionsQueryResult::Exists
);
}
@@ -514,15 +619,15 @@ foo: 3.8- # trailing comment
assert!(parsed_versions.contains_exact(&bar_baz));
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
parsed_versions.query_module(&bar_baz, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
parsed_versions.query_module(&bar_baz, TargetVersion::Py39.into()),
TypeshedVersionsQueryResult::Exists
);
assert_eq!(
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
parsed_versions.query_module(&bar_baz, TargetVersion::Py310.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
@@ -534,15 +639,15 @@ foo: 3.8- # trailing comment
assert!(!parsed_versions.contains_exact(&bar_eggs));
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
parsed_versions.query_module(&bar_eggs, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::MaybeExists
);
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
parsed_versions.query_module(&bar_eggs, TargetVersion::Py310.into()),
TypeshedVersionsQueryResult::MaybeExists
);
assert_eq!(
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
parsed_versions.query_module(&bar_eggs, TargetVersion::Py311.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
@@ -554,15 +659,40 @@ foo: 3.8- # trailing comment
assert!(!parsed_versions.contains_exact(&spam));
assert_eq!(
parsed_versions.query_module(&spam, PythonVersion::PY37),
parsed_versions.query_module(&spam, TargetVersion::Py37.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
assert_eq!(
parsed_versions.query_module(&spam, PythonVersion::PY313),
parsed_versions.query_module(&spam, TargetVersion::Py313.into()),
TypeshedVersionsQueryResult::DoesNotExist
);
}
#[test]
fn invalid_empty_versions_file() {
assert_eq!(
TypeshedVersions::from_str(""),
Err(TypeshedVersionsParseError {
line_number: None,
reason: TypeshedVersionsParseErrorKind::EmptyVersionsFile
})
);
assert_eq!(
TypeshedVersions::from_str(" "),
Err(TypeshedVersionsParseError {
line_number: None,
reason: TypeshedVersionsParseErrorKind::EmptyVersionsFile
})
);
assert_eq!(
TypeshedVersions::from_str(" \n \n \n "),
Err(TypeshedVersionsParseError {
line_number: None,
reason: TypeshedVersionsParseErrorKind::EmptyVersionsFile
})
);
}
#[test]
fn invalid_huge_versions_file() {
let offset = 100;

View File

@@ -21,7 +21,7 @@ the project the stubs are for, but instead report them here to typeshed.**
Further documentation on stub files, typeshed, and Python's typing system in
general, can also be found at https://typing.readthedocs.io/en/latest/.
Typeshed supports Python versions 3.8 to 3.13.
Typeshed supports Python versions 3.8 and up.
## Using

View File

@@ -0,0 +1 @@
dcab6e88883c629ede9637fb011958f8b4918f52

View File

@@ -20,51 +20,35 @@
__future__: 3.0-
__main__: 3.0-
_ast: 3.0-
_asyncio: 3.0-
_bisect: 3.0-
_blake2: 3.6-
_bootlocale: 3.4-3.9
_bz2: 3.3-
_codecs: 3.0-
_collections_abc: 3.3-
_compat_pickle: 3.1-
_compression: 3.5-
_contextvars: 3.7-
_csv: 3.0-
_ctypes: 3.0-
_curses: 3.0-
_dbm: 3.0-
_decimal: 3.3-
_dummy_thread: 3.0-3.8
_dummy_threading: 3.0-3.8
_frozen_importlib: 3.0-
_frozen_importlib_external: 3.5-
_gdbm: 3.0-
_heapq: 3.0-
_imp: 3.0-
_interpchannels: 3.13-
_interpqueues: 3.13-
_interpreters: 3.13-
_io: 3.0-
_json: 3.0-
_locale: 3.0-
_lsprof: 3.0-
_lzma: 3.3-
_markupbase: 3.0-
_msi: 3.0-3.12
_msi: 3.0-
_operator: 3.4-
_osx_support: 3.0-
_posixsubprocess: 3.2-
_py_abc: 3.7-
_pydecimal: 3.5-
_queue: 3.7-
_random: 3.0-
_sitebuiltins: 3.4-
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
_sqlite3: 3.0-
_ssl: 3.0-
_stat: 3.4-
_struct: 3.0-
_thread: 3.0-
_threading_local: 3.0-
_tkinter: 3.0-
@@ -128,7 +112,6 @@ curses: 3.0-
dataclasses: 3.7-
datetime: 3.0-
dbm: 3.0-
dbm.sqlite3: 3.13-
decimal: 3.0-
difflib: 3.0-
dis: 3.0-
@@ -170,15 +153,10 @@ imghdr: 3.0-3.12
imp: 3.0-3.11
importlib: 3.0-
importlib._abc: 3.10-
importlib._bootstrap: 3.0-
importlib._bootstrap_external: 3.5-
importlib.metadata: 3.8-
importlib.metadata._meta: 3.10-
importlib.metadata.diagnose: 3.13-
importlib.readers: 3.10-
importlib.resources: 3.7-
importlib.resources._common: 3.11-
importlib.resources._functional: 3.13-
importlib.resources.abc: 3.11-
importlib.resources.readers: 3.11-
importlib.resources.simple: 3.11-

View File

@@ -1,12 +1,13 @@
import sys
from abc import abstractmethod
from types import MappingProxyType
from typing import ( # noqa: Y022,Y038
from typing import ( # noqa: Y022,Y038,Y057
AbstractSet as Set,
AsyncGenerator as AsyncGenerator,
AsyncIterable as AsyncIterable,
AsyncIterator as AsyncIterator,
Awaitable as Awaitable,
ByteString as ByteString,
Callable as Callable,
Collection as Collection,
Container as Container,
@@ -58,12 +59,8 @@ __all__ = [
"ValuesView",
"Sequence",
"MutableSequence",
"ByteString",
]
if sys.version_info < (3, 14):
from typing import ByteString as ByteString # noqa: Y057
__all__ += ["ByteString"]
if sys.version_info >= (3, 12):
__all__ += ["Buffer"]
@@ -73,8 +70,6 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
@final
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
def __eq__(self, value: object, /) -> bool: ...
if sys.version_info >= (3, 13):
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
@@ -88,8 +83,6 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
@final
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
def __eq__(self, value: object, /) -> bool: ...
if sys.version_info >= (3, 13):
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...

View File

@@ -1,19 +1,18 @@
import csv
import sys
from _typeshed import SupportsWrite
from collections.abc import Iterable, Iterator
from typing import Any, Final
from typing import Any, Final, Literal
from typing_extensions import TypeAlias
__version__: Final[str]
QUOTE_ALL: Final = 1
QUOTE_MINIMAL: Final = 0
QUOTE_NONE: Final = 3
QUOTE_NONNUMERIC: Final = 2
QUOTE_ALL: Literal[1]
QUOTE_MINIMAL: Literal[0]
QUOTE_NONE: Literal[3]
QUOTE_NONNUMERIC: Literal[2]
if sys.version_info >= (3, 12):
QUOTE_STRINGS: Final = 4
QUOTE_NOTNULL: Final = 5
QUOTE_STRINGS: Literal[4]
QUOTE_NOTNULL: Literal[5]
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
# However, using literals in situations like these can cause false-positives (see #7258)
@@ -21,8 +20,6 @@ _QuotingType: TypeAlias = int
class Error(Exception): ...
_DialectLike: TypeAlias = str | Dialect | csv.Dialect | type[Dialect | csv.Dialect]
class Dialect:
delimiter: str
quotechar: str | None
@@ -32,18 +29,9 @@ class Dialect:
lineterminator: str
quoting: _QuotingType
strict: bool
def __init__(
self,
dialect: _DialectLike | None = ...,
delimiter: str = ",",
doublequote: bool = True,
escapechar: str | None = None,
lineterminator: str = "\r\n",
quotechar: str | None = '"',
quoting: _QuotingType = 0,
skipinitialspace: bool = False,
strict: bool = False,
) -> None: ...
def __init__(self) -> None: ...
_DialectLike: TypeAlias = str | Dialect | type[Dialect]
class _reader(Iterator[list[str]]):
@property

View File

@@ -2,7 +2,7 @@ import sys
from _typeshed import ReadableBuffer, WriteableBuffer
from abc import abstractmethod
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
from ctypes import CDLL, ArgumentError as ArgumentError, c_void_p
from ctypes import CDLL, ArgumentError as ArgumentError
from typing import Any, ClassVar, Generic, TypeVar, overload
from typing_extensions import Self, TypeAlias
@@ -51,8 +51,8 @@ class _CDataMeta(type):
# By default mypy complains about the following two methods, because strictly speaking cls
# might not be a Type[_CT]. However this can never actually happen, because the only class that
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
class _CData(metaclass=_CDataMeta):
_b_base_: int
@@ -64,6 +64,7 @@ class _CData(metaclass=_CDataMeta):
# Structure.from_buffer(...) # valid at runtime
# Structure(...).from_buffer(...) # invalid at runtime
#
@classmethod
def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ...
@classmethod
@@ -71,7 +72,7 @@ class _CData(metaclass=_CDataMeta):
@classmethod
def from_address(cls, address: int) -> Self: ...
@classmethod
def from_param(cls, value: Any, /) -> Self | _CArgObject: ...
def from_param(cls, obj: Any) -> Self | _CArgObject: ...
@classmethod
def in_dll(cls, library: CDLL, name: str) -> Self: ...
def __buffer__(self, flags: int, /) -> memoryview: ...
@@ -99,11 +100,8 @@ class _Pointer(_PointerLike, _CData, Generic[_CT]):
def __getitem__(self, key: slice, /) -> list[Any]: ...
def __setitem__(self, key: int, value: Any, /) -> None: ...
@overload
def POINTER(type: None, /) -> type[c_void_p]: ...
@overload
def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ...
def pointer(obj: _CT, /) -> _Pointer[_CT]: ...
def POINTER(type: type[_CT]) -> type[_Pointer[_CT]]: ...
def pointer(arg: _CT, /) -> _Pointer[_CT]: ...
class _CArgObject: ...
@@ -205,9 +203,9 @@ class Array(_CData, Generic[_CT]):
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
def addressof(obj: _CData, /) -> int: ...
def alignment(obj_or_type: _CData | type[_CData], /) -> int: ...
def addressof(obj: _CData) -> int: ...
def alignment(obj_or_type: _CData | type[_CData]) -> int: ...
def get_errno() -> int: ...
def resize(obj: _CData, size: int, /) -> None: ...
def set_errno(value: int, /) -> int: ...
def sizeof(obj_or_type: _CData | type[_CData], /) -> int: ...
def resize(obj: _CData, size: int) -> None: ...
def set_errno(value: int) -> int: ...
def sizeof(obj_or_type: _CData | type[_CData]) -> int: ...

View File

@@ -298,7 +298,7 @@ if sys.version_info >= (3, 9):
def getmouse() -> tuple[int, int, int, int, int]: ...
def getsyx() -> tuple[int, int]: ...
def getwin(file: SupportsRead[bytes], /) -> window: ...
def getwin(file: SupportsRead[bytes], /) -> _CursesWindow: ...
def halfdelay(tenths: int, /) -> None: ...
def has_colors() -> bool: ...
@@ -310,7 +310,7 @@ def has_il() -> bool: ...
def has_key(key: int, /) -> bool: ...
def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ...
def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ...
def initscr() -> window: ...
def initscr() -> _CursesWindow: ...
def intrflush(flag: bool, /) -> None: ...
def is_term_resized(nlines: int, ncols: int, /) -> bool: ...
def isendwin() -> bool: ...
@@ -321,8 +321,8 @@ def meta(yes: bool, /) -> None: ...
def mouseinterval(interval: int, /) -> None: ...
def mousemask(newmask: int, /) -> tuple[int, int]: ...
def napms(ms: int, /) -> int: ...
def newpad(nlines: int, ncols: int, /) -> window: ...
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> window: ...
def newpad(nlines: int, ncols: int, /) -> _CursesWindow: ...
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> _CursesWindow: ...
def nl(flag: bool = True, /) -> None: ...
def nocbreak() -> None: ...
def noecho() -> None: ...
@@ -368,7 +368,11 @@ def tparm(
) -> bytes: ...
def typeahead(fd: int, /) -> None: ...
def unctrl(ch: _ChType, /) -> bytes: ...
def unget_wch(ch: int | str, /) -> None: ...
if sys.version_info < (3, 12) or sys.platform != "darwin":
# The support for macos was dropped in 3.12
def unget_wch(ch: int | str, /) -> None: ...
def ungetch(ch: _ChType, /) -> None: ...
def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ...
def update_lines_cols() -> None: ...
@@ -378,7 +382,7 @@ def use_env(flag: bool, /) -> None: ...
class error(Exception): ...
@final
class window: # undocumented
class _CursesWindow:
encoding: str
@overload
def addch(self, ch: _ChType, attr: int = ...) -> None: ...
@@ -431,9 +435,9 @@ class window: # undocumented
def delch(self, y: int, x: int) -> None: ...
def deleteln(self) -> None: ...
@overload
def derwin(self, begin_y: int, begin_x: int) -> window: ...
def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
@overload
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
def echochar(self, ch: _ChType, attr: int = ..., /) -> None: ...
def enclose(self, y: int, x: int, /) -> bool: ...
def erase(self) -> None: ...
@@ -443,10 +447,13 @@ class window: # undocumented
def getch(self) -> int: ...
@overload
def getch(self, y: int, x: int) -> int: ...
@overload
def get_wch(self) -> int | str: ...
@overload
def get_wch(self, y: int, x: int) -> int | str: ...
if sys.version_info < (3, 12) or sys.platform != "darwin":
# The support for macos was dropped in 3.12
@overload
def get_wch(self) -> int | str: ...
@overload
def get_wch(self, y: int, x: int) -> int | str: ...
@overload
def getkey(self) -> str: ...
@overload
@@ -493,7 +500,7 @@ class window: # undocumented
def instr(self, y: int, x: int, n: int = ...) -> bytes: ...
def is_linetouched(self, line: int, /) -> bool: ...
def is_wintouched(self) -> bool: ...
def keypad(self, yes: bool, /) -> None: ...
def keypad(self, yes: bool) -> None: ...
def leaveok(self, yes: bool) -> None: ...
def move(self, new_y: int, new_x: int) -> None: ...
def mvderwin(self, y: int, x: int) -> None: ...
@@ -505,16 +512,16 @@ class window: # undocumented
@overload
def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ...
@overload
def overlay(self, destwin: window) -> None: ...
def overlay(self, destwin: _CursesWindow) -> None: ...
@overload
def overlay(
self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
) -> None: ...
@overload
def overwrite(self, destwin: window) -> None: ...
def overwrite(self, destwin: _CursesWindow) -> None: ...
@overload
def overwrite(
self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
) -> None: ...
def putwin(self, file: IO[Any], /) -> None: ...
def redrawln(self, beg: int, num: int, /) -> None: ...
@@ -530,13 +537,13 @@ class window: # undocumented
def standend(self) -> None: ...
def standout(self) -> None: ...
@overload
def subpad(self, begin_y: int, begin_x: int) -> window: ...
def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
@overload
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
@overload
def subwin(self, begin_y: int, begin_x: int) -> window: ...
def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
@overload
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
def syncdown(self) -> None: ...
def syncok(self, flag: bool) -> None: ...
def syncup(self) -> None: ...
@@ -555,3 +562,4 @@ class _ncurses_version(NamedTuple):
patch: int
ncurses_version: _ncurses_version
window = _CursesWindow # undocumented

View File

@@ -1,45 +1,37 @@
import numbers
from _decimal import (
HAVE_CONTEXTVAR as HAVE_CONTEXTVAR,
HAVE_THREADS as HAVE_THREADS,
MAX_EMAX as MAX_EMAX,
MAX_PREC as MAX_PREC,
MIN_EMIN as MIN_EMIN,
MIN_ETINY as MIN_ETINY,
ROUND_05UP as ROUND_05UP,
ROUND_CEILING as ROUND_CEILING,
ROUND_DOWN as ROUND_DOWN,
ROUND_FLOOR as ROUND_FLOOR,
ROUND_HALF_DOWN as ROUND_HALF_DOWN,
ROUND_HALF_EVEN as ROUND_HALF_EVEN,
ROUND_HALF_UP as ROUND_HALF_UP,
ROUND_UP as ROUND_UP,
BasicContext as BasicContext,
DefaultContext as DefaultContext,
ExtendedContext as ExtendedContext,
__libmpdec_version__ as __libmpdec_version__,
__version__ as __version__,
getcontext as getcontext,
localcontext as localcontext,
setcontext as setcontext,
)
import sys
from collections.abc import Container, Sequence
from typing import Any, ClassVar, Literal, NamedTuple, overload
from types import TracebackType
from typing import Any, ClassVar, Final, Literal, NamedTuple, overload
from typing_extensions import Self, TypeAlias
_Decimal: TypeAlias = Decimal | int
_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int]
_ComparableNum: TypeAlias = Decimal | float | numbers.Rational
_TrapType: TypeAlias = type[DecimalException]
# At runtime, these classes are implemented in C as part of "_decimal".
# However, they consider themselves to live in "decimal", so we'll put them here.
__version__: Final[str]
__libmpdec_version__: Final[str]
class DecimalTuple(NamedTuple):
sign: int
digits: tuple[int, ...]
exponent: int | Literal["n", "N", "F"]
ROUND_DOWN: str
ROUND_HALF_UP: str
ROUND_HALF_EVEN: str
ROUND_CEILING: str
ROUND_FLOOR: str
ROUND_UP: str
ROUND_HALF_DOWN: str
ROUND_05UP: str
HAVE_CONTEXTVAR: bool
HAVE_THREADS: bool
MAX_EMAX: int
MAX_PREC: int
MIN_EMIN: int
MIN_ETINY: int
class DecimalException(ArithmeticError): ...
class Clamped(DecimalException): ...
class InvalidOperation(DecimalException): ...
@@ -55,6 +47,26 @@ class Overflow(Inexact, Rounded): ...
class Underflow(Inexact, Rounded, Subnormal): ...
class FloatOperation(DecimalException, TypeError): ...
def setcontext(context: Context, /) -> None: ...
def getcontext() -> Context: ...
if sys.version_info >= (3, 11):
def localcontext(
ctx: Context | None = None,
*,
prec: int | None = ...,
rounding: str | None = ...,
Emin: int | None = ...,
Emax: int | None = ...,
capitals: int | None = ...,
clamp: int | None = ...,
traps: dict[_TrapType, bool] | None = ...,
flags: dict[_TrapType, bool] | None = ...,
) -> _ContextManager: ...
else:
def localcontext(ctx: Context | None = None) -> _ContextManager: ...
class Decimal:
def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ...
@classmethod
@@ -156,6 +168,15 @@ class Decimal:
def __deepcopy__(self, memo: Any, /) -> Self: ...
def __format__(self, specifier: str, context: Context | None = ..., /) -> str: ...
class _ContextManager:
new_context: Context
saved_context: Context
def __init__(self, new_context: Context) -> None: ...
def __enter__(self) -> Context: ...
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
_TrapType: TypeAlias = type[DecimalException]
class Context:
# TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime,
# even settable attributes like `prec` and `rounding`,
@@ -254,3 +275,7 @@ class Context:
def to_integral_exact(self, x: _Decimal, /) -> Decimal: ...
def to_integral_value(self, x: _Decimal, /) -> Decimal: ...
def to_integral(self, x: _Decimal, /) -> Decimal: ...
DefaultContext: Context
BasicContext: Context
ExtendedContext: Context

View File

@@ -1,5 +1,5 @@
from _typeshed import structseq
from typing import Any, Final, Literal, SupportsIndex, final
from typing import Final, Literal, SupportsIndex, final
from typing_extensions import Buffer, Self
class ChannelError(RuntimeError): ...
@@ -72,15 +72,13 @@ class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, in
@property
def send_released(self) -> bool: ...
def create(unboundop: Literal[1, 2, 3]) -> ChannelID: ...
def create() -> ChannelID: ...
def destroy(cid: SupportsIndex) -> None: ...
def list_all() -> list[ChannelID]: ...
def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ...
def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ...
def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ...
def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: ...
def recv(cid: SupportsIndex, default: object = ...) -> object: ...
def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ...
def get_count(cid: SupportsIndex) -> int: ...
def get_info(cid: SupportsIndex) -> ChannelInfo: ...
def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: ...
def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ...

View File

@@ -0,0 +1,100 @@
import sys
from _typeshed import StrPath
from collections.abc import Mapping
LC_CTYPE: int
LC_COLLATE: int
LC_TIME: int
LC_MONETARY: int
LC_NUMERIC: int
LC_ALL: int
CHAR_MAX: int
def setlocale(category: int, locale: str | None = None, /) -> str: ...
def localeconv() -> Mapping[str, int | str | list[int]]: ...
if sys.version_info >= (3, 11):
def getencoding() -> str: ...
def strcoll(os1: str, os2: str, /) -> int: ...
def strxfrm(string: str, /) -> str: ...
# native gettext functions
# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
if sys.platform != "win32":
LC_MESSAGES: int
ABDAY_1: int
ABDAY_2: int
ABDAY_3: int
ABDAY_4: int
ABDAY_5: int
ABDAY_6: int
ABDAY_7: int
ABMON_1: int
ABMON_2: int
ABMON_3: int
ABMON_4: int
ABMON_5: int
ABMON_6: int
ABMON_7: int
ABMON_8: int
ABMON_9: int
ABMON_10: int
ABMON_11: int
ABMON_12: int
DAY_1: int
DAY_2: int
DAY_3: int
DAY_4: int
DAY_5: int
DAY_6: int
DAY_7: int
ERA: int
ERA_D_T_FMT: int
ERA_D_FMT: int
ERA_T_FMT: int
MON_1: int
MON_2: int
MON_3: int
MON_4: int
MON_5: int
MON_6: int
MON_7: int
MON_8: int
MON_9: int
MON_10: int
MON_11: int
MON_12: int
CODESET: int
D_T_FMT: int
D_FMT: int
T_FMT: int
T_FMT_AMPM: int
AM_STR: int
PM_STR: int
RADIXCHAR: int
THOUSEP: int
YESEXPR: int
NOEXPR: int
CRNCYSTR: int
ALT_DIGITS: int
def nl_langinfo(key: int, /) -> str: ...
# This is dependent on `libintl.h` which is a part of `gettext`
# system dependency. These functions might be missing.
# But, we always say that they are present.
def gettext(msg: str, /) -> str: ...
def dgettext(domain: str | None, msg: str, /) -> str: ...
def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ...
def textdomain(domain: str | None, /) -> str: ...
def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ...
def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ...

View File

@@ -1,16 +1,18 @@
import sys
from _typeshed import SupportsGetItem
from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence
from operator import attrgetter as attrgetter, itemgetter as itemgetter, methodcaller as methodcaller
from typing import Any, AnyStr, Protocol, SupportsAbs, SupportsIndex, TypeVar, overload
from typing_extensions import ParamSpec, TypeAlias, TypeIs
from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, SupportsIndex, TypeVar, final, overload
from typing_extensions import ParamSpec, TypeAlias, TypeVarTuple, Unpack
_R = TypeVar("_R")
_T = TypeVar("_T")
_T_co = TypeVar("_T_co", covariant=True)
_T1 = TypeVar("_T1")
_T2 = TypeVar("_T2")
_K = TypeVar("_K")
_V = TypeVar("_V")
_P = ParamSpec("_P")
_Ts = TypeVarTuple("_Ts")
# The following protocols return "Any" instead of bool, since the comparison
# operators can be overloaded to return an arbitrary object. For example,
@@ -90,6 +92,40 @@ def setitem(a: MutableSequence[_T], b: slice, c: Sequence[_T], /) -> None: ...
@overload
def setitem(a: MutableMapping[_K, _V], b: _K, c: _V, /) -> None: ...
def length_hint(obj: object, default: int = 0, /) -> int: ...
@final
class attrgetter(Generic[_T_co]):
@overload
def __new__(cls, attr: str, /) -> attrgetter[Any]: ...
@overload
def __new__(cls, attr: str, attr2: str, /) -> attrgetter[tuple[Any, Any]]: ...
@overload
def __new__(cls, attr: str, attr2: str, attr3: str, /) -> attrgetter[tuple[Any, Any, Any]]: ...
@overload
def __new__(cls, attr: str, attr2: str, attr3: str, attr4: str, /) -> attrgetter[tuple[Any, Any, Any, Any]]: ...
@overload
def __new__(cls, attr: str, /, *attrs: str) -> attrgetter[tuple[Any, ...]]: ...
def __call__(self, obj: Any, /) -> _T_co: ...
@final
class itemgetter(Generic[_T_co]):
@overload
def __new__(cls, item: _T, /) -> itemgetter[_T]: ...
@overload
def __new__(cls, item1: _T1, item2: _T2, /, *items: Unpack[_Ts]) -> itemgetter[tuple[_T1, _T2, Unpack[_Ts]]]: ...
# __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie:
# TypeVar "_KT_contra@SupportsGetItem" is contravariant
# "tuple[int, int]" is incompatible with protocol "SupportsIndex"
# preventing [_T_co, ...] instead of [Any, ...]
#
# A suspected mypy issue prevents using [..., _T] instead of [..., Any] here.
# https://github.com/python/mypy/issues/14032
def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ...
@final
class methodcaller:
def __init__(self, name: str, /, *args: Any, **kwargs: Any) -> None: ...
def __call__(self, obj: Any) -> Any: ...
def iadd(a: Any, b: Any, /) -> Any: ...
def iand(a: Any, b: Any, /) -> Any: ...
def iconcat(a: Any, b: Any, /) -> Any: ...
@@ -109,7 +145,3 @@ if sys.version_info >= (3, 11):
def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ...
if sys.version_info >= (3, 14):
def is_none(a: object, /) -> TypeIs[None]: ...
def is_not_none(a: _T | None, /) -> TypeIs[_T]: ...

View File

@@ -1,5 +1,5 @@
from collections.abc import Iterable, Sequence
from typing import Final, TypeVar
from typing import TypeVar
_T = TypeVar("_T")
_K = TypeVar("_K")
@@ -7,15 +7,15 @@ _V = TypeVar("_V")
__all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"]
_UNIVERSAL_CONFIG_VARS: Final[tuple[str, ...]] # undocumented
_COMPILER_CONFIG_VARS: Final[tuple[str, ...]] # undocumented
_INITPRE: Final[str] # undocumented
_UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented
_COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented
_INITPRE: str # undocumented
def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented
def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented
def _find_build_tool(toolname: str) -> str: ... # undocumented
_SYSTEM_VERSION: Final[str | None] # undocumented
_SYSTEM_VERSION: str | None # undocumented
def _get_system_version() -> str: ... # undocumented
def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented

View File

@@ -1,7 +1,6 @@
import sys
from _typeshed import ReadableBuffer, WriteableBuffer
from collections.abc import Iterable
from socket import error as error, gaierror as gaierror, herror as herror, timeout as timeout
from typing import Any, SupportsIndex, overload
from typing_extensions import TypeAlias
@@ -667,6 +666,18 @@ if sys.platform != "win32":
if sys.platform != "win32" and sys.platform != "darwin":
IPX_TYPE: int
# ===== Exceptions =====
error = OSError
class herror(error): ...
class gaierror(error): ...
if sys.version_info >= (3, 10):
timeout = TimeoutError
else:
class timeout(error): ...
# ===== Classes =====
class socket:
@@ -676,9 +687,8 @@ class socket:
def type(self) -> int: ...
@property
def proto(self) -> int: ...
# F811: "Redefinition of unused `timeout`"
@property
def timeout(self) -> float | None: ... # noqa: F811
def timeout(self) -> float | None: ...
if sys.platform == "win32":
def __init__(
self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | bytes | None = ...
@@ -778,9 +788,7 @@ def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: ...
def inet_pton(address_family: int, ip_string: str, /) -> bytes: ...
def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: ...
def getdefaulttimeout() -> float | None: ...
# F811: "Redefinition of unused `timeout`"
def setdefaulttimeout(timeout: float | None, /) -> None: ... # noqa: F811
def setdefaulttimeout(timeout: float | None, /) -> None: ...
if sys.platform != "win32":
def sethostname(name: str, /) -> None: ...

View File

@@ -0,0 +1,117 @@
import sys
from typing import Literal
SF_APPEND: Literal[0x00040000]
SF_ARCHIVED: Literal[0x00010000]
SF_IMMUTABLE: Literal[0x00020000]
SF_NOUNLINK: Literal[0x00100000]
SF_SNAPSHOT: Literal[0x00200000]
ST_MODE: Literal[0]
ST_INO: Literal[1]
ST_DEV: Literal[2]
ST_NLINK: Literal[3]
ST_UID: Literal[4]
ST_GID: Literal[5]
ST_SIZE: Literal[6]
ST_ATIME: Literal[7]
ST_MTIME: Literal[8]
ST_CTIME: Literal[9]
S_IFIFO: Literal[0o010000]
S_IFLNK: Literal[0o120000]
S_IFREG: Literal[0o100000]
S_IFSOCK: Literal[0o140000]
S_IFBLK: Literal[0o060000]
S_IFCHR: Literal[0o020000]
S_IFDIR: Literal[0o040000]
# These are 0 on systems that don't support the specific kind of file.
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
S_IFDOOR: int
S_IFPORT: int
S_IFWHT: int
S_ISUID: Literal[0o4000]
S_ISGID: Literal[0o2000]
S_ISVTX: Literal[0o1000]
S_IRWXU: Literal[0o0700]
S_IRUSR: Literal[0o0400]
S_IWUSR: Literal[0o0200]
S_IXUSR: Literal[0o0100]
S_IRWXG: Literal[0o0070]
S_IRGRP: Literal[0o0040]
S_IWGRP: Literal[0o0020]
S_IXGRP: Literal[0o0010]
S_IRWXO: Literal[0o0007]
S_IROTH: Literal[0o0004]
S_IWOTH: Literal[0o0002]
S_IXOTH: Literal[0o0001]
S_ENFMT: Literal[0o2000]
S_IREAD: Literal[0o0400]
S_IWRITE: Literal[0o0200]
S_IEXEC: Literal[0o0100]
UF_APPEND: Literal[0x00000004]
UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only
UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only
UF_IMMUTABLE: Literal[0x00000002]
UF_NODUMP: Literal[0x00000001]
UF_NOUNLINK: Literal[0x00000010]
UF_OPAQUE: Literal[0x00000008]
def S_IMODE(mode: int, /) -> int: ...
def S_IFMT(mode: int, /) -> int: ...
def S_ISBLK(mode: int, /) -> bool: ...
def S_ISCHR(mode: int, /) -> bool: ...
def S_ISDIR(mode: int, /) -> bool: ...
def S_ISDOOR(mode: int, /) -> bool: ...
def S_ISFIFO(mode: int, /) -> bool: ...
def S_ISLNK(mode: int, /) -> bool: ...
def S_ISPORT(mode: int, /) -> bool: ...
def S_ISREG(mode: int, /) -> bool: ...
def S_ISSOCK(mode: int, /) -> bool: ...
def S_ISWHT(mode: int, /) -> bool: ...
def filemode(mode: int, /) -> str: ...
if sys.platform == "win32":
IO_REPARSE_TAG_SYMLINK: int
IO_REPARSE_TAG_MOUNT_POINT: int
IO_REPARSE_TAG_APPEXECLINK: int
if sys.platform == "win32":
FILE_ATTRIBUTE_ARCHIVE: Literal[32]
FILE_ATTRIBUTE_COMPRESSED: Literal[2048]
FILE_ATTRIBUTE_DEVICE: Literal[64]
FILE_ATTRIBUTE_DIRECTORY: Literal[16]
FILE_ATTRIBUTE_ENCRYPTED: Literal[16384]
FILE_ATTRIBUTE_HIDDEN: Literal[2]
FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768]
FILE_ATTRIBUTE_NORMAL: Literal[128]
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192]
FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072]
FILE_ATTRIBUTE_OFFLINE: Literal[4096]
FILE_ATTRIBUTE_READONLY: Literal[1]
FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024]
FILE_ATTRIBUTE_SPARSE_FILE: Literal[512]
FILE_ATTRIBUTE_SYSTEM: Literal[4]
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
if sys.version_info >= (3, 13):
SF_SETTABLE: Literal[0x3FFF0000]
# https://github.com/python/cpython/issues/114081#issuecomment-2119017790
# SF_RESTRICTED: Literal[0x00080000]
SF_FIRMLINK: Literal[0x00800000]
SF_DATALESS: Literal[0x40000000]
SF_SUPPORTED: Literal[0x9F0000]
SF_SYNTHETIC: Literal[0xC0000000]
UF_TRACKED: Literal[0x00000040]
UF_DATAVAULT: Literal[0x00000080]
UF_SETTABLE: Literal[0x0000FFFF]

View File

@@ -1,4 +1,3 @@
import signal
import sys
from _typeshed import structseq
from collections.abc import Callable
@@ -14,46 +13,23 @@ error = RuntimeError
def _count() -> int: ...
@final
class LockType:
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
def release(self) -> None: ...
def locked(self) -> bool: ...
def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ...
def release_lock(self) -> None: ...
def locked_lock(self) -> bool: ...
def __enter__(self) -> bool: ...
def __exit__(
self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
) -> None: ...
if sys.version_info >= (3, 13):
@final
class _ThreadHandle:
ident: int
def join(self, timeout: float | None = None, /) -> None: ...
def is_done(self) -> bool: ...
def _set_done(self) -> None: ...
def start_joinable_thread(
function: Callable[[], object], handle: _ThreadHandle | None = None, daemon: bool = True
) -> _ThreadHandle: ...
lock = LockType
@overload
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ...
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]]) -> int: ...
@overload
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ...
if sys.version_info >= (3, 10):
def interrupt_main(signum: signal.Signals = ..., /) -> None: ...
else:
def interrupt_main() -> None: ...
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any]) -> int: ...
def interrupt_main() -> None: ...
def exit() -> NoReturn: ...
def allocate_lock() -> LockType: ...
def get_ident() -> int: ...
def stack_size(size: int = 0, /) -> int: ...
def stack_size(size: int = ...) -> int: ...
TIMEOUT_MAX: float

View File

@@ -1,5 +1,5 @@
from typing import Any
from typing_extensions import Self, TypeAlias
from typing_extensions import TypeAlias
from weakref import ReferenceType
__all__ = ["local"]
@@ -12,7 +12,6 @@ class _localimpl:
def create_dict(self) -> _LocalDict: ...
class local:
def __new__(cls, /, *args: Any, **kw: Any) -> Self: ...
def __getattribute__(self, name: str) -> Any: ...
def __setattr__(self, name: str, value: Any) -> None: ...
def __delattr__(self, name: str) -> None: ...

View File

@@ -1,6 +1,6 @@
import sys
from collections.abc import Callable
from typing import Any, ClassVar, Final, final
from typing import Any, ClassVar, Literal, final
from typing_extensions import TypeAlias
# _tkinter is meant to be only used internally by tkinter, but some tkinter
@@ -95,19 +95,19 @@ class TkappType:
def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ...
# These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS
ALL_EVENTS: Final = -3
FILE_EVENTS: Final = 8
IDLE_EVENTS: Final = 32
TIMER_EVENTS: Final = 16
WINDOW_EVENTS: Final = 4
ALL_EVENTS: Literal[-3]
FILE_EVENTS: Literal[8]
IDLE_EVENTS: Literal[32]
TIMER_EVENTS: Literal[16]
WINDOW_EVENTS: Literal[4]
DONT_WAIT: Final = 2
EXCEPTION: Final = 8
READABLE: Final = 2
WRITABLE: Final = 4
DONT_WAIT: Literal[2]
EXCEPTION: Literal[8]
READABLE: Literal[2]
WRITABLE: Literal[4]
TCL_VERSION: Final[str]
TK_VERSION: Final[str]
TCL_VERSION: str
TK_VERSION: str
@final
class TkttType:

Some files were not shown because too many files have changed in this diff Show More