Compare commits
1 Commits
PYI034
...
editables-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
09e8599e91 |
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"fill-labs.dependi",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -17,5 +17,5 @@
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood
|
||||
|
||||
27
.github/renovate.json5
vendored
27
.github/renovate.json5
vendored
@@ -8,32 +8,15 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
@@ -65,14 +48,6 @@
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
|
||||
2
.github/workflows/build-binaries.yml
vendored
2
.github/workflows/build-binaries.yml
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: macos-14
|
||||
runs-on: macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
||||
250
.github/workflows/build-docker.yml
vendored
250
.github/workflows/build-docker.yml
vendored
@@ -17,21 +17,12 @@ on:
|
||||
paths:
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
|
||||
docker-publish:
|
||||
name: Build Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -45,6 +36,12 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/ruff
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: |
|
||||
@@ -58,233 +55,14 @@ jobs:
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
- name: Export digests
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.RUFF_BASE_IMG }}@sha256:%s ' *)
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.20,alpine3.20,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
steps:
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the image and tags from the matrix variable
|
||||
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
|
||||
|
||||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${{ env.RUFF_BASE_IMG }}:latest /ruff /usr/local/bin/ruff
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/ruff"]
|
||||
EOF
|
||||
|
||||
# Initialize a variable to store all tag docker metadata patterns
|
||||
TAG_PATTERNS=""
|
||||
|
||||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> $GITHUB_ENV
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> $GITHUB_ENV
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
|
||||
# show the ruff base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.RUFF_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.RUFF_BASE_IMG }}@sha256:%s ' *)
|
||||
|
||||
33
.github/workflows/ci.yaml
vendored
33
.github/workflows/ci.yaml
vendored
@@ -16,7 +16,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.12"
|
||||
PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
determine_changes:
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v45
|
||||
- uses: tj-actions/changed-files@v44
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
@@ -142,13 +142,6 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
@@ -193,19 +186,15 @@ jobs:
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Test ruff_wasm"
|
||||
- name: "Run wasm-pack"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
@@ -518,8 +507,6 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
@@ -527,15 +514,13 @@ jobs:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: uv pip install -r docs/requirements.txt --system
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Update README File"
|
||||
run: python scripts/transform_readme.py --target mkdocs
|
||||
- name: "Generate docs"
|
||||
@@ -612,7 +597,7 @@ jobs:
|
||||
just test
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
@@ -634,7 +619,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v3
|
||||
uses: CodSpeedHQ/action@v2
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.github/workflows/pr-comment.yaml
vendored
2
.github/workflows/pr-comment.yaml
vendored
@@ -23,7 +23,6 @@ jobs:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
@@ -44,7 +43,6 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
|
||||
13
.github/workflows/publish-docs.yml
vendored
13
.github/workflows/publish-docs.yml
vendored
@@ -34,10 +34,10 @@ jobs:
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
@@ -104,8 +104,8 @@ jobs:
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
@@ -145,7 +145,6 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
|
||||
4
.github/workflows/publish-playground.yml
vendored
4
.github/workflows/publish-playground.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.11.0
|
||||
uses: cloudflare/wrangler-action@v3.7.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
8
.github/workflows/publish-pypi.yml
vendored
8
.github/workflows/publish-pypi.yml
vendored
@@ -21,12 +21,14 @@ jobs:
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@v3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
run: uv publish -v wheels/*
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
||||
2
.github/workflows/publish-wasm.yml
vendored
2
.github/workflows/publish-wasm.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
|
||||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -1,5 +1,3 @@
|
||||
# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
@@ -66,7 +64,7 @@ jobs:
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.1/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
||||
14
.github/workflows/sync_typeshed.yaml
vendored
14
.github/workflows/sync_typeshed.yaml
vendored
@@ -37,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -21,14 +21,6 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -14,9 +14,6 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
fail_fast: false
|
||||
fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot_vendored/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/red_knot_module_resolver/vendor/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -17,18 +14,17 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.22
|
||||
rev: v0.18
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.18
|
||||
rev: 0.7.17
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
- mdformat-footnote
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
@@ -36,7 +32,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.42.0
|
||||
rev: v0.41.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -45,21 +41,8 @@ repos:
|
||||
| docs/\w+\.md
|
||||
)$
|
||||
|
||||
- repo: https://github.com/adamchainz/blacken-docs
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
args: ["--pyi", "--line-length", "130"]
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)_syntax.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==24.10.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.27.0
|
||||
rev: v1.23.2
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -73,17 +56,22 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.2
|
||||
rev: v0.5.2
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.3.3
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
@@ -1,56 +1,5 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
|
||||
605
CHANGELOG.md
605
CHANGELOG.md
@@ -1,610 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Disallow single-line implicit concatenated strings ([#13928](https://github.com/astral-sh/ruff/pull/13928))
|
||||
- \[`flake8-pyi`\] Include all Python file types for `PYI006` and `PYI066` ([#14059](https://github.com/astral-sh/ruff/pull/14059))
|
||||
- \[`flake8-simplify`\] Implement `split-of-static-string` (`SIM905`) ([#14008](https://github.com/astral-sh/ruff/pull/14008))
|
||||
- \[`refurb`\] Implement `subclass-builtin` (`FURB189`) ([#14105](https://github.com/astral-sh/ruff/pull/14105))
|
||||
- \[`ruff`\] Improve diagnostic messages and docs (`RUF031`, `RUF032`, `RUF034`) ([#14068](https://github.com/astral-sh/ruff/pull/14068))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Detect items that hash to same value in duplicate sets (`B033`, `PLC0208`) ([#14064](https://github.com/astral-sh/ruff/pull/14064))
|
||||
- \[`eradicate`\] Better detection of IntelliJ language injection comments (`ERA001`) ([#14094](https://github.com/astral-sh/ruff/pull/14094))
|
||||
- \[`flake8-pyi`\] Add autofix for `docstring-in-stub` (`PYI021`) ([#14150](https://github.com/astral-sh/ruff/pull/14150))
|
||||
- \[`flake8-pyi`\] Update `duplicate-literal-member` (`PYI062`) to alawys provide an autofix ([#14188](https://github.com/astral-sh/ruff/pull/14188))
|
||||
- \[`pyflakes`\] Detect items that hash to same value in duplicate dictionaries (`F601`) ([#14065](https://github.com/astral-sh/ruff/pull/14065))
|
||||
- \[`ruff`\] Fix false positive for decorators (`RUF028`) ([#14061](https://github.com/astral-sh/ruff/pull/14061))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid parsing joint rule codes as distinct codes in `# noqa` ([#12809](https://github.com/astral-sh/ruff/pull/12809))
|
||||
- \[`eradicate`\] ignore `# language=` in commented-out-code rule (ERA001) ([#14069](https://github.com/astral-sh/ruff/pull/14069))
|
||||
- \[`flake8-bugbear`\] - do not run `mutable-argument-default` on stubs (`B006`) ([#14058](https://github.com/astral-sh/ruff/pull/14058))
|
||||
- \[`flake8-builtins`\] Skip lambda expressions in `builtin-argument-shadowing (A002)` ([#14144](https://github.com/astral-sh/ruff/pull/14144))
|
||||
- \[`flake8-comprehension`\] Also remove trailing comma while fixing `C409` and `C419` ([#14097](https://github.com/astral-sh/ruff/pull/14097))
|
||||
- \[`flake8-simplify`\] Allow `open` without context manager in `return` statement (`SIM115`) ([#14066](https://github.com/astral-sh/ruff/pull/14066))
|
||||
- \[`pylint`\] Respect hash-equivalent literals in `iteration-over-set` (`PLC0208`) ([#14063](https://github.com/astral-sh/ruff/pull/14063))
|
||||
- \[`pylint`\] Update known dunder methods for Python 3.13 (`PLW3201`) ([#14146](https://github.com/astral-sh/ruff/pull/14146))
|
||||
- \[`pyupgrade`\] - ignore kwarg unpacking for `UP044` ([#14053](https://github.com/astral-sh/ruff/pull/14053))
|
||||
- \[`refurb`\] Parse more exotic decimal strings in `verbose-decimal-constructor` (`FURB157`) ([#14098](https://github.com/astral-sh/ruff/pull/14098))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add links to missing related options within rule documentations ([#13971](https://github.com/astral-sh/ruff/pull/13971))
|
||||
- Add rule short code to mkdocs tags to allow searching via rule codes ([#14040](https://github.com/astral-sh/ruff/pull/14040))
|
||||
|
||||
## 0.7.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix formatting of single with-item with trailing comment ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
||||
- \[`pyupgrade`\] Add PEP 646 `Unpack` conversion to `*` with fix (`UP044`) ([#13988](https://github.com/astral-sh/ruff/pull/13988))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Regenerate `known_stdlibs.rs` with stdlibs 2024.10.25 ([#13963](https://github.com/astral-sh/ruff/pull/13963))
|
||||
- \[`flake8-no-pep420`\] Skip namespace package enforcement for PEP 723 scripts (`INP001`) ([#13974](https://github.com/astral-sh/ruff/pull/13974))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix server panic when undoing an edit ([#14010](https://github.com/astral-sh/ruff/pull/14010))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix issues in discovering ruff in pip build environments ([#13881](https://github.com/astral-sh/ruff/pull/13881))
|
||||
- \[`flake8-type-checking`\] Fix false positive for `singledispatchmethod` (`TCH003`) ([#13941](https://github.com/astral-sh/ruff/pull/13941))
|
||||
- \[`flake8-type-checking`\] Treat return type of `singledispatch` as runtime-required (`TCH003`) ([#13957](https://github.com/astral-sh/ruff/pull/13957))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`flake8-simplify`\] Include caveats of enabling `if-else-block-instead-of-if-exp` (`SIM108`) ([#14019](https://github.com/astral-sh/ruff/pull/14019))
|
||||
|
||||
## 0.7.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix `E221` and `E222` to flag missing or extra whitespace around `==` operator ([#13890](https://github.com/astral-sh/ruff/pull/13890))
|
||||
- Formatter: Alternate quotes for strings inside f-strings in preview ([#13860](https://github.com/astral-sh/ruff/pull/13860))
|
||||
- Formatter: Join implicit concatenated strings when they fit on a line ([#13663](https://github.com/astral-sh/ruff/pull/13663))
|
||||
- \[`pylint`\] Restrict `iteration-over-set` to only work on sets of literals (`PLC0208`) ([#13731](https://github.com/astral-sh/ruff/pull/13731))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-type-checking`\] Support auto-quoting when annotations contain quotes ([#11811](https://github.com/astral-sh/ruff/pull/11811))
|
||||
|
||||
### Server
|
||||
|
||||
- Avoid indexing the workspace for single-file mode ([#13770](https://github.com/astral-sh/ruff/pull/13770))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `ARG002` compatible with `EM101` when raising `NotImplementedError` ([#13714](https://github.com/astral-sh/ruff/pull/13714))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Introduce more Docker tags for Ruff (similar to uv) ([#13274](https://github.com/astral-sh/ruff/pull/13274))
|
||||
|
||||
## 0.7.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.7.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
|
||||
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
|
||||
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
|
||||
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
|
||||
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
|
||||
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
|
||||
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
|
||||
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
|
||||
instead.
|
||||
|
||||
### Formatter preview style
|
||||
|
||||
- Normalize implicit concatenated f-string quotes per part ([#13539](https://github.com/astral-sh/ruff/pull/13539))
|
||||
|
||||
### Preview linter features
|
||||
|
||||
- \[`refurb`\] implement `hardcoded-string-charset` (FURB156) ([#13530](https://github.com/astral-sh/ruff/pull/13530))
|
||||
- \[`refurb`\] Count codepoints not bytes for `slice-to-remove-prefix-or-suffix (FURB188)` ([#13631](https://github.com/astral-sh/ruff/pull/13631))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Mark `PLE1141` fix as unsafe ([#13629](https://github.com/astral-sh/ruff/pull/13629))
|
||||
- \[`flake8-async`\] Consider async generators to be "checkpoints" for `cancel-scope-no-checkpoint` (`ASYNC100`) ([#13639](https://github.com/astral-sh/ruff/pull/13639))
|
||||
- \[`flake8-bugbear`\] Do not suggest setting parameter `strict=` to `False` in `B905` diagnostic message ([#13656](https://github.com/astral-sh/ruff/pull/13656))
|
||||
- \[`flake8-todos`\] Only flag the word "TODO", not words starting with "todo" (`TD006`) ([#13640](https://github.com/astral-sh/ruff/pull/13640))
|
||||
- \[`pycodestyle`\] Fix whitespace-related false positives and false negatives inside type-parameter lists (`E231`, `E251`) ([#13704](https://github.com/astral-sh/ruff/pull/13704))
|
||||
- \[`flake8-simplify`\] Stabilize preview behavior for `SIM115` so that the rule can detect files
|
||||
being opened from a wider range of standard-library functions ([#12959](https://github.com/astral-sh/ruff/pull/12959)).
|
||||
|
||||
### CLI
|
||||
|
||||
- Add explanation of fixable in `--statistics` command ([#13774](https://github.com/astral-sh/ruff/pull/13774))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pyflakes`\] Allow `ipytest` cell magic (`F401`) ([#13745](https://github.com/astral-sh/ruff/pull/13745))
|
||||
- \[`flake8-use-pathlib`\] Fix `PTH123` false positive when `open` is passed a file descriptor ([#13616](https://github.com/astral-sh/ruff/pull/13616))
|
||||
- \[`flake8-bandit`\] Detect patterns from multi line SQL statements (`S608`) ([#13574](https://github.com/astral-sh/ruff/pull/13574))
|
||||
- \[`flake8-pyi`\] - Fix dropped expressions in `PYI030` autofix ([#13727](https://github.com/astral-sh/ruff/pull/13727))
|
||||
|
||||
## 0.6.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix codeblock dynamic line length calculation for indented docstring examples ([#13523](https://github.com/astral-sh/ruff/pull/13523))
|
||||
- \[`refurb`\] Mark `FURB118` fix as unsafe ([#13613](https://github.com/astral-sh/ruff/pull/13613))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pydocstyle`\] Don't raise `D208` when last line is non-empty ([#13372](https://github.com/astral-sh/ruff/pull/13372))
|
||||
- \[`pylint`\] Preserve trivia (i.e. comments) in `PLR5501` autofix ([#13573](https://github.com/astral-sh/ruff/pull/13573))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`pyflakes`\] Add `allow-unused-imports` setting for `unused-import` rule (`F401`) ([#13601](https://github.com/astral-sh/ruff/pull/13601))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support ruff discovery in pip build environments ([#13591](https://github.com/astral-sh/ruff/pull/13591))
|
||||
- \[`flake8-bugbear`\] Avoid short circuiting `B017` for multiple context managers ([#13609](https://github.com/astral-sh/ruff/pull/13609))
|
||||
- \[`pylint`\] Do not offer an invalid fix for `PLR1716` when the comparisons contain parenthesis ([#13527](https://github.com/astral-sh/ruff/pull/13527))
|
||||
- \[`pyupgrade`\] Fix `UP043` to apply to `collections.abc.Generator` and `collections.abc.AsyncGenerator` ([#13611](https://github.com/astral-sh/ruff/pull/13611))
|
||||
- \[`refurb`\] Fix handling of slices in tuples for `FURB118`, e.g., `x[:, 1]` ([#13518](https://github.com/astral-sh/ruff/pull/13518))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update GitHub Action link to `astral-sh/ruff-action` ([#13551](https://github.com/astral-sh/ruff/pull/13551))
|
||||
|
||||
## 0.6.8
|
||||
|
||||
### Preview features
|
||||
|
||||
- Remove unnecessary parentheses around `match case` clauses ([#13510](https://github.com/astral-sh/ruff/pull/13510))
|
||||
- Parenthesize overlong `if` guards in `match..case` clauses ([#13513](https://github.com/astral-sh/ruff/pull/13513))
|
||||
- Detect basic wildcard imports in `ruff analyze graph` ([#13486](https://github.com/astral-sh/ruff/pull/13486))
|
||||
- \[`pylint`\] Implement `boolean-chained-comparison` (`R1716`) ([#13435](https://github.com/astral-sh/ruff/pull/13435))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`lake8-simplify`\] Detect `SIM910` when using variadic keyword arguments, i.e., `**kwargs` ([#13503](https://github.com/astral-sh/ruff/pull/13503))
|
||||
- \[`pyupgrade`\] Avoid false negatives with non-reference shadowed bindings of loop variables (`UP028`) ([#13504](https://github.com/astral-sh/ruff/pull/13504))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Detect tuples bound to variadic positional arguments i.e. `*args` ([#13512](https://github.com/astral-sh/ruff/pull/13512))
|
||||
- Exit gracefully on broken pipe errors ([#13485](https://github.com/astral-sh/ruff/pull/13485))
|
||||
- Avoid panic when analyze graph hits broken pipe ([#13484](https://github.com/astral-sh/ruff/pull/13484))
|
||||
|
||||
### Performance
|
||||
|
||||
- Reuse `BTreeSets` in module resolver ([#13440](https://github.com/astral-sh/ruff/pull/13440))
|
||||
- Skip traversal for non-compound statements ([#13441](https://github.com/astral-sh/ruff/pull/13441))
|
||||
|
||||
## 0.6.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add Python version support to ruff analyze CLI ([#13426](https://github.com/astral-sh/ruff/pull/13426))
|
||||
- Add `exclude` support to `ruff analyze` ([#13425](https://github.com/astral-sh/ruff/pull/13425))
|
||||
- Fix parentheses around return type annotations ([#13381](https://github.com/astral-sh/ruff/pull/13381))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Fix: Don't autofix if the first line ends in a question mark? (D400) ([#13399](https://github.com/astral-sh/ruff/pull/13399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect `lint.exclude` in ruff check `--add-noqa` ([#13427](https://github.com/astral-sh/ruff/pull/13427))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid tracking module resolver files in Salsa ([#13437](https://github.com/astral-sh/ruff/pull/13437))
|
||||
- Use `forget` for module resolver database ([#13438](https://github.com/astral-sh/ruff/pull/13438))
|
||||
|
||||
## 0.6.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Skip `slice-to-remove-prefix-or-suffix` (`FURB188`) when non-trivial slice steps are present ([#13405](https://github.com/astral-sh/ruff/pull/13405))
|
||||
- Add a subcommand to generate dependency graphs ([#13402](https://github.com/astral-sh/ruff/pull/13402))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix placement of inline parameter comments ([#13379](https://github.com/astral-sh/ruff/pull/13379))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix off-by one error in the `LineIndex::offset` calculation ([#13407](https://github.com/astral-sh/ruff/pull/13407))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`fastapi`\] Respect FastAPI aliases in route definitions ([#13394](https://github.com/astral-sh/ruff/pull/13394))
|
||||
- \[`pydocstyle`\] Respect word boundaries when detecting function signature in docs ([#13388](https://github.com/astral-sh/ruff/pull/13388))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add backlinks to rule overview linter ([#13368](https://github.com/astral-sh/ruff/pull/13368))
|
||||
- Fix documentation for editor vim plugin ALE ([#13348](https://github.com/astral-sh/ruff/pull/13348))
|
||||
- Fix rendering of `FURB188` docs ([#13406](https://github.com/astral-sh/ruff/pull/13406))
|
||||
|
||||
## 0.6.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pydoclint`\] Ignore `DOC201` when function name is "**new**" ([#13300](https://github.com/astral-sh/ruff/pull/13300))
|
||||
- \[`refurb`\] Implement `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#13256](https://github.com/astral-sh/ruff/pull/13256))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Ignore script-comments with multiple end-tags (`ERA001`) ([#13283](https://github.com/astral-sh/ruff/pull/13283))
|
||||
- \[`pyflakes`\] Improve error message for `UndefinedName` when a builtin was added in a newer version than specified in Ruff config (`F821`) ([#13293](https://github.com/astral-sh/ruff/pull/13293))
|
||||
|
||||
### Server
|
||||
|
||||
- Add support for extensionless Python files for server ([#13326](https://github.com/astral-sh/ruff/pull/13326))
|
||||
- Fix configuration inheritance for configurations specified in the LSP settings ([#13285](https://github.com/astral-sh/ruff/pull/13285))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`ruff`\] Handle unary operators in `decimal-from-float-literal` (`RUF032`) ([#13275](https://github.com/astral-sh/ruff/pull/13275))
|
||||
|
||||
### CLI
|
||||
|
||||
- Only include rules with diagnostics in SARIF metadata ([#13268](https://github.com/astral-sh/ruff/pull/13268))
|
||||
|
||||
### Playground
|
||||
|
||||
- Add "Copy as pyproject.toml/ruff.toml" and "Paste from TOML" ([#13328](https://github.com/astral-sh/ruff/pull/13328))
|
||||
- Fix errors not shown for restored snippet on page load ([#13262](https://github.com/astral-sh/ruff/pull/13262))
|
||||
|
||||
## 0.6.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-builtins`\] Use dynamic builtins list based on Python version ([#13172](https://github.com/astral-sh/ruff/pull/13172))
|
||||
- \[`pydoclint`\] Permit yielding `None` in `DOC402` and `DOC403` ([#13148](https://github.com/astral-sh/ruff/pull/13148))
|
||||
- \[`pylint`\] Update diagnostic message for `PLW3201` ([#13194](https://github.com/astral-sh/ruff/pull/13194))
|
||||
- \[`ruff`\] Implement `post-init-default` (`RUF033`) ([#13192](https://github.com/astral-sh/ruff/pull/13192))
|
||||
- \[`ruff`\] Implement useless if-else (`RUF034`) ([#13218](https://github.com/astral-sh/ruff/pull/13218))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pyi`\] Respect `pep8_naming.classmethod-decorators` settings when determining if a method is a classmethod in `custom-type-var-return-type` (`PYI019`) ([#13162](https://github.com/astral-sh/ruff/pull/13162))
|
||||
- \[`flake8-pyi`\] Teach various rules that annotations might be stringized ([#12951](https://github.com/astral-sh/ruff/pull/12951))
|
||||
- \[`pylint`\] Avoid `no-self-use` for `attrs`-style validators ([#13166](https://github.com/astral-sh/ruff/pull/13166))
|
||||
- \[`pylint`\] Recurse into subscript subexpressions when searching for list/dict lookups (`PLR1733`, `PLR1736`) ([#13186](https://github.com/astral-sh/ruff/pull/13186))
|
||||
- \[`pyupgrade`\] Detect `aiofiles.open` calls in `UP015` ([#13173](https://github.com/astral-sh/ruff/pull/13173))
|
||||
- \[`pyupgrade`\] Mark `sys.version_info[0] < 3` and similar comparisons as outdated (`UP036`) ([#13175](https://github.com/astral-sh/ruff/pull/13175))
|
||||
|
||||
### CLI
|
||||
|
||||
- Enrich messages of SARIF results ([#13180](https://github.com/astral-sh/ruff/pull/13180))
|
||||
- Handle singular case for incompatible rules warning in `ruff format` output ([#13212](https://github.com/astral-sh/ruff/pull/13212))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pydocstyle`\] Improve heuristics for detecting Google-style docstrings ([#13142](https://github.com/astral-sh/ruff/pull/13142))
|
||||
- \[`refurb`\] Treat `sep` arguments with effects as unsafe removals (`FURB105`) ([#13165](https://github.com/astral-sh/ruff/pull/13165))
|
||||
|
||||
## 0.6.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
|
||||
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
|
||||
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
|
||||
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
|
||||
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
|
||||
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
|
||||
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
|
||||
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
|
||||
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
|
||||
|
||||
## 0.6.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
|
||||
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
|
||||
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
|
||||
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
|
||||
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
|
||||
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
|
||||
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
|
||||
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
|
||||
|
||||
### Server
|
||||
|
||||
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
|
||||
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
|
||||
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
|
||||
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
|
||||
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
|
||||
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
Ruff changed its behavior to lint and format Jupyter notebooks by default;
|
||||
however, due to an oversight, these files were still excluded by default if
|
||||
Ruff was run via pre-commit, leading to inconsistent behavior.
|
||||
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
|
||||
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
|
||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
|
||||
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
|
||||
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
|
||||
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
|
||||
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
|
||||
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
|
||||
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
|
||||
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
|
||||
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
|
||||
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
|
||||
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
|
||||
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
|
||||
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
|
||||
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
|
||||
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
|
||||
|
||||
### Server
|
||||
|
||||
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
|
||||
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
|
||||
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
|
||||
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
|
||||
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
|
||||
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
|
||||
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
|
||||
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
|
||||
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
|
||||
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
|
||||
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
|
||||
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
|
||||
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
|
||||
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
|
||||
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
|
||||
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
|
||||
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
|
||||
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
|
||||
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
|
||||
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
|
||||
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
|
||||
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
|
||||
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
|
||||
|
||||
### Server
|
||||
|
||||
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
|
||||
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
|
||||
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
|
||||
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
|
||||
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
|
||||
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
### Preview features
|
||||
|
||||
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
|
||||
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
|
||||
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
|
||||
|
||||
### Server
|
||||
|
||||
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
|
||||
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
|
||||
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
|
||||
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
|
||||
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
|
||||
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
|
||||
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
|
||||
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
|
||||
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
|
||||
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
|
||||
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
|
||||
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
|
||||
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
|
||||
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
|
||||
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
|
||||
113
CONTRIBUTING.md
113
CONTRIBUTING.md
@@ -2,6 +2,35 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
@@ -29,14 +58,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
@@ -64,7 +95,7 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
@@ -265,20 +296,26 @@ To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
1. Install MkDocs and Material for MkDocs with:
|
||||
|
||||
```shell
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
@@ -296,34 +333,22 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
@@ -334,25 +359,14 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -360,8 +374,9 @@ GitHub Actions will run your changes against a number of real-world projects fro
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
@@ -374,7 +389,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> **Note**
|
||||
> \[!NOTE\]
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -388,18 +403,12 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -418,7 +427,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -464,7 +473,7 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
@@ -521,8 +530,6 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
@@ -561,7 +568,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
@@ -898,11 +905,15 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
|
||||
1078
Cargo.lock
generated
1078
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
116
Cargo.toml
116
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
rust-version = "1.75"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -14,10 +14,9 @@ license = "MIT"
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db", default-features = false }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
@@ -34,18 +33,15 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
||||
ruff_server = { path = "crates/ruff_server" }
|
||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
red_knot_vendored = { path = "crates/red_knot_vendored" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot = { path = "crates/red_knot" }
|
||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_test = { path = "crates/red_knot_test" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace", default-features = false }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
@@ -65,23 +61,17 @@ compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.3.0" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
fern = { version = "0.7.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
"raw-entry",
|
||||
"inline-more",
|
||||
] }
|
||||
hashbrown = "0.14.3"
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = {version = "2.6.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
@@ -96,28 +86,29 @@ libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "7.0.0" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "254c749b02cde2fd29852a7463a33e800b771758" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -125,7 +116,7 @@ serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
"macros",
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
@@ -140,13 +131,9 @@ thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
@@ -156,24 +143,20 @@ unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
"cfg(fuzzing)",
|
||||
"cfg(codspeed)",
|
||||
] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -189,9 +172,8 @@ missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
single_match_else = "allow"
|
||||
too_many_lines = "allow"
|
||||
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
|
||||
# To allow `#[allow(clippy::all)]` in `crates/ruff_python_parser/src/python.rs`.
|
||||
needless_raw_string_hashes = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
@@ -204,10 +186,6 @@ get_unwrap = "warn"
|
||||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
# nursery rules
|
||||
redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
@@ -250,9 +228,9 @@ inherits = "release"
|
||||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.22.1"
|
||||
cargo-dist-version = "0.18.0"
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
ci = ["github"]
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
@@ -261,33 +239,33 @@ windows-archive = ".zip"
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
# Publish jobs to run in CI
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase cargo-dist should use to create the GitHub release
|
||||
# The stage during which the GitHub Release should be created
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
@@ -295,15 +273,9 @@ build-local-artifacts = false
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = [
|
||||
"./notify-dependents",
|
||||
"./publish-docs",
|
||||
"./publish-playground",
|
||||
]
|
||||
# Announcement jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = "CARGO_HOME"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM --platform=$BUILDPLATFORM ubuntu AS build
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,28 +1371,3 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
38
README.md
38
README.md
@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.7.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.7.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.5.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.3/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.7.3
|
||||
rev: v0.5.3
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -179,10 +179,11 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/astral-sh/ruff-action):
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
|
||||
```yaml
|
||||
name: Ruff
|
||||
@@ -192,10 +193,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v1
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration<a id="configuration"></a>
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -291,7 +292,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules<a id="rules"></a>
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -367,21 +368,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing<a id="contributing"></a>
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support<a id="support"></a>
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -405,7 +406,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
@@ -417,14 +418,12 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- Benchling ([Refac](https://github.com/benchling/refac))
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- CrowdCent ([NumerBlox](https://github.com/crowdcent/numerblox)) <!-- typos: ignore -->
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- CERN ([Indico](https://getindico.io/))
|
||||
- [DVC](https://github.com/iterative/dvc)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
@@ -435,7 +434,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
@@ -525,7 +523,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License<a id="license"></a>
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_vendored/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
@@ -8,11 +8,10 @@ hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pn` is a thing
|
||||
pn = "pn" # `import panel as pd` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
Numer = "Numer" # Library name 'NumerBlox' in "Who's Using Ruff?"
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
|
||||
@@ -10,12 +10,4 @@ doc-valid-idents = [
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -12,28 +12,25 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true, features = ["zstd"] }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
notify = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 40 KiB |
@@ -1,128 +0,0 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
|
||||
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
|
||||
but this can result in a confused logging output where messages from different threads are intertwined.
|
||||
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
2
crates/red_knot/src/cli/mod.rs
Normal file
2
crates/red_knot/src/cli/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub(crate) mod target_version;
|
||||
pub(crate) mod verbosity;
|
||||
34
crates/red_knot/src/cli/target_version.rs
Normal file
34
crates/red_knot/src/cli/target_version.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
ruff_db::program::TargetVersion::from(*self).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::Py37,
|
||||
TargetVersion::Py38 => Self::Py38,
|
||||
TargetVersion::Py39 => Self::Py39,
|
||||
TargetVersion::Py310 => Self::Py310,
|
||||
TargetVersion::Py311 => Self::Py311,
|
||||
TargetVersion::Py312 => Self::Py312,
|
||||
TargetVersion::Py313 => Self::Py313,
|
||||
}
|
||||
}
|
||||
}
|
||||
34
crates/red_knot/src/cli/verbosity.rs
Normal file
34
crates/red_knot/src/cli/verbosity.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
Info,
|
||||
Debug,
|
||||
Trace,
|
||||
}
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> Option<VerbosityLevel> {
|
||||
match self.verbose {
|
||||
0 => None,
|
||||
1 => Some(VerbosityLevel::Info),
|
||||
2 => Some(VerbosityLevel::Debug),
|
||||
_ => Some(VerbosityLevel::Trace),
|
||||
}
|
||||
}
|
||||
}
|
||||
200
crates/red_knot/src/db.rs
Normal file
200
crates/red_knot/src/db.rs
Normal file
@@ -0,0 +1,200 @@
|
||||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::{Cancelled, Database, DbWithJar};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::program::{Program, ProgramSettings};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
|
||||
use crate::watch::{FileChangeKind, FileWatcherChange};
|
||||
use crate::workspace::{check_file, Package, Workspace, WorkspaceMetadata};
|
||||
|
||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
Workspace,
|
||||
Package,
|
||||
lint_syntax,
|
||||
lint_semantic,
|
||||
unwind_if_cancelled,
|
||||
);
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct RootDatabase {
|
||||
workspace: Option<Workspace>,
|
||||
storage: salsa::Storage<RootDatabase>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
let mut db = Self {
|
||||
workspace: None,
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
};
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, settings);
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
// SAFETY: The workspace is always initialized in `new`.
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<FileWatcherChange>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
|
||||
// TODO: Optimize change tracking by only reloading a package if a file that is part of the package was changed.
|
||||
let mut structural_change = false;
|
||||
for change in changes {
|
||||
if matches!(
|
||||
change.path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
structural_change = true;
|
||||
} else {
|
||||
match change.kind {
|
||||
FileChangeKind::Created => {
|
||||
// Reload the package when a new file was added. This is necessary because the file might be excluded
|
||||
// by a gitignore.
|
||||
if workspace.package(self, &change.path).is_some() {
|
||||
structural_change = true;
|
||||
}
|
||||
}
|
||||
FileChangeKind::Modified => {}
|
||||
FileChangeKind::Deleted => {
|
||||
if let Some(package) = workspace.package(self, &change.path) {
|
||||
if let Some(file) = system_path_to_file(self, &change.path) {
|
||||
package.remove_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
|
||||
if structural_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
|
||||
{
|
||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
||||
// unwind safe.
|
||||
//
|
||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
||||
//
|
||||
// That still leaves us with possible logical bugs in two sources:
|
||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
||||
// certainly makes it harder to catch these issues in our user code.
|
||||
//
|
||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
||||
let db = &AssertUnwindSafe(self);
|
||||
Cancelled::catch(|| f(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for RootDatabase {}
|
||||
|
||||
impl SemanticDb for RootDatabase {}
|
||||
|
||||
impl SourceDb for RootDatabase {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
vendored_typeshed_stubs()
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&*self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for RootDatabase {}
|
||||
|
||||
impl Db for RootDatabase {}
|
||||
|
||||
impl salsa::ParallelDatabase for RootDatabase {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
workspace: self.workspace,
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,6 @@
|
||||
use crate::db::Jar;
|
||||
|
||||
pub mod db;
|
||||
pub mod lint;
|
||||
pub mod watch;
|
||||
pub mod workspace;
|
||||
274
crates/red_knot/src/lint.rs
Normal file
274
crates/red_knot/src/lint.rs
Normal file
@@ -0,0 +1,274 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Deref;
|
||||
use std::time::Duration;
|
||||
|
||||
use tracing::trace_span;
|
||||
|
||||
use red_knot_module_resolver::ModuleName;
|
||||
use red_knot_python_semantic::types::Type;
|
||||
use red_knot_python_semantic::{HasTy, SemanticModel};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::{parsed_module, ParsedModule};
|
||||
use ruff_db::source::{source_text, SourceText};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::{walk_stmt, Visitor};
|
||||
|
||||
use crate::db::Db;
|
||||
|
||||
/// Workaround query to test for if the computation should be cancelled.
|
||||
/// Ideally, push for Salsa to expose an API for testing if cancellation was requested.
|
||||
#[salsa::tracked]
|
||||
#[allow(unused_variables)]
|
||||
pub(crate) fn unwind_if_cancelled(db: &dyn Db) {}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
unwind_if_cancelled(db);
|
||||
|
||||
println!("RED_KNOT_SLOW_LINT is set, sleeping for {i}/10 seconds");
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source = source_text(db.upcast(), file_id);
|
||||
lint_lines(&source, &mut diagnostics);
|
||||
|
||||
let parsed = parsed_module(db.upcast(), file_id);
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.syntax();
|
||||
|
||||
let mut visitor = SyntaxLintVisitor {
|
||||
diagnostics,
|
||||
source: &source,
|
||||
};
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(ToString::to_string));
|
||||
}
|
||||
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
for (line_number, line) in source.lines().enumerate() {
|
||||
if line.len() < 88 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let char_count = line.chars().count();
|
||||
if char_count > 88 {
|
||||
diagnostics.push(format!(
|
||||
"Line {} is too long ({} characters)",
|
||||
line_number + 1,
|
||||
char_count
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
let _span = trace_span!("lint_semantic", ?file_id).entered();
|
||||
|
||||
let source = source_text(db.upcast(), file_id);
|
||||
let parsed = parsed_module(db.upcast(), file_id);
|
||||
let semantic = SemanticModel::new(db.upcast(), file_id);
|
||||
|
||||
if !parsed.is_valid() {
|
||||
return Diagnostics::Empty;
|
||||
}
|
||||
|
||||
let context = SemanticLintContext {
|
||||
source,
|
||||
parsed,
|
||||
semantic,
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
|
||||
SemanticVisitor { context: &context }.visit_body(parsed.suite());
|
||||
|
||||
Diagnostics::from(context.diagnostics.take())
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
|
||||
match import {
|
||||
AnyImportRef::Import(import) => {
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
AnyImportRef::ImportFrom(import) => {
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
|
||||
let semantic = &context.semantic;
|
||||
|
||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
||||
// same for other stdlib modules that our lint rules care about)
|
||||
let Some(typing) = semantic.resolve_module(ModuleName::new("typing").unwrap()) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let override_ty = semantic.global_symbol_ty(&typing, "override");
|
||||
|
||||
let Type::Class(class_ty) = class.ty(semantic) else {
|
||||
return;
|
||||
};
|
||||
|
||||
for function in class
|
||||
.body
|
||||
.iter()
|
||||
.filter_map(|stmt| stmt.as_function_def_stmt())
|
||||
{
|
||||
let Type::Function(ty) = function.ty(semantic) else {
|
||||
return;
|
||||
};
|
||||
|
||||
// TODO this shouldn't make direct use of the Db; see comment on SemanticModel::db
|
||||
let db = semantic.db();
|
||||
|
||||
if ty.has_decorator(db, override_ty) {
|
||||
let method_name = ty.name(db);
|
||||
if class_ty
|
||||
.inherited_class_member(db, &method_name)
|
||||
.is_unbound()
|
||||
{
|
||||
// TODO should have a qualname() method to support nested classes
|
||||
context.push_diagnostic(
|
||||
format!(
|
||||
"Method {}.{} is decorated with `typing.override` but does not override any base class method",
|
||||
class_ty.name(db),
|
||||
method_name,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct SemanticLintContext<'a> {
|
||||
source: SourceText,
|
||||
parsed: &'a ParsedModule,
|
||||
semantic: SemanticModel<'a>,
|
||||
diagnostics: RefCell<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'db> SemanticLintContext<'db> {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn source_text(&self) -> &str {
|
||||
self.source.as_str()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn ast(&self) -> &'db ast::ModModule {
|
||||
self.parsed.syntax()
|
||||
}
|
||||
|
||||
pub(crate) fn push_diagnostic(&self, diagnostic: String) {
|
||||
self.diagnostics.borrow_mut().push(diagnostic);
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
||||
self.diagnostics.get_mut().extend(diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SyntaxLintVisitor<'a> {
|
||||
diagnostics: Vec<String>,
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ ast::StringLiteral) {
|
||||
// A very naive implementation of use double quotes
|
||||
let text = &self.source[string_literal.range];
|
||||
|
||||
if text.starts_with('\'') {
|
||||
self.diagnostics
|
||||
.push("Use double quotes for strings".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SemanticVisitor<'a> {
|
||||
context: &'a SemanticLintContext<'a>,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SemanticVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
lint_bad_override(self.context, class);
|
||||
}
|
||||
ast::Stmt::Import(import) => {
|
||||
lint_unresolved_imports(self.context, AnyImportRef::Import(import));
|
||||
}
|
||||
ast::Stmt::ImportFrom(import) => {
|
||||
lint_unresolved_imports(self.context, AnyImportRef::ImportFrom(import));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Vec<String>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum AnyImportRef<'a> {
|
||||
Import(&'a ast::StmtImport),
|
||||
ImportFrom(&'a ast::StmtImportFrom),
|
||||
}
|
||||
@@ -1,254 +0,0 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,39 +1,35 @@
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use red_knot_python_semantic::SitePackages;
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::settings::Configuration;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
use target_version::TargetVersion;
|
||||
use salsa::ParallelDatabase;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::watch::FileWatcherChange;
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
|
||||
mod logging;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
use cli::target_version::TargetVersion;
|
||||
use cli::verbosity::{Verbosity, VerbosityLevel};
|
||||
|
||||
mod cli;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
about = "An experimental multifile analysis backend for Ruff"
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
@@ -42,17 +38,6 @@ struct Args {
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
@@ -65,135 +50,62 @@ to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Option<Vec<SystemPathBuf>>,
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
value_name = "VERSION"
|
||||
)]
|
||||
target_version: Option<TargetVersion>,
|
||||
#[arg(long, help = "Python version to assume when resolving types", default_value_t = TargetVersion::default(), value_name="VERSION")]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
impl Args {
|
||||
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
|
||||
let mut configuration = Configuration::default();
|
||||
#[allow(
|
||||
clippy::print_stdout,
|
||||
clippy::unnecessary_wraps,
|
||||
clippy::print_stderr,
|
||||
clippy::dbg_macro
|
||||
)]
|
||||
pub fn main() -> anyhow::Result<()> {
|
||||
let Args {
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
target_version,
|
||||
verbosity,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
if let Some(target_version) = self.target_version {
|
||||
configuration.target_version = Some(target_version.into());
|
||||
}
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity == Some(VerbosityLevel::Trace));
|
||||
setup_tracing(verbosity);
|
||||
|
||||
if let Some(venv_path) = &self.venv_path {
|
||||
configuration.search_paths.site_packages = Some(SitePackages::Derived {
|
||||
venv_path: SystemPath::absolute(venv_path, cli_cwd),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
|
||||
configuration.search_paths.custom_typeshed =
|
||||
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
|
||||
}
|
||||
|
||||
if let Some(extra_search_paths) = &self.extra_search_path {
|
||||
configuration.search_paths.extra_paths = extra_search_paths
|
||||
.iter()
|
||||
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
|
||||
.collect();
|
||||
}
|
||||
|
||||
configuration
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let args = Args::parse_from(std::env::args());
|
||||
|
||||
if matches!(args.command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let verbosity = args.verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory `{}` contains non-Unicode characters. Red Knot only supports Unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
let cwd = if let Some(cwd) = current_directory {
|
||||
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
|
||||
SystemPathBuf::from_utf8_path_buf(canonicalized)
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
SystemPathBuf::from_path_buf(cwd).unwrap()
|
||||
};
|
||||
|
||||
let cwd = args
|
||||
.current_directory
|
||||
.as_ref()
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path `{cwd}` is not a directory"
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(cli_configuration.clone()),
|
||||
)?;
|
||||
let workspace_metadata =
|
||||
WorkspaceMetadata::from_path(system.current_directory(), &system).unwrap();
|
||||
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
workspace_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages: None,
|
||||
},
|
||||
};
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, system)?;
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -205,162 +117,125 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let exit_status = if args.watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
std::mem::forget(db);
|
||||
file_watcher.watch_folder(db.workspace().root(&db).as_std_path())?;
|
||||
|
||||
Ok(exit_status)
|
||||
}
|
||||
main_loop.run(&mut db);
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
println!("{}", countme::get_all());
|
||||
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
verbosity: Option<VerbosityLevel>,
|
||||
orchestrator: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
|
||||
cli_configuration: Configuration,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
main_loop: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
|
||||
(
|
||||
Self {
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
cli_configuration,
|
||||
verbosity,
|
||||
orchestrator: orchestrator_sender,
|
||||
receiver: main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, db: &mut RootDatabase) {
|
||||
self.orchestrator.send(OrchestratorMessage::Run).unwrap();
|
||||
|
||||
let result = self.main_loop(db);
|
||||
for message in &self.receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
MainLoopMessage::CheckWorkspace { revision } => {
|
||||
let db = db.snapshot();
|
||||
let sender = self.sender.clone();
|
||||
let orchestrator = self.orchestrator.clone();
|
||||
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
orchestrator
|
||||
.send(OrchestratorMessage::CheckCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
#[allow(clippy::print_stdout)]
|
||||
for diagnostic in result {
|
||||
println!("{}", diagnostic.display(db));
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes, Some(&self.cli_configuration));
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
db.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ExitStatus::Success
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -375,14 +250,170 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
main_loop: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckWorkspace {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.main_loop.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace,
|
||||
CheckCompleted {
|
||||
result: Vec<Box<dyn Diagnostic>>,
|
||||
revision: u64,
|
||||
},
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
CheckWorkspace { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
|
||||
let trace_level = match verbosity {
|
||||
None => Level::WARN,
|
||||
Some(VerbosityLevel::Info) => Level::INFO,
|
||||
Some(VerbosityLevel::Debug) => Level::DEBUG,
|
||||
Some(VerbosityLevel::Trace) => Level::TRACE,
|
||||
};
|
||||
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter { trace_level }),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
111
crates/red_knot/src/watch.rs
Normal file
111
crates/red_knot/src/watch.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, ModifyKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |event: notify::Result<Event>| {
|
||||
match event {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::From)) => {
|
||||
FileChangeKind::Deleted
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::To)) => {
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Any)) => {
|
||||
// TODO Introduce a better catch all event for cases that we don't understand.
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Both)) => {
|
||||
todo!("Handle both create and delete event.");
|
||||
}
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes
|
||||
.push(FileWatcherChange::new(fs_path.to_path_buf(), change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
pub path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
pub kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
344
crates/red_knot/src/workspace.rs
Normal file
344
crates/red_knot/src/workspace.rs
Normal file
@@ -0,0 +1,344 @@
|
||||
// TODO: Fix clippy warnings created by salsa macros
|
||||
#![allow(clippy::used_underscore_binding)]
|
||||
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
|
||||
use crate::{
|
||||
db::Db,
|
||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
||||
};
|
||||
|
||||
mod metadata;
|
||||
|
||||
/// The project workspace as a Salsa ingredient.
|
||||
///
|
||||
/// A workspace consists of one or multiple packages. Packages can be nested. A file in a workspace
|
||||
/// belongs to no or exactly one package (files can't belong to multiple packages).
|
||||
///
|
||||
/// How workspaces and packages are discovered is TBD. For now, a workspace can be any directory,
|
||||
/// and it always contains a single package which has the same root as the workspace.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```text
|
||||
/// app-1/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// app-2/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// shared/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// pyproject.toml
|
||||
/// ```
|
||||
///
|
||||
/// The above project structure has three packages: `app-1`, `app-2`, and `shared`.
|
||||
/// Each of the packages can define their own settings in their `pyproject.toml` file, but
|
||||
/// they must be compatible. For example, each package can define a different `requires-python` range,
|
||||
/// but the ranges must overlap.
|
||||
///
|
||||
/// ## How is a workspace different from a program?
|
||||
/// There are two (related) motivations:
|
||||
///
|
||||
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
|
||||
/// without introducing a cyclic dependency. The workspace is defined in a higher level crate
|
||||
/// where it can reference these setting types.
|
||||
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
|
||||
/// it remains the same workspace. That's why program is a narrowed view of the workspace only
|
||||
/// holding on to the most fundamental settings required for checking.
|
||||
#[salsa::input]
|
||||
pub struct Workspace {
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
||||
/// open files rather than all files in the workspace.
|
||||
#[return_ref]
|
||||
open_file_set: Option<Arc<FxHashSet<File>>>,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
#[return_ref]
|
||||
package_tree: BTreeMap<SystemPathBuf, Package>,
|
||||
}
|
||||
|
||||
/// A first-party package in a workspace.
|
||||
#[salsa::input]
|
||||
pub struct Package {
|
||||
#[return_ref]
|
||||
pub name: Name,
|
||||
|
||||
/// The path to the root directory of the package.
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are part of this package.
|
||||
#[return_ref]
|
||||
file_set: Arc<FxHashSet<File>>,
|
||||
// TODO: Add the loaded settings.
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_metadata(db: &dyn Db, metadata: WorkspaceMetadata) -> Self {
|
||||
let mut packages = BTreeMap::new();
|
||||
|
||||
for package in metadata.packages {
|
||||
packages.insert(package.root.clone(), Package::from_metadata(db, package));
|
||||
}
|
||||
|
||||
Workspace::new(db, metadata.root, None, packages)
|
||||
}
|
||||
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
pub fn packages(self, db: &dyn Db) -> impl Iterator<Item = Package> + '_ {
|
||||
self.package_tree(db).values().copied()
|
||||
}
|
||||
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
|
||||
let mut old_packages = self.package_tree(db).clone();
|
||||
let mut new_packages = BTreeMap::new();
|
||||
|
||||
for package_metadata in metadata.packages {
|
||||
let path = package_metadata.root().to_path_buf();
|
||||
|
||||
let package = if let Some(old_package) = old_packages.remove(&path) {
|
||||
old_package.update(db, package_metadata);
|
||||
old_package
|
||||
} else {
|
||||
Package::from_metadata(db, package_metadata)
|
||||
};
|
||||
|
||||
new_packages.insert(path, package);
|
||||
}
|
||||
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
}
|
||||
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
let path = metadata.root().to_path_buf();
|
||||
|
||||
if let Some(package) = self.package_tree(db).get(&path).copied() {
|
||||
package.update(db, metadata);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Package {path} not found"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the closest package to which the first-party `path` belongs.
|
||||
///
|
||||
/// Returns `None` if the `path` is outside of any package or if `file` isn't a first-party file
|
||||
/// (e.g. third-party dependencies or `excluded`).
|
||||
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
|
||||
let packages = self.package_tree(db);
|
||||
|
||||
let (package_path, package) = packages.range(..path.to_path_buf()).next_back()?;
|
||||
|
||||
if path.starts_with(package_path) {
|
||||
Some(*package)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
if let Some(open_files) = self.open_files(db) {
|
||||
for file in open_files {
|
||||
result.extend_from_slice(&check_file(db, *file));
|
||||
}
|
||||
} else {
|
||||
for package in self.packages(db) {
|
||||
result.extend(package.check(db));
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Opens a file in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||
let mut open_files = self.take_open_files(db);
|
||||
open_files.insert(file);
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
/// Closes a file in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut open_files = self.take_open_files(db);
|
||||
let removed = open_files.remove(&file);
|
||||
|
||||
if removed {
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
self.open_file_set(db).as_deref()
|
||||
}
|
||||
|
||||
/// Sets the open files in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
||||
self.set_open_file_set(db).to(Some(Arc::new(open_files)));
|
||||
}
|
||||
|
||||
/// This takes the open files from the workspace and returns them.
|
||||
///
|
||||
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
|
||||
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
||||
let open_files = self.open_file_set(db).clone();
|
||||
|
||||
if let Some(open_files) = open_files {
|
||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||
// so that the reference counter to `open_files` now drops to 1.
|
||||
self.set_open_file_set(db).to(None);
|
||||
|
||||
Arc::try_unwrap(open_files).unwrap()
|
||||
} else {
|
||||
FxHashSet::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Package {
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).contains(&file)
|
||||
}
|
||||
|
||||
pub fn files(self, db: &dyn Db) -> &FxHashSet<File> {
|
||||
self.file_set(db)
|
||||
}
|
||||
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut files_arc = self.file_set(db).clone();
|
||||
|
||||
// Set a dummy value. Salsa will cancel any pending queries and remove its own reference to `files`
|
||||
// so that the reference counter to `files` now drops to 1.
|
||||
self.set_file_set(db).to(Arc::new(FxHashSet::default()));
|
||||
|
||||
let files = Arc::get_mut(&mut files_arc).unwrap();
|
||||
let removed = files.remove(&file);
|
||||
self.set_file_set(db).to(files_arc);
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
for file in self.files(db) {
|
||||
let diagnostics = check_file(db, *file);
|
||||
result.extend_from_slice(&diagnostics);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
|
||||
let files = discover_package_files(db, metadata.root());
|
||||
|
||||
Self::new(db, metadata.name, metadata.root, Arc::new(files))
|
||||
}
|
||||
|
||||
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
|
||||
let root = self.root(db);
|
||||
assert_eq!(root, metadata.root());
|
||||
|
||||
let files = discover_package_files(db, root);
|
||||
|
||||
self.set_name(db).to(metadata.name);
|
||||
self.set_file_set(db).to(Arc::new(files));
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
let mut diagnostics = Vec::new();
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
|
||||
db.system().walk_directory(path).run(|| {
|
||||
Box::new(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||
if entry.file_type().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
// TODO Handle error
|
||||
tracing::error!("Failed to walk path: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
let paths = paths.into_inner().unwrap();
|
||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||
|
||||
for path in paths {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
// We can ignore this.
|
||||
if let Some(file) = system_path_to_file(db.upcast(), &path) {
|
||||
files.insert(file);
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::workspace::settings::{Configuration, WorkspaceSettings};
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
@@ -8,8 +7,6 @@ pub struct WorkspaceMetadata {
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
pub(super) packages: Vec<PackageMetadata>,
|
||||
|
||||
pub(super) settings: WorkspaceSettings,
|
||||
}
|
||||
|
||||
/// A first-party package in a workspace.
|
||||
@@ -24,18 +21,16 @@ pub struct PackageMetadata {
|
||||
|
||||
impl WorkspaceMetadata {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_path(
|
||||
path: &SystemPath,
|
||||
system: &dyn System,
|
||||
base_configuration: Option<Configuration>,
|
||||
) -> anyhow::Result<WorkspaceMetadata> {
|
||||
assert!(
|
||||
system.is_directory(path),
|
||||
"Workspace root path must be a directory"
|
||||
);
|
||||
tracing::debug!("Searching for workspace in '{path}'");
|
||||
pub fn from_path(path: &SystemPath, system: &dyn System) -> anyhow::Result<WorkspaceMetadata> {
|
||||
let root = if system.is_file(path) {
|
||||
path.parent().unwrap().to_path_buf()
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
};
|
||||
|
||||
let root = path.to_path_buf();
|
||||
if !system.is_directory(&root) {
|
||||
anyhow::bail!("no workspace found at {:?}", root);
|
||||
}
|
||||
|
||||
// TODO: Discover package name from `pyproject.toml`.
|
||||
let package_name: Name = path.file_name().unwrap_or("<root>").into();
|
||||
@@ -45,20 +40,9 @@ impl WorkspaceMetadata {
|
||||
root: root.clone(),
|
||||
};
|
||||
|
||||
// TODO: Load the configuration from disk.
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
if let Some(base_configuration) = base_configuration {
|
||||
configuration.extend(base_configuration);
|
||||
}
|
||||
|
||||
// TODO: Respect the package configurations when resolving settings (e.g. for the target version).
|
||||
let settings = configuration.into_workspace_settings(&root);
|
||||
|
||||
let workspace = WorkspaceMetadata {
|
||||
root,
|
||||
packages: vec![package],
|
||||
settings,
|
||||
};
|
||||
|
||||
Ok(workspace)
|
||||
@@ -71,10 +55,6 @@ impl WorkspaceMetadata {
|
||||
pub fn packages(&self) -> &[PackageMetadata] {
|
||||
&self.packages
|
||||
}
|
||||
|
||||
pub fn settings(&self) -> &WorkspaceSettings {
|
||||
&self.settings
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageMetadata {
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
[package]
|
||||
name = "red_knot_vendored"
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
@@ -12,20 +12,28 @@ license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
|
||||
compact_str = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
walkdir = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os"] }
|
||||
|
||||
[features]
|
||||
zstd = ["zip/zstd"]
|
||||
deflate = ["zip/deflate"]
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# Vendored types for the stdlib
|
||||
# Red Knot
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_vendored/vendor/typeshed`. The file `crates/red_knot_vendored/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
A work-in-progress multifile module resolver for Ruff.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
@@ -3,7 +3,7 @@
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_vendored/vendor/typeshed` change.
|
||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
@@ -23,23 +23,8 @@ const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if cfg!(feature = "zstd") {
|
||||
CompressionMethod::Zstd
|
||||
} else if cfg!(feature = "deflate") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Stored
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(method)
|
||||
.compression_method(CompressionMethod::Zstd)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
@@ -69,7 +54,7 @@ fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo::rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
assert!(
|
||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
||||
"Where is typeshed?"
|
||||
126
crates/red_knot_module_resolver/src/db.rs
Normal file
126
crates/red_knot_module_resolver/src/db.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
|
||||
module_resolution_settings, resolve_module_query,
|
||||
};
|
||||
use crate::typeshed::parse_typeshed_versions;
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
module_resolution_settings,
|
||||
editable_install_resolution_paths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
parse_typeshed_versions,
|
||||
);
|
||||
|
||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::system::{DbWithTestSystem, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use crate::vendored_typeshed_stubs;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[salsa::db(Jar, ruff_db::Jar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
files: Files,
|
||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().snapshot(),
|
||||
events: sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn ruff_db::system::System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
system: self.system.snapshot(),
|
||||
vendored: self.vendored.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
events: self.events.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
18
crates/red_knot_module_resolver/src/lib.rs
Normal file
18
crates/red_knot_module_resolver/src/lib.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
mod db;
|
||||
mod module;
|
||||
mod module_name;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind};
|
||||
pub use module_name::ModuleName;
|
||||
pub use resolver::resolve_module;
|
||||
pub use typeshed::{
|
||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
||||
};
|
||||
@@ -3,8 +3,9 @@ use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use super::path::SearchPath;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::{ModuleResolutionPathBuf, ModuleResolutionPathRef};
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
@@ -16,7 +17,7 @@ impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -40,8 +41,8 @@ impl Module {
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> &SearchPath {
|
||||
&self.inner.search_path
|
||||
pub(crate) fn search_path(&self) -> ModuleResolutionPathRef {
|
||||
ModuleResolutionPathRef::from(&*self.inner.search_path)
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
@@ -61,11 +62,22 @@ impl std::fmt::Debug for Module {
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::DebugWithDb<dyn Db> for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file().debug(db.upcast()))
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
file: File,
|
||||
}
|
||||
|
||||
@@ -77,9 +89,3 @@ pub enum ModuleKind {
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
impl ModuleKind {
|
||||
pub const fn is_package(self) -> bool {
|
||||
matches!(self, ModuleKind::Package)
|
||||
}
|
||||
}
|
||||
@@ -42,7 +42,7 @@ impl ModuleName {
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
@@ -68,7 +68,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
@@ -82,7 +82,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
@@ -101,7 +101,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
@@ -133,7 +133,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
||||
@@ -168,24 +168,6 @@ impl ModuleName {
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// Extend `self` with the components of `other`
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// let mut module_name = ModuleName::new_static("foo").unwrap();
|
||||
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar");
|
||||
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
|
||||
/// ```
|
||||
pub fn extend(&mut self, other: &ModuleName) {
|
||||
self.0.push('.');
|
||||
self.0.push_str(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
1286
crates/red_knot_module_resolver/src/path.rs
Normal file
1286
crates/red_knot_module_resolver/src/path.rs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
30
crates/red_knot_module_resolver/src/state.rs
Normal file
30
crates/red_knot_module_resolver/src/state.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::typeshed::LazyTypeshedVersions;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverState<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: TargetVersion) -> Self {
|
||||
Self {
|
||||
db,
|
||||
typeshed_versions: LazyTypeshedVersions::new(),
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn system(&self) -> &dyn System {
|
||||
self.db.system()
|
||||
}
|
||||
|
||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,8 @@
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::{ProgramSettings, SitePackages};
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -14,11 +12,8 @@ pub(crate) struct TestCase<T> {
|
||||
pub(crate) db: TestDb,
|
||||
pub(crate) src: SystemPathBuf,
|
||||
pub(crate) stdlib: T,
|
||||
// Most test cases only ever need a single `site-packages` directory,
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
}
|
||||
|
||||
/// A `(file_name, file_contents)` tuple
|
||||
@@ -100,7 +95,7 @@ pub(crate) struct UnspecifiedTypeshed;
|
||||
/// to `()`.
|
||||
pub(crate) struct TestCaseBuilder<T> {
|
||||
typeshed_option: T,
|
||||
target_version: PythonVersion,
|
||||
target_version: TargetVersion,
|
||||
first_party_files: Vec<FileSpec>,
|
||||
site_packages_files: Vec<FileSpec>,
|
||||
}
|
||||
@@ -119,7 +114,7 @@ impl<T> TestCaseBuilder<T> {
|
||||
}
|
||||
|
||||
/// Specify the target Python version the module resolver should assume
|
||||
pub(crate) fn with_target_version(mut self, target_version: PythonVersion) -> Self {
|
||||
pub(crate) fn with_target_version(mut self, target_version: TargetVersion) -> Self {
|
||||
self.target_version = target_version;
|
||||
self
|
||||
}
|
||||
@@ -130,8 +125,6 @@ impl<T> TestCaseBuilder<T> {
|
||||
files: impl IntoIterator<Item = FileSpec>,
|
||||
) -> SystemPathBuf {
|
||||
let root = location.as_ref().to_path_buf();
|
||||
// Make sure to create the directory even if the list of files is empty:
|
||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
||||
db.write_files(
|
||||
files
|
||||
.into_iter()
|
||||
@@ -146,7 +139,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
Self {
|
||||
typeshed_option: UnspecifiedTypeshed,
|
||||
target_version: PythonVersion::default(),
|
||||
target_version: TargetVersion::default(),
|
||||
first_party_files: vec![],
|
||||
site_packages_files: vec![],
|
||||
}
|
||||
@@ -179,7 +172,6 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
TestCaseBuilder {
|
||||
typeshed_option: typeshed,
|
||||
target_version,
|
||||
@@ -196,7 +188,6 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
site_packages,
|
||||
target_version,
|
||||
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
@@ -223,19 +214,16 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::from_settings(
|
||||
Program::new(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
},
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
},
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
);
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
@@ -279,17 +267,16 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::from_settings(
|
||||
Program::new(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
..SearchPathSettings::new(src.clone())
|
||||
},
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: Some(site_packages.clone()),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
);
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
8
crates/red_knot_module_resolver/src/typeshed.rs
Normal file
8
crates/red_knot_module_resolver/src/typeshed.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(crate) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsQueryResult,
|
||||
};
|
||||
pub use self::versions::{TypeshedVersionsParseError, TypeshedVersionsParseErrorKind};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -1,13 +1,14 @@
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
|
||||
pub fn file_system() -> &'static VendoredFileSystem {
|
||||
static VENDORED_TYPESHED_STUBS: LazyLock<VendoredFileSystem> =
|
||||
LazyLock::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
|
||||
pub fn vendored_typeshed_stubs() -> &'static VendoredFileSystem {
|
||||
static VENDORED_TYPESHED_STUBS: Lazy<VendoredFileSystem> =
|
||||
Lazy::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
|
||||
&VENDORED_TYPESHED_STUBS
|
||||
}
|
||||
|
||||
@@ -41,7 +42,7 @@ mod tests {
|
||||
#[test]
|
||||
fn typeshed_vfs_consistent_with_vendored_stubs() {
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_stubs = file_system();
|
||||
let vendored_typeshed_stubs = vendored_typeshed_stubs();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
|
||||
@@ -1,30 +1,96 @@
|
||||
use std::cell::OnceCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::num::{NonZeroU16, NonZeroUsize};
|
||||
use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, PythonVersion};
|
||||
|
||||
pub(in crate::module_resolver) fn vendored_typeshed_versions(db: &dyn Db) -> TypeshedVersions {
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
||||
|
||||
impl<'db> LazyTypeshedVersions<'db> {
|
||||
#[must_use]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self(OnceCell::new())
|
||||
}
|
||||
|
||||
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
|
||||
///
|
||||
/// Simply probing whether a file exists in typeshed is insufficient for this question,
|
||||
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
|
||||
/// will still be available (either in a custom typeshed dir or in our vendored copy)
|
||||
/// even if the user specified Python 3.8 as the target version.
|
||||
///
|
||||
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
|
||||
/// as to whether the module exists on the specified target version. However, VERSIONS does not
|
||||
/// provide comprehensive information on all submodules, meaning that this method sometimes
|
||||
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
|
||||
/// See [`TypeshedVersionsQueryResult`] for more details.
|
||||
#[must_use]
|
||||
pub(crate) fn query_module(
|
||||
&self,
|
||||
db: &'db dyn Db,
|
||||
module: &ModuleName,
|
||||
stdlib_root: Option<&SystemPath>,
|
||||
target_version: TargetVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let versions = self.0.get_or_init(|| {
|
||||
let versions_path = if let Some(system_path) = stdlib_root {
|
||||
system_path.join("VERSIONS")
|
||||
} else {
|
||||
return &VENDORED_VERSIONS;
|
||||
};
|
||||
let Some(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
todo!(
|
||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
||||
)
|
||||
};
|
||||
// TODO(Alex/Micha): If VERSIONS is invalid,
|
||||
// this should invalidate not just the specific module resolution we're currently attempting,
|
||||
// but all type inference that depends on any standard-library types.
|
||||
// Unwrapping here is not correct...
|
||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
||||
});
|
||||
versions.query_module(module, PyVersion::from(target_version))
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn parse_typeshed_versions(
|
||||
db: &dyn Db,
|
||||
versions_file: File,
|
||||
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
|
||||
// TODO: Handle IO errors
|
||||
let file_content = versions_file
|
||||
.read_to_string(db.upcast())
|
||||
.unwrap_or_default();
|
||||
file_content.parse()
|
||||
}
|
||||
|
||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
TypeshedVersions::from_str(
|
||||
&db.vendored()
|
||||
&vendored_typeshed_stubs()
|
||||
.read_to_string("stdlib/VERSIONS")
|
||||
.expect("The vendored typeshed stubs should contain a VERSIONS file"),
|
||||
.unwrap(),
|
||||
)
|
||||
.expect("The VERSIONS file in the vendored typeshed stubs should be well-formed")
|
||||
}
|
||||
|
||||
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
|
||||
Program::get(db).search_paths(db).typeshed_versions()
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
pub struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
@@ -57,7 +123,7 @@ impl std::error::Error for TypeshedVersionsParseError {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(super) enum TypeshedVersionsParseErrorKind {
|
||||
pub enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
@@ -109,10 +175,10 @@ impl TypeshedVersions {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(in crate::module_resolver) fn query_module(
|
||||
fn query_module(
|
||||
&self,
|
||||
module: &ModuleName,
|
||||
target_version: PythonVersion,
|
||||
target_version: PyVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
if let Some(range) = self.exact(module) {
|
||||
if range.contains(target_version) {
|
||||
@@ -139,7 +205,7 @@ impl TypeshedVersions {
|
||||
}
|
||||
}
|
||||
|
||||
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
|
||||
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
|
||||
/// "Does this module exist in the stdlib at runtime on a certain target version?"
|
||||
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
|
||||
pub(crate) enum TypeshedVersionsQueryResult {
|
||||
@@ -257,13 +323,13 @@ impl fmt::Display for TypeshedVersions {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PythonVersion>),
|
||||
AvailableWithin(RangeInclusive<PythonVersion>),
|
||||
AvailableFrom(RangeFrom<PyVersion>),
|
||||
AvailableWithin(RangeInclusive<PyVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
#[must_use]
|
||||
fn contains(&self, version: PythonVersion) -> bool {
|
||||
fn contains(&self, version: PyVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
@@ -277,14 +343,9 @@ impl FromStr for PyVersionRange {
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
Ok(Self::AvailableFrom(lower..))
|
||||
}
|
||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
||||
(Some(lower), Some(upper), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
let upper = PythonVersion::from_versions_file_string(upper)?;
|
||||
Ok(Self::AvailableWithin(lower..=upper))
|
||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
@@ -302,20 +363,74 @@ impl fmt::Display for PyVersionRange {
|
||||
}
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
struct PyVersion {
|
||||
major: u8,
|
||||
minor: u8,
|
||||
}
|
||||
|
||||
impl FromStr for PyVersion {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
|
||||
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err: int_parse_error,
|
||||
let major = match u8::from_str(major) {
|
||||
Ok(major) => major,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
})
|
||||
};
|
||||
let minor = match u8::from_str(minor) {
|
||||
Ok(minor) => minor,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
Ok(Self { major, minor })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PyVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for PyVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
||||
TargetVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
||||
TargetVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
||||
TargetVersion::Py310 => PyVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
},
|
||||
TargetVersion::Py311 => PyVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
},
|
||||
TargetVersion::Py312 => PyVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
},
|
||||
TargetVersion::Py313 => PyVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -325,8 +440,7 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -349,9 +463,12 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn can_parse_vendored_versions_file() {
|
||||
let db = TestDb::new();
|
||||
let versions_data = include_str!(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/vendor/typeshed/stdlib/VERSIONS"
|
||||
));
|
||||
|
||||
let versions = vendored_typeshed_versions(&db);
|
||||
let versions = TypeshedVersions::from_str(versions_data).unwrap();
|
||||
assert!(versions.len() > 100);
|
||||
assert!(versions.len() < 1000);
|
||||
|
||||
@@ -361,37 +478,36 @@ mod tests {
|
||||
|
||||
assert!(versions.contains_exact(&asyncio));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio, PythonVersion::PY310),
|
||||
versions.query_module(&asyncio, TargetVersion::Py310.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&asyncio_staggered));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py38.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&audioop));
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, PythonVersion::PY312),
|
||||
versions.query_module(&audioop, TargetVersion::Py312.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, PythonVersion::PY313),
|
||||
versions.query_module(&audioop, TargetVersion::Py313.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
let db = TestDb::new();
|
||||
let vendored_typeshed_versions = vendored_typeshed_versions(&db);
|
||||
let vendored_typeshed_dir =
|
||||
Path::new(env!("CARGO_MANIFEST_DIR")).join("../red_knot_vendored/vendor/typeshed");
|
||||
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
|
||||
@@ -474,15 +590,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY37),
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY310),
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py310.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY311),
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py311.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -494,15 +610,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&foo));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY37),
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY38),
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py38.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY311),
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py311.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
}
|
||||
@@ -514,15 +630,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py39.into()),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py310.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -534,15 +650,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py310.into()),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py311.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -554,11 +670,11 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&spam));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY37),
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py37.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY313),
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py313.into()),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -21,7 +21,7 @@ the project the stubs are for, but instead report them here to typeshed.**
|
||||
Further documentation on stub files, typeshed, and Python's typing system in
|
||||
general, can also be found at https://typing.readthedocs.io/en/latest/.
|
||||
|
||||
Typeshed supports Python versions 3.8 to 3.13.
|
||||
Typeshed supports Python versions 3.8 and up.
|
||||
|
||||
## Using
|
||||
|
||||
1
crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
vendored
Normal file
1
crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
f863db6bc5242348ceaa6a3bca4e59aa9e62faaa
|
||||
@@ -20,9 +20,7 @@
|
||||
__future__: 3.0-
|
||||
__main__: 3.0-
|
||||
_ast: 3.0-
|
||||
_asyncio: 3.0-
|
||||
_bisect: 3.0-
|
||||
_blake2: 3.6-
|
||||
_bootlocale: 3.4-3.9
|
||||
_codecs: 3.0-
|
||||
_collections_abc: 3.3-
|
||||
@@ -34,19 +32,16 @@ _curses: 3.0-
|
||||
_decimal: 3.3-
|
||||
_dummy_thread: 3.0-3.8
|
||||
_dummy_threading: 3.0-3.8
|
||||
_frozen_importlib: 3.0-
|
||||
_frozen_importlib_external: 3.5-
|
||||
_heapq: 3.0-
|
||||
_imp: 3.0-
|
||||
_interpchannels: 3.13-
|
||||
_interpqueues: 3.13-
|
||||
_interpreters: 3.13-
|
||||
_io: 3.0-
|
||||
_json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
_markupbase: 3.0-
|
||||
_msi: 3.0-3.12
|
||||
_msi: 3.0-
|
||||
_operator: 3.4-
|
||||
_osx_support: 3.0-
|
||||
_posixsubprocess: 3.2-
|
||||
@@ -55,8 +50,6 @@ _pydecimal: 3.5-
|
||||
_random: 3.0-
|
||||
_sitebuiltins: 3.4-
|
||||
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
|
||||
_sqlite3: 3.0-
|
||||
_ssl: 3.0-
|
||||
_stat: 3.4-
|
||||
_thread: 3.0-
|
||||
_threading_local: 3.0-
|
||||
@@ -163,15 +156,11 @@ imghdr: 3.0-3.12
|
||||
imp: 3.0-3.11
|
||||
importlib: 3.0-
|
||||
importlib._abc: 3.10-
|
||||
importlib._bootstrap: 3.0-
|
||||
importlib._bootstrap_external: 3.5-
|
||||
importlib.metadata: 3.8-
|
||||
importlib.metadata._meta: 3.10-
|
||||
importlib.metadata.diagnose: 3.13-
|
||||
importlib.readers: 3.10-
|
||||
importlib.resources: 3.7-
|
||||
importlib.resources._common: 3.11-
|
||||
importlib.resources._functional: 3.13-
|
||||
importlib.resources.abc: 3.11-
|
||||
importlib.resources.readers: 3.11-
|
||||
importlib.resources.simple: 3.11-
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,13 @@
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from types import MappingProxyType
|
||||
from typing import ( # noqa: Y022,Y038
|
||||
from typing import ( # noqa: Y022,Y038,Y057
|
||||
AbstractSet as Set,
|
||||
AsyncGenerator as AsyncGenerator,
|
||||
AsyncIterable as AsyncIterable,
|
||||
AsyncIterator as AsyncIterator,
|
||||
Awaitable as Awaitable,
|
||||
ByteString as ByteString,
|
||||
Callable as Callable,
|
||||
Collection as Collection,
|
||||
Container as Container,
|
||||
@@ -58,12 +59,8 @@ __all__ = [
|
||||
"ValuesView",
|
||||
"Sequence",
|
||||
"MutableSequence",
|
||||
"ByteString",
|
||||
]
|
||||
if sys.version_info < (3, 14):
|
||||
from typing import ByteString as ByteString # noqa: Y057
|
||||
|
||||
__all__ += ["ByteString"]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["Buffer"]
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
import csv
|
||||
import sys
|
||||
from _typeshed import SupportsWrite
|
||||
from collections.abc import Iterable, Iterator
|
||||
from typing import Any, Final
|
||||
from typing import Any, Final, Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__version__: Final[str]
|
||||
|
||||
QUOTE_ALL: Final = 1
|
||||
QUOTE_MINIMAL: Final = 0
|
||||
QUOTE_NONE: Final = 3
|
||||
QUOTE_NONNUMERIC: Final = 2
|
||||
QUOTE_ALL: Literal[1]
|
||||
QUOTE_MINIMAL: Literal[0]
|
||||
QUOTE_NONE: Literal[3]
|
||||
QUOTE_NONNUMERIC: Literal[2]
|
||||
if sys.version_info >= (3, 12):
|
||||
QUOTE_STRINGS: Final = 4
|
||||
QUOTE_NOTNULL: Final = 5
|
||||
QUOTE_STRINGS: Literal[4]
|
||||
QUOTE_NOTNULL: Literal[5]
|
||||
|
||||
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
|
||||
# However, using literals in situations like these can cause false-positives (see #7258)
|
||||
@@ -21,8 +20,6 @@ _QuotingType: TypeAlias = int
|
||||
|
||||
class Error(Exception): ...
|
||||
|
||||
_DialectLike: TypeAlias = str | Dialect | csv.Dialect | type[Dialect | csv.Dialect]
|
||||
|
||||
class Dialect:
|
||||
delimiter: str
|
||||
quotechar: str | None
|
||||
@@ -32,18 +29,9 @@ class Dialect:
|
||||
lineterminator: str
|
||||
quoting: _QuotingType
|
||||
strict: bool
|
||||
def __init__(
|
||||
self,
|
||||
dialect: _DialectLike | None = ...,
|
||||
delimiter: str = ",",
|
||||
doublequote: bool = True,
|
||||
escapechar: str | None = None,
|
||||
lineterminator: str = "\r\n",
|
||||
quotechar: str | None = '"',
|
||||
quoting: _QuotingType = 0,
|
||||
skipinitialspace: bool = False,
|
||||
strict: bool = False,
|
||||
) -> None: ...
|
||||
def __init__(self) -> None: ...
|
||||
|
||||
_DialectLike: TypeAlias = str | Dialect | type[Dialect]
|
||||
|
||||
class _reader(Iterator[list[str]]):
|
||||
@property
|
||||
@@ -2,7 +2,7 @@ import sys
|
||||
from _typeshed import ReadableBuffer, WriteableBuffer
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
|
||||
from ctypes import CDLL, ArgumentError as ArgumentError, c_void_p
|
||||
from ctypes import CDLL, ArgumentError as ArgumentError
|
||||
from typing import Any, ClassVar, Generic, TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
@@ -51,8 +51,8 @@ class _CDataMeta(type):
|
||||
# By default mypy complains about the following two methods, because strictly speaking cls
|
||||
# might not be a Type[_CT]. However this can never actually happen, because the only class that
|
||||
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
|
||||
class _CData(metaclass=_CDataMeta):
|
||||
_b_base_: int
|
||||
@@ -71,7 +71,7 @@ class _CData(metaclass=_CDataMeta):
|
||||
@classmethod
|
||||
def from_address(cls, address: int) -> Self: ...
|
||||
@classmethod
|
||||
def from_param(cls, value: Any, /) -> Self | _CArgObject: ...
|
||||
def from_param(cls, obj: Any) -> Self | _CArgObject: ...
|
||||
@classmethod
|
||||
def in_dll(cls, library: CDLL, name: str) -> Self: ...
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
@@ -99,9 +99,6 @@ class _Pointer(_PointerLike, _CData, Generic[_CT]):
|
||||
def __getitem__(self, key: slice, /) -> list[Any]: ...
|
||||
def __setitem__(self, key: int, value: Any, /) -> None: ...
|
||||
|
||||
@overload
|
||||
def POINTER(type: None, /) -> type[c_void_p]: ...
|
||||
@overload
|
||||
def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ...
|
||||
def pointer(obj: _CT, /) -> _Pointer[_CT]: ...
|
||||
|
||||
@@ -298,7 +298,7 @@ if sys.version_info >= (3, 9):
|
||||
|
||||
def getmouse() -> tuple[int, int, int, int, int]: ...
|
||||
def getsyx() -> tuple[int, int]: ...
|
||||
def getwin(file: SupportsRead[bytes], /) -> window: ...
|
||||
def getwin(file: SupportsRead[bytes], /) -> _CursesWindow: ...
|
||||
def halfdelay(tenths: int, /) -> None: ...
|
||||
def has_colors() -> bool: ...
|
||||
|
||||
@@ -310,7 +310,7 @@ def has_il() -> bool: ...
|
||||
def has_key(key: int, /) -> bool: ...
|
||||
def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ...
|
||||
def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ...
|
||||
def initscr() -> window: ...
|
||||
def initscr() -> _CursesWindow: ...
|
||||
def intrflush(flag: bool, /) -> None: ...
|
||||
def is_term_resized(nlines: int, ncols: int, /) -> bool: ...
|
||||
def isendwin() -> bool: ...
|
||||
@@ -321,8 +321,8 @@ def meta(yes: bool, /) -> None: ...
|
||||
def mouseinterval(interval: int, /) -> None: ...
|
||||
def mousemask(newmask: int, /) -> tuple[int, int]: ...
|
||||
def napms(ms: int, /) -> int: ...
|
||||
def newpad(nlines: int, ncols: int, /) -> window: ...
|
||||
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> window: ...
|
||||
def newpad(nlines: int, ncols: int, /) -> _CursesWindow: ...
|
||||
def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> _CursesWindow: ...
|
||||
def nl(flag: bool = True, /) -> None: ...
|
||||
def nocbreak() -> None: ...
|
||||
def noecho() -> None: ...
|
||||
@@ -368,7 +368,11 @@ def tparm(
|
||||
) -> bytes: ...
|
||||
def typeahead(fd: int, /) -> None: ...
|
||||
def unctrl(ch: _ChType, /) -> bytes: ...
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
|
||||
def ungetch(ch: _ChType, /) -> None: ...
|
||||
def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ...
|
||||
def update_lines_cols() -> None: ...
|
||||
@@ -378,7 +382,7 @@ def use_env(flag: bool, /) -> None: ...
|
||||
class error(Exception): ...
|
||||
|
||||
@final
|
||||
class window: # undocumented
|
||||
class _CursesWindow:
|
||||
encoding: str
|
||||
@overload
|
||||
def addch(self, ch: _ChType, attr: int = ...) -> None: ...
|
||||
@@ -431,9 +435,9 @@ class window: # undocumented
|
||||
def delch(self, y: int, x: int) -> None: ...
|
||||
def deleteln(self) -> None: ...
|
||||
@overload
|
||||
def derwin(self, begin_y: int, begin_x: int) -> window: ...
|
||||
def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
|
||||
def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
def echochar(self, ch: _ChType, attr: int = ..., /) -> None: ...
|
||||
def enclose(self, y: int, x: int, /) -> bool: ...
|
||||
def erase(self) -> None: ...
|
||||
@@ -443,10 +447,13 @@ class window: # undocumented
|
||||
def getch(self) -> int: ...
|
||||
@overload
|
||||
def getch(self, y: int, x: int) -> int: ...
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
|
||||
@overload
|
||||
def getkey(self) -> str: ...
|
||||
@overload
|
||||
@@ -493,7 +500,7 @@ class window: # undocumented
|
||||
def instr(self, y: int, x: int, n: int = ...) -> bytes: ...
|
||||
def is_linetouched(self, line: int, /) -> bool: ...
|
||||
def is_wintouched(self) -> bool: ...
|
||||
def keypad(self, yes: bool, /) -> None: ...
|
||||
def keypad(self, yes: bool) -> None: ...
|
||||
def leaveok(self, yes: bool) -> None: ...
|
||||
def move(self, new_y: int, new_x: int) -> None: ...
|
||||
def mvderwin(self, y: int, x: int) -> None: ...
|
||||
@@ -505,16 +512,16 @@ class window: # undocumented
|
||||
@overload
|
||||
def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ...
|
||||
@overload
|
||||
def overlay(self, destwin: window) -> None: ...
|
||||
def overlay(self, destwin: _CursesWindow) -> None: ...
|
||||
@overload
|
||||
def overlay(
|
||||
self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
) -> None: ...
|
||||
@overload
|
||||
def overwrite(self, destwin: window) -> None: ...
|
||||
def overwrite(self, destwin: _CursesWindow) -> None: ...
|
||||
@overload
|
||||
def overwrite(
|
||||
self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int
|
||||
) -> None: ...
|
||||
def putwin(self, file: IO[Any], /) -> None: ...
|
||||
def redrawln(self, beg: int, num: int, /) -> None: ...
|
||||
@@ -530,13 +537,13 @@ class window: # undocumented
|
||||
def standend(self) -> None: ...
|
||||
def standout(self) -> None: ...
|
||||
@overload
|
||||
def subpad(self, begin_y: int, begin_x: int) -> window: ...
|
||||
def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
|
||||
def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subwin(self, begin_y: int, begin_x: int) -> window: ...
|
||||
def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
@overload
|
||||
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ...
|
||||
def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ...
|
||||
def syncdown(self) -> None: ...
|
||||
def syncok(self, flag: bool) -> None: ...
|
||||
def syncup(self) -> None: ...
|
||||
@@ -555,3 +562,4 @@ class _ncurses_version(NamedTuple):
|
||||
patch: int
|
||||
|
||||
ncurses_version: _ncurses_version
|
||||
window = _CursesWindow # undocumented
|
||||
@@ -1,45 +1,37 @@
|
||||
import numbers
|
||||
from _decimal import (
|
||||
HAVE_CONTEXTVAR as HAVE_CONTEXTVAR,
|
||||
HAVE_THREADS as HAVE_THREADS,
|
||||
MAX_EMAX as MAX_EMAX,
|
||||
MAX_PREC as MAX_PREC,
|
||||
MIN_EMIN as MIN_EMIN,
|
||||
MIN_ETINY as MIN_ETINY,
|
||||
ROUND_05UP as ROUND_05UP,
|
||||
ROUND_CEILING as ROUND_CEILING,
|
||||
ROUND_DOWN as ROUND_DOWN,
|
||||
ROUND_FLOOR as ROUND_FLOOR,
|
||||
ROUND_HALF_DOWN as ROUND_HALF_DOWN,
|
||||
ROUND_HALF_EVEN as ROUND_HALF_EVEN,
|
||||
ROUND_HALF_UP as ROUND_HALF_UP,
|
||||
ROUND_UP as ROUND_UP,
|
||||
BasicContext as BasicContext,
|
||||
DefaultContext as DefaultContext,
|
||||
ExtendedContext as ExtendedContext,
|
||||
__libmpdec_version__ as __libmpdec_version__,
|
||||
__version__ as __version__,
|
||||
getcontext as getcontext,
|
||||
localcontext as localcontext,
|
||||
setcontext as setcontext,
|
||||
)
|
||||
import sys
|
||||
from collections.abc import Container, Sequence
|
||||
from typing import Any, ClassVar, Literal, NamedTuple, overload
|
||||
from types import TracebackType
|
||||
from typing import Any, ClassVar, Final, Literal, NamedTuple, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
_Decimal: TypeAlias = Decimal | int
|
||||
_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int]
|
||||
_ComparableNum: TypeAlias = Decimal | float | numbers.Rational
|
||||
_TrapType: TypeAlias = type[DecimalException]
|
||||
|
||||
# At runtime, these classes are implemented in C as part of "_decimal".
|
||||
# However, they consider themselves to live in "decimal", so we'll put them here.
|
||||
__version__: Final[str]
|
||||
__libmpdec_version__: Final[str]
|
||||
|
||||
class DecimalTuple(NamedTuple):
|
||||
sign: int
|
||||
digits: tuple[int, ...]
|
||||
exponent: int | Literal["n", "N", "F"]
|
||||
|
||||
ROUND_DOWN: str
|
||||
ROUND_HALF_UP: str
|
||||
ROUND_HALF_EVEN: str
|
||||
ROUND_CEILING: str
|
||||
ROUND_FLOOR: str
|
||||
ROUND_UP: str
|
||||
ROUND_HALF_DOWN: str
|
||||
ROUND_05UP: str
|
||||
HAVE_CONTEXTVAR: bool
|
||||
HAVE_THREADS: bool
|
||||
MAX_EMAX: int
|
||||
MAX_PREC: int
|
||||
MIN_EMIN: int
|
||||
MIN_ETINY: int
|
||||
|
||||
class DecimalException(ArithmeticError): ...
|
||||
class Clamped(DecimalException): ...
|
||||
class InvalidOperation(DecimalException): ...
|
||||
@@ -55,6 +47,26 @@ class Overflow(Inexact, Rounded): ...
|
||||
class Underflow(Inexact, Rounded, Subnormal): ...
|
||||
class FloatOperation(DecimalException, TypeError): ...
|
||||
|
||||
def setcontext(context: Context, /) -> None: ...
|
||||
def getcontext() -> Context: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def localcontext(
|
||||
ctx: Context | None = None,
|
||||
*,
|
||||
prec: int | None = ...,
|
||||
rounding: str | None = ...,
|
||||
Emin: int | None = ...,
|
||||
Emax: int | None = ...,
|
||||
capitals: int | None = ...,
|
||||
clamp: int | None = ...,
|
||||
traps: dict[_TrapType, bool] | None = ...,
|
||||
flags: dict[_TrapType, bool] | None = ...,
|
||||
) -> _ContextManager: ...
|
||||
|
||||
else:
|
||||
def localcontext(ctx: Context | None = None) -> _ContextManager: ...
|
||||
|
||||
class Decimal:
|
||||
def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ...
|
||||
@classmethod
|
||||
@@ -156,6 +168,15 @@ class Decimal:
|
||||
def __deepcopy__(self, memo: Any, /) -> Self: ...
|
||||
def __format__(self, specifier: str, context: Context | None = ..., /) -> str: ...
|
||||
|
||||
class _ContextManager:
|
||||
new_context: Context
|
||||
saved_context: Context
|
||||
def __init__(self, new_context: Context) -> None: ...
|
||||
def __enter__(self) -> Context: ...
|
||||
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
|
||||
_TrapType: TypeAlias = type[DecimalException]
|
||||
|
||||
class Context:
|
||||
# TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime,
|
||||
# even settable attributes like `prec` and `rounding`,
|
||||
@@ -254,3 +275,7 @@ class Context:
|
||||
def to_integral_exact(self, x: _Decimal, /) -> Decimal: ...
|
||||
def to_integral_value(self, x: _Decimal, /) -> Decimal: ...
|
||||
def to_integral(self, x: _Decimal, /) -> Decimal: ...
|
||||
|
||||
DefaultContext: Context
|
||||
BasicContext: Context
|
||||
ExtendedContext: Context
|
||||
@@ -1,5 +1,5 @@
|
||||
from _typeshed import structseq
|
||||
from typing import Any, Final, Literal, SupportsIndex, final
|
||||
from typing import Final, Literal, SupportsIndex, final
|
||||
from typing_extensions import Buffer, Self
|
||||
|
||||
class ChannelError(RuntimeError): ...
|
||||
@@ -72,15 +72,13 @@ class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, in
|
||||
@property
|
||||
def send_released(self) -> bool: ...
|
||||
|
||||
def create(unboundop: Literal[1, 2, 3]) -> ChannelID: ...
|
||||
def create() -> ChannelID: ...
|
||||
def destroy(cid: SupportsIndex) -> None: ...
|
||||
def list_all() -> list[ChannelID]: ...
|
||||
def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ...
|
||||
def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ...
|
||||
def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ...
|
||||
def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: ...
|
||||
def recv(cid: SupportsIndex, default: object = ...) -> object: ...
|
||||
def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ...
|
||||
def get_count(cid: SupportsIndex) -> int: ...
|
||||
def get_info(cid: SupportsIndex) -> ChannelInfo: ...
|
||||
def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: ...
|
||||
def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ...
|
||||
@@ -7,7 +7,7 @@ _Configs: TypeAlias = Literal["default", "isolated", "legacy", "empty", ""]
|
||||
|
||||
class InterpreterError(Exception): ...
|
||||
class InterpreterNotFoundError(InterpreterError): ...
|
||||
class NotShareableError(ValueError): ...
|
||||
class NotShareableError(Exception): ...
|
||||
|
||||
class CrossInterpreterBufferView:
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
100
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_locale.pyi
vendored
Normal file
100
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_locale.pyi
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
import sys
|
||||
from _typeshed import StrPath
|
||||
from collections.abc import Mapping
|
||||
|
||||
LC_CTYPE: int
|
||||
LC_COLLATE: int
|
||||
LC_TIME: int
|
||||
LC_MONETARY: int
|
||||
LC_NUMERIC: int
|
||||
LC_ALL: int
|
||||
CHAR_MAX: int
|
||||
|
||||
def setlocale(category: int, locale: str | None = None, /) -> str: ...
|
||||
def localeconv() -> Mapping[str, int | str | list[int]]: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def getencoding() -> str: ...
|
||||
|
||||
def strcoll(os1: str, os2: str, /) -> int: ...
|
||||
def strxfrm(string: str, /) -> str: ...
|
||||
|
||||
# native gettext functions
|
||||
# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
|
||||
# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
|
||||
if sys.platform != "win32":
|
||||
LC_MESSAGES: int
|
||||
|
||||
ABDAY_1: int
|
||||
ABDAY_2: int
|
||||
ABDAY_3: int
|
||||
ABDAY_4: int
|
||||
ABDAY_5: int
|
||||
ABDAY_6: int
|
||||
ABDAY_7: int
|
||||
|
||||
ABMON_1: int
|
||||
ABMON_2: int
|
||||
ABMON_3: int
|
||||
ABMON_4: int
|
||||
ABMON_5: int
|
||||
ABMON_6: int
|
||||
ABMON_7: int
|
||||
ABMON_8: int
|
||||
ABMON_9: int
|
||||
ABMON_10: int
|
||||
ABMON_11: int
|
||||
ABMON_12: int
|
||||
|
||||
DAY_1: int
|
||||
DAY_2: int
|
||||
DAY_3: int
|
||||
DAY_4: int
|
||||
DAY_5: int
|
||||
DAY_6: int
|
||||
DAY_7: int
|
||||
|
||||
ERA: int
|
||||
ERA_D_T_FMT: int
|
||||
ERA_D_FMT: int
|
||||
ERA_T_FMT: int
|
||||
|
||||
MON_1: int
|
||||
MON_2: int
|
||||
MON_3: int
|
||||
MON_4: int
|
||||
MON_5: int
|
||||
MON_6: int
|
||||
MON_7: int
|
||||
MON_8: int
|
||||
MON_9: int
|
||||
MON_10: int
|
||||
MON_11: int
|
||||
MON_12: int
|
||||
|
||||
CODESET: int
|
||||
D_T_FMT: int
|
||||
D_FMT: int
|
||||
T_FMT: int
|
||||
T_FMT_AMPM: int
|
||||
AM_STR: int
|
||||
PM_STR: int
|
||||
|
||||
RADIXCHAR: int
|
||||
THOUSEP: int
|
||||
YESEXPR: int
|
||||
NOEXPR: int
|
||||
CRNCYSTR: int
|
||||
ALT_DIGITS: int
|
||||
|
||||
def nl_langinfo(key: int, /) -> str: ...
|
||||
|
||||
# This is dependent on `libintl.h` which is a part of `gettext`
|
||||
# system dependency. These functions might be missing.
|
||||
# But, we always say that they are present.
|
||||
def gettext(msg: str, /) -> str: ...
|
||||
def dgettext(domain: str | None, msg: str, /) -> str: ...
|
||||
def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ...
|
||||
def textdomain(domain: str | None, /) -> str: ...
|
||||
def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ...
|
||||
def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ...
|
||||
@@ -1,16 +1,18 @@
|
||||
import sys
|
||||
from _typeshed import SupportsGetItem
|
||||
from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence
|
||||
from operator import attrgetter as attrgetter, itemgetter as itemgetter, methodcaller as methodcaller
|
||||
from typing import Any, AnyStr, Protocol, SupportsAbs, SupportsIndex, TypeVar, overload
|
||||
from typing_extensions import ParamSpec, TypeAlias, TypeIs
|
||||
from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, SupportsIndex, TypeVar, final, overload
|
||||
from typing_extensions import ParamSpec, TypeAlias, TypeVarTuple, Unpack
|
||||
|
||||
_R = TypeVar("_R")
|
||||
_T = TypeVar("_T")
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
_P = ParamSpec("_P")
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
# The following protocols return "Any" instead of bool, since the comparison
|
||||
# operators can be overloaded to return an arbitrary object. For example,
|
||||
@@ -90,6 +92,40 @@ def setitem(a: MutableSequence[_T], b: slice, c: Sequence[_T], /) -> None: ...
|
||||
@overload
|
||||
def setitem(a: MutableMapping[_K, _V], b: _K, c: _V, /) -> None: ...
|
||||
def length_hint(obj: object, default: int = 0, /) -> int: ...
|
||||
@final
|
||||
class attrgetter(Generic[_T_co]):
|
||||
@overload
|
||||
def __new__(cls, attr: str, /) -> attrgetter[Any]: ...
|
||||
@overload
|
||||
def __new__(cls, attr: str, attr2: str, /) -> attrgetter[tuple[Any, Any]]: ...
|
||||
@overload
|
||||
def __new__(cls, attr: str, attr2: str, attr3: str, /) -> attrgetter[tuple[Any, Any, Any]]: ...
|
||||
@overload
|
||||
def __new__(cls, attr: str, attr2: str, attr3: str, attr4: str, /) -> attrgetter[tuple[Any, Any, Any, Any]]: ...
|
||||
@overload
|
||||
def __new__(cls, attr: str, /, *attrs: str) -> attrgetter[tuple[Any, ...]]: ...
|
||||
def __call__(self, obj: Any, /) -> _T_co: ...
|
||||
|
||||
@final
|
||||
class itemgetter(Generic[_T_co]):
|
||||
@overload
|
||||
def __new__(cls, item: _T, /) -> itemgetter[_T]: ...
|
||||
@overload
|
||||
def __new__(cls, item1: _T1, item2: _T2, /, *items: Unpack[_Ts]) -> itemgetter[tuple[_T1, _T2, Unpack[_Ts]]]: ...
|
||||
# __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie:
|
||||
# TypeVar "_KT_contra@SupportsGetItem" is contravariant
|
||||
# "tuple[int, int]" is incompatible with protocol "SupportsIndex"
|
||||
# preventing [_T_co, ...] instead of [Any, ...]
|
||||
#
|
||||
# A suspected mypy issue prevents using [..., _T] instead of [..., Any] here.
|
||||
# https://github.com/python/mypy/issues/14032
|
||||
def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ...
|
||||
|
||||
@final
|
||||
class methodcaller:
|
||||
def __init__(self, name: str, /, *args: Any, **kwargs: Any) -> None: ...
|
||||
def __call__(self, obj: Any) -> Any: ...
|
||||
|
||||
def iadd(a: Any, b: Any, /) -> Any: ...
|
||||
def iand(a: Any, b: Any, /) -> Any: ...
|
||||
def iconcat(a: Any, b: Any, /) -> Any: ...
|
||||
@@ -109,7 +145,3 @@ if sys.version_info >= (3, 11):
|
||||
def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
|
||||
|
||||
def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def is_none(a: object, /) -> TypeIs[None]: ...
|
||||
def is_not_none(a: _T | None, /) -> TypeIs[_T]: ...
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Iterable, Sequence
|
||||
from typing import Final, TypeVar
|
||||
from typing import TypeVar
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_K = TypeVar("_K")
|
||||
@@ -7,15 +7,15 @@ _V = TypeVar("_V")
|
||||
|
||||
__all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"]
|
||||
|
||||
_UNIVERSAL_CONFIG_VARS: Final[tuple[str, ...]] # undocumented
|
||||
_COMPILER_CONFIG_VARS: Final[tuple[str, ...]] # undocumented
|
||||
_INITPRE: Final[str] # undocumented
|
||||
_UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented
|
||||
_COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented
|
||||
_INITPRE: str # undocumented
|
||||
|
||||
def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented
|
||||
def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented
|
||||
def _find_build_tool(toolname: str) -> str: ... # undocumented
|
||||
|
||||
_SYSTEM_VERSION: Final[str | None] # undocumented
|
||||
_SYSTEM_VERSION: str | None # undocumented
|
||||
|
||||
def _get_system_version() -> str: ... # undocumented
|
||||
def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented
|
||||
@@ -1,7 +1,6 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer, WriteableBuffer
|
||||
from collections.abc import Iterable
|
||||
from socket import error as error, gaierror as gaierror, herror as herror, timeout as timeout
|
||||
from typing import Any, SupportsIndex, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
@@ -667,6 +666,18 @@ if sys.platform != "win32":
|
||||
if sys.platform != "win32" and sys.platform != "darwin":
|
||||
IPX_TYPE: int
|
||||
|
||||
# ===== Exceptions =====
|
||||
|
||||
error = OSError
|
||||
|
||||
class herror(error): ...
|
||||
class gaierror(error): ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
timeout = TimeoutError
|
||||
else:
|
||||
class timeout(error): ...
|
||||
|
||||
# ===== Classes =====
|
||||
|
||||
class socket:
|
||||
@@ -676,9 +687,8 @@ class socket:
|
||||
def type(self) -> int: ...
|
||||
@property
|
||||
def proto(self) -> int: ...
|
||||
# F811: "Redefinition of unused `timeout`"
|
||||
@property
|
||||
def timeout(self) -> float | None: ... # noqa: F811
|
||||
def timeout(self) -> float | None: ...
|
||||
if sys.platform == "win32":
|
||||
def __init__(
|
||||
self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | bytes | None = ...
|
||||
@@ -778,9 +788,7 @@ def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: ...
|
||||
def inet_pton(address_family: int, ip_string: str, /) -> bytes: ...
|
||||
def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: ...
|
||||
def getdefaulttimeout() -> float | None: ...
|
||||
|
||||
# F811: "Redefinition of unused `timeout`"
|
||||
def setdefaulttimeout(timeout: float | None, /) -> None: ... # noqa: F811
|
||||
def setdefaulttimeout(timeout: float | None, /) -> None: ...
|
||||
|
||||
if sys.platform != "win32":
|
||||
def sethostname(name: str, /) -> None: ...
|
||||
117
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_stat.pyi
vendored
Normal file
117
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_stat.pyi
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
SF_APPEND: Literal[0x00040000]
|
||||
SF_ARCHIVED: Literal[0x00010000]
|
||||
SF_IMMUTABLE: Literal[0x00020000]
|
||||
SF_NOUNLINK: Literal[0x00100000]
|
||||
SF_SNAPSHOT: Literal[0x00200000]
|
||||
|
||||
ST_MODE: Literal[0]
|
||||
ST_INO: Literal[1]
|
||||
ST_DEV: Literal[2]
|
||||
ST_NLINK: Literal[3]
|
||||
ST_UID: Literal[4]
|
||||
ST_GID: Literal[5]
|
||||
ST_SIZE: Literal[6]
|
||||
ST_ATIME: Literal[7]
|
||||
ST_MTIME: Literal[8]
|
||||
ST_CTIME: Literal[9]
|
||||
|
||||
S_IFIFO: Literal[0o010000]
|
||||
S_IFLNK: Literal[0o120000]
|
||||
S_IFREG: Literal[0o100000]
|
||||
S_IFSOCK: Literal[0o140000]
|
||||
S_IFBLK: Literal[0o060000]
|
||||
S_IFCHR: Literal[0o020000]
|
||||
S_IFDIR: Literal[0o040000]
|
||||
|
||||
# These are 0 on systems that don't support the specific kind of file.
|
||||
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
|
||||
S_IFDOOR: int
|
||||
S_IFPORT: int
|
||||
S_IFWHT: int
|
||||
|
||||
S_ISUID: Literal[0o4000]
|
||||
S_ISGID: Literal[0o2000]
|
||||
S_ISVTX: Literal[0o1000]
|
||||
|
||||
S_IRWXU: Literal[0o0700]
|
||||
S_IRUSR: Literal[0o0400]
|
||||
S_IWUSR: Literal[0o0200]
|
||||
S_IXUSR: Literal[0o0100]
|
||||
|
||||
S_IRWXG: Literal[0o0070]
|
||||
S_IRGRP: Literal[0o0040]
|
||||
S_IWGRP: Literal[0o0020]
|
||||
S_IXGRP: Literal[0o0010]
|
||||
|
||||
S_IRWXO: Literal[0o0007]
|
||||
S_IROTH: Literal[0o0004]
|
||||
S_IWOTH: Literal[0o0002]
|
||||
S_IXOTH: Literal[0o0001]
|
||||
|
||||
S_ENFMT: Literal[0o2000]
|
||||
S_IREAD: Literal[0o0400]
|
||||
S_IWRITE: Literal[0o0200]
|
||||
S_IEXEC: Literal[0o0100]
|
||||
|
||||
UF_APPEND: Literal[0x00000004]
|
||||
UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only
|
||||
UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only
|
||||
UF_IMMUTABLE: Literal[0x00000002]
|
||||
UF_NODUMP: Literal[0x00000001]
|
||||
UF_NOUNLINK: Literal[0x00000010]
|
||||
UF_OPAQUE: Literal[0x00000008]
|
||||
|
||||
def S_IMODE(mode: int, /) -> int: ...
|
||||
def S_IFMT(mode: int, /) -> int: ...
|
||||
def S_ISBLK(mode: int, /) -> bool: ...
|
||||
def S_ISCHR(mode: int, /) -> bool: ...
|
||||
def S_ISDIR(mode: int, /) -> bool: ...
|
||||
def S_ISDOOR(mode: int, /) -> bool: ...
|
||||
def S_ISFIFO(mode: int, /) -> bool: ...
|
||||
def S_ISLNK(mode: int, /) -> bool: ...
|
||||
def S_ISPORT(mode: int, /) -> bool: ...
|
||||
def S_ISREG(mode: int, /) -> bool: ...
|
||||
def S_ISSOCK(mode: int, /) -> bool: ...
|
||||
def S_ISWHT(mode: int, /) -> bool: ...
|
||||
def filemode(mode: int, /) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
IO_REPARSE_TAG_MOUNT_POINT: int
|
||||
IO_REPARSE_TAG_APPEXECLINK: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_ARCHIVE: Literal[32]
|
||||
FILE_ATTRIBUTE_COMPRESSED: Literal[2048]
|
||||
FILE_ATTRIBUTE_DEVICE: Literal[64]
|
||||
FILE_ATTRIBUTE_DIRECTORY: Literal[16]
|
||||
FILE_ATTRIBUTE_ENCRYPTED: Literal[16384]
|
||||
FILE_ATTRIBUTE_HIDDEN: Literal[2]
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768]
|
||||
FILE_ATTRIBUTE_NORMAL: Literal[128]
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192]
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072]
|
||||
FILE_ATTRIBUTE_OFFLINE: Literal[4096]
|
||||
FILE_ATTRIBUTE_READONLY: Literal[1]
|
||||
FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024]
|
||||
FILE_ATTRIBUTE_SPARSE_FILE: Literal[512]
|
||||
FILE_ATTRIBUTE_SYSTEM: Literal[4]
|
||||
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
|
||||
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
SF_SETTABLE: Literal[0x3FFF0000]
|
||||
# https://github.com/python/cpython/issues/114081#issuecomment-2119017790
|
||||
# SF_RESTRICTED: Literal[0x00080000]
|
||||
SF_FIRMLINK: Literal[0x00800000]
|
||||
SF_DATALESS: Literal[0x40000000]
|
||||
|
||||
SF_SUPPORTED: Literal[0x9F0000]
|
||||
SF_SYNTHETIC: Literal[0xC0000000]
|
||||
|
||||
UF_TRACKED: Literal[0x00000040]
|
||||
UF_DATAVAULT: Literal[0x00000080]
|
||||
UF_SETTABLE: Literal[0x0000FFFF]
|
||||
@@ -1,4 +1,3 @@
|
||||
import signal
|
||||
import sys
|
||||
from _typeshed import structseq
|
||||
from collections.abc import Callable
|
||||
@@ -17,39 +16,16 @@ class LockType:
|
||||
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
def locked(self) -> bool: ...
|
||||
def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ...
|
||||
def release_lock(self) -> None: ...
|
||||
def locked_lock(self) -> bool: ...
|
||||
def __enter__(self) -> bool: ...
|
||||
def __exit__(
|
||||
self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
@final
|
||||
class _ThreadHandle:
|
||||
ident: int
|
||||
|
||||
def join(self, timeout: float | None = None, /) -> None: ...
|
||||
def is_done(self) -> bool: ...
|
||||
def _set_done(self) -> None: ...
|
||||
|
||||
def start_joinable_thread(
|
||||
function: Callable[[], object], handle: _ThreadHandle | None = None, daemon: bool = True
|
||||
) -> _ThreadHandle: ...
|
||||
lock = LockType
|
||||
|
||||
@overload
|
||||
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ...
|
||||
@overload
|
||||
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def interrupt_main(signum: signal.Signals = ..., /) -> None: ...
|
||||
|
||||
else:
|
||||
def interrupt_main() -> None: ...
|
||||
|
||||
def interrupt_main() -> None: ...
|
||||
def exit() -> NoReturn: ...
|
||||
def allocate_lock() -> LockType: ...
|
||||
def get_ident() -> int: ...
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from typing import Any, ClassVar, Final, final
|
||||
from typing import Any, ClassVar, Literal, final
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# _tkinter is meant to be only used internally by tkinter, but some tkinter
|
||||
@@ -95,19 +95,19 @@ class TkappType:
|
||||
def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ...
|
||||
|
||||
# These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS
|
||||
ALL_EVENTS: Final = -3
|
||||
FILE_EVENTS: Final = 8
|
||||
IDLE_EVENTS: Final = 32
|
||||
TIMER_EVENTS: Final = 16
|
||||
WINDOW_EVENTS: Final = 4
|
||||
ALL_EVENTS: Literal[-3]
|
||||
FILE_EVENTS: Literal[8]
|
||||
IDLE_EVENTS: Literal[32]
|
||||
TIMER_EVENTS: Literal[16]
|
||||
WINDOW_EVENTS: Literal[4]
|
||||
|
||||
DONT_WAIT: Final = 2
|
||||
EXCEPTION: Final = 8
|
||||
READABLE: Final = 2
|
||||
WRITABLE: Final = 4
|
||||
DONT_WAIT: Literal[2]
|
||||
EXCEPTION: Literal[8]
|
||||
READABLE: Literal[2]
|
||||
WRITABLE: Literal[4]
|
||||
|
||||
TCL_VERSION: Final[str]
|
||||
TK_VERSION: Final[str]
|
||||
TCL_VERSION: str
|
||||
TK_VERSION: str
|
||||
|
||||
@final
|
||||
class TkttType:
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user