Compare commits

..

1 Commits

Author SHA1 Message Date
Micha Reiser
82f33db5e6 Inline NodeKey construction and avoid AnyNodeRef 2024-08-21 19:04:02 +02:00
1427 changed files with 12856 additions and 53829 deletions

View File

@@ -20,7 +20,7 @@
"extensions": [
"ms-python.python",
"rust-lang.rust-analyzer",
"fill-labs.dependi",
"serayuzgur.crates",
"tamasfe.even-better-toml",
"Swellaby.vscode-rust-test-adapter",
"charliermarsh.ruff"

View File

@@ -63,7 +63,7 @@ jobs:
macos-x86_64:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
runs-on: macos-14
runs-on: macos-12
steps:
- uses: actions/checkout@v4
with:

View File

@@ -17,21 +17,12 @@ on:
paths:
- .github/workflows/build-docker.yml
env:
RUFF_BASE_IMG: ghcr.io/${{ github.repository_owner }}/ruff
jobs:
docker-build:
name: Build Docker image (ghcr.io/astral-sh/ruff) for ${{ matrix.platform }}
docker-publish:
name: Build Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- uses: actions/checkout@v4
with:
@@ -45,6 +36,12 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/astral-sh/ruff
- name: Check tag consistency
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
run: |
@@ -58,233 +55,14 @@ jobs:
echo "Releasing ${version}"
fi
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.RUFF_BASE_IMG }}
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
tags: |
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
- name: Normalize Platform Pair (replace / with -)
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_TUPLE=${platform//\//-}" >> $GITHUB_ENV
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: .
platforms: ${{ matrix.platform }}
cache-from: type=gha,scope=ruff-${{ env.PLATFORM_TUPLE }}
cache-to: type=gha,mode=min,scope=ruff-${{ env.PLATFORM_TUPLE }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.RUFF_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
- name: Export digests
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digests
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_TUPLE }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
docker-publish:
name: Publish Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-build
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@v3
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.RUFF_BASE_IMG }}
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
tags: |
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Create manifest list and push
working-directory: /tmp/digests
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
run: |
docker buildx imagetools create \
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.RUFF_BASE_IMG }}@sha256:%s ' *)
docker-publish-extra:
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-publish
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
strategy:
fail-fast: false
matrix:
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
image-mapping:
- alpine:3.20,alpine3.20,alpine
- debian:bookworm-slim,bookworm-slim,debian-slim
- buildpack-deps:bookworm,bookworm,debian
steps:
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate Dynamic Dockerfile Tags
shell: bash
run: |
set -euo pipefail
# Extract the image and tags from the matrix variable
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
# Generate Dockerfile content
cat <<EOF > Dockerfile
FROM ${BASE_IMAGE}
COPY --from=${{ env.RUFF_BASE_IMG }}:latest /ruff /usr/local/bin/ruff
ENTRYPOINT []
CMD ["/usr/local/bin/ruff"]
EOF
# Initialize a variable to store all tag docker metadata patterns
TAG_PATTERNS=""
# Loop through all base tags and append its docker metadata pattern to the list
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
IFS=','; for TAG in ${BASE_TAGS}; do
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
done
# Remove the trailing newline from the pattern list
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
# Export image cache name
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> $GITHUB_ENV
# Export tag patterns using the multiline env var syntax
{
echo "TAG_PATTERNS<<EOF"
echo -e "${TAG_PATTERNS}"
echo EOF
} >> $GITHUB_ENV
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
# ghcr.io prefers index level annotations
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:
images: ${{ env.RUFF_BASE_IMG }}
flavor: |
latest=false
tags: |
${{ env.TAG_PATTERNS }}
- name: Build and push
- name: "Build and push Docker image"
uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm64
# We do not really need to cache here as the Dockerfile is tiny
#cache-from: type=gha,scope=ruff-${{ env.IMAGE_REF }}
#cache-to: type=gha,mode=min,scope=ruff-${{ env.IMAGE_REF }}
push: true
tags: ${{ steps.meta.outputs.tags }}
# Reuse the builder
cache-from: type=gha
cache-to: type=gha,mode=max
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
tags: ghcr.io/astral-sh/ruff:latest,ghcr.io/astral-sh/ruff:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
labels: ${{ steps.meta.outputs.labels }}
annotations: ${{ steps.meta.outputs.annotations }}
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/ruff/pkgs/container/ruff
# show the ruff base image first since GitHub always shows the last updated image digests
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
docker-republish:
name: Annotate Docker image (ghcr.io/astral-sh/ruff)
runs-on: ubuntu-latest
environment:
name: release
needs:
- docker-publish-extra
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- uses: docker/setup-buildx-action@v3
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:
images: ${{ env.RUFF_BASE_IMG }}
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
tags: |
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
- name: Create manifest list and push
working-directory: /tmp/digests
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
# The printf will expand the base image with the `<RUFF_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <RUFF_BASE_IMG>@sha256:<sha256_1> <RUFF_BASE_IMG>@sha256:<sha256_2> ...`
run: |
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
docker buildx imagetools create \
"${annotations[@]}" \
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.RUFF_BASE_IMG }}@sha256:%s ' *)

View File

@@ -37,7 +37,7 @@ jobs:
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v45
- uses: tj-actions/changed-files@v44
id: changed
with:
files_yaml: |
@@ -148,7 +148,7 @@ jobs:
# sync, not just public items. Eventually we should do this for all
# crates; for now add crates here as they are warning-clean to prevent
# regression.
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"
@@ -193,7 +193,7 @@ jobs:
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
with:
node-version: 20
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
@@ -518,8 +518,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.13"
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
@@ -527,15 +525,13 @@ jobs:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@v3
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
run: pip install -r docs/requirements-insiders.txt
- name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: uv pip install -r docs/requirements.txt --system
run: pip install -r docs/requirements.txt
- name: "Update README File"
run: python scripts/transform_readme.py --target mkdocs
- name: "Generate docs"
@@ -612,7 +608,7 @@ jobs:
just test
benchmarks:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20

View File

@@ -34,10 +34,10 @@ jobs:
- name: "Set docs version"
run: |
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
# if version is missing, use 'latest'
if [ -z "$version" ]; then
echo "Using 'latest' as version"
version="latest"
# if version is missing, exit with error
if [[ -z "$version" ]]; then
echo "Can't build docs without a version."
exit 1
fi
# Use version as display name for now
@@ -145,7 +145,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
branch_name="${{ env.branch_name }}"
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
# give the PR a few seconds to be created before trying to auto-merge it
sleep 10

View File

@@ -29,7 +29,7 @@ jobs:
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
with:
node-version: 20
node-version: 18
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
@@ -47,7 +47,7 @@ jobs:
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.9.0
uses: cloudflare/wrangler-action@v3.7.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

View File

@@ -43,7 +43,7 @@ jobs:
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
- uses: actions/setup-node@v4
with:
node-version: 20
node-version: 18
registry-url: "https://registry.npmjs.org"
- name: "Publish (dry-run)"
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}

View File

@@ -1,5 +1,3 @@
# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
@@ -66,7 +64,7 @@ jobs:
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.1/cargo-dist-installer.sh | sh"
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
- name: Cache cargo-dist
uses: actions/upload-artifact@v4
with:

View File

@@ -37,13 +37,13 @@ jobs:
- name: Sync typeshed
id: sync
run: |
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
- name: Commit the changes
id: commit
if: ${{ steps.sync.outcome == 'success' }}

View File

@@ -1,8 +1,8 @@
fail_fast: false
fail_fast: true
exclude: |
(?x)^(
crates/red_knot_vendored/vendor/.*|
crates/red_knot_python_semantic/vendor/.*|
crates/red_knot_workspace/resources/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
@@ -17,18 +17,17 @@ exclude: |
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.21
rev: v0.19
hooks:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.18
rev: 0.7.17
hooks:
- id: mdformat
additional_dependencies:
- mdformat-mkdocs
- mdformat-admon
- mdformat-footnote
exclude: |
(?x)^(
docs/formatter/black\.md
@@ -36,7 +35,7 @@ repos:
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.42.0
rev: v0.41.0
hooks:
- id: markdownlint-fix
exclude: |
@@ -45,17 +44,8 @@ repos:
| docs/\w+\.md
)$
- repo: https://github.com/adamchainz/blacken-docs
rev: 1.19.1
hooks:
- id: blacken-docs
args: ["--pyi", "--line-length", "130"]
files: '^crates/.*/resources/mdtest/.*\.md'
additional_dependencies:
- black==24.10.0
- repo: https://github.com/crate-ci/typos
rev: v1.26.0
rev: v1.23.6
hooks:
- id: typos
@@ -69,7 +59,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.0
rev: v0.6.1
hooks:
- id: ruff-format
- id: ruff
@@ -78,8 +68,8 @@ repos:
require_serial: true
# Prettier
- repo: https://github.com/rbubley/mirrors-prettier
rev: v3.3.3
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0
hooks:
- id: prettier
types: [yaml]

View File

@@ -1,18 +1,5 @@
# Breaking Changes
## 0.7.0
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
instead.
## 0.6.0
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))

View File

@@ -1,292 +1,5 @@
# Changelog
## 0.7.1
### Preview features
- Fix `E221` and `E222` to flag missing or extra whitespace around `==` operator ([#13890](https://github.com/astral-sh/ruff/pull/13890))
- Formatter: Alternate quotes for strings inside f-strings in preview ([#13860](https://github.com/astral-sh/ruff/pull/13860))
- Formatter: Join implicit concatenated strings when they fit on a line ([#13663](https://github.com/astral-sh/ruff/pull/13663))
- \[`pylint`\] Restrict `iteration-over-set` to only work on sets of literals (`PLC0208`) ([#13731](https://github.com/astral-sh/ruff/pull/13731))
### Rule changes
- \[`flake8-type-checking`\] Support auto-quoting when annotations contain quotes ([#11811](https://github.com/astral-sh/ruff/pull/11811))
### Server
- Avoid indexing the workspace for single-file mode ([#13770](https://github.com/astral-sh/ruff/pull/13770))
### Bug fixes
- Make `ARG002` compatible with `EM101` when raising `NotImplementedError` ([#13714](https://github.com/astral-sh/ruff/pull/13714))
### Other changes
- Introduce more Docker tags for Ruff (similar to uv) ([#13274](https://github.com/astral-sh/ruff/pull/13274))
## 0.7.0
Check out the [blog post](https://astral.sh/blog/ruff-v0.7.0) for a migration guide and overview of the changes!
### Breaking changes
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
([#12838](https://github.com/astral-sh/ruff/pull/12838), [#13292](https://github.com/astral-sh/ruff/pull/13292)).
This was a change that we attempted to make in Ruff v0.6.0, but only partially made due to an error on our part.
See the [blog post](https://astral.sh/blog/ruff-v0.7.0) for more details.
- The `useless-try-except` rule (in our `tryceratops` category) has been recoded from `TRY302` to
`TRY203` ([#13502](https://github.com/astral-sh/ruff/pull/13502)). This ensures Ruff's code is consistent with
the same rule in the [`tryceratops`](https://github.com/guilatrova/tryceratops) linter.
- The `lint.allow-unused-imports` setting has been removed ([#13677](https://github.com/astral-sh/ruff/pull/13677)). Use
[`lint.pyflakes.allow-unused-imports`](https://docs.astral.sh/ruff/settings/#lint_pyflakes_allowed-unused-imports)
instead.
### Formatter preview style
- Normalize implicit concatenated f-string quotes per part ([#13539](https://github.com/astral-sh/ruff/pull/13539))
### Preview linter features
- \[`refurb`\] implement `hardcoded-string-charset` (FURB156) ([#13530](https://github.com/astral-sh/ruff/pull/13530))
- \[`refurb`\] Count codepoints not bytes for `slice-to-remove-prefix-or-suffix (FURB188)` ([#13631](https://github.com/astral-sh/ruff/pull/13631))
### Rule changes
- \[`pylint`\] Mark `PLE1141` fix as unsafe ([#13629](https://github.com/astral-sh/ruff/pull/13629))
- \[`flake8-async`\] Consider async generators to be "checkpoints" for `cancel-scope-no-checkpoint` (`ASYNC100`) ([#13639](https://github.com/astral-sh/ruff/pull/13639))
- \[`flake8-bugbear`\] Do not suggest setting parameter `strict=` to `False` in `B905` diagnostic message ([#13656](https://github.com/astral-sh/ruff/pull/13656))
- \[`flake8-todos`\] Only flag the word "TODO", not words starting with "todo" (`TD006`) ([#13640](https://github.com/astral-sh/ruff/pull/13640))
- \[`pycodestyle`\] Fix whitespace-related false positives and false negatives inside type-parameter lists (`E231`, `E251`) ([#13704](https://github.com/astral-sh/ruff/pull/13704))
- \[`flake8-simplify`\] Stabilize preview behavior for `SIM115` so that the rule can detect files
being opened from a wider range of standard-library functions ([#12959](https://github.com/astral-sh/ruff/pull/12959)).
### CLI
- Add explanation of fixable in `--statistics` command ([#13774](https://github.com/astral-sh/ruff/pull/13774))
### Bug fixes
- \[`pyflakes`\] Allow `ipytest` cell magic (`F401`) ([#13745](https://github.com/astral-sh/ruff/pull/13745))
- \[`flake8-use-pathlib`\] Fix `PTH123` false positive when `open` is passed a file descriptor ([#13616](https://github.com/astral-sh/ruff/pull/13616))
- \[`flake8-bandit`\] Detect patterns from multi line SQL statements (`S608`) ([#13574](https://github.com/astral-sh/ruff/pull/13574))
- \[`flake8-pyi`\] - Fix dropped expressions in `PYI030` autofix ([#13727](https://github.com/astral-sh/ruff/pull/13727))
## 0.6.9
### Preview features
- Fix codeblock dynamic line length calculation for indented docstring examples ([#13523](https://github.com/astral-sh/ruff/pull/13523))
- \[`refurb`\] Mark `FURB118` fix as unsafe ([#13613](https://github.com/astral-sh/ruff/pull/13613))
### Rule changes
- \[`pydocstyle`\] Don't raise `D208` when last line is non-empty ([#13372](https://github.com/astral-sh/ruff/pull/13372))
- \[`pylint`\] Preserve trivia (i.e. comments) in `PLR5501` autofix ([#13573](https://github.com/astral-sh/ruff/pull/13573))
### Configuration
- \[`pyflakes`\] Add `allow-unused-imports` setting for `unused-import` rule (`F401`) ([#13601](https://github.com/astral-sh/ruff/pull/13601))
### Bug fixes
- Support ruff discovery in pip build environments ([#13591](https://github.com/astral-sh/ruff/pull/13591))
- \[`flake8-bugbear`\] Avoid short circuiting `B017` for multiple context managers ([#13609](https://github.com/astral-sh/ruff/pull/13609))
- \[`pylint`\] Do not offer an invalid fix for `PLR1716` when the comparisons contain parenthesis ([#13527](https://github.com/astral-sh/ruff/pull/13527))
- \[`pyupgrade`\] Fix `UP043` to apply to `collections.abc.Generator` and `collections.abc.AsyncGenerator` ([#13611](https://github.com/astral-sh/ruff/pull/13611))
- \[`refurb`\] Fix handling of slices in tuples for `FURB118`, e.g., `x[:, 1]` ([#13518](https://github.com/astral-sh/ruff/pull/13518))
### Documentation
- Update GitHub Action link to `astral-sh/ruff-action` ([#13551](https://github.com/astral-sh/ruff/pull/13551))
## 0.6.8
### Preview features
- Remove unnecessary parentheses around `match case` clauses ([#13510](https://github.com/astral-sh/ruff/pull/13510))
- Parenthesize overlong `if` guards in `match..case` clauses ([#13513](https://github.com/astral-sh/ruff/pull/13513))
- Detect basic wildcard imports in `ruff analyze graph` ([#13486](https://github.com/astral-sh/ruff/pull/13486))
- \[`pylint`\] Implement `boolean-chained-comparison` (`R1716`) ([#13435](https://github.com/astral-sh/ruff/pull/13435))
### Rule changes
- \[`lake8-simplify`\] Detect `SIM910` when using variadic keyword arguments, i.e., `**kwargs` ([#13503](https://github.com/astral-sh/ruff/pull/13503))
- \[`pyupgrade`\] Avoid false negatives with non-reference shadowed bindings of loop variables (`UP028`) ([#13504](https://github.com/astral-sh/ruff/pull/13504))
### Bug fixes
- Detect tuples bound to variadic positional arguments i.e. `*args` ([#13512](https://github.com/astral-sh/ruff/pull/13512))
- Exit gracefully on broken pipe errors ([#13485](https://github.com/astral-sh/ruff/pull/13485))
- Avoid panic when analyze graph hits broken pipe ([#13484](https://github.com/astral-sh/ruff/pull/13484))
### Performance
- Reuse `BTreeSets` in module resolver ([#13440](https://github.com/astral-sh/ruff/pull/13440))
- Skip traversal for non-compound statements ([#13441](https://github.com/astral-sh/ruff/pull/13441))
## 0.6.7
### Preview features
- Add Python version support to ruff analyze CLI ([#13426](https://github.com/astral-sh/ruff/pull/13426))
- Add `exclude` support to `ruff analyze` ([#13425](https://github.com/astral-sh/ruff/pull/13425))
- Fix parentheses around return type annotations ([#13381](https://github.com/astral-sh/ruff/pull/13381))
### Rule changes
- \[`pycodestyle`\] Fix: Don't autofix if the first line ends in a question mark? (D400) ([#13399](https://github.com/astral-sh/ruff/pull/13399))
### Bug fixes
- Respect `lint.exclude` in ruff check `--add-noqa` ([#13427](https://github.com/astral-sh/ruff/pull/13427))
### Performance
- Avoid tracking module resolver files in Salsa ([#13437](https://github.com/astral-sh/ruff/pull/13437))
- Use `forget` for module resolver database ([#13438](https://github.com/astral-sh/ruff/pull/13438))
## 0.6.6
### Preview features
- \[`refurb`\] Skip `slice-to-remove-prefix-or-suffix` (`FURB188`) when non-trivial slice steps are present ([#13405](https://github.com/astral-sh/ruff/pull/13405))
- Add a subcommand to generate dependency graphs ([#13402](https://github.com/astral-sh/ruff/pull/13402))
### Formatter
- Fix placement of inline parameter comments ([#13379](https://github.com/astral-sh/ruff/pull/13379))
### Server
- Fix off-by one error in the `LineIndex::offset` calculation ([#13407](https://github.com/astral-sh/ruff/pull/13407))
### Bug fixes
- \[`fastapi`\] Respect FastAPI aliases in route definitions ([#13394](https://github.com/astral-sh/ruff/pull/13394))
- \[`pydocstyle`\] Respect word boundaries when detecting function signature in docs ([#13388](https://github.com/astral-sh/ruff/pull/13388))
### Documentation
- Add backlinks to rule overview linter ([#13368](https://github.com/astral-sh/ruff/pull/13368))
- Fix documentation for editor vim plugin ALE ([#13348](https://github.com/astral-sh/ruff/pull/13348))
- Fix rendering of `FURB188` docs ([#13406](https://github.com/astral-sh/ruff/pull/13406))
## 0.6.5
### Preview features
- \[`pydoclint`\] Ignore `DOC201` when function name is "**new**" ([#13300](https://github.com/astral-sh/ruff/pull/13300))
- \[`refurb`\] Implement `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#13256](https://github.com/astral-sh/ruff/pull/13256))
### Rule changes
- \[`eradicate`\] Ignore script-comments with multiple end-tags (`ERA001`) ([#13283](https://github.com/astral-sh/ruff/pull/13283))
- \[`pyflakes`\] Improve error message for `UndefinedName` when a builtin was added in a newer version than specified in Ruff config (`F821`) ([#13293](https://github.com/astral-sh/ruff/pull/13293))
### Server
- Add support for extensionless Python files for server ([#13326](https://github.com/astral-sh/ruff/pull/13326))
- Fix configuration inheritance for configurations specified in the LSP settings ([#13285](https://github.com/astral-sh/ruff/pull/13285))
### Bug fixes
- \[`ruff`\] Handle unary operators in `decimal-from-float-literal` (`RUF032`) ([#13275](https://github.com/astral-sh/ruff/pull/13275))
### CLI
- Only include rules with diagnostics in SARIF metadata ([#13268](https://github.com/astral-sh/ruff/pull/13268))
### Playground
- Add "Copy as pyproject.toml/ruff.toml" and "Paste from TOML" ([#13328](https://github.com/astral-sh/ruff/pull/13328))
- Fix errors not shown for restored snippet on page load ([#13262](https://github.com/astral-sh/ruff/pull/13262))
## 0.6.4
### Preview features
- \[`flake8-builtins`\] Use dynamic builtins list based on Python version ([#13172](https://github.com/astral-sh/ruff/pull/13172))
- \[`pydoclint`\] Permit yielding `None` in `DOC402` and `DOC403` ([#13148](https://github.com/astral-sh/ruff/pull/13148))
- \[`pylint`\] Update diagnostic message for `PLW3201` ([#13194](https://github.com/astral-sh/ruff/pull/13194))
- \[`ruff`\] Implement `post-init-default` (`RUF033`) ([#13192](https://github.com/astral-sh/ruff/pull/13192))
- \[`ruff`\] Implement useless if-else (`RUF034`) ([#13218](https://github.com/astral-sh/ruff/pull/13218))
### Rule changes
- \[`flake8-pyi`\] Respect `pep8_naming.classmethod-decorators` settings when determining if a method is a classmethod in `custom-type-var-return-type` (`PYI019`) ([#13162](https://github.com/astral-sh/ruff/pull/13162))
- \[`flake8-pyi`\] Teach various rules that annotations might be stringized ([#12951](https://github.com/astral-sh/ruff/pull/12951))
- \[`pylint`\] Avoid `no-self-use` for `attrs`-style validators ([#13166](https://github.com/astral-sh/ruff/pull/13166))
- \[`pylint`\] Recurse into subscript subexpressions when searching for list/dict lookups (`PLR1733`, `PLR1736`) ([#13186](https://github.com/astral-sh/ruff/pull/13186))
- \[`pyupgrade`\] Detect `aiofiles.open` calls in `UP015` ([#13173](https://github.com/astral-sh/ruff/pull/13173))
- \[`pyupgrade`\] Mark `sys.version_info[0] < 3` and similar comparisons as outdated (`UP036`) ([#13175](https://github.com/astral-sh/ruff/pull/13175))
### CLI
- Enrich messages of SARIF results ([#13180](https://github.com/astral-sh/ruff/pull/13180))
- Handle singular case for incompatible rules warning in `ruff format` output ([#13212](https://github.com/astral-sh/ruff/pull/13212))
### Bug fixes
- \[`pydocstyle`\] Improve heuristics for detecting Google-style docstrings ([#13142](https://github.com/astral-sh/ruff/pull/13142))
- \[`refurb`\] Treat `sep` arguments with effects as unsafe removals (`FURB105`) ([#13165](https://github.com/astral-sh/ruff/pull/13165))
## 0.6.3
### Preview features
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
### Rule changes
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
### Bug fixes
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
## 0.6.2
### Preview features
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
### Rule changes
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
### Server
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
### Bug fixes
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
### Documentation
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
## 0.6.1
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,

View File

@@ -29,14 +29,16 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
cargo install cargo-insta
```
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
run Python utility commands.
And you'll need pre-commit to run some validation checks:
```shell
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
```
You can optionally install pre-commit hooks to automatically run the validation checks
when making a commit:
```shell
uv tool install pre-commit
pre-commit install
```
@@ -64,7 +66,7 @@ and that it passes both the lint and test validation checks:
```shell
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
```
These checks will run on GitHub Actions when you open your pull request, but running them locally
@@ -265,20 +267,26 @@ To preview any changes to the documentation locally:
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
1. Install MkDocs and Material for MkDocs with:
```shell
pip install -r docs/requirements.txt
```
1. Generate the MkDocs site with:
```shell
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
python scripts/generate_mkdocs.py
```
1. Run the development server with:
```shell
# For contributors.
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
mkdocs serve -f mkdocs.public.yml
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
mkdocs serve -f mkdocs.insiders.yml
```
The documentation should then be available locally at
@@ -360,8 +368,9 @@ GitHub Actions will run your changes against a number of real-world projects fro
report on any linter or formatter differences. You can also run those checks locally via:
```shell
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
pip install -e ./python/ruff-ecosystem
ruff-ecosystem check ruff "./target/debug/ruff"
ruff-ecosystem format ruff "./target/debug/ruff"
```
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
@@ -388,18 +397,12 @@ which makes it a good target for benchmarking.
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
```
Install `hyperfine`:
```shell
cargo install hyperfine
```
To benchmark the release build:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
@@ -418,7 +421,7 @@ To benchmark against the ecosystem's existing tools:
```shell
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"pyflakes crates/ruff_linter/resources/test/cpython" \
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
"pycodestyle crates/ruff_linter/resources/test/cpython" \
@@ -464,7 +467,7 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
```
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
@@ -521,8 +524,6 @@ You can run the benchmarks with
cargo benchmark
```
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
#### Benchmark-driven Development
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
@@ -561,7 +562,7 @@ cargo install critcmp
#### Tips
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
to only run the lexer benchmarks.
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)

686
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ resolver = "2"
[workspace.package]
edition = "2021"
rust-version = "1.80"
rust-version = "1.76"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@@ -14,10 +14,9 @@ license = "MIT"
[workspace.dependencies]
ruff = { path = "crates/ruff" }
ruff_cache = { path = "crates/ruff_cache" }
ruff_db = { path = "crates/ruff_db", default-features = false }
ruff_db = { path = "crates/ruff_db" }
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
ruff_formatter = { path = "crates/ruff_formatter" }
ruff_graph = { path = "crates/ruff_graph" }
ruff_index = { path = "crates/ruff_index" }
ruff_linter = { path = "crates/ruff_linter" }
ruff_macros = { path = "crates/ruff_macros" }
@@ -34,18 +33,15 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
ruff_server = { path = "crates/ruff_server" }
ruff_source_file = { path = "crates/ruff_source_file" }
ruff_text_size = { path = "crates/ruff_text_size" }
red_knot_vendored = { path = "crates/red_knot_vendored" }
ruff_workspace = { path = "crates/ruff_workspace" }
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
red_knot_server = { path = "crates/red_knot_server" }
red_knot_test = { path = "crates/red_knot_test" }
red_knot_workspace = { path = "crates/red_knot_workspace", default-features = false }
red_knot_workspace = { path = "crates/red_knot_workspace" }
aho-corasick = { version = "1.1.3" }
annotate-snippets = { version = "0.9.2", features = ["color"] }
anyhow = { version = "1.0.80" }
assert_fs = { version = "1.1.0" }
argfile = { version = "0.2.0" }
bincode = { version = "1.3.3" }
bitflags = { version = "2.5.0" }
@@ -65,19 +61,14 @@ compact_str = "0.8.0"
criterion = { version = "0.5.1", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
dir-test = { version = "0.3.0" }
drop_bomb = { version = "0.1.5" }
env_logger = { version = "0.11.0" }
etcetera = { version = "0.8.0" }
fern = { version = "0.7.0" }
fern = { version = "0.6.1" }
filetime = { version = "0.2.23" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globwalk = { version = "0.9.1" }
hashbrown = { version = "0.15.0", default-features = false, features = [
"raw-entry",
"inline-more",
] }
hashbrown = "0.14.3"
ignore = { version = "0.4.22" }
imara-diff = { version = "0.1.5" }
imperative = { version = "1.0.4" }
@@ -95,28 +86,29 @@ libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" }
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
"proposed",
"proposed",
] }
matchit = { version = "0.8.1" }
memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "6.1.1" }
once_cell = { version = "1.19.0" }
ordermap = { version = "0.5.0" }
path-absolutize = { version = "3.1.1" }
path-slash = { version = "0.2.1" }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.7.1" }
pep440_rs = { version = "0.6.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.5.0" }
quick-junit = { version = "0.4.0" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "254c749b02cde2fd29852a7463a33e800b771758" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -124,7 +116,7 @@ serde-wasm-bindgen = { version = "0.6.4" }
serde_json = { version = "1.0.113" }
serde_test = { version = "1.0.152" }
serde_with = { version = "3.6.0", default-features = false, features = [
"macros",
"macros",
] }
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
@@ -141,10 +133,7 @@ toml = { version = "0.8.11" }
tracing = { version = "0.1.40" }
tracing-flame = { version = "0.2.0" }
tracing-indicatif = { version = "0.3.6" }
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
"env-filter",
"fmt",
] }
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
tracing-tree = { version = "0.4.0" }
typed-arena = { version = "2.0.2" }
unic-ucd-category = { version = "0.9" }
@@ -155,10 +144,10 @@ unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
"js",
"v4",
"fast-rng",
"macro-diagnostics",
"js",
] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
@@ -169,10 +158,7 @@ zip = { version = "0.6.6", default-features = false }
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
unexpected_cfgs = { level = "warn", check-cfg = [
"cfg(fuzzing)",
"cfg(codspeed)",
] }
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
[workspace.lints.clippy]
pedantic = { level = "warn", priority = -2 }
@@ -244,9 +230,9 @@ inherits = "release"
# Config for 'cargo dist'
[workspace.metadata.dist]
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.22.1"
cargo-dist-version = "0.18.0"
# CI backends to support
ci = "github"
ci = ["github"]
# The installers to generate for each app
installers = ["shell", "powershell"]
# The archive format to use for windows builds (defaults .zip)
@@ -255,33 +241,33 @@ windows-archive = ".zip"
unix-archive = ".tar.gz"
# Target platforms to build apps for (Rust target-triple syntax)
targets = [
"aarch64-apple-darwin",
"aarch64-pc-windows-msvc",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"arm-unknown-linux-musleabihf",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"i686-unknown-linux-musl",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"aarch64-apple-darwin",
"aarch64-pc-windows-msvc",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"arm-unknown-linux-musleabihf",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"i686-unknown-linux-musl",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
]
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
auto-includes = false
# Whether cargo-dist should create a GitHub Release or use an existing draft
create-release = true
# Which actions to run on pull requests
# Publish jobs to run in CI
pr-run-mode = "skip"
# Whether CI should trigger releases with dispatches instead of tag pushes
dispatch-releases = true
# Which phase cargo-dist should use to create the GitHub release
# The stage during which the GitHub Release should be created
github-release = "announce"
# Whether CI should include auto-generated code to build local artifacts
build-local-artifacts = false
@@ -289,15 +275,9 @@ build-local-artifacts = false
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
# Publish jobs to run in CI
publish-jobs = ["./publish-pypi", "./publish-wasm"]
# Post-announce jobs to run in CI
post-announce-jobs = [
"./notify-dependents",
"./publish-docs",
"./publish-playground",
]
# Announcement jobs to run in CI
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
# Custom permissions for GitHub Jobs
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
# Whether to install an updater program
install-updater = false
# Path that installers should place binaries in
install-path = "CARGO_HOME"

View File

@@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM ubuntu AS build
FROM --platform=$BUILDPLATFORM ubuntu as build
ENV HOME="/root"
WORKDIR $HOME

View File

@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
1. [Who's Using Ruff?](#whos-using-ruff)
1. [License](#license)
## Getting Started<a id="getting-started"></a>
## Getting Started
For more, see the [documentation](https://docs.astral.sh/ruff/).
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.7.1/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.7.1/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.7.1
rev: v0.6.1
hooks:
# Run the linter.
- id: ruff
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
[`ruff-action`](https://github.com/astral-sh/ruff-action):
[`ruff-action`](https://github.com/chartboost/ruff-action):
```yaml
name: Ruff
@@ -192,10 +192,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/ruff-action@v1
- uses: chartboost/ruff-action@v1
```
### Configuration<a id="configuration"></a>
### Configuration
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
@@ -291,7 +291,7 @@ features that may change prior to stabilization.
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
for more on the linting and formatting commands, respectively.
## Rules<a id="rules"></a>
## Rules
<!-- Begin section: Rules -->
@@ -367,21 +367,21 @@ quality tools, including:
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
## Contributing<a id="contributing"></a>
## Contributing
Contributions are welcome and highly appreciated. To get started, check out the
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
## Support<a id="support"></a>
## Support
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
## Acknowledgements<a id="acknowledgements"></a>
## Acknowledgements
Ruff's linter draws on both the APIs and implementation details of many other
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
@@ -405,7 +405,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
Ruff is released under the MIT license.
## Who's Using Ruff?<a id="whos-using-ruff"></a>
## Who's Using Ruff?
Ruff is used by a number of major open-source projects and companies, including:
@@ -524,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
```
## License<a id="license"></a>
## License
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)

View File

@@ -1,6 +1,6 @@
[files]
# https://github.com/crate-ci/typos/issues/868
extend-exclude = ["crates/red_knot_vendored/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
[default.extend-words]
"arange" = "arange" # e.g. `numpy.arange`
@@ -8,7 +8,7 @@ hel = "hel"
whos = "whos"
spawnve = "spawnve"
ned = "ned"
pn = "pn" # `import panel as pn` is a thing
pn = "pn" # `import panel as pd` is a thing
poit = "poit"
BA = "BA" # acronym for "Bad Allowed", used in testing.
jod = "jod" # e.g., `jod-thread`

View File

@@ -13,8 +13,9 @@ license.workspace = true
[dependencies]
red_knot_python_semantic = { workspace = true }
red_knot_workspace = { workspace = true, features = ["zstd"] }
red_knot_workspace = { workspace = true }
red_knot_server = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
anyhow = { workspace = true }

View File

@@ -13,17 +13,12 @@ The CLI supports different verbosity levels.
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
## `RED_KNOT_LOG`
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
but this can result in a confused logging output where messages from different threads are intertwined.
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
### Examples
#### Show all debug messages

View File

@@ -7,12 +7,12 @@ use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use salsa::plumbing::ZalsaDatabase;
use red_knot_python_semantic::SitePackages;
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
use red_knot_server::run_server;
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::site_packages::VirtualEnvironment;
use red_knot_workspace::watch;
use red_knot_workspace::watch::WorkspaceWatcher;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use target_version::TargetVersion;
@@ -65,14 +65,15 @@ to resolve type information for the project's third-party dependencies.",
value_name = "PATH",
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
)]
extra_search_path: Option<Vec<SystemPathBuf>>,
extra_search_path: Vec<SystemPathBuf>,
#[arg(
long,
help = "Python version to assume when resolving types",
value_name = "VERSION"
)]
target_version: Option<TargetVersion>,
default_value_t = TargetVersion::default(),
value_name="VERSION")
]
target_version: TargetVersion,
#[clap(flatten)]
verbosity: Verbosity,
@@ -85,36 +86,6 @@ to resolve type information for the project's third-party dependencies.",
watch: bool,
}
impl Args {
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
let mut configuration = Configuration::default();
if let Some(target_version) = self.target_version {
configuration.target_version = Some(target_version.into());
}
if let Some(venv_path) = &self.venv_path {
configuration.search_paths.site_packages = Some(SitePackages::Derived {
venv_path: SystemPath::absolute(venv_path, cli_cwd),
});
}
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
configuration.search_paths.custom_typeshed =
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
}
if let Some(extra_search_paths) = &self.extra_search_path {
configuration.search_paths.extra_paths = extra_search_paths
.iter()
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
.collect();
}
configuration
}
}
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Start the language server
@@ -144,13 +115,22 @@ pub fn main() -> ExitStatus {
}
fn run() -> anyhow::Result<ExitStatus> {
let args = Args::parse_from(std::env::args().collect::<Vec<_>>());
let Args {
command,
current_directory,
custom_typeshed_dir,
extra_search_path: extra_paths,
venv_path,
target_version,
verbosity,
watch,
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
if matches!(args.command, Some(Command::Server)) {
if matches!(command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success);
}
let verbosity = args.verbosity.level();
let verbosity = verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
@@ -160,21 +140,19 @@ fn run() -> anyhow::Result<ExitStatus> {
SystemPathBuf::from_path_buf(cwd)
.map_err(|path| {
anyhow!(
"The current working directory `{}` contains non-Unicode characters. Red Knot only supports Unicode paths.",
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
path.display()
)
})?
};
let cwd = args
.current_directory
.as_ref()
let cwd = current_directory
.map(|cwd| {
if cwd.as_std_path().is_dir() {
Ok(SystemPath::absolute(cwd, &cli_base_path))
Ok(SystemPath::absolute(&cwd, &cli_base_path))
} else {
Err(anyhow!(
"Provided current-directory path `{cwd}` is not a directory"
"Provided current-directory path '{cwd}' is not a directory."
))
}
})
@@ -182,18 +160,33 @@ fn run() -> anyhow::Result<ExitStatus> {
.unwrap_or_else(|| cli_base_path.clone());
let system = OsSystem::new(cwd.clone());
let cli_configuration = args.to_configuration(&cwd);
let workspace_metadata = WorkspaceMetadata::from_path(
system.current_directory(),
&system,
Some(cli_configuration.clone()),
)?;
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
// TODO: Verify the remaining search path settings eagerly.
let site_packages = venv_path
.map(|path| {
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
.and_then(|venv| venv.site_packages_directories(&system))
})
.transpose()?
.unwrap_or_default();
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
let program_settings = ProgramSettings {
target_version: target_version.into(),
search_paths: SearchPathSettings {
extra_paths,
src_root: workspace_metadata.root().to_path_buf(),
custom_typeshed: custom_typeshed_dir,
site_packages,
},
};
// TODO: Use the `program_settings` to compute the key for the database's persistent
// cache and load the cache if it exists.
let mut db = RootDatabase::new(workspace_metadata, system)?;
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
// Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
@@ -205,7 +198,7 @@ fn run() -> anyhow::Result<ExitStatus> {
}
})?;
let exit_status = if args.watch {
let exit_status = if watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)
@@ -245,12 +238,10 @@ struct MainLoop {
/// The file system watcher, if running in watch mode.
watcher: Option<WorkspaceWatcher>,
cli_configuration: Configuration,
}
impl MainLoop {
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
fn new() -> (Self, MainLoopCancellationToken) {
let (sender, receiver) = crossbeam_channel::bounded(10);
(
@@ -258,7 +249,6 @@ impl MainLoop {
sender: sender.clone(),
receiver,
watcher: None,
cli_configuration,
},
MainLoopCancellationToken { sender },
)
@@ -341,7 +331,7 @@ impl MainLoop {
MainLoopMessage::ApplyChanges(changes) => {
revision += 1;
// Automatically cancels any pending queries and waits for them to complete.
db.apply_changes(changes, Some(&self.cli_configuration));
db.apply_changes(changes);
if let Some(watcher) = self.watcher.as_mut() {
watcher.update(db);
}

View File

@@ -5,11 +5,12 @@ use std::time::Duration;
use anyhow::{anyhow, Context};
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
use red_knot_python_semantic::{
resolve_module, ModuleName, Program, ProgramSettings, PythonVersion, SearchPathSettings,
};
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::watch;
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration};
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::files::{system_path_to_file, File, FileError};
use ruff_db::source::source_text;
@@ -24,7 +25,7 @@ struct TestCase {
/// We need to hold on to it in the test case or the temp files get deleted.
_temp_dir: tempfile::TempDir,
root_dir: SystemPathBuf,
configuration: Configuration,
search_path_settings: SearchPathSettings,
}
impl TestCase {
@@ -40,16 +41,20 @@ impl TestCase {
&self.db
}
fn db_mut(&mut self) -> &mut RootDatabase {
&mut self.db
}
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
self.try_stop_watch(Duration::from_secs(10))
.expect("Expected watch changes but observed none")
.expect("Expected watch changes but observed none.")
}
fn try_stop_watch(&mut self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
let watcher = self
.watcher
.take()
.expect("Cannot call `stop_watch` more than once");
.expect("Cannot call `stop_watch` more than once.");
let mut all_events = self
.changes_receiver
@@ -72,7 +77,7 @@ impl TestCase {
#[cfg(unix)]
fn take_watch_changes(&self) -> Vec<watch::ChangeEvent> {
self.try_take_watch_changes(Duration::from_secs(10))
.expect("Expected watch changes but observed none")
.expect("Expected watch changes but observed none.")
}
fn try_take_watch_changes(&self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
@@ -100,20 +105,16 @@ impl TestCase {
Some(all_events)
}
fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
self.db.apply_changes(changes, Some(&self.configuration));
}
fn update_search_path_settings(
&mut self,
configuration: SearchPathConfiguration,
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
) -> anyhow::Result<()> {
let program = Program::get(self.db());
self.configuration.search_paths = configuration.clone();
let new_settings = configuration.into_settings(self.db.workspace().root(&self.db));
let new_settings = f(&self.search_path_settings);
program.update_search_paths(&mut self.db, &new_settings)?;
program.update_search_paths(&mut self.db, new_settings.clone())?;
self.search_path_settings = new_settings;
if let Some(watcher) = &mut self.watcher {
watcher.update(&self.db);
@@ -150,14 +151,14 @@ where
let absolute_path = workspace_path.join(relative_path);
if let Some(parent) = absolute_path.parent() {
std::fs::create_dir_all(parent).with_context(|| {
format!("Failed to create parent directory for file `{relative_path}`")
format!("Failed to create parent directory for file '{relative_path}'.",)
})?;
}
let mut file = std::fs::File::create(absolute_path.as_std_path())
.with_context(|| format!("Failed to open file `{relative_path}`"))?;
.with_context(|| format!("Failed to open file '{relative_path}'"))?;
file.write_all(content.as_bytes())
.with_context(|| format!("Failed to write to file `{relative_path}`"))?;
.with_context(|| format!("Failed to write to file '{relative_path}'"))?;
file.sync_data()?;
}
@@ -178,14 +179,17 @@ fn setup<F>(setup_files: F) -> anyhow::Result<TestCase>
where
F: SetupFiles,
{
setup_with_search_paths(setup_files, |_root, _workspace_path| {
SearchPathConfiguration::default()
setup_with_search_paths(setup_files, |_root, workspace_path| SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![],
})
}
fn setup_with_search_paths<F>(
setup_files: F,
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathConfiguration,
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathSettings,
) -> anyhow::Result<TestCase>
where
F: SetupFiles,
@@ -194,7 +198,7 @@ where
let root_path = SystemPath::from_std_path(temp_dir.path()).ok_or_else(|| {
anyhow!(
"Temporary directory `{}` is not a valid UTF-8 path.",
"Temp directory '{}' is not a valid UTF-8 path.",
temp_dir.path().display()
)
})?;
@@ -209,7 +213,7 @@ where
let workspace_path = root_path.join("workspace");
std::fs::create_dir_all(workspace_path.as_std_path())
.with_context(|| format!("Failed to create workspace directory `{workspace_path}`"))?;
.with_context(|| format!("Failed to create workspace directory '{workspace_path}'",))?;
setup_files
.setup(&root_path, &workspace_path)
@@ -217,34 +221,25 @@ where
let system = OsSystem::new(&workspace_path);
let search_paths = create_search_paths(&root_path, &workspace_path);
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
let search_path_settings = create_search_paths(&root_path, workspace.root());
for path in search_paths
for path in search_path_settings
.extra_paths
.iter()
.flatten()
.chain(search_paths.custom_typeshed.iter())
.chain(search_paths.site_packages.iter().flat_map(|site_packages| {
if let SitePackages::Known(path) = site_packages {
path.as_slice()
} else {
&[]
}
}))
.chain(search_path_settings.site_packages.iter())
.chain(search_path_settings.custom_typeshed.iter())
{
std::fs::create_dir_all(path.as_std_path())
.with_context(|| format!("Failed to create search path `{path}`"))?;
.with_context(|| format!("Failed to create search path '{path}'"))?;
}
let configuration = Configuration {
target_version: Some(PythonVersion::PY312),
search_paths,
let settings = ProgramSettings {
target_version: PythonVersion::default(),
search_paths: search_path_settings.clone(),
};
let workspace =
WorkspaceMetadata::from_path(&workspace_path, &system, Some(configuration.clone()))?;
let db = RootDatabase::new(workspace, system)?;
let db = RootDatabase::new(workspace, settings, system)?;
let (sender, receiver) = crossbeam::channel::unbounded();
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
@@ -259,7 +254,7 @@ where
watcher: Some(watcher),
_temp_dir: temp_dir,
root_dir: root_path,
configuration,
search_path_settings,
};
// Sometimes the file watcher reports changes for events that happened before the watcher was started.
@@ -312,7 +307,7 @@ fn new_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
let foo = case.system_file(&foo_path).expect("foo.py to exist.");
@@ -335,7 +330,7 @@ fn new_ignored_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(case.system_file(&foo_path).is_ok());
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
@@ -359,7 +354,7 @@ fn changed_file() -> anyhow::Result<()> {
assert!(!changes.is_empty());
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
@@ -382,7 +377,7 @@ fn deleted_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(!foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
@@ -414,7 +409,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(!foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
@@ -446,7 +441,7 @@ fn move_file_to_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
let foo_in_workspace = case.system_file(&foo_in_workspace_path)?;
@@ -474,7 +469,7 @@ fn rename_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(!foo.exists(case.db()));
@@ -501,10 +496,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
.with_context(|| "Failed to create __init__.py")?;
std::fs::write(a_original_path.as_std_path(), "").with_context(|| "Failed to create a.py")?;
let sub_a_module = resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap(),
);
let sub_a_module = resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap());
assert_eq!(sub_a_module, None);
assert_eq!(
@@ -518,7 +510,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
let init_file = case
.system_file(sub_new_path.join("__init__.py"))
@@ -528,11 +520,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
.expect("a.py to exist");
// `import sub.a` should now resolve
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_some());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some());
assert_eq!(
case.collect_package_files(&case.workspace_path("bar.py")),
@@ -551,11 +539,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
])?;
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_some());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some());
let sub_path = case.workspace_path("sub");
let init_file = case
@@ -577,14 +561,10 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_none());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
assert!(!init_file.exists(case.db()));
assert!(!a_file.exists(case.db()));
@@ -607,14 +587,10 @@ fn directory_renamed() -> anyhow::Result<()> {
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some());
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_some());
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("foo.baz").unwrap()
ModuleName::new_static("foo.baz").unwrap()
)
.is_none());
@@ -639,18 +615,14 @@ fn directory_renamed() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_none());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
// `import foo.baz` should now resolve
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("foo.baz").unwrap()
ModuleName::new_static("foo.baz").unwrap()
)
.is_some());
@@ -688,11 +660,7 @@ fn directory_deleted() -> anyhow::Result<()> {
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_some());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some(),);
let sub_path = case.workspace_path("sub");
@@ -712,14 +680,10 @@ fn directory_deleted() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_none());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
assert!(!init_file.exists(case.db()));
assert!(!a_file.exists(case.db()));
@@ -730,18 +694,20 @@ fn directory_deleted() -> anyhow::Result<()> {
#[test]
fn search_path() -> anyhow::Result<()> {
let mut case = setup_with_search_paths(
[("bar.py", "import sub.a")],
|root_path, _workspace_path| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
..SearchPathConfiguration::default()
},
)?;
let mut case =
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![root_path.join("site_packages")],
}
})?;
let site_packages = case.root_path().join("site_packages");
assert_eq!(
resolve_module(case.db(), &ModuleName::new("a").unwrap()),
resolve_module(case.db(), ModuleName::new("a").unwrap()),
None
);
@@ -749,9 +715,9 @@ fn search_path() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_some());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
assert_eq!(
case.collect_package_files(&case.workspace_path("bar.py")),
&[case.system_file(case.workspace_path("bar.py")).unwrap()]
@@ -767,12 +733,12 @@ fn add_search_path() -> anyhow::Result<()> {
let site_packages = case.workspace_path("site_packages");
std::fs::create_dir_all(site_packages.as_std_path())?;
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_none());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_none());
// Register site-packages as a search path.
case.update_search_path_settings(SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![site_packages.clone()])),
..SearchPathConfiguration::default()
case.update_search_path_settings(|settings| SearchPathSettings {
site_packages: vec![site_packages.clone()],
..settings.clone()
})
.expect("Search path settings to be valid");
@@ -780,28 +746,30 @@ fn add_search_path() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_some());
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
Ok(())
}
#[test]
fn remove_search_path() -> anyhow::Result<()> {
let mut case = setup_with_search_paths(
[("bar.py", "import sub.a")],
|root_path, _workspace_path| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
..SearchPathConfiguration::default()
},
)?;
let mut case =
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![root_path.join("site_packages")],
}
})?;
// Remove site packages from the search path settings.
let site_packages = case.root_path().join("site_packages");
case.update_search_path_settings(SearchPathConfiguration {
site_packages: None,
..SearchPathConfiguration::default()
case.update_search_path_settings(|settings| SearchPathSettings {
site_packages: vec![],
..settings.clone()
})
.expect("Search path settings to be valid");
@@ -814,48 +782,6 @@ fn remove_search_path() -> anyhow::Result<()> {
Ok(())
}
#[test]
fn changed_versions_file() -> anyhow::Result<()> {
let mut case = setup_with_search_paths(
|root_path: &SystemPath, workspace_path: &SystemPath| {
std::fs::write(workspace_path.join("bar.py").as_std_path(), "import sub.a")?;
std::fs::create_dir_all(root_path.join("typeshed/stdlib").as_std_path())?;
std::fs::write(root_path.join("typeshed/stdlib/VERSIONS").as_std_path(), "")?;
std::fs::write(
root_path.join("typeshed/stdlib/os.pyi").as_std_path(),
"# not important",
)?;
Ok(())
},
|root_path, _workspace_path| SearchPathConfiguration {
custom_typeshed: Some(root_path.join("typeshed")),
..SearchPathConfiguration::default()
},
)?;
// Unset the custom typeshed directory.
assert_eq!(
resolve_module(case.db(), &ModuleName::new("os").unwrap()),
None
);
std::fs::write(
case.root_path()
.join("typeshed/stdlib/VERSIONS")
.as_std_path(),
"os: 3.0-",
)?;
let changes = case.stop_watch();
case.apply_changes(changes);
assert!(resolve_module(case.db(), &ModuleName::new("os").unwrap()).is_some());
Ok(())
}
/// Watch a workspace that contains two files where one file is a hardlink to another.
///
/// Setup:
@@ -902,7 +828,7 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
@@ -973,7 +899,7 @@ fn hard_links_to_target_outside_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 2')");
@@ -1012,7 +938,7 @@ mod unix {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(
foo.permissions(case.db()),
@@ -1075,7 +1001,7 @@ mod unix {
let baz = resolve_module(
case.db().upcast(),
&ModuleName::new_static("bar.baz").unwrap(),
ModuleName::new_static("bar.baz").unwrap(),
)
.expect("Expected bar.baz to exist in site-packages.");
let baz_workspace = case.workspace_path("bar/baz.py");
@@ -1097,7 +1023,7 @@ mod unix {
let changes = case.take_watch_changes();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(
source_text(case.db(), baz.file()).as_str(),
@@ -1110,7 +1036,7 @@ mod unix {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(
source_text(case.db(), baz.file()).as_str(),
@@ -1156,7 +1082,7 @@ mod unix {
let baz = resolve_module(
case.db().upcast(),
&ModuleName::new_static("bar.baz").unwrap(),
ModuleName::new_static("bar.baz").unwrap(),
)
.expect("Expected bar.baz to exist in site-packages.");
let bar_baz = case.workspace_path("bar/baz.py");
@@ -1181,7 +1107,7 @@ mod unix {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
// The file watcher is guaranteed to emit one event for the changed file, but it isn't specified
// if the event is emitted for the "original" or linked path because both paths are watched.
@@ -1250,17 +1176,17 @@ mod unix {
Ok(())
},
|_root, workspace| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![
workspace.join(".venv/lib/python3.12/site-packages")
])),
..SearchPathConfiguration::default()
|_root, workspace| SearchPathSettings {
extra_paths: vec![],
src_root: workspace.to_path_buf(),
custom_typeshed: None,
site_packages: vec![workspace.join(".venv/lib/python3.12/site-packages")],
},
)?;
let baz = resolve_module(
case.db().upcast(),
&ModuleName::new_static("bar.baz").unwrap(),
ModuleName::new_static("bar.baz").unwrap(),
)
.expect("Expected bar.baz to exist in site-packages.");
let baz_site_packages_path =
@@ -1289,7 +1215,7 @@ mod unix {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(
source_text(case.db(), baz_original_file).as_str(),

View File

@@ -17,35 +17,35 @@ ruff_python_ast = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_source_file = { workspace = true }
ruff_text_size = { workspace = true }
ruff_python_literal = { workspace = true }
anyhow = { workspace = true }
bitflags = { workspace = true }
camino = { workspace = true }
compact_str = { workspace = true }
countme = { workspace = true }
itertools = { workspace = true}
once_cell = { workspace = true }
ordermap = { workspace = true }
salsa = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
rustc-hash = { workspace = true }
hashbrown = { workspace = true }
smallvec = { workspace = true }
static_assertions = { workspace = true }
test-case = { workspace = true }
memchr = { workspace = true }
[build-dependencies]
path-slash = { workspace = true }
walkdir = { workspace = true }
zip = { workspace = true, features = ["zstd", "deflate"] }
[dev-dependencies]
ruff_db = { workspace = true, features = ["os", "testing"] }
ruff_python_parser = { workspace = true }
red_knot_test = { workspace = true }
red_knot_vendored = { workspace = true }
anyhow = { workspace = true }
dir-test = {workspace = true}
insta = { workspace = true }
tempfile = { workspace = true }
walkdir = { workspace = true }
zip = { workspace = true }
[lints]
workspace = true

View File

@@ -1,5 +1,9 @@
# Vendored types for the stdlib
# Red Knot
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_vendored/vendor/typeshed`. The file `crates/red_knot_vendored/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
Semantic analysis for the red-knot project.
## Vendored types for the stdlib
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).

View File

@@ -1,4 +1,87 @@
/// Rebuild the crate if a test file is added or removed from
pub fn main() {
println!("cargo::rerun-if-changed=resources/mdtest");
//! Build script to package our vendored typeshed files
//! into a zip archive that can be included in the Ruff binary.
//!
//! This script should be automatically run at build time
//! whenever the script itself changes, or whenever any files
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
use std::fs::File;
use std::path::Path;
use path_slash::PathExt;
use zip::result::ZipResult;
use zip::write::{FileOptions, ZipWriter};
use zip::CompressionMethod;
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
/// Recursively zip the contents of an entire directory.
///
/// This routine is adapted from a recipe at
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
let mut zip = ZipWriter::new(writer);
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
// (WASM itself is already slower than a native build for a specific platform).
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
// architecture of the system running the build script and not the architecture of the build-target.
// That's why we use the `TARGET` environment variable here.
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
CompressionMethod::Deflated
} else {
CompressionMethod::Zstd
};
let options = FileOptions::default()
.compression_method(method)
.unix_permissions(0o644);
for entry in walkdir::WalkDir::new(directory_path) {
let dir_entry = entry.unwrap();
let absolute_path = dir_entry.path();
let normalized_relative_path = absolute_path
.strip_prefix(Path::new(directory_path))
.unwrap()
.to_slash()
.expect("Unexpected non-utf8 typeshed path!");
// Write file or directory explicitly
// Some unzip tools unzip files with directory paths correctly, some do not!
if absolute_path.is_file() {
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
zip.start_file(normalized_relative_path, options)?;
let mut f = File::open(absolute_path)?;
std::io::copy(&mut f, &mut zip).unwrap();
} else if !normalized_relative_path.is_empty() {
// Only if not root! Avoids path spec / warning
// and mapname conversion failed error on unzip
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
zip.add_directory(normalized_relative_path, options)?;
}
}
zip.finish()
}
fn main() {
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
assert!(
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
"Where is typeshed?"
);
let out_dir = std::env::var("OUT_DIR").unwrap();
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
// so that we use `/` as a path separator on all platforms.
// That enables us to load the typeshed zip at compile time in `module.rs`
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
// based on the default path separator for the specific platform we're on,
// which can't be done at compile time.)
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
}

View File

@@ -1,4 +0,0 @@
Markdown files within the `mdtest/` subdirectory are tests of type inference and type checking;
executed by the `tests/mdtest.rs` integration test.
See `crates/red_knot_test/README.md` for documentation of this test format.

View File

@@ -1,24 +0,0 @@
# Assignment with annotations
## Annotation only transparent to local inference
```py
x = 1
x: int
y = x
reveal_type(y) # revealed: Literal[1]
```
## Violates own annotation
```py
x: int = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]` is not assignable to `int`"
```
## Violates previous annotation
```py
x: int
x = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]` is not assignable to `int`"
```

View File

@@ -1,9 +0,0 @@
# Multi-target assignment
## Basic
```py
x = y = 1
reveal_type(x) # revealed: Literal[1]
reveal_type(y) # revealed: Literal[1]
```

View File

@@ -1,25 +0,0 @@
# Unbound
## Unbound
```py
x = foo
foo = 1
reveal_type(x) # revealed: Unbound
```
## Unbound class variable
Name lookups within a class scope fall back to globals, but lookups of class attributes don't.
```py
x = 1
class C:
y = x
if flag:
x = 2
reveal_type(C.x) # revealed: Literal[2]
reveal_type(C.y) # revealed: Literal[1]
```

View File

@@ -1,17 +0,0 @@
# Walrus operator
## Basic
```py
x = (y := 1) + 1
reveal_type(x) # revealed: Literal[2]
reveal_type(y) # revealed: Literal[1]
```
## Walrus self-addition
```py
x = 0
(x := x + 1)
reveal_type(x) # revealed: Literal[1]
```

View File

@@ -1,15 +0,0 @@
# Class attributes
## Union of attributes
```py
if flag:
class C:
x = 1
else:
class C:
x = 2
reveal_type(C.x) # revealed: Literal[1, 2]
```

View File

@@ -1,48 +0,0 @@
## Binary operations on booleans
## Basic Arithmetic
We try to be precise and all operations except for division will result in Literal type.
```py
a = True
b = False
reveal_type(a + a) # revealed: Literal[2]
reveal_type(a + b) # revealed: Literal[1]
reveal_type(b + a) # revealed: Literal[1]
reveal_type(b + b) # revealed: Literal[0]
reveal_type(a - a) # revealed: Literal[0]
reveal_type(a - b) # revealed: Literal[1]
reveal_type(b - a) # revealed: Literal[-1]
reveal_type(b - b) # revealed: Literal[0]
reveal_type(a * a) # revealed: Literal[1]
reveal_type(a * b) # revealed: Literal[0]
reveal_type(b * a) # revealed: Literal[0]
reveal_type(b * b) # revealed: Literal[0]
reveal_type(a % a) # revealed: Literal[0]
reveal_type(b % a) # revealed: Literal[0]
reveal_type(a // a) # revealed: Literal[1]
reveal_type(b // a) # revealed: Literal[0]
reveal_type(a**a) # revealed: Literal[1]
reveal_type(a**b) # revealed: Literal[1]
reveal_type(b**a) # revealed: Literal[0]
reveal_type(b**b) # revealed: Literal[1]
# Division
reveal_type(a / a) # revealed: float
reveal_type(b / a) # revealed: float
b / b # error: [division-by-zero] "Cannot divide object of type `Literal[False]` by zero"
a / b # error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
# bitwise OR
reveal_type(a | a) # revealed: Literal[True]
reveal_type(a | b) # revealed: Literal[True]
reveal_type(b | a) # revealed: Literal[True]
reveal_type(b | b) # revealed: Literal[False]
```

View File

@@ -1,447 +0,0 @@
# Binary operations on instances
Binary operations in Python are implemented by means of magic double-underscore methods.
For references, see:
- <https://snarky.ca/unravelling-binary-arithmetic-operations-in-python/>
- <https://docs.python.org/3/reference/datamodel.html#emulating-numeric-types>
## Operations
We support inference for all Python's binary operators:
`+`, `-`, `*`, `@`, `/`, `//`, `%`, `**`, `<<`, `>>`, `&`, `^`, and `|`.
```py
class A:
def __add__(self, other) -> A:
return self
def __sub__(self, other) -> A:
return self
def __mul__(self, other) -> A:
return self
def __matmul__(self, other) -> A:
return self
def __truediv__(self, other) -> A:
return self
def __floordiv__(self, other) -> A:
return self
def __mod__(self, other) -> A:
return self
def __pow__(self, other) -> A:
return self
def __lshift__(self, other) -> A:
return self
def __rshift__(self, other) -> A:
return self
def __and__(self, other) -> A:
return self
def __xor__(self, other) -> A:
return self
def __or__(self, other) -> A:
return self
class B: ...
reveal_type(A() + B()) # revealed: A
reveal_type(A() - B()) # revealed: A
reveal_type(A() * B()) # revealed: A
reveal_type(A() @ B()) # revealed: A
reveal_type(A() / B()) # revealed: A
reveal_type(A() // B()) # revealed: A
reveal_type(A() % B()) # revealed: A
reveal_type(A() ** B()) # revealed: A
reveal_type(A() << B()) # revealed: A
reveal_type(A() >> B()) # revealed: A
reveal_type(A() & B()) # revealed: A
reveal_type(A() ^ B()) # revealed: A
reveal_type(A() | B()) # revealed: A
```
## Reflected
We also support inference for reflected operations:
```py
class A:
def __radd__(self, other) -> A:
return self
def __rsub__(self, other) -> A:
return self
def __rmul__(self, other) -> A:
return self
def __rmatmul__(self, other) -> A:
return self
def __rtruediv__(self, other) -> A:
return self
def __rfloordiv__(self, other) -> A:
return self
def __rmod__(self, other) -> A:
return self
def __rpow__(self, other) -> A:
return self
def __rlshift__(self, other) -> A:
return self
def __rrshift__(self, other) -> A:
return self
def __rand__(self, other) -> A:
return self
def __rxor__(self, other) -> A:
return self
def __ror__(self, other) -> A:
return self
class B: ...
reveal_type(B() + A()) # revealed: A
reveal_type(B() - A()) # revealed: A
reveal_type(B() * A()) # revealed: A
reveal_type(B() @ A()) # revealed: A
reveal_type(B() / A()) # revealed: A
reveal_type(B() // A()) # revealed: A
reveal_type(B() % A()) # revealed: A
reveal_type(B() ** A()) # revealed: A
reveal_type(B() << A()) # revealed: A
reveal_type(B() >> A()) # revealed: A
reveal_type(B() & A()) # revealed: A
reveal_type(B() ^ A()) # revealed: A
reveal_type(B() | A()) # revealed: A
```
## Returning a different type
The magic methods aren't required to return the type of `self`:
```py
class A:
def __add__(self, other) -> int:
return 1
def __rsub__(self, other) -> int:
return 1
class B: ...
reveal_type(A() + B()) # revealed: int
reveal_type(B() - A()) # revealed: int
```
## Non-reflected precedence in general
In general, if the left-hand side defines `__add__` and the right-hand side
defines `__radd__` and the right-hand side is not a subtype of the left-hand
side, `lhs.__add__` will take precedence:
```py
class A:
def __add__(self, other: B) -> int:
return 42
class B:
def __radd__(self, other: A) -> str:
return "foo"
reveal_type(A() + B()) # revealed: int
# Edge case: C is a subtype of C, *but* if the two sides are of *equal* types,
# the lhs *still* takes precedence
class C:
def __add__(self, other: C) -> int:
return 42
def __radd__(self, other: C) -> str:
return "foo"
reveal_type(C() + C()) # revealed: int
```
## Reflected precedence for subtypes (in some cases)
If the right-hand operand is a subtype of the left-hand operand and has a
different implementation of the reflected method, the reflected method on the
right-hand operand takes precedence.
```py
class A:
def __add__(self, other) -> str:
return "foo"
def __radd__(self, other) -> str:
return "foo"
class MyString(str): ...
class B(A):
def __radd__(self, other) -> MyString:
return MyString()
reveal_type(A() + B()) # revealed: MyString
# N.B. Still a subtype of `A`, even though `A` does not appear directly in the class's `__bases__`
class C(B): ...
# TODO: we currently only understand direct subclasses as subtypes of the superclass.
# We need to iterate through the full MRO rather than just the class's bases;
# if we do, we'll understand `C` as a subtype of `A`, and correctly understand this as being
# `MyString` rather than `str`
reveal_type(A() + C()) # revealed: str
```
## Reflected precedence 2
If the right-hand operand is a subtype of the left-hand operand, but does not
override the reflected method, the left-hand operand's non-reflected method
still takes precedence:
```py
class A:
def __add__(self, other) -> str:
return "foo"
def __radd__(self, other) -> int:
return 42
class B(A): ...
reveal_type(A() + B()) # revealed: str
```
## Only reflected supported
For example, at runtime, `(1).__add__(1.2)` is `NotImplemented`, but
`(1.2).__radd__(1) == 2.2`, meaning that `1 + 1.2` succeeds at runtime
(producing `2.2`). The runtime tries the second one only if the first one
returns `NotImplemented` to signal failure.
Typeshed and other stubs annotate dunder-method calls that would return
`NotImplemented` as being "illegal" calls. `int.__add__` is annotated as only
"accepting" `int`s, even though it strictly-speaking "accepts" any other object
without raising an exception -- it will simply return `NotImplemented`,
allowing the runtime to try the `__radd__` method of the right-hand operand
as well.
```py
class A:
def __sub__(self, other: A) -> A:
return A()
class B:
def __rsub__(self, other: A) -> B:
return B()
# TODO: this should be `B` (the return annotation of `B.__rsub__`),
# because `A.__sub__` is annotated as only accepting `A`,
# but `B.__rsub__` will accept `A`.
reveal_type(A() - B()) # revealed: A
```
## Callable instances as dunders
Believe it or not, this is supported at runtime:
```py
class A:
def __call__(self, other) -> int:
return 42
class B:
__add__ = A()
reveal_type(B() + B()) # revealed: int
```
## Integration test: numbers from typeshed
```py
reveal_type(3j + 3.14) # revealed: complex
reveal_type(4.2 + 42) # revealed: float
reveal_type(3j + 3) # revealed: complex
# TODO should be complex, need to check arg type and fall back to `rhs.__radd__`
reveal_type(3.14 + 3j) # revealed: float
# TODO should be float, need to check arg type and fall back to `rhs.__radd__`
reveal_type(42 + 4.2) # revealed: int
# TODO should be complex, need to check arg type and fall back to `rhs.__radd__`
reveal_type(3 + 3j) # revealed: int
def returns_int() -> int:
return 42
def returns_bool() -> bool:
return True
x = returns_bool()
y = returns_int()
reveal_type(x + y) # revealed: int
reveal_type(4.2 + x) # revealed: float
# TODO should be float, need to check arg type and fall back to `rhs.__radd__`
reveal_type(y + 4.12) # revealed: int
```
## With literal types
When we have a literal type for one operand, we're able to fall back to the
instance handling for its instance super-type.
```py
class A:
def __add__(self, other) -> A:
return self
def __radd__(self, other) -> A:
return self
reveal_type(A() + 1) # revealed: A
# TODO should be `A` since `int.__add__` doesn't support `A` instances
reveal_type(1 + A()) # revealed: int
reveal_type(A() + "foo") # revealed: A
# TODO should be `A` since `str.__add__` doesn't support `A` instances
# TODO overloads
reveal_type("foo" + A()) # revealed: @Todo
reveal_type(A() + b"foo") # revealed: A
# TODO should be `A` since `bytes.__add__` doesn't support `A` instances
reveal_type(b"foo" + A()) # revealed: bytes
reveal_type(A() + ()) # revealed: A
# TODO this should be `A`, since `tuple.__add__` doesn't support `A` instances
reveal_type(() + A()) # revealed: @Todo
literal_string_instance = "foo" * 1_000_000_000
# the test is not testing what it's meant to be testing if this isn't a `LiteralString`:
reveal_type(literal_string_instance) # revealed: LiteralString
reveal_type(A() + literal_string_instance) # revealed: A
# TODO should be `A` since `str.__add__` doesn't support `A` instances
# TODO overloads
reveal_type(literal_string_instance + A()) # revealed: @Todo
```
## Operations involving instances of classes inheriting from `Any`
`Any` and `Unknown` represent a set of possible runtime objects, wherein the
bounds of the set are unknown. Whether the left-hand operand's dunder or the
right-hand operand's reflected dunder depends on whether the right-hand operand
is an instance of a class that is a subclass of the left-hand operand's class
and overrides the reflected dunder. In the following example, because of the
unknowable nature of `Any`/`Unknown`, we must consider both possibilities:
`Any`/`Unknown` might resolve to an unknown third class that inherits from `X`
and overrides `__radd__`; but it also might not. Thus, the correct answer here
for the `reveal_type` is `int | Unknown`.
```py
from does_not_exist import Foo # error: [unresolved-import]
reveal_type(Foo) # revealed: Unknown
class X:
def __add__(self, other: object) -> int:
return 42
class Y(Foo): ...
# TODO: Should be `int | Unknown`; see above discussion.
reveal_type(X() + Y()) # revealed: int
```
## Unsupported
### Dunder as instance attribute
The magic method must exist on the class, not just on the instance:
```py
def add_impl(self, other) -> int:
return 1
class A:
def __init__(self):
self.__add__ = add_impl
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `A` and `A`"
# revealed: Unknown
reveal_type(A() + A())
```
### Missing dunder
```py
class A: ...
# error: [unsupported-operator]
# revealed: Unknown
reveal_type(A() + A())
```
### Wrong position
A left-hand dunder method doesn't apply for the right-hand operand, or vice versa:
```py
class A:
def __add__(self, other) -> int: ...
class B:
def __radd__(self, other) -> int: ...
class C: ...
# error: [unsupported-operator]
# revealed: Unknown
reveal_type(C() + A())
# error: [unsupported-operator]
# revealed: Unknown
reveal_type(B() + C())
```
### Reflected dunder is not tried between two objects of the same type
For the specific case where the left-hand operand is the exact same type as the
right-hand operand, the reflected dunder of the right-hand operand is not
tried; the runtime short-circuits after trying the unreflected dunder of the
left-hand operand. For context, see
[this mailing list discussion](https://mail.python.org/archives/list/python-dev@python.org/thread/7NZUCODEAPQFMRFXYRMGJXDSIS3WJYIV/).
```py
class Foo:
def __radd__(self, other: Foo) -> Foo:
return self
# error: [unsupported-operator]
# revealed: Unknown
reveal_type(Foo() + Foo())
```
### Wrong type
TODO: check signature and error if `other` is the wrong type

View File

@@ -1,70 +0,0 @@
# Binary operations on integers
## Basic Arithmetic
```py
reveal_type(2 + 1) # revealed: Literal[3]
reveal_type(3 - 4) # revealed: Literal[-1]
reveal_type(3 * -1) # revealed: Literal[-3]
reveal_type(-3 // 3) # revealed: Literal[-1]
reveal_type(-3 / 3) # revealed: float
reveal_type(5 % 3) # revealed: Literal[2]
```
## Power
For power if the result fits in the int literal type it will be a Literal type. Otherwise the
outcome is int.
```py
largest_u32 = 4_294_967_295
reveal_type(2**2) # revealed: Literal[4]
reveal_type(1 ** (largest_u32 + 1)) # revealed: int
reveal_type(2**largest_u32) # revealed: int
```
## Division by Zero
This error is really outside the current Python type system, because e.g. `int.__truediv__` and
friends are not annotated to indicate that it's an error, and we don't even have a facility to
permit such an annotation. So arguably divide-by-zero should be a lint error rather than a type
checker error. But we choose to go ahead and error in the cases that are very likely to be an error:
dividing something typed as `int` or `float` by something known to be `Literal[0]`.
This isn't _definitely_ an error, because the object typed as `int` or `float` could be an instance
of a custom subclass which overrides division behavior to handle zero without error. But if this
unusual case occurs, the error can be avoided by explicitly typing the dividend as that safe custom
subclass; we only emit the error if the LHS type is exactly `int` or `float`, not if its a subclass.
```py
a = 1 / 0 # error: "Cannot divide object of type `Literal[1]` by zero"
reveal_type(a) # revealed: float
b = 2 // 0 # error: "Cannot floor divide object of type `Literal[2]` by zero"
reveal_type(b) # revealed: int
c = 3 % 0 # error: "Cannot reduce object of type `Literal[3]` modulo zero"
reveal_type(c) # revealed: int
# error: "Cannot divide object of type `int` by zero"
# revealed: float
reveal_type(int() / 0)
# error: "Cannot divide object of type `Literal[1]` by zero"
# revealed: float
reveal_type(1 / False)
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
True / False
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
bool(1) / False
# error: "Cannot divide object of type `float` by zero"
# revealed: float
reveal_type(1.0 / 0)
class MyInt(int): ...
# No error for a subclass of int
# revealed: float
reveal_type(MyInt(3) / 0)
```

View File

@@ -1,20 +0,0 @@
# Callable instance
## Dunder call
```py
class Multiplier:
def __init__(self, factor: float):
self.factor = factor
def __call__(self, number: float) -> float:
return number * self.factor
a = Multiplier(2.0)(3.0)
reveal_type(a) # revealed: float
class Unit: ...
b = Unit()(3.0) # error: "Object of type `Unit` is not callable"
reveal_type(b) # revealed: Unknown
```

View File

@@ -1,7 +0,0 @@
# Constructor
```py
class Foo: ...
reveal_type(Foo()) # revealed: Foo
```

View File

@@ -1,46 +0,0 @@
# Call expression
## Simple
```py
def get_int() -> int:
return 42
reveal_type(get_int()) # revealed: int
```
## Async
```py
async def get_int_async() -> int:
return 42
# TODO: we don't yet support `types.CoroutineType`, should be generic `Coroutine[Any, Any, int]`
reveal_type(get_int_async()) # revealed: @Todo
```
## Decorated
```py
from typing import Callable
def foo() -> int:
return 42
def decorator(func) -> Callable[[], int]:
return foo
@decorator
def bar() -> str:
return "bar"
# TODO: should reveal `int`, as the decorator replaces `bar` with `foo`
reveal_type(bar()) # revealed: @Todo
```
## Invalid callable
```py
nonsense = 123
x = nonsense() # error: "Object of type `Literal[123]` is not callable"
```

View File

@@ -1,79 +0,0 @@
# Unions in calls
## Union of return types
```py
if flag:
def f() -> int:
return 1
else:
def f() -> str:
return "foo"
reveal_type(f()) # revealed: int | str
```
## Calling with an unknown union
```py
from nonexistent import f # error: [unresolved-import] "Cannot resolve import `nonexistent`"
if flag:
def f() -> int:
return 1
reveal_type(f()) # revealed: Unknown | int
```
## Non-callable elements in a union
Calling a union with a non-callable element should emit a diagnostic.
```py
if flag:
f = 1
else:
def f() -> int:
return 1
x = f() # error: "Object of type `Literal[1] | Literal[f]` is not callable (due to union element `Literal[1]`)"
reveal_type(x) # revealed: Unknown | int
```
## Multiple non-callable elements in a union
Calling a union with multiple non-callable elements should mention all of them in the diagnostic.
```py
if flag:
f = 1
elif flag2:
f = "foo"
else:
def f() -> int:
return 1
# error: "Object of type `Literal[1] | Literal["foo"] | Literal[f]` is not callable (due to union elements Literal[1], Literal["foo"])"
# revealed: Unknown | int
reveal_type(f())
```
## All non-callable union elements
Calling a union with no callable elements can emit a simpler diagnostic.
```py
if flag:
f = 1
else:
f = "foo"
x = f() # error: "Object of type `Literal[1] | Literal["foo"]` is not callable"
reveal_type(x) # revealed: Unknown
```

View File

@@ -1,43 +0,0 @@
# Comparison: Byte literals
These tests assert that we infer precise `Literal` types for comparisons between objects
inferred as having `Literal` bytes types:
```py
reveal_type(b"abc" == b"abc") # revealed: Literal[True]
reveal_type(b"abc" == b"ab") # revealed: Literal[False]
reveal_type(b"abc" != b"abc") # revealed: Literal[False]
reveal_type(b"abc" != b"ab") # revealed: Literal[True]
reveal_type(b"abc" < b"abd") # revealed: Literal[True]
reveal_type(b"abc" < b"abb") # revealed: Literal[False]
reveal_type(b"abc" <= b"abc") # revealed: Literal[True]
reveal_type(b"abc" <= b"abb") # revealed: Literal[False]
reveal_type(b"abc" > b"abd") # revealed: Literal[False]
reveal_type(b"abc" > b"abb") # revealed: Literal[True]
reveal_type(b"abc" >= b"abc") # revealed: Literal[True]
reveal_type(b"abc" >= b"abd") # revealed: Literal[False]
reveal_type(b"" in b"") # revealed: Literal[True]
reveal_type(b"" in b"abc") # revealed: Literal[True]
reveal_type(b"abc" in b"") # revealed: Literal[False]
reveal_type(b"ab" in b"abc") # revealed: Literal[True]
reveal_type(b"abc" in b"abc") # revealed: Literal[True]
reveal_type(b"d" in b"abc") # revealed: Literal[False]
reveal_type(b"ac" in b"abc") # revealed: Literal[False]
reveal_type(b"\x81\x82" in b"\x80\x81\x82") # revealed: Literal[True]
reveal_type(b"\x82\x83" in b"\x80\x81\x82") # revealed: Literal[False]
reveal_type(b"ab" not in b"abc") # revealed: Literal[False]
reveal_type(b"ac" not in b"abc") # revealed: Literal[True]
reveal_type(b"abc" is b"abc") # revealed: bool
reveal_type(b"abc" is b"ab") # revealed: Literal[False]
reveal_type(b"abc" is not b"abc") # revealed: bool
reveal_type(b"abc" is not b"ab") # revealed: Literal[True]
```

View File

@@ -1,27 +0,0 @@
# Comparison: Integers
## Integer literals
```py
reveal_type(1 == 1 == True) # revealed: Literal[True]
reveal_type(1 == 1 == 2 == 4) # revealed: Literal[False]
reveal_type(False < True <= 2 < 3 != 6) # revealed: Literal[True]
reveal_type(1 < 1) # revealed: Literal[False]
reveal_type(1 > 1) # revealed: Literal[False]
reveal_type(1 is 1) # revealed: bool
reveal_type(1 is not 1) # revealed: bool
reveal_type(1 is 2) # revealed: Literal[False]
reveal_type(1 is not 7) # revealed: Literal[True]
reveal_type(1 <= "" and 0 < 1) # revealed: @Todo | Literal[True]
```
## Integer instance
```py
# TODO: implement lookup of `__eq__` on typeshed `int` stub.
def int_instance() -> int: ...
reveal_type(1 == int_instance()) # revealed: @Todo
reveal_type(9 < int_instance()) # revealed: bool
reveal_type(int_instance() < int_instance()) # revealed: bool
```

View File

@@ -1,41 +0,0 @@
# Comparison: Non boolean returns
Walking through examples:
- `a = A() < B() < C()`
1. `A() < B() and B() < C()` - split in N comparison
1. `A()` and `B()` - evaluate outcome types
1. `bool` and `bool` - evaluate truthiness
1. `A | B` - union of "first true" types
- `b = 0 < 1 < A() < 3`
1. `0 < 1 and 1 < A() and A() < 3` - split in N comparison
1. `True` and `bool` and `A` - evaluate outcome types
1. `True` and `bool` and `bool` - evaluate truthiness
1. `bool | A` - union of "true" types
- `c = 10 < 0 < A() < B() < C()` short-circuit to False
```py
from __future__ import annotations
class A:
def __lt__(self, other) -> A: ...
class B:
def __lt__(self, other) -> B: ...
class C:
def __lt__(self, other) -> C: ...
x = A() < B() < C()
reveal_type(x) # revealed: A | B
y = 0 < 1 < A() < 3
reveal_type(y) # revealed: bool | A
z = 10 < 0 < A() < B() < C()
reveal_type(z) # revealed: Literal[False]
```

View File

@@ -1,20 +0,0 @@
# Comparison: Strings
## String literals
```py
def str_instance() -> str: ...
reveal_type("abc" == "abc") # revealed: Literal[True]
reveal_type("ab_cd" <= "ab_ce") # revealed: Literal[True]
reveal_type("abc" in "ab cd") # revealed: Literal[False]
reveal_type("" not in "hello") # revealed: Literal[False]
reveal_type("--" is "--") # revealed: bool
reveal_type("A" is "B") # revealed: Literal[False]
reveal_type("--" is not "--") # revealed: bool
reveal_type("A" is not "B") # revealed: Literal[True]
reveal_type(str_instance() < "...") # revealed: bool
# ensure we're not comparing the interned salsa symbols, which compare by order of declaration.
reveal_type("ab" < "ab_cd") # revealed: Literal[True]
```

View File

@@ -1,205 +0,0 @@
# Comparison: Tuples
## Heterogeneous
For tuples like `tuple[int, str, Literal[1]]`
### Value Comparisons
"Value Comparisons" refers to the operators: `==`, `!=`, `<`, `<=`, `>`, `>=`
#### Results without Ambiguity
Cases where the result can be definitively inferred as a `BooleanLiteral`.
```py
a = (1, "test", (3, 13), True)
b = (1, "test", (3, 14), False)
reveal_type(a == a) # revealed: Literal[True]
reveal_type(a != a) # revealed: Literal[False]
reveal_type(a < a) # revealed: Literal[False]
reveal_type(a <= a) # revealed: Literal[True]
reveal_type(a > a) # revealed: Literal[False]
reveal_type(a >= a) # revealed: Literal[True]
reveal_type(a == b) # revealed: Literal[False]
reveal_type(a != b) # revealed: Literal[True]
reveal_type(a < b) # revealed: Literal[True]
reveal_type(a <= b) # revealed: Literal[True]
reveal_type(a > b) # revealed: Literal[False]
reveal_type(a >= b) # revealed: Literal[False]
```
Even when tuples have different lengths, comparisons should be handled appropriately.
```py path=different_length.py
a = (1, 2, 3)
b = (1, 2, 3, 4)
reveal_type(a == b) # revealed: Literal[False]
reveal_type(a != b) # revealed: Literal[True]
reveal_type(a < b) # revealed: Literal[True]
reveal_type(a <= b) # revealed: Literal[True]
reveal_type(a > b) # revealed: Literal[False]
reveal_type(a >= b) # revealed: Literal[False]
c = ("a", "b", "c", "d")
d = ("a", "b", "c")
reveal_type(c == d) # revealed: Literal[False]
reveal_type(c != d) # revealed: Literal[True]
reveal_type(c < d) # revealed: Literal[False]
reveal_type(c <= d) # revealed: Literal[False]
reveal_type(c > d) # revealed: Literal[True]
reveal_type(c >= d) # revealed: Literal[True]
```
#### Results with Ambiguity
```py
def bool_instance() -> bool: ...
def int_instance() -> int: ...
a = (bool_instance(),)
b = (int_instance(),)
# TODO: All @Todo should be `bool`
reveal_type(a == a) # revealed: @Todo
reveal_type(a != a) # revealed: @Todo
reveal_type(a < a) # revealed: @Todo
reveal_type(a <= a) # revealed: @Todo
reveal_type(a > a) # revealed: @Todo
reveal_type(a >= a) # revealed: @Todo
reveal_type(a == b) # revealed: @Todo
reveal_type(a != b) # revealed: @Todo
reveal_type(a < b) # revealed: @Todo
reveal_type(a <= b) # revealed: @Todo
reveal_type(a > b) # revealed: @Todo
reveal_type(a >= b) # revealed: @Todo
```
#### Comparison Unsupported
If two tuples contain types that do not support comparison, the result may be `Unknown`.
However, `==` and `!=` are exceptions and can still provide definite results.
```py
a = (1, 2)
b = (1, "hello")
# TODO: should be Literal[False]
reveal_type(a == b) # revealed: @Todo
# TODO: should be Literal[True]
reveal_type(a != b) # revealed: @Todo
# TODO: should be Unknown and add more informative diagnostics
reveal_type(a < b) # revealed: @Todo
reveal_type(a <= b) # revealed: @Todo
reveal_type(a > b) # revealed: @Todo
reveal_type(a >= b) # revealed: @Todo
```
However, if the lexicographic comparison completes without reaching a point where str and int are compared,
Python will still produce a result based on the prior elements.
```py path=short_circuit.py
a = (1, 2)
b = (999999, "hello")
reveal_type(a == b) # revealed: Literal[False]
reveal_type(a != b) # revealed: Literal[True]
reveal_type(a < b) # revealed: Literal[True]
reveal_type(a <= b) # revealed: Literal[True]
reveal_type(a > b) # revealed: Literal[False]
reveal_type(a >= b) # revealed: Literal[False]
```
#### Matryoshka Tuples
```py
a = (1, True, "Hello")
b = (a, a, a)
c = (b, b, b)
reveal_type(c == c) # revealed: Literal[True]
reveal_type(c != c) # revealed: Literal[False]
reveal_type(c < c) # revealed: Literal[False]
reveal_type(c <= c) # revealed: Literal[True]
reveal_type(c > c) # revealed: Literal[False]
reveal_type(c >= c) # revealed: Literal[True]
```
#### Non Boolean Rich Comparisons
```py
class A:
def __eq__(self, o) -> str: ...
def __ne__(self, o) -> int: ...
def __lt__(self, o) -> float: ...
def __le__(self, o) -> object: ...
def __gt__(self, o) -> tuple: ...
def __ge__(self, o) -> list: ...
a = (A(), A())
# TODO: All @Todo should be bool
reveal_type(a == a) # revealed: @Todo
reveal_type(a != a) # revealed: @Todo
reveal_type(a < a) # revealed: @Todo
reveal_type(a <= a) # revealed: @Todo
reveal_type(a > a) # revealed: @Todo
reveal_type(a >= a) # revealed: @Todo
```
### Membership Test Comparisons
"Membership Test Comparisons" refers to the operators `in` and `not in`.
```py
def int_instance() -> int: ...
a = (1, 2)
b = ((3, 4), (1, 2))
c = ((1, 2, 3), (4, 5, 6))
d = ((int_instance(), int_instance()), (int_instance(), int_instance()))
reveal_type(a in b) # revealed: Literal[True]
reveal_type(a not in b) # revealed: Literal[False]
reveal_type(a in c) # revealed: Literal[False]
reveal_type(a not in c) # revealed: Literal[True]
# TODO: All @Todo should be bool
reveal_type(a in d) # revealed: @Todo
reveal_type(a not in d) # revealed: @Todo
```
### Identity Comparisons
"Identity Comparisons" refers to `is` and `is not`.
```py
a = (1, 2)
b = ("a", "b")
c = (1, 2, 3)
reveal_type(a is (1, 2)) # revealed: bool
reveal_type(a is not (1, 2)) # revealed: bool
# TODO: Update to Literal[False] once str == int comparison is implemented
reveal_type(a is b) # revealed: @Todo
# TODO: Update to Literal[True] once str == int comparison is implemented
reveal_type(a is not b) # revealed: @Todo
reveal_type(a is c) # revealed: Literal[False]
reveal_type(a is not c) # revealed: Literal[True]
```
## Homogeneous
For tuples like `tuple[int, ...]`, `tuple[Any, ...]`
// TODO

View File

@@ -1,76 +0,0 @@
# Comparison: Unions
## Union on one side of the comparison
Comparisons on union types need to consider all possible cases:
```py
one_or_two = 1 if flag else 2
reveal_type(one_or_two <= 2) # revealed: Literal[True]
reveal_type(one_or_two <= 1) # revealed: bool
reveal_type(one_or_two <= 0) # revealed: Literal[False]
reveal_type(2 >= one_or_two) # revealed: Literal[True]
reveal_type(1 >= one_or_two) # revealed: bool
reveal_type(0 >= one_or_two) # revealed: Literal[False]
reveal_type(one_or_two < 1) # revealed: Literal[False]
reveal_type(one_or_two < 2) # revealed: bool
reveal_type(one_or_two < 3) # revealed: Literal[True]
reveal_type(one_or_two > 0) # revealed: Literal[True]
reveal_type(one_or_two > 1) # revealed: bool
reveal_type(one_or_two > 2) # revealed: Literal[False]
reveal_type(one_or_two == 3) # revealed: Literal[False]
reveal_type(one_or_two == 1) # revealed: bool
reveal_type(one_or_two != 3) # revealed: Literal[True]
reveal_type(one_or_two != 1) # revealed: bool
a_or_ab = "a" if flag else "ab"
reveal_type(a_or_ab in "ab") # revealed: Literal[True]
reveal_type("a" in a_or_ab) # revealed: Literal[True]
reveal_type("c" not in a_or_ab) # revealed: Literal[True]
reveal_type("a" not in a_or_ab) # revealed: Literal[False]
reveal_type("b" in a_or_ab) # revealed: bool
reveal_type("b" not in a_or_ab) # revealed: bool
one_or_none = 1 if flag else None
reveal_type(one_or_none is None) # revealed: bool
reveal_type(one_or_none is not None) # revealed: bool
```
## Union on both sides of the comparison
With unions on both sides, we need to consider the full cross product of
options when building the resulting (union) type:
```py
small = 1 if flag_s else 2
large = 2 if flag_l else 3
reveal_type(small <= large) # revealed: Literal[True]
reveal_type(small >= large) # revealed: bool
reveal_type(small < large) # revealed: bool
reveal_type(small > large) # revealed: Literal[False]
```
## Unsupported operations
Make sure we emit a diagnostic if *any* of the possible comparisons is
unsupported. For now, we fall back to `bool` for the result type instead of
trying to infer something more precise from the other (supported) variants:
```py
x = [1, 2] if flag else 1
result = 1 in x # error: "Operator `in` is not supported"
reveal_type(result) # revealed: bool
```

View File

@@ -1,27 +0,0 @@
# Comparison: Unsupported operators
```py
a = 1 in 7 # error: "Operator `in` is not supported for types `Literal[1]` and `Literal[7]`"
reveal_type(a) # revealed: bool
b = 0 not in 10 # error: "Operator `not in` is not supported for types `Literal[0]` and `Literal[10]`"
reveal_type(b) # revealed: bool
c = object() < 5 # error: "Operator `<` is not supported for types `object` and `int`"
reveal_type(c) # revealed: Unknown
# TODO should error, need to check if __lt__ signature is valid for right operand
d = 5 < object()
# TODO: should be `Unknown`
reveal_type(d) # revealed: bool
int_literal_or_str_literal = 1 if flag else "foo"
# error: "Operator `in` is not supported for types `Literal[42]` and `Literal[1]`, in comparing `Literal[42]` with `Literal[1] | Literal["foo"]`"
e = 42 in int_literal_or_str_literal
reveal_type(e) # revealed: bool
# TODO: should error, need to check if __lt__ signature is valid for right operand
# error may be "Operator `<` is not supported for types `int` and `str`, in comparing `tuple[Literal[1], Literal[2]]` with `tuple[Literal[1], Literal["hello"]]`
f = (1, 2) < (1, "hello")
reveal_type(f) # revealed: @Todo
```

View File

@@ -1,33 +0,0 @@
# If expressions
## Simple if-expression
```py
x = 1 if flag else 2
reveal_type(x) # revealed: Literal[1, 2]
```
## If-expression with walrus operator
```py
y = 0
z = 0
x = (y := 1) if flag else (z := 2)
reveal_type(x) # revealed: Literal[1, 2]
reveal_type(y) # revealed: Literal[0, 1]
reveal_type(z) # revealed: Literal[0, 2]
```
## Nested if-expression
```py
x = 1 if flag else 2 if flag2 else 3
reveal_type(x) # revealed: Literal[1, 2, 3]
```
## None
```py
x = 1 if flag else None
reveal_type(x) # revealed: Literal[1] | None
```

View File

@@ -1,95 +0,0 @@
# If statements
## Simple if
```py
y = 1
y = 2
if flag:
y = 3
reveal_type(y) # revealed: Literal[2, 3]
```
## Simple if-elif-else
```py
y = 1
y = 2
if flag:
y = 3
elif flag2:
y = 4
else:
r = y
y = 5
s = y
x = y
reveal_type(x) # revealed: Literal[3, 4, 5]
reveal_type(r) # revealed: Unbound | Literal[2]
reveal_type(s) # revealed: Unbound | Literal[5]
```
## Single symbol across if-elif-else
```py
if flag:
y = 1
elif flag2:
y = 2
else:
y = 3
reveal_type(y) # revealed: Literal[1, 2, 3]
```
## if-elif-else without else assignment
```py
y = 0
if flag:
y = 1
elif flag2:
y = 2
else:
pass
reveal_type(y) # revealed: Literal[0, 1, 2]
```
## if-elif-else with intervening assignment
```py
y = 0
if flag:
y = 1
z = 3
elif flag2:
y = 2
else:
pass
reveal_type(y) # revealed: Literal[0, 1, 2]
```
## Nested if statement
```py
y = 0
if flag:
if flag2:
y = 1
reveal_type(y) # revealed: Literal[0, 1]
```
## if-elif without else
```py
y = 1
y = 2
if flag:
y = 3
elif flag2:
y = 4
reveal_type(y) # revealed: Literal[2, 3, 4]
```

View File

@@ -1,39 +0,0 @@
# Pattern matching
## With wildcard
```py
match 0:
case 1:
y = 2
case _:
y = 3
reveal_type(y) # revealed: Literal[2, 3]
```
## Without wildcard
```py
match 0:
case 1:
y = 2
case 2:
y = 3
reveal_type(y) # revealed: Unbound | Literal[2, 3]
```
## Basic match
```py
y = 1
y = 2
match 0:
case 1:
y = 3
case 2:
y = 4
reveal_type(y) # revealed: Literal[2, 3, 4]
```

View File

@@ -1,39 +0,0 @@
# Errors while declaring
## Violates previous assignment
```py
x = 1
x: str # error: [invalid-declaration] "Cannot declare type `str` for inferred type `Literal[1]`"
```
## Incompatible declarations
```py
if flag:
x: str
else:
x: int
x = 1 # error: [conflicting-declarations] "Conflicting declared types for `x`: str, int"
```
## Partial declarations
```py
if flag:
x: int
x = 1 # error: [conflicting-declarations] "Conflicting declared types for `x`: Unknown, int"
```
## Incompatible declarations with bad assignment
```py
if flag:
x: str
else:
x: int
# error: [conflicting-declarations]
# error: [invalid-assignment]
x = b"foo"
```

View File

@@ -1,56 +0,0 @@
# Exception Handling
## Single Exception
```py
import re
try:
x
except NameError as e:
reveal_type(e) # revealed: NameError
except re.error as f:
reveal_type(f) # revealed: error
```
## Unknown type in except handler does not cause spurious diagnostic
```py
from nonexistent_module import foo # error: [unresolved-import]
try:
x
except foo as e:
reveal_type(foo) # revealed: Unknown
reveal_type(e) # revealed: Unknown
```
## Multiple Exceptions in a Tuple
```py
EXCEPTIONS = (AttributeError, TypeError)
try:
x
except (RuntimeError, OSError) as e:
reveal_type(e) # revealed: RuntimeError | OSError
except EXCEPTIONS as f:
reveal_type(f) # revealed: AttributeError | TypeError
```
## Dynamic exception types
```py
def foo(x: type[AttributeError], y: tuple[type[OSError], type[RuntimeError]], z: tuple[type[BaseException], ...]):
try:
w
except x as e:
# TODO: should be `AttributeError`
reveal_type(e) # revealed: @Todo
except y as f:
# TODO: should be `OSError | RuntimeError`
reveal_type(f) # revealed: @Todo
except z as g:
# TODO: should be `BaseException`
reveal_type(g) # revealed: @Todo
```

View File

@@ -1,641 +0,0 @@
# Control flow for exception handlers
These tests assert that we understand the possible "definition states" (which
symbols might or might not be defined) in the various branches of a
`try`/`except`/`else`/`finally` block.
For a full writeup on the semantics of exception handlers,
see [this document][1].
The tests throughout this Markdown document use functions with names starting
with `could_raise_*` to mark definitions that might or might not succeed
(as the function could raise an exception). A type checker must assume that any
arbitrary function call could raise an exception in Python; this is just a
naming convention used in these tests for clarity, and to future-proof the
tests against possible future improvements whereby certain statements or
expressions could potentially be inferred as being incapable of causing an
exception to be raised.
## A single bare `except`
Consider the following `try`/`except` block, with a single bare `except:`.
There are different types for the variable `x` in the two branches of this
block, and we can't determine which branch might have been taken from the
perspective of code following this block. The inferred type after the block's
conclusion is therefore the union of the type at the end of the `try` suite
(`str`) and the type at the end of the `except` suite (`Literal[2]`).
*Within* the `except` suite, we must infer a union of all possible "definition
states" we could have been in at any point during the `try` suite. This is
because control flow could have jumped to the `except` suite without any of the
`try`-suite definitions successfully completing, with only *some* of the
`try`-suite definitions successfully completing, or indeed with *all* of them
successfully completing. The type of `x` at the beginning of the `except` suite
in this example is therefore `Literal[1] | str`, taking into account that we
might have jumped to the `except` suite before the
`x = could_raise_returns_str()` redefinition, but we *also* could have jumped
to the `except` suite *after* that redefinition.
```py path=union_type_inferred.py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except:
reveal_type(x) # revealed: Literal[1] | str
x = 2
reveal_type(x) # revealed: Literal[2]
reveal_type(x) # revealed: str | Literal[2]
```
If `x` has the same type at the end of both branches, however, the branches
unify and `x` is not inferred as having a union type following the
`try`/`except` block:
```py path=branches_unify_to_non_union_type.py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
x = could_raise_returns_str()
except:
x = could_raise_returns_str()
reveal_type(x) # revealed: str
```
## A non-bare `except`
For simple `try`/`except` blocks, an `except TypeError:` handler has the same
control flow semantics as an `except:` handler. An `except TypeError:` handler
will not catch *all* exceptions: if this is the only handler, it opens up the
possibility that an exception might occur that would not be handled. However,
as described in [the document on exception-handling semantics][1], that would
lead to termination of the scope. It's therefore irrelevant to consider this
possibility when it comes to control-flow analysis.
```py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = 2
reveal_type(x) # revealed: Literal[2]
reveal_type(x) # revealed: str | Literal[2]
```
## Multiple `except` branches
If the scope reaches the final `reveal_type` call in this example,
either the `try`-block suite of statements was executed in its entirety,
or exactly one `except` suite was executed in its entirety.
The inferred type of `x` at this point is the union of the types at the end of
the three suites:
- At the end of `try`, `type(x) == str`
- At the end of `except TypeError`, `x == 2`
- At the end of `except ValueError`, `x == 3`
```py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = 2
reveal_type(x) # revealed: Literal[2]
except ValueError:
reveal_type(x) # revealed: Literal[1] | str
x = 3
reveal_type(x) # revealed: Literal[3]
reveal_type(x) # revealed: str | Literal[2, 3]
```
## Exception handlers with `else` branches (but no `finally`)
If we reach the `reveal_type` call at the end of this scope,
either the `try` and `else` suites were both executed in their entireties,
or the `except` suite was executed in its entirety. The type of `x` at this
point is the union of the type at the end of the `else` suite and the type at
the end of the `except` suite:
- At the end of `else`, `x == 3`
- At the end of `except`, `x == 2`
```py path=single_except.py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = 2
reveal_type(x) # revealed: Literal[2]
else:
reveal_type(x) # revealed: str
x = 3
reveal_type(x) # revealed: Literal[3]
reveal_type(x) # revealed: Literal[2, 3]
```
For a block that has multiple `except` branches and an `else` branch, the same
principle applies. In order to reach the final `reveal_type` call,
either exactly one of the `except` suites must have been executed in its
entirety, or the `try` suite and the `else` suite must both have been executed
in their entireties:
```py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = 2
reveal_type(x) # revealed: Literal[2]
except ValueError:
reveal_type(x) # revealed: Literal[1] | str
x = 3
reveal_type(x) # revealed: Literal[3]
else:
reveal_type(x) # revealed: str
x = 4
reveal_type(x) # revealed: Literal[4]
reveal_type(x) # revealed: Literal[2, 3, 4]
```
## Exception handlers with `finally` branches (but no `except` branches)
A `finally` suite is *always* executed. As such, if we reach the `reveal_type`
call at the end of this example, we know that `x` *must* have been reassigned
to `2` during the `finally` suite. The type of `x` at the end of the example is
therefore `Literal[2]`:
```py path=redef_in_finally.py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
finally:
x = 2
reveal_type(x) # revealed: Literal[2]
reveal_type(x) # revealed: Literal[2]
```
If `x` was *not* redefined in the `finally` suite, however, things are somewhat
more complicated. If we reach the final `reveal_type` call,
unlike the state when we're visiting the `finally` suite,
we know that the `try`-block suite ran to completion.
This means that there are fewer possible states at this point than there were
when we were inside the `finally` block.
(Our current model does *not* correctly infer the types *inside* `finally`
suites, however; this is still a TODO item for us.)
```py path=no_redef_in_finally.py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
finally:
# TODO: should be Literal[1] | str
reveal_type(x) # revealed: str
reveal_type(x) # revealed: str
```
## Combining an `except` branch with a `finally` branch
As previously stated, we do not yet have accurate inference for types *inside*
`finally` suites. When we do, however, we will have to take account of the
following possibilities inside `finally` suites:
- The `try` suite could have run to completion
- Or we could have jumped from halfway through the `try` suite to an `except`
suite, and the `except` suite ran to completion
- Or we could have jumped from halfway through the `try` suite straight to the
`finally` suite due to an unhandled exception
- Or we could have jumped from halfway through the `try` suite to an
`except` suite, only for an exception raised in the `except` suite to cause
us to jump to the `finally` suite before the `except` suite ran to completion
```py path=redef_in_finally.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_bool() -> bool:
return True
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
finally:
# TODO: should be `Literal[1] | str | bytes | bool`
reveal_type(x) # revealed: str | bool
x = 2
reveal_type(x) # revealed: Literal[2]
reveal_type(x) # revealed: Literal[2]
```
Now for an example without a redefinition in the `finally` suite.
As before, there *should* be fewer possibilities after completion of the
`finally` suite than there were during the `finally` suite itself.
(In some control-flow possibilities, some exceptions were merely *suspended*
during the `finally` suite; these lead to the scope's termination following the
conclusion of the `finally` suite.)
```py path=no_redef_in_finally.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_bool() -> bool:
return True
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
finally:
# TODO: should be `Literal[1] | str | bytes | bool`
reveal_type(x) # revealed: str | bool
reveal_type(x) # revealed: str | bool
```
An example with multiple `except` branches and a `finally` branch:
```py path=multiple_except_branches.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_bool() -> bool:
return True
def could_raise_returns_memoryview() -> memoryview:
return memoryview(b"")
def could_raise_returns_float() -> float:
return 3.14
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
except ValueError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_memoryview()
reveal_type(x) # revealed: memoryview
x = could_raise_returns_float()
reveal_type(x) # revealed: float
finally:
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float`
reveal_type(x) # revealed: str | bool | float
reveal_type(x) # revealed: str | bool | float
```
## Combining `except`, `else` and `finally` branches
If the exception handler has an `else` branch, we must also take into account
the possibility that control flow could have jumped to the `finally` suite from
partway through the `else` suite due to an exception raised *there*.
```py path=single_except_branch.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_bool() -> bool:
return True
def could_raise_returns_memoryview() -> memoryview:
return memoryview(b"")
def could_raise_returns_float() -> float:
return 3.14
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
else:
reveal_type(x) # revealed: str
x = could_raise_returns_memoryview()
reveal_type(x) # revealed: memoryview
x = could_raise_returns_float()
reveal_type(x) # revealed: float
finally:
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float`
reveal_type(x) # revealed: bool | float
reveal_type(x) # revealed: bool | float
```
The same again, this time with multiple `except` branches:
```py path=multiple_except_branches.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_bool() -> bool:
return True
def could_raise_returns_memoryview() -> memoryview:
return memoryview(b"")
def could_raise_returns_float() -> float:
return 3.14
def could_raise_returns_range() -> range:
return range(42)
def could_raise_returns_slice() -> slice:
return slice(None)
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
except ValueError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_memoryview()
reveal_type(x) # revealed: memoryview
x = could_raise_returns_float()
reveal_type(x) # revealed: float
else:
reveal_type(x) # revealed: str
x = could_raise_returns_range()
reveal_type(x) # revealed: range
x = could_raise_returns_slice()
reveal_type(x) # revealed: slice
finally:
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float | range | slice`
reveal_type(x) # revealed: bool | float | slice
reveal_type(x) # revealed: bool | float | slice
```
## Nested `try`/`except` blocks
It would take advanced analysis, which we are not yet capable of, to be able
to determine that an exception handler always suppresses all exceptions. This
is partly because it is possible for statements in `except`, `else` and
`finally` suites to raise exceptions as well as statements in `try` suites.
This means that if an exception handler is nested inside the `try` statement of
an enclosing exception handler, it should (at least for now) be treated the
same as any other node: as a suite containing statements that could possibly
raise exceptions, which would lead to control flow jumping out of that suite
prior to the suite running to completion.
```py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_bool() -> bool:
return True
def could_raise_returns_memoryview() -> memoryview:
return memoryview(b"")
def could_raise_returns_float() -> float:
return 3.14
def could_raise_returns_range() -> range:
return range(42)
def could_raise_returns_slice() -> slice:
return slice(None)
def could_raise_returns_complex() -> complex:
return 3j
def could_raise_returns_bytearray() -> bytearray:
return bytearray()
class Foo: ...
class Bar: ...
def could_raise_returns_Foo() -> Foo:
return Foo()
def could_raise_returns_Bar() -> Bar:
return Bar()
x = 1
try:
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
except ValueError:
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_memoryview()
reveal_type(x) # revealed: memoryview
x = could_raise_returns_float()
reveal_type(x) # revealed: float
else:
reveal_type(x) # revealed: str
x = could_raise_returns_range()
reveal_type(x) # revealed: range
x = could_raise_returns_slice()
reveal_type(x) # revealed: slice
finally:
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float | range | slice`
reveal_type(x) # revealed: bool | float | slice
x = 2
reveal_type(x) # revealed: Literal[2]
reveal_type(x) # revealed: Literal[2]
except:
reveal_type(x) # revealed: Literal[1, 2] | str | bytes | bool | memoryview | float | range | slice
x = could_raise_returns_complex()
reveal_type(x) # revealed: complex
x = could_raise_returns_bytearray()
reveal_type(x) # revealed: bytearray
else:
reveal_type(x) # revealed: Literal[2]
x = could_raise_returns_Foo()
reveal_type(x) # revealed: Foo
x = could_raise_returns_Bar()
reveal_type(x) # revealed: Bar
finally:
# TODO: should be `Literal[1, 2] | str | bytes | bool | memoryview | float | range | slice | complex | bytearray | Foo | Bar`
reveal_type(x) # revealed: bytearray | Bar
# Either one `except` branch or the `else`
# must have been taken and completed to get here:
reveal_type(x) # revealed: bytearray | Bar
```
## Nested scopes inside `try` blocks
Shadowing a variable in an inner scope has no effect on type inference of the
variable by that name in the outer scope:
```py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_range() -> range:
return range(42)
def could_raise_returns_bytearray() -> bytearray:
return bytearray()
def could_raise_returns_float() -> float:
return 3.14
x = 1
try:
def foo(param=could_raise_returns_str()):
x = could_raise_returns_str()
try:
reveal_type(x) # revealed: str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
except:
reveal_type(x) # revealed: str | bytes
x = could_raise_returns_bytearray()
reveal_type(x) # revealed: bytearray
x = could_raise_returns_float()
reveal_type(x) # revealed: float
finally:
# TODO: should be `str | bytes | bytearray | float`
reveal_type(x) # revealed: bytes | float
reveal_type(x) # revealed: bytes | float
x = foo
reveal_type(x) # revealed: Literal[foo]
except:
reveal_type(x) # revealed: Literal[1] | Literal[foo]
class Bar:
x = could_raise_returns_range()
reveal_type(x) # revealed: range
x = Bar
reveal_type(x) # revealed: Literal[Bar]
finally:
# TODO: should be `Literal[1] | Literal[foo] | Literal[Bar]`
reveal_type(x) # revealed: Literal[foo] | Literal[Bar]
reveal_type(x) # revealed: Literal[foo] | Literal[Bar]
```
[1]: https://astral-sh.notion.site/Exception-handler-control-flow-11348797e1ca80bb8ce1e9aedbbe439d

View File

@@ -1,30 +0,0 @@
# Except star
## Except\* with BaseException
```py
try:
x
except* BaseException as e:
reveal_type(e) # revealed: BaseExceptionGroup
```
## Except\* with specific exception
```py
try:
x
except* OSError as e:
# TODO(Alex): more precise would be `ExceptionGroup[OSError]`
reveal_type(e) # revealed: BaseExceptionGroup
```
## Except\* with multiple exceptions
```py
try:
x
except* (TypeError, AttributeError) as e:
# TODO(Alex): more precise would be `ExceptionGroup[TypeError | AttributeError]`.
reveal_type(e) # revealed: BaseExceptionGroup
```

View File

@@ -1,110 +0,0 @@
# Expressions
## OR
```py
def foo() -> str:
pass
reveal_type(True or False) # revealed: Literal[True]
reveal_type("x" or "y" or "z") # revealed: Literal["x"]
reveal_type("" or "y" or "z") # revealed: Literal["y"]
reveal_type(False or "z") # revealed: Literal["z"]
reveal_type(False or True) # revealed: Literal[True]
reveal_type(False or False) # revealed: Literal[False]
reveal_type(foo() or False) # revealed: str | Literal[False]
reveal_type(foo() or True) # revealed: str | Literal[True]
```
## AND
```py
def foo() -> str:
pass
reveal_type(True and False) # revealed: Literal[False]
reveal_type(False and True) # revealed: Literal[False]
reveal_type(foo() and False) # revealed: str | Literal[False]
reveal_type(foo() and True) # revealed: str | Literal[True]
reveal_type("x" and "y" and "z") # revealed: Literal["z"]
reveal_type("x" and "y" and "") # revealed: Literal[""]
reveal_type("" and "y") # revealed: Literal[""]
```
## Simple function calls to bool
```py
def returns_bool() -> bool:
return True
if returns_bool():
x = True
else:
x = False
reveal_type(x) # revealed: bool
```
## Complex
```py
def foo() -> str:
pass
reveal_type("x" and "y" or "z") # revealed: Literal["y"]
reveal_type("x" or "y" and "z") # revealed: Literal["x"]
reveal_type("" and "y" or "z") # revealed: Literal["z"]
reveal_type("" or "y" and "z") # revealed: Literal["z"]
reveal_type("x" and "y" or "") # revealed: Literal["y"]
reveal_type("x" or "y" and "") # revealed: Literal["x"]
```
## `bool()` function
## Evaluates to builtin
```py path=a.py
redefined_builtin_bool = bool
def my_bool(x) -> bool:
return True
```
```py
from a import redefined_builtin_bool, my_bool
reveal_type(redefined_builtin_bool(0)) # revealed: Literal[False]
reveal_type(my_bool(0)) # revealed: bool
```
## Truthy values
```py
reveal_type(bool(1)) # revealed: Literal[True]
reveal_type(bool((0,))) # revealed: Literal[True]
reveal_type(bool("NON EMPTY")) # revealed: Literal[True]
reveal_type(bool(True)) # revealed: Literal[True]
def foo(): ...
reveal_type(bool(foo)) # revealed: Literal[True]
```
## Falsy values
```py
reveal_type(bool(0)) # revealed: Literal[False]
reveal_type(bool(())) # revealed: Literal[False]
reveal_type(bool(None)) # revealed: Literal[False]
reveal_type(bool("")) # revealed: Literal[False]
reveal_type(bool(False)) # revealed: Literal[False]
reveal_type(bool()) # revealed: Literal[False]
```
## Ambiguous values
```py
reveal_type(bool([])) # revealed: bool
reveal_type(bool({})) # revealed: bool
reveal_type(bool(set())) # revealed: bool
```

View File

@@ -1,56 +0,0 @@
# PEP 695 Generics
## Class Declarations
Basic PEP 695 generics
```py
class MyBox[T]:
data: T
box_model_number = 695
def __init__(self, data: T):
self.data = data
# TODO not error (should be subscriptable)
box: MyBox[int] = MyBox(5) # error: [non-subscriptable]
# TODO error differently (str and int don't unify)
wrong_innards: MyBox[int] = MyBox("five") # error: [non-subscriptable]
# TODO reveal int
reveal_type(box.data) # revealed: @Todo
reveal_type(MyBox.box_model_number) # revealed: Literal[695]
```
## Subclassing
```py
class MyBox[T]:
data: T
def __init__(self, data: T):
self.data = data
# TODO not error on the subscripting
class MySecureBox[T](MyBox[T]): ... # error: [non-subscriptable]
secure_box: MySecureBox[int] = MySecureBox(5)
reveal_type(secure_box) # revealed: MySecureBox
# TODO reveal int
reveal_type(secure_box.data) # revealed: @Todo
```
## Cyclical class definition
In type stubs, classes can reference themselves in their base class definitions. For example, in `typeshed`, we have `class str(Sequence[str]): ...`.
This should hold true even with generics at play.
```py path=a.pyi
class Seq[T]: ...
# TODO not error on the subscripting
class S[T](Seq[S]): ... # error: [non-subscriptable]
reveal_type(S) # revealed: Literal[S]
```

View File

@@ -1,27 +0,0 @@
# Structures
## Class import following
```py
from b import C as D
E = D
reveal_type(E) # revealed: Literal[C]
```
```py path=b.py
class C: ...
```
## Module member resolution
```py
import b
D = b.C
reveal_type(D) # revealed: Literal[C]
```
```py path=b.py
class C: ...
```

View File

@@ -1,8 +0,0 @@
# Importing builtin module
```py
import builtins
x = builtins.copyright
reveal_type(x) # revealed: Literal[copyright]
```

View File

@@ -1,80 +0,0 @@
# Conditional imports
## Maybe unbound
```py path=maybe_unbound.py
if flag:
y = 3
x = y
reveal_type(x) # revealed: Unbound | Literal[3]
reveal_type(y) # revealed: Unbound | Literal[3]
```
```py
from maybe_unbound import x, y
reveal_type(x) # revealed: Literal[3]
reveal_type(y) # revealed: Literal[3]
```
## Maybe unbound annotated
```py path=maybe_unbound_annotated.py
if flag:
y: int = 3
x = y
reveal_type(x) # revealed: Unbound | Literal[3]
reveal_type(y) # revealed: Unbound | Literal[3]
```
Importing an annotated name prefers the declared type over the inferred type:
```py
from maybe_unbound_annotated import x, y
reveal_type(x) # revealed: Literal[3]
reveal_type(y) # revealed: int
```
## Reimport
```py path=c.py
def f(): ...
```
```py path=b.py
if flag:
from c import f
else:
def f(): ...
```
```py
from b import f
# TODO: We should disambiguate in such cases, showing `Literal[b.f, c.f]`.
reveal_type(f) # revealed: Literal[f, f]
```
## Reimport with stub declaration
When we have a declared type in one path and only an inferred-from-definition type in the other, we
should still be able to unify those:
```py path=c.pyi
x: int
```
```py path=b.py
if flag:
from c import x
else:
x = 1
```
```py
from b import x
reveal_type(x) # revealed: int
```

View File

@@ -1,57 +0,0 @@
# Unresolved Imports
## Unresolved import statement
```py
import bar # error: "Cannot resolve import `bar`"
reveal_type(bar) # revealed: Unknown
```
## Unresolved import from statement
```py
from bar import baz # error: "Cannot resolve import `bar`"
reveal_type(baz) # revealed: Unknown
```
## Unresolved import from resolved module
```py path=a.py
```
```py
from a import thing # error: "Module `a` has no member `thing`"
reveal_type(thing) # revealed: Unknown
```
## Resolved import of symbol from unresolved import
```py path=a.py
import foo as foo # error: "Cannot resolve import `foo`"
reveal_type(foo) # revealed: Unknown
```
Importing the unresolved import into a second file should not trigger an additional "unresolved
import" violation:
```py
from a import foo
reveal_type(foo) # revealed: Unknown
```
## No implicit shadowing
```py path=b.py
x: int
```
```py
from b import x
x = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]"
```

View File

@@ -1,143 +0,0 @@
# Relative
## Non-existent
```py path=package/__init__.py
```
```py path=package/bar.py
from .foo import X # error: [unresolved-import]
reveal_type(X) # revealed: Unknown
```
## Simple
```py path=package/__init__.py
```
```py path=package/foo.py
X = 42
```
```py path=package/bar.py
from .foo import X
reveal_type(X) # revealed: Literal[42]
```
## Dotted
```py path=package/__init__.py
```
```py path=package/foo/bar/baz.py
X = 42
```
```py path=package/bar.py
from .foo.bar.baz import X
reveal_type(X) # revealed: Literal[42]
```
## Bare to package
```py path=package/__init__.py
X = 42
```
```py path=package/bar.py
from . import X
reveal_type(X) # revealed: Literal[42]
```
## Non-existent + bare to package
```py path=package/bar.py
from . import X # error: [unresolved-import]
reveal_type(X) # revealed: Unknown
```
## Dunder init
```py path=package/__init__.py
from .foo import X
reveal_type(X) # revealed: Literal[42]
```
```py path=package/foo.py
X = 42
```
## Non-existent + dunder init
```py path=package/__init__.py
from .foo import X # error: [unresolved-import]
reveal_type(X) # revealed: Unknown
```
## Long relative import
```py path=package/__init__.py
```
```py path=package/foo.py
X = 42
```
```py path=package/subpackage/subsubpackage/bar.py
from ...foo import X
reveal_type(X) # revealed: Literal[42]
```
## Unbound symbol
```py path=package/__init__.py
```
```py path=package/foo.py
x
```
```py path=package/bar.py
from .foo import x # error: [unresolved-import]
reveal_type(x) # revealed: Unknown
```
## Bare to module
```py path=package/__init__.py
```
```py path=package/foo.py
X = 42
```
```py path=package/bar.py
# TODO: support submodule imports
from . import foo # error: [unresolved-import]
y = foo.X
# TODO: should be `Literal[42]`
reveal_type(y) # revealed: Unknown
```
## Non-existent + bare to module
```py path=package/__init__.py
```
```py path=package/bar.py
# TODO: support submodule imports
from . import foo # error: [unresolved-import]
reveal_type(foo) # revealed: Unknown
```

View File

@@ -1,27 +0,0 @@
# Stubs
## Import from stub declaration
```py
from b import x
y = x
reveal_type(y) # revealed: int
```
```py path=b.pyi
x: int
```
## Import from non-stub with declaration and definition
```py
from b import x
y = x
reveal_type(y) # revealed: int
```
```py path=b.py
x: int = 1
```

View File

@@ -1,6 +0,0 @@
# Boolean literals
```py
reveal_type(True) # revealed: Literal[True]
reveal_type(False) # revealed: Literal[False]
```

View File

@@ -1,10 +0,0 @@
# Bytes literals
## Simple
```py
reveal_type(b"red" b"knot") # revealed: Literal[b"redknot"]
reveal_type(b"hello") # revealed: Literal[b"hello"]
reveal_type(b"world" + b"!") # revealed: Literal[b"world!"]
reveal_type(b"\xff\x00") # revealed: Literal[b"\xff\x00"]
```

View File

@@ -1,7 +0,0 @@
# Dictionaries
## Empty dictionary
```py
reveal_type({}) # revealed: dict
```

View File

@@ -1,7 +0,0 @@
# Lists
## Empty list
```py
reveal_type([]) # revealed: list
```

View File

@@ -1,7 +0,0 @@
# Sets
## Basic set
```py
reveal_type({1, 2}) # revealed: set
```

View File

@@ -1,17 +0,0 @@
# Tuples
## Empty tuple
```py
reveal_type(()) # revealed: tuple[()]
```
## Heterogeneous tuple
```py
reveal_type((1, "a")) # revealed: tuple[Literal[1], Literal["a"]]
reveal_type((1, (2, 3))) # revealed: tuple[Literal[1], tuple[Literal[2], Literal[3]]]
reveal_type(((1, "a"), 2)) # revealed: tuple[tuple[Literal[1], Literal["a"]], Literal[2]]
```

View File

@@ -1,7 +0,0 @@
# Complex literals
## Complex numbers
```py
reveal_type(2j) # revealed: complex
```

View File

@@ -1,33 +0,0 @@
# f-strings
## Expression
```py
x = 0
y = str()
z = False
reveal_type(f"hello") # revealed: Literal["hello"]
reveal_type(f"h {x}") # revealed: Literal["h 0"]
reveal_type("one " f"single " f"literal") # revealed: Literal["one single literal"]
reveal_type("first " f"second({x})" f" third") # revealed: Literal["first second(0) third"]
reveal_type(f"-{y}-") # revealed: str
reveal_type(f"-{y}-" f"--" "--") # revealed: str
reveal_type(f"{z} == {False} is {True}") # revealed: Literal["False == False is True"]
```
## Conversion Flags
```py
string = "hello"
# TODO: should be `Literal["'hello'"]`
reveal_type(f"{string!r}") # revealed: str
```
## Format Specifiers
```py
# TODO: should be `Literal["01"]`
reveal_type(f"{1:02}") # revealed: str
```

View File

@@ -1,7 +0,0 @@
# Float literals
## Basic
```py
reveal_type(1.0) # revealed: float
```

View File

@@ -1,56 +0,0 @@
# Integer literals
## Literals
We can infer an integer literal type:
```py
reveal_type(1) # revealed: Literal[1]
```
## Variable
```py
x = 1
reveal_type(x) # revealed: Literal[1]
```
## Overflow
We only track integer literals within the range of an i64:
```py
reveal_type(9223372036854775808) # revealed: int
```
## Big int
We don't support big integer literals; we just infer `int` type instead:
```py
x = 10_000_000_000_000_000_000
reveal_type(x) # revealed: int
```
## Negated
```py
x = -1
y = -1234567890987654321
z = --987
reveal_type(x) # revealed: Literal[-1]
reveal_type(y) # revealed: Literal[-1234567890987654321]
reveal_type(z) # revealed: Literal[987]
```
## Floats
```py
reveal_type(1.0) # revealed: float
```
## Complex
```py
reveal_type(2j) # revealed: complex
```

View File

@@ -1,21 +0,0 @@
# String literals
## Simple
```py
reveal_type("Hello") # revealed: Literal["Hello"]
reveal_type("world") # revealed: Literal["world"]
reveal_type("Guten " + "Tag") # revealed: Literal["Guten Tag"]
reveal_type("bon " + "jour") # revealed: Literal["bon jour"]
```
## Nested Quotes
```py
reveal_type('I say "hello" to you') # revealed: Literal["I say \"hello\" to you"]
# revealed: Literal["You say \"hey\" back"]
reveal_type("You say \"hey\" back") # fmt: skip
reveal_type('No "closure here') # revealed: Literal["No \"closure here"]
```

View File

@@ -1,41 +0,0 @@
# Async
Async `for` loops do not work according to the synchronous iteration protocol.
## Invalid async for loop
```py
async def foo():
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator:
return Iterator()
async for x in Iterator():
pass
# TODO
reveal_type(x) # revealed: Unbound | @Todo
```
## Basic async for loop
```py
async def foo():
class IntAsyncIterator:
async def __anext__(self) -> int:
return 42
class IntAsyncIterable:
def __aiter__(self) -> IntAsyncIterator:
return IntAsyncIterator()
# TODO(Alex): async iterables/iterators!
async for x in IntAsyncIterable():
pass
reveal_type(x) # revealed: Unbound | @Todo
```

View File

@@ -1,133 +0,0 @@
# For loops
## Basic `for` loop
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
for x in IntIterable():
pass
reveal_type(x) # revealed: Unbound | int
```
## With previous definition
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
x = "foo"
for x in IntIterable():
pass
reveal_type(x) # revealed: Literal["foo"] | int
```
## With `else` (no break)
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
for x in IntIterable():
pass
else:
x = "foo"
reveal_type(x) # revealed: Literal["foo"]
```
## May `break`
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
for x in IntIterable():
if x > 5:
break
else:
x = "foo"
reveal_type(x) # revealed: int | Literal["foo"]
```
## With old-style iteration protocol
```py
class OldStyleIterable:
def __getitem__(self, key: int) -> int:
return 42
for x in OldStyleIterable():
pass
reveal_type(x) # revealed: Unbound | int
```
## With heterogeneous tuple
```py
for x in (1, "a", b"foo"):
pass
reveal_type(x) # revealed: Unbound | Literal[1] | Literal["a"] | Literal[b"foo"]
```
## With non-callable iterator
```py
class NotIterable:
if flag:
__iter__ = 1
else:
__iter__ = None
for x in NotIterable(): # error: "Object of type `NotIterable` is not iterable"
pass
reveal_type(x) # revealed: Unbound | Unknown
```
## Invalid iterable
```py
nonsense = 123
for x in nonsense: # error: "Object of type `Literal[123]` is not iterable"
pass
```
## New over old style iteration protocol
```py
class NotIterable:
def __getitem__(self, key: int) -> int:
return 42
__iter__ = None
for x in NotIterable(): # error: "Object of type `NotIterable` is not iterable"
pass
```

View File

@@ -1,18 +0,0 @@
# Iterators
## Yield must be iterable
```py
class NotIterable: ...
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator: ...
def generator_function():
yield from Iterable()
yield from NotIterable() # error: "Object of type `NotIterable` is not iterable"
```

View File

@@ -1,42 +0,0 @@
# While loops
## Basic While Loop
```py
x = 1
while flag:
x = 2
reveal_type(x) # revealed: Literal[1, 2]
```
## While with else (no break)
```py
x = 1
while flag:
x = 2
else:
reveal_type(x) # revealed: Literal[1, 2]
x = 3
reveal_type(x) # revealed: Literal[3]
```
## While with Else (may break)
```py
x = 1
y = 0
while flag:
x = 2
if flag2:
y = 4
break
else:
y = x
x = 3
reveal_type(x) # revealed: Literal[2, 3]
reveal_type(y) # revealed: Literal[1, 2, 4]
```

View File

@@ -1,40 +0,0 @@
# Narrowing for `is` conditionals
## `is None`
```py
x = None if flag else 1
if x is None:
reveal_type(x) # revealed: None
reveal_type(x) # revealed: None | Literal[1]
```
## `is` for other types
```py
class A: ...
x = A()
y = x if flag else None
if y is x:
reveal_type(y) # revealed: A
reveal_type(y) # revealed: A | None
```
## `is` in chained comparisons
```py
x = True if x_flag else False
y = True if y_flag else False
reveal_type(x) # revealed: bool
reveal_type(y) # revealed: bool
if y is x is False: # Interpreted as `(y is x) and (x is False)`
reveal_type(x) # revealed: Literal[False]
reveal_type(y) # revealed: bool
```

View File

@@ -1,54 +0,0 @@
# Narrowing for `is not` conditionals
## `is not None`
The type guard removes `None` from the union type:
```py
x = None if flag else 1
if x is not None:
reveal_type(x) # revealed: Literal[1]
reveal_type(x) # revealed: None | Literal[1]
```
## `is not` for other singleton types
```py
x = True if flag else False
reveal_type(x) # revealed: bool
if x is not False:
reveal_type(x) # revealed: Literal[True]
```
## `is not` for non-singleton types
Non-singleton types should *not* narrow the type: two instances of a
non-singleton class may occupy different addresses in memory even if
they compare equal.
```py
x = 345
y = 345
if x is not y:
reveal_type(x) # revealed: Literal[345]
```
## `is not` in chained comparisons
The type guard removes `False` from the union type of the tested value only.
```py
x = True if x_flag else False
y = True if y_flag else False
reveal_type(x) # revealed: bool
reveal_type(y) # revealed: bool
if y is not x is not False: # Interpreted as `(y is not x) and (x is not False)`
reveal_type(x) # revealed: Literal[True]
reveal_type(y) # revealed: bool
```

View File

@@ -1,25 +0,0 @@
# Narrowing for nested conditionals
## Multiple negative contributions
```py
def int_instance() -> int: ...
x = int_instance()
if x != 1:
if x != 2:
if x != 3:
reveal_type(x) # revealed: int & ~Literal[1] & ~Literal[2] & ~Literal[3]
```
## Multiple negative contributions with simplification
```py
x = 1 if flag1 else 2 if flag2 else 3
if x != 1:
reveal_type(x) # revealed: Literal[2, 3]
if x != 2:
reveal_type(x) # revealed: Literal[3]
```

View File

@@ -1,54 +0,0 @@
# Narrowing for `!=` conditionals
## `x != None`
```py
x = None if flag else 1
if x != None:
reveal_type(x) # revealed: Literal[1]
```
## `!=` for other singleton types
```py
x = True if flag else False
if x != False:
reveal_type(x) # revealed: Literal[True]
```
## `x != y` where `y` is of literal type
```py
x = 1 if flag else 2
if x != 1:
reveal_type(x) # revealed: Literal[2]
```
## `x != y` where `y` is a single-valued type
```py
class A: ...
class B: ...
C = A if flag else B
if C != A:
reveal_type(C) # revealed: Literal[B]
```
## `!=` for non-single-valued types
Only single-valued types should narrow the type:
```py
def int_instance() -> int: ...
x = int_instance() if flag else None
y = int_instance()
if x != y:
reveal_type(x) # revealed: int | None
```

View File

@@ -1,150 +0,0 @@
# Narrowing for `isinstance` checks
Narrowing for `isinstance(object, classinfo)` expressions.
## `classinfo` is a single type
```py
x = 1 if flag else "a"
if isinstance(x, int):
reveal_type(x) # revealed: Literal[1]
if isinstance(x, str):
reveal_type(x) # revealed: Literal["a"]
if isinstance(x, int):
reveal_type(x) # revealed: Never
if isinstance(x, (int, object)):
reveal_type(x) # revealed: Literal[1] | Literal["a"]
```
## `classinfo` is a tuple of types
Note: `isinstance(x, (int, str))` should not be confused with
`isinstance(x, tuple[(int, str)])`. The former is equivalent to
`isinstance(x, int | str)`:
```py
x = 1 if flag else "a"
if isinstance(x, (int, str)):
reveal_type(x) # revealed: Literal[1] | Literal["a"]
if isinstance(x, (int, bytes)):
reveal_type(x) # revealed: Literal[1]
if isinstance(x, (bytes, str)):
reveal_type(x) # revealed: Literal["a"]
# No narrowing should occur if a larger type is also
# one of the possibilities:
if isinstance(x, (int, object)):
reveal_type(x) # revealed: Literal[1] | Literal["a"]
y = 1 if flag1 else "a" if flag2 else b"b"
if isinstance(y, (int, str)):
reveal_type(y) # revealed: Literal[1] | Literal["a"]
if isinstance(y, (int, bytes)):
reveal_type(y) # revealed: Literal[1] | Literal[b"b"]
if isinstance(y, (str, bytes)):
reveal_type(y) # revealed: Literal["a"] | Literal[b"b"]
```
## `classinfo` is a nested tuple of types
```py
x = 1 if flag else "a"
if isinstance(x, (bool, (bytes, int))):
reveal_type(x) # revealed: Literal[1]
```
## Class types
```py
class A: ...
class B: ...
def get_object() -> object: ...
x = get_object()
if isinstance(x, A):
reveal_type(x) # revealed: A
if isinstance(x, B):
reveal_type(x) # revealed: A & B
```
## No narrowing for instances of `builtins.type`
```py
t = type("t", (), {})
# This isn't testing what we want it to test if we infer anything more precise here:
reveal_type(t) # revealed: type
x = 1 if flag else "foo"
if isinstance(x, t):
reveal_type(x) # revealed: Literal[1] | Literal["foo"]
```
## Do not use custom `isinstance` for narrowing
```py
def isinstance(x, t):
return True
x = 1 if flag else "a"
if isinstance(x, int):
reveal_type(x) # revealed: Literal[1] | Literal["a"]
```
## Do support narrowing if `isinstance` is aliased
```py
isinstance_alias = isinstance
x = 1 if flag else "a"
if isinstance_alias(x, int):
reveal_type(x) # revealed: Literal[1]
```
## Do support narrowing if `isinstance` is imported
```py
from builtins import isinstance as imported_isinstance
x = 1 if flag else "a"
if imported_isinstance(x, int):
reveal_type(x) # revealed: Literal[1]
```
## Do not narrow if second argument is not a type
```py
x = 1 if flag else "a"
# TODO: this should cause us to emit a diagnostic during
# type checking
if isinstance(x, "a"):
reveal_type(x) # revealed: Literal[1] | Literal["a"]
# TODO: this should cause us to emit a diagnostic during
# type checking
if isinstance(x, "int"):
reveal_type(x) # revealed: Literal[1] | Literal["a"]
```
## Do not narrow if there are keyword arguments
```py
x = 1 if flag else "a"
# TODO: this should cause us to emit a diagnostic
# (`isinstance` has no `foo` parameter)
if isinstance(x, int, foo="bar"):
reveal_type(x) # revealed: Literal[1] | Literal["a"]
```

View File

@@ -1,16 +0,0 @@
# Narrowing for `match` statements
## Single `match` pattern
```py
x = None if flag else 1
reveal_type(x) # revealed: None | Literal[1]
y = 0
match x:
case None:
y = x
reveal_type(y) # revealed: Literal[0] | None
```

View File

@@ -1,32 +0,0 @@
# Builtin scope
## Conditionally global or builtin
If a builtin name is conditionally defined as a global, a name lookup should union the builtin type
with the conditionally-defined type:
```py
def returns_bool() -> bool:
return True
if returns_bool():
copyright = 1
def f():
reveal_type(copyright) # revealed: Literal[copyright] | Literal[1]
```
## Conditionally global or builtin, with annotation
Same is true if the name is annotated:
```py
def returns_bool() -> bool:
return True
if returns_bool():
copyright: int = 1
def f():
reveal_type(copyright) # revealed: Literal[copyright] | int
```

View File

@@ -1,19 +0,0 @@
# Classes shadowing
## Implicit error
```py
class C: ...
C = 1 # error: "Implicit shadowing of class `C`; annotate to make it explicit if this is intentional"
```
## Explicit
No diagnostic is raised in the case of explicit shadowing:
```py
class C: ...
C: int = 1
```

View File

@@ -1,26 +0,0 @@
# Function shadowing
## Parameter
Parameter `x` of type `str` is shadowed and reassigned with a new `int` value inside the function. No diagnostics should be generated.
```py path=a.py
def f(x: str):
x: int = int(x)
```
## Implicit error
```py path=a.py
def f(): ...
f = 1 # error: "Implicit shadowing of function `f`; annotate to make it explicit if this is intentional"
```
## Explicit shadowing
```py path=a.py
def f(): ...
f: int = 1
```

View File

@@ -1,11 +0,0 @@
# Shadwing declaration
## Shadow after incompatible declarations is OK
```py
if flag:
x: str
else:
x: int
x: bytes = b"foo"
```

View File

@@ -1,11 +0,0 @@
# Class defenitions in stubs
## Cyclical class definition
In type stubs, classes can reference themselves in their base class definitions. For example, in `typeshed`, we have `class str(Sequence[str]): ...`.
```py path=a.pyi
class C(C): ...
reveal_type(C) # revealed: Literal[C]
```

View File

@@ -1,34 +0,0 @@
# Bytes subscript
## Simple
```py
b = b"\x00abc\xff"
reveal_type(b[0]) # revealed: Literal[b"\x00"]
reveal_type(b[1]) # revealed: Literal[b"a"]
reveal_type(b[4]) # revealed: Literal[b"\xff"]
reveal_type(b[-1]) # revealed: Literal[b"\xff"]
reveal_type(b[-2]) # revealed: Literal[b"c"]
reveal_type(b[-5]) # revealed: Literal[b"\x00"]
reveal_type(b[False]) # revealed: Literal[b"\x00"]
reveal_type(b[True]) # revealed: Literal[b"a"]
x = b[5] # error: [index-out-of-bounds] "Index 5 is out of bounds for bytes literal `Literal[b"\x00abc\xff"]` with length 5"
reveal_type(x) # revealed: Unknown
y = b[-6] # error: [index-out-of-bounds] "Index -6 is out of bounds for bytes literal `Literal[b"\x00abc\xff"]` with length 5"
reveal_type(y) # revealed: Unknown
```
## Function return
```py
def int_instance() -> int: ...
a = b"abcde"[int_instance()]
# TODO: Support overloads... Should be `bytes`
reveal_type(a) # revealed: @Todo
```

View File

@@ -1,93 +0,0 @@
# Class subscript
## Class getitem unbound
```py
class NotSubscriptable: ...
a = NotSubscriptable[0] # error: "Cannot subscript object of type `Literal[NotSubscriptable]` with no `__class_getitem__` method"
```
## Class getitem
```py
class Identity:
def __class_getitem__(cls, item: int) -> str:
return item
reveal_type(Identity[0]) # revealed: str
```
## Class getitem union
```py
flag = True
class UnionClassGetItem:
if flag:
def __class_getitem__(cls, item: int) -> str:
return item
else:
def __class_getitem__(cls, item: int) -> int:
return item
reveal_type(UnionClassGetItem[0]) # revealed: str | int
```
## Class getitem with class union
```py
flag = True
class A:
def __class_getitem__(cls, item: int) -> str:
return item
class B:
def __class_getitem__(cls, item: int) -> int:
return item
x = A if flag else B
reveal_type(x) # revealed: Literal[A, B]
reveal_type(x[0]) # revealed: str | int
```
## Class getitem with unbound method union
```py
flag = True
if flag:
class Spam:
def __class_getitem__(self, x: int) -> str:
return "foo"
else:
class Spam: ...
# error: [call-non-callable] "Method `__class_getitem__` of type `Literal[__class_getitem__] | Unbound` is not callable on object of type `Literal[Spam, Spam]`"
# revealed: str | Unknown
reveal_type(Spam[42])
```
## TODO: Class getitem non-class union
```py
flag = True
if flag:
class Eggs:
def __class_getitem__(self, x: int) -> str:
return "foo"
else:
Eggs = 1
a = Eggs[42] # error: "Cannot subscript object of type `Literal[Eggs] | Literal[1]` with no `__getitem__` method"
# TODO: should _probably_ emit `str | Unknown`
reveal_type(a) # revealed: Unknown
```

View File

@@ -1,46 +0,0 @@
# Instance subscript
## Getitem unbound
```py
class NotSubscriptable: ...
a = NotSubscriptable()[0] # error: "Cannot subscript object of type `NotSubscriptable` with no `__getitem__` method"
```
## Getitem not callable
```py
class NotSubscriptable:
__getitem__ = None
a = NotSubscriptable()[0] # error: "Method `__getitem__` of type `None` is not callable on object of type `NotSubscriptable`"
```
## Valid getitem
```py
class Identity:
def __getitem__(self, index: int) -> int:
return index
reveal_type(Identity()[0]) # revealed: int
```
## Getitem union
```py
flag = True
class Identity:
if flag:
def __getitem__(self, index: int) -> int:
return index
else:
def __getitem__(self, index: int) -> str:
return str(index)
reveal_type(Identity()[0]) # revealed: int | str
```

View File

@@ -1,38 +0,0 @@
# List subscripts
## Indexing into lists
A list can be indexed into with:
- numbers
- slices
```py
x = [1, 2, 3]
reveal_type(x) # revealed: list
# TODO reveal int
reveal_type(x[0]) # revealed: @Todo
# TODO reveal list
reveal_type(x[0:1]) # revealed: @Todo
# TODO error
reveal_type(x["a"]) # revealed: @Todo
```
## Assignments within list assignment
In assignment, we might also have a named assignment.
This should also get type checked.
```py
x = [1, 2, 3]
x[0 if (y := 2) else 1] = 5
# TODO error? (indeterminite index type)
x["a" if (y := 2) else 1] = 6
# TODO error (can't index via string)
x["a" if (y := 2) else "b"] = 6
```

View File

@@ -1,31 +0,0 @@
# Subscript on strings
## Simple
```py
s = "abcde"
reveal_type(s[0]) # revealed: Literal["a"]
reveal_type(s[1]) # revealed: Literal["b"]
reveal_type(s[-1]) # revealed: Literal["e"]
reveal_type(s[-2]) # revealed: Literal["d"]
reveal_type(s[False]) # revealed: Literal["a"]
reveal_type(s[True]) # revealed: Literal["b"]
a = s[8] # error: [index-out-of-bounds] "Index 8 is out of bounds for string `Literal["abcde"]` with length 5"
reveal_type(a) # revealed: Unknown
b = s[-8] # error: [index-out-of-bounds] "Index -8 is out of bounds for string `Literal["abcde"]` with length 5"
reveal_type(b) # revealed: Unknown
```
## Function return
```py
def int_instance() -> int: ...
a = "abcde"[int_instance()]
# TODO: Support overloads... Should be `str`
reveal_type(a) # revealed: @Todo
```

View File

@@ -1,18 +0,0 @@
# Tuple subscripts
## Basic
```py
t = (1, "a", "b")
reveal_type(t[0]) # revealed: Literal[1]
reveal_type(t[1]) # revealed: Literal["a"]
reveal_type(t[-1]) # revealed: Literal["b"]
reveal_type(t[-2]) # revealed: Literal["a"]
a = t[4] # error: [index-out-of-bounds]
reveal_type(a) # revealed: Unknown
b = t[-4] # error: [index-out-of-bounds]
reveal_type(b) # revealed: Unknown
```

View File

@@ -1,29 +0,0 @@
# Unary Operations
```py
class Number:
def __init__(self, value: int):
self.value = 1
def __pos__(self) -> int:
return +self.value
def __neg__(self) -> int:
return -self.value
def __invert__(self) -> Literal[True]:
return True
a = Number()
reveal_type(+a) # revealed: int
reveal_type(-a) # revealed: int
reveal_type(~a) # revealed: @Todo
class NoDunder: ...
b = NoDunder()
+b # error: [unsupported-operator] "Unary operator `+` is unsupported for type `NoDunder`"
-b # error: [unsupported-operator] "Unary operator `-` is unsupported for type `NoDunder`"
~b # error: [unsupported-operator] "Unary operator `~` is unsupported for type `NoDunder`"
```

View File

@@ -1,25 +0,0 @@
# Unary Operations
## Unary Addition
```py
reveal_type(+0) # revealed: Literal[0]
reveal_type(+1) # revealed: Literal[1]
reveal_type(+True) # revealed: Literal[1]
```
## Unary Subtraction
```py
reveal_type(-0) # revealed: Literal[0]
reveal_type(-1) # revealed: Literal[-1]
reveal_type(-True) # revealed: Literal[-1]
```
## Unary Bitwise Inversion
```py
reveal_type(~0) # revealed: Literal[-1]
reveal_type(~1) # revealed: Literal[-2]
reveal_type(~True) # revealed: Literal[-2]
```

View File

@@ -1,112 +0,0 @@
# Unary not
## None
```py
reveal_type(not None) # revealed: Literal[True]
reveal_type(not not None) # revealed: Literal[False]
```
## Function
```py
from typing import reveal_type
def f():
return 1
reveal_type(not f) # revealed: Literal[False]
# TODO Unknown should not be part of the type of typing.reveal_type
# reveal_type(not reveal_type) revealed: Literal[False]
```
## Module
```py
import b
import warnings
reveal_type(not b) # revealed: Literal[False]
reveal_type(not warnings) # revealed: Literal[False]
```
```py path=b.py
y = 1
```
## Union
```py
if flag:
p = 1
q = 3.3
r = "hello"
s = "world"
t = 0
else:
p = "hello"
q = 4
r = ""
s = 0
t = ""
reveal_type(not p) # revealed: Literal[False]
reveal_type(not q) # revealed: bool
reveal_type(not r) # revealed: bool
reveal_type(not s) # revealed: bool
reveal_type(not t) # revealed: Literal[True]
```
## Integer literal
```py
reveal_type(not 1) # revealed: Literal[False]
reveal_type(not 1234567890987654321) # revealed: Literal[False]
reveal_type(not 0) # revealed: Literal[True]
reveal_type(not -1) # revealed: Literal[False]
reveal_type(not -1234567890987654321) # revealed: Literal[False]
reveal_type(not --987) # revealed: Literal[False]
```
## Boolean literal
```py
w = True
reveal_type(w) # revealed: Literal[True]
x = False
reveal_type(x) # revealed: Literal[False]
reveal_type(not w) # revealed: Literal[False]
reveal_type(not x) # revealed: Literal[True]
```
## String literal
```py
reveal_type(not "hello") # revealed: Literal[False]
reveal_type(not "") # revealed: Literal[True]
reveal_type(not "0") # revealed: Literal[False]
reveal_type(not "hello" + "world") # revealed: Literal[False]
```
## Bytes literal
```py
reveal_type(not b"hello") # revealed: Literal[False]
reveal_type(not b"") # revealed: Literal[True]
reveal_type(not b"0") # revealed: Literal[False]
reveal_type(not b"hello" + b"world") # revealed: Literal[False]
```
## Tuple
```py
reveal_type(not (1,)) # revealed: Literal[False]
reveal_type(not (1, 2)) # revealed: Literal[False]
reveal_type(not (1, 2, 3)) # revealed: Literal[False]
reveal_type(not ()) # revealed: Literal[True]
reveal_type(not ("hello",)) # revealed: Literal[False]
reveal_type(not (1, "hello")) # revealed: Literal[False]
```

View File

@@ -1,269 +0,0 @@
# Unpacking
## Tuple
### Simple tuple
```py
(a, b, c) = (1, 2, 3)
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: Literal[2]
reveal_type(c) # revealed: Literal[3]
```
### Simple list
```py
[a, b, c] = (1, 2, 3)
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: Literal[2]
reveal_type(c) # revealed: Literal[3]
```
### Simple mixed
```py
[a, (b, c), d] = (1, (2, 3), 4)
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: Literal[2]
reveal_type(c) # revealed: Literal[3]
reveal_type(d) # revealed: Literal[4]
```
### Multiple assignment
```py
a, b = c = 1, 2
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: Literal[2]
reveal_type(c) # revealed: tuple[Literal[1], Literal[2]]
```
### Nested tuple with unpacking
```py
(a, (b, c), d) = (1, (2, 3), 4)
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: Literal[2]
reveal_type(c) # revealed: Literal[3]
reveal_type(d) # revealed: Literal[4]
```
### Nested tuple without unpacking
```py
(a, b, c) = (1, (2, 3), 4)
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: tuple[Literal[2], Literal[3]]
reveal_type(c) # revealed: Literal[4]
```
### Uneven unpacking (1)
```py
# TODO: Add diagnostic (there aren't enough values to unpack)
(a, b, c) = (1, 2)
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: Literal[2]
reveal_type(c) # revealed: Unknown
```
### Uneven unpacking (2)
```py
# TODO: Add diagnostic (too many values to unpack)
(a, b) = (1, 2, 3)
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: Literal[2]
```
### Starred expression (1)
```py
# TODO: Add diagnostic (need more values to unpack)
# TODO: Remove 'not-iterable' diagnostic
[a, *b, c, d] = (1, 2) # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: Literal[1]
# TODO: Should be list[Any] once support for assigning to starred expression is added
reveal_type(b) # revealed: @Todo
reveal_type(c) # revealed: Literal[2]
reveal_type(d) # revealed: Unknown
```
### Starred expression (2)
```py
[a, *b, c] = (1, 2) # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: Literal[1]
# TODO: Should be list[Any] once support for assigning to starred expression is added
reveal_type(b) # revealed: @Todo
reveal_type(c) # revealed: Literal[2]
```
### Starred expression (3)
```py
# TODO: Remove 'not-iterable' diagnostic
[a, *b, c] = (1, 2, 3) # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: Literal[1]
# TODO: Should be list[int] once support for assigning to starred expression is added
reveal_type(b) # revealed: @Todo
reveal_type(c) # revealed: Literal[3]
```
### Starred expression (4)
```py
# TODO: Remove 'not-iterable' diagnostic
[a, *b, c, d] = (1, 2, 3, 4, 5, 6) # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: Literal[1]
# TODO: Should be list[int] once support for assigning to starred expression is added
reveal_type(b) # revealed: @Todo
reveal_type(c) # revealed: Literal[5]
reveal_type(d) # revealed: Literal[6]
```
### Starred expression (5)
```py
# TODO: Remove 'not-iterable' diagnostic
[a, b, *c] = (1, 2, 3, 4) # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: Literal[2]
# TODO: Should be list[int] once support for assigning to starred expression is added
reveal_type(c) # revealed: @Todo
```
### Non-iterable unpacking
TODO: Remove duplicate diagnostics. This is happening because for a sequence-like
assignment target, multiple definitions are created and the inference engine runs
on each of them which results in duplicate diagnostics.
```py
# error: "Object of type `Literal[1]` is not iterable"
# error: "Object of type `Literal[1]` is not iterable"
a, b = 1
reveal_type(a) # revealed: Unknown
reveal_type(b) # revealed: Unknown
```
### Custom iterator unpacking
```py
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator:
return Iterator()
(a, b) = Iterable()
reveal_type(a) # revealed: int
reveal_type(b) # revealed: int
```
### Custom iterator unpacking nested
```py
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator:
return Iterator()
(a, (b, c), d) = (1, Iterable(), 2)
reveal_type(a) # revealed: Literal[1]
reveal_type(b) # revealed: int
reveal_type(c) # revealed: int
reveal_type(d) # revealed: Literal[2]
```
## String
### Simple unpacking
```py
a, b = "ab"
reveal_type(a) # revealed: LiteralString
reveal_type(b) # revealed: LiteralString
```
### Uneven unpacking (1)
```py
# TODO: Add diagnostic (there aren't enough values to unpack)
a, b, c = "ab"
reveal_type(a) # revealed: LiteralString
reveal_type(b) # revealed: LiteralString
reveal_type(c) # revealed: Unknown
```
### Uneven unpacking (2)
```py
# TODO: Add diagnostic (too many values to unpack)
a, b = "abc"
reveal_type(a) # revealed: LiteralString
reveal_type(b) # revealed: LiteralString
```
### Starred expression (1)
```py
# TODO: Add diagnostic (need more values to unpack)
# TODO: Remove 'not-iterable' diagnostic
(a, *b, c, d) = "ab" # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: LiteralString
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
reveal_type(b) # revealed: @Todo
reveal_type(c) # revealed: LiteralString
reveal_type(d) # revealed: Unknown
```
### Starred expression (2)
```py
(a, *b, c) = "ab" # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: LiteralString
# TODO: Should be list[Any] once support for assigning to starred expression is added
reveal_type(b) # revealed: @Todo
reveal_type(c) # revealed: LiteralString
```
### Starred expression (3)
```py
# TODO: Remove 'not-iterable' diagnostic
(a, *b, c) = "abc" # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: LiteralString
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
reveal_type(b) # revealed: @Todo
reveal_type(c) # revealed: LiteralString
```
### Starred expression (4)
```py
# TODO: Remove 'not-iterable' diagnostic
(a, *b, c, d) = "abcdef" # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: LiteralString
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
reveal_type(b) # revealed: @Todo
reveal_type(c) # revealed: LiteralString
reveal_type(d) # revealed: LiteralString
```
### Starred expression (5)
```py
# TODO: Remove 'not-iterable' diagnostic
(a, b, *c) = "abcd" # error: "Object of type `None` is not iterable"
reveal_type(a) # revealed: LiteralString
reveal_type(b) # revealed: LiteralString
# TODO: Should be list[int] once support for assigning to starred expression is added
reveal_type(c) # revealed: @Todo
```

View File

@@ -31,10 +31,10 @@ impl<T> AstNodeRef<T> {
/// which the `AstNodeRef` belongs.
///
/// ## Safety
///
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
/// the invariant `node belongs to parsed` is upheld.
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
Self {
_parsed: parsed,

View File

@@ -0,0 +1,16 @@
use crate::module_name::ModuleName;
use crate::module_resolver::resolve_module;
use crate::semantic_index::global_scope;
use crate::semantic_index::symbol::ScopeId;
use crate::Db;
/// Salsa query to get the builtins scope.
///
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
#[salsa::tracked]
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
let builtins_name =
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
let builtins_file = resolve_module(db, builtins_name)?.file();
Some(global_scope(db, builtins_file))
}

View File

@@ -11,6 +11,7 @@ pub trait Db: SourceDb + Upcast<dyn SourceDb> {
pub(crate) mod tests {
use std::sync::Arc;
use crate::module_resolver::vendored_typeshed_stubs;
use ruff_db::files::{File, Files};
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
@@ -32,7 +33,7 @@ pub(crate) mod tests {
Self {
storage: salsa::Storage::default(),
system: TestSystem::default(),
vendored: red_knot_vendored::file_system().clone(),
vendored: vendored_typeshed_stubs().clone(),
events: std::sync::Arc::default(),
files: Files::default(),
}

View File

@@ -4,12 +4,13 @@ use rustc_hash::FxHasher;
pub use db::Db;
pub use module_name::ModuleName;
pub use module_resolver::{resolve_module, system_module_search_paths, Module};
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
pub use program::{Program, ProgramSettings, SearchPathSettings};
pub use python_version::PythonVersion;
pub use semantic_model::{HasTy, SemanticModel};
pub mod ast_node_ref;
mod builtins;
mod db;
mod module_name;
mod module_resolver;
@@ -18,9 +19,6 @@ mod program;
mod python_version;
pub mod semantic_index;
mod semantic_model;
pub(crate) mod site_packages;
mod stdlib;
pub mod types;
mod util;
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;

Some files were not shown because too many files have changed in this diff Show More