Compare commits

..

12 Commits

Author SHA1 Message Date
Dhruv Manilawala
c76e15a45d Check context parameters directly from function definition 2025-01-23 14:13:29 +05:30
Ankit Chaurasia
d0aff2bbff Add find_parameter 2025-01-23 12:19:31 +05:45
Ankit Chaurasia
65db31f0e1 highlights the parameter itself 2025-01-22 12:59:39 +05:45
Wei Lee
c2c37b8052 test: update test fixture 2025-01-22 10:06:15 +08:00
Ankit Chaurasia
51613d9107 Add lint error for removed context variables for get_current_context 2025-01-22 09:57:26 +08:00
Ankit Chaurasia
f20e70cd62 remove use of vectors 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
4737824345 fix the logic for lint error message 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
1961b76d03 Refactor functions to use ExprSubscript 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
62a1e55705 refactor rename functions 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
8a7ec4c0a3 Fix PR comments 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
9b9540c3cd Check arguments and function decorated with @task 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
ccafaf8e30 Add more checks for removed context variables
add lint rule to show error for removed context variables in airflow
2025-01-22 09:57:25 +08:00
1447 changed files with 21966 additions and 65046 deletions

View File

@@ -8,7 +8,3 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
# See: https://github.com/astral-sh/ruff/issues/11503
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
rustflags = ["-C", "target-feature=+crt-static"]
[target.'wasm32-unknown-unknown']
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']

View File

@@ -6,10 +6,3 @@ failure-output = "immediate-final"
fail-fast = false
status-level = "skip"
# Mark tests that take longer than 1s as slow.
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
slow-timeout = { period = "1s", terminate-after = 60 }
# Show slow jobs in the final summary
final-status-level = "slow"

View File

@@ -1,31 +0,0 @@
name: Bug report
description: Report an error or unexpected behavior
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
**Before reporting, please make sure to search through [existing issues](https://github.com/astral-sh/ruff/issues?q=is:issue+is:open+label:bug) (including [closed](https://github.com/astral-sh/ruff/issues?q=is:issue%20state:closed%20label:bug)).**
- type: textarea
attributes:
label: Summary
description: |
A clear and concise description of the bug, including a minimal reproducible example.
Be sure to include the command you invoked (e.g., `ruff check /path/to/file.py --fix`), ideally including the `--isolated` flag and
the current Ruff settings (e.g., relevant sections from your `pyproject.toml`).
If possible, try to include the [playground](https://play.ruff.rs) link that reproduces this issue.
validations:
required: true
- type: input
attributes:
label: Version
description: What version of ruff are you using? (see `ruff version`)
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
validations:
required: false

View File

@@ -1,10 +0,0 @@
name: Rule request
description: Anything related to lint rules (proposing new rules, changes to existing rules, auto-fixes, etc.)
body:
- type: textarea
attributes:
label: Summary
description: |
A clear and concise description of the relevant request. If applicable, please describe the current behavior as well.
validations:
required: true

View File

@@ -1,18 +0,0 @@
name: Question
description: Ask a question about Ruff
labels: ["question"]
body:
- type: textarea
attributes:
label: Question
description: Describe your question in detail.
validations:
required: true
- type: input
attributes:
label: Version
description: What version of ruff are you using? (see `ruff version`)
placeholder: e.g., ruff 0.9.3 (90589372d 2025-01-23)
validations:
required: false

View File

@@ -1,8 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: Documentation
url: https://docs.astral.sh/ruff
about: Please consult the documentation before creating an issue.
- name: Community
url: https://discord.com/invite/astral-sh
about: Join our Discord community to ask questions and collaborate.

View File

@@ -58,12 +58,6 @@
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
enabled: false,
},
{
// TODO: Remove this once the codebase is upgrade to v4 (https://github.com/astral-sh/ruff/pull/16069)
matchPackageNames: ["tailwindcss"],
matchManagers: ["npm"],
enabled: false,
},
{
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
// See: https://github.com/astral-sh/uv/issues/3642

View File

@@ -23,8 +23,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
env:
PACKAGE_NAME: ruff
MODULE_NAME: ruff

View File

@@ -51,7 +51,7 @@ jobs:
env:
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
run: |
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${TAG}" != "${version}" ]; then
echo "The input tag does not match the version from pyproject.toml:" >&2
echo "${TAG}" >&2

View File

@@ -1,7 +1,5 @@
name: CI
permissions: {}
on:
push:
branches: [main]
@@ -61,7 +59,6 @@ jobs:
- Cargo.toml
- Cargo.lock
- crates/**
- "!crates/red_knot*/**"
- "!crates/ruff_python_formatter/**"
- "!crates/ruff_formatter/**"
- "!crates/ruff_dev/**"
@@ -119,11 +116,11 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: |
rustup component add clippy
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
- name: "Clippy (wasm)"
@@ -133,13 +130,12 @@ jobs:
name: "cargo test (linux)"
runs-on: depot-ubuntu-22.04-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
@@ -152,6 +148,7 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -179,13 +176,12 @@ jobs:
name: "cargo test (linux, release)"
runs-on: depot-ubuntu-22.04-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
@@ -198,6 +194,7 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -208,23 +205,22 @@ jobs:
name: "cargo test (windows)"
runs-on: github-windows-2025-x86_64-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
run: |
@@ -235,13 +231,12 @@ jobs:
name: "cargo test (wasm)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
@@ -252,6 +247,7 @@ jobs:
- uses: jetli/wasm-pack-action@v0.4.0
with:
version: v0.13.1
- uses: Swatinem/rust-cache@v2
- name: "Test ruff_wasm"
run: |
cd crates/ruff_wasm
@@ -270,19 +266,19 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: "Build"
run: cargo build --release --locked
cargo-build-msrv:
name: "cargo build (msrv)"
runs-on: depot-ubuntu-latest-8
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
@@ -293,7 +289,6 @@ jobs:
with:
file: "Cargo.toml"
field: "workspace.package.rust-version"
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
env:
MSRV: ${{ steps.msrv.outputs.value }}
@@ -308,6 +303,7 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -325,11 +321,11 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
with:
workspaces: "fuzz -> target"
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@main
with:
@@ -345,7 +341,7 @@ jobs:
needs:
- cargo-test-linux
- determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
timeout-minutes: 20
env:
FORCE_COLOR: 1
@@ -381,15 +377,15 @@ jobs:
name: "test scripts"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 5
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup component add rustfmt
- uses: Swatinem/rust-cache@v2
# Run all code generation scripts, and verify that the current output is
# already checked into git.
- run: python crates/ruff_python_ast/generate.py
@@ -413,7 +409,7 @@ jobs:
- determine_changes
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
@@ -430,7 +426,7 @@ jobs:
name: ruff
path: target/debug
- uses: dawidd6/action-download-artifact@v8
- uses: dawidd6/action-download-artifact@v7
name: Download baseline Ruff binary
with:
name: ruff
@@ -547,7 +543,6 @@ jobs:
name: "python package"
runs-on: ubuntu-latest
timeout-minutes: 20
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
steps:
- uses: actions/checkout@v4
with:
@@ -582,9 +577,9 @@ jobs:
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install pre-commit"
run: pip install pre-commit
- name: "Cache pre-commit"
@@ -616,7 +611,6 @@ jobs:
- uses: actions/setup-python@v5
with:
python-version: "3.13"
- uses: Swatinem/rust-cache@v2
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
@@ -626,6 +620,7 @@ jobs:
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@v5
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
@@ -649,15 +644,16 @@ jobs:
name: "formatter instabilities and black similarity"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Cache rust"
uses: Swatinem/rust-cache@v2
- name: "Run checks"
run: scripts/formatter_ecosystem_checks.sh
- name: "Github step summary"
@@ -672,7 +668,7 @@ jobs:
needs:
- cargo-test-linux
- determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: extractions/setup-just@v2
env:
@@ -712,9 +708,9 @@ jobs:
just test
benchmarks:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- name: "Checkout Branch"
@@ -722,8 +718,6 @@ jobs:
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
@@ -732,6 +726,8 @@ jobs:
with:
tool: cargo-codspeed
- uses: Swatinem/rust-cache@v2
- name: "Build benchmarks"
run: cargo codspeed build --features codspeed -p ruff_benchmark

View File

@@ -16,7 +16,7 @@ jobs:
permissions:
pull-requests: write
steps:
- uses: dawidd6/action-download-artifact@v8
- uses: dawidd6/action-download-artifact@v7
name: Download pull request number
with:
name: pr-number
@@ -32,7 +32,7 @@ jobs:
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
fi
- uses: dawidd6/action-download-artifact@v8
- uses: dawidd6/action-download-artifact@v7
name: "Download ecosystem results"
id: download-ecosystem-result
if: steps.pr-number.outputs.pr-number

View File

@@ -35,8 +35,6 @@ jobs:
cache: "npm"
cache-dependency-path: playground/package-lock.json
- uses: jetli/wasm-pack-action@v0.4.0
with:
version: v0.13.1
- uses: jetli/wasm-bindgen-action@v0.2.0
- name: "Run wasm-pack"
run: wasm-pack build --target web --out-dir ../../playground/src/pkg crates/ruff_wasm
@@ -51,7 +49,7 @@ jobs:
working-directory: playground
- name: "Deploy to Cloudflare Pages"
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
uses: cloudflare/wrangler-action@v3.14.0
uses: cloudflare/wrangler-action@v3.13.1
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

View File

@@ -35,8 +35,6 @@ jobs:
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: jetli/wasm-pack-action@v0.4.0
with:
version: v0.13.1
- uses: jetli/wasm-bindgen-action@v0.2.0
- name: "Run wasm-pack build"
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm

7
.github/zizmor.yml vendored
View File

@@ -10,10 +10,3 @@ rules:
ignore:
- build-docker.yml
- publish-playground.yml
excessive-permissions:
# it's hard to test what the impact of removing these ignores would be
# without actually running the release workflow...
ignore:
- build-docker.yml
- publish-playground.yml
- publish-docs.yml

4
.gitignore vendored
View File

@@ -29,10 +29,6 @@ tracing.folded
tracing-flamechart.svg
tracing-flamegraph.svg
# insta
*.rs.pending-snap
###
# Rust.gitignore
###

View File

@@ -5,7 +5,6 @@ exclude: |
.github/workflows/release.yml|
crates/red_knot_vendored/vendor/.*|
crates/red_knot_project/resources/.*|
crates/ruff_benchmark/resources/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff_notebook/resources/.*|
@@ -24,7 +23,7 @@ repos:
- id: validate-pyproject
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.22
rev: 0.7.21
hooks:
- id: mdformat
additional_dependencies:
@@ -37,7 +36,7 @@ repos:
)$
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.44.0
rev: v0.43.0
hooks:
- id: markdownlint-fix
exclude: |
@@ -57,10 +56,10 @@ repos:
.*?invalid(_.+)*_syntax\.md
)$
additional_dependencies:
- black==25.1.0
- black==24.10.0
- repo: https://github.com/crate-ci/typos
rev: v1.29.7
rev: v1.29.4
hooks:
- id: typos
@@ -74,7 +73,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.6
rev: v0.9.2
hooks:
- id: ruff-format
- id: ruff
@@ -84,7 +83,7 @@ repos:
# Prettier
- repo: https://github.com/rbubley/mirrors-prettier
rev: v3.5.1
rev: v3.4.2
hooks:
- id: prettier
types: [yaml]
@@ -92,12 +91,12 @@ repos:
# zizmor detects security vulnerabilities in GitHub Actions workflows.
# Additional configuration for the tool is found in `.github/zizmor.yml`
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.3.1
rev: v1.1.1
hooks:
- id: zizmor
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.31.1
rev: 0.31.0
hooks:
- id: check-github-workflows

View File

@@ -209,8 +209,8 @@ This change only affects those using Ruff under its default rule set. Users that
### Remove support for emoji identifiers ([#7212](https://github.com/astral-sh/ruff/pull/7212))
Previously, Ruff supported non-standards-compliant emoji identifiers such as `📦 = 1`.
We decided to remove this non-standard language extension. Ruff now reports syntax errors for invalid emoji identifiers in your code, the same as CPython.
Previously, Ruff supported the non-standard compliant emoji identifiers e.g. `📦 = 1`.
We decided to remove this non-standard language extension, and Ruff now reports syntax errors for emoji identifiers in your code, the same as CPython.
### Improved GitLab fingerprints ([#7203](https://github.com/astral-sh/ruff/pull/7203))

View File

@@ -1,287 +1,5 @@
# Changelog
## 0.9.8
### Preview features
- Start detecting version-related syntax errors in the parser ([#16090](https://github.com/astral-sh/ruff/pull/16090))
### Rule changes
- \[`pylint`\] Mark fix unsafe (`PLW1507`) ([#16343](https://github.com/astral-sh/ruff/pull/16343))
- \[`pylint`\] Catch `case np.nan`/`case math.nan` in `match` statements (`PLW0177`) ([#16378](https://github.com/astral-sh/ruff/pull/16378))
- \[`ruff`\] Add more Pydantic models variants to the list of default copy semantics (`RUF012`) ([#16291](https://github.com/astral-sh/ruff/pull/16291))
### Server
- Avoid indexing the project if `configurationPreference` is `editorOnly` ([#16381](https://github.com/astral-sh/ruff/pull/16381))
- Avoid unnecessary info at non-trace server log level ([#16389](https://github.com/astral-sh/ruff/pull/16389))
- Expand `ruff.configuration` to allow inline config ([#16296](https://github.com/astral-sh/ruff/pull/16296))
- Notify users for invalid client settings ([#16361](https://github.com/astral-sh/ruff/pull/16361))
### Configuration
- Add `per-file-target-version` option ([#16257](https://github.com/astral-sh/ruff/pull/16257))
### Bug fixes
- \[`refurb`\] Do not consider docstring(s) (`FURB156`) ([#16391](https://github.com/astral-sh/ruff/pull/16391))
- \[`flake8-self`\] Ignore attribute accesses on instance-like variables (`SLF001`) ([#16149](https://github.com/astral-sh/ruff/pull/16149))
- \[`pylint`\] Fix false positives, add missing methods, and support positional-only parameters (`PLE0302`) ([#16263](https://github.com/astral-sh/ruff/pull/16263))
- \[`flake8-pyi`\] Mark `PYI030` fix unsafe when comments are deleted ([#16322](https://github.com/astral-sh/ruff/pull/16322))
### Documentation
- Fix example for `S611` ([#16316](https://github.com/astral-sh/ruff/pull/16316))
- Normalize inconsistent markdown headings in docstrings ([#16364](https://github.com/astral-sh/ruff/pull/16364))
- Document MSRV policy ([#16384](https://github.com/astral-sh/ruff/pull/16384))
## 0.9.7
### Preview features
- Consider `__new__` methods as special function type for enforcing class method or static method rules ([#13305](https://github.com/astral-sh/ruff/pull/13305))
- \[`airflow`\] Improve the internal logic to differentiate deprecated symbols (`AIR303`) ([#16013](https://github.com/astral-sh/ruff/pull/16013))
- \[`refurb`\] Manual timezone monkeypatching (`FURB162`) ([#16113](https://github.com/astral-sh/ruff/pull/16113))
- \[`ruff`\] Implicit class variable in dataclass (`RUF045`) ([#14349](https://github.com/astral-sh/ruff/pull/14349))
- \[`ruff`\] Skip singleton starred expressions for `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#16083](https://github.com/astral-sh/ruff/pull/16083))
- \[`refurb`\] Check for subclasses includes subscript expressions (`FURB189`) ([#16155](https://github.com/astral-sh/ruff/pull/16155))
### Rule changes
- \[`flake8-debugger`\] Also flag `sys.breakpointhook` and `sys.__breakpointhook__` (`T100`) ([#16191](https://github.com/astral-sh/ruff/pull/16191))
- \[`pycodestyle`\] Exempt `site.addsitedir(...)` calls (`E402`) ([#16251](https://github.com/astral-sh/ruff/pull/16251))
### Formatter
- Fix unstable formatting of trailing end-of-line comments of parenthesized attribute values ([#16187](https://github.com/astral-sh/ruff/pull/16187))
### Server
- Fix handling of requests received after shutdown message ([#16262](https://github.com/astral-sh/ruff/pull/16262))
- Ignore `source.organizeImports.ruff` and `source.fixAll.ruff` code actions for a notebook cell ([#16154](https://github.com/astral-sh/ruff/pull/16154))
- Include document specific debug info for `ruff.printDebugInformation` ([#16215](https://github.com/astral-sh/ruff/pull/16215))
- Update server to return the debug info as string with `ruff.printDebugInformation` ([#16214](https://github.com/astral-sh/ruff/pull/16214))
### CLI
- Warn on invalid `noqa` even when there are no diagnostics ([#16178](https://github.com/astral-sh/ruff/pull/16178))
- Better error messages while loading configuration `extend`s ([#15658](https://github.com/astral-sh/ruff/pull/15658))
### Bug fixes
- \[`flake8-comprehensions`\] Handle trailing comma in `C403` fix ([#16110](https://github.com/astral-sh/ruff/pull/16110))
- \[`flake8-pyi`\] Avoid flagging `custom-typevar-for-self` on metaclass methods (`PYI019`) ([#16141](https://github.com/astral-sh/ruff/pull/16141))
- \[`pydocstyle`\] Handle arguments with the same names as sections (`D417`) ([#16011](https://github.com/astral-sh/ruff/pull/16011))
- \[`pylint`\] Correct ordering of arguments in fix for `if-stmt-min-max` (`PLR1730`) ([#16080](https://github.com/astral-sh/ruff/pull/16080))
- \[`pylint`\] Do not offer fix for raw strings (`PLE251`) ([#16132](https://github.com/astral-sh/ruff/pull/16132))
- \[`pyupgrade`\] Do not upgrade functional `TypedDicts` with private field names to the class-based syntax (`UP013`) ([#16219](https://github.com/astral-sh/ruff/pull/16219))
- \[`pyupgrade`\] Handle micro version numbers correctly (`UP036`) ([#16091](https://github.com/astral-sh/ruff/pull/16091))
- \[`pyupgrade`\] Unwrap unary expressions correctly (`UP018`) ([#15919](https://github.com/astral-sh/ruff/pull/15919))
- \[`refurb`\] Correctly handle lengths of literal strings in `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#16237](https://github.com/astral-sh/ruff/pull/16237))
- \[`ruff`\] Skip `RUF001` diagnostics when visiting string type definitions ([#16122](https://github.com/astral-sh/ruff/pull/16122))
### Documentation
- Add FAQ entry for `source.*` code actions in Notebook ([#16212](https://github.com/astral-sh/ruff/pull/16212))
- Add `SECURITY.md` ([#16224](https://github.com/astral-sh/ruff/pull/16224))
## 0.9.6
### Preview features
- \[`airflow`\] Add `external_task.{ExternalTaskMarker, ExternalTaskSensor}` for `AIR302` ([#16014](https://github.com/astral-sh/ruff/pull/16014))
- \[`flake8-builtins`\] Make strict module name comparison optional (`A005`) ([#15951](https://github.com/astral-sh/ruff/pull/15951))
- \[`flake8-pyi`\] Extend fix to Python \<= 3.9 for `redundant-none-literal` (`PYI061`) ([#16044](https://github.com/astral-sh/ruff/pull/16044))
- \[`pylint`\] Also report when the object isn't a literal (`PLE1310`) ([#15985](https://github.com/astral-sh/ruff/pull/15985))
- \[`ruff`\] Implement `indented-form-feed` (`RUF054`) ([#16049](https://github.com/astral-sh/ruff/pull/16049))
- \[`ruff`\] Skip type definitions for `missing-f-string-syntax` (`RUF027`) ([#16054](https://github.com/astral-sh/ruff/pull/16054))
### Rule changes
- \[`flake8-annotations`\] Correct syntax for `typing.Union` in suggested return type fixes for `ANN20x` rules ([#16025](https://github.com/astral-sh/ruff/pull/16025))
- \[`flake8-builtins`\] Match upstream module name comparison (`A005`) ([#16006](https://github.com/astral-sh/ruff/pull/16006))
- \[`flake8-comprehensions`\] Detect overshadowed `list`/`set`/`dict`, ignore variadics and named expressions (`C417`) ([#15955](https://github.com/astral-sh/ruff/pull/15955))
- \[`flake8-pie`\] Remove following comma correctly when the unpacked dictionary is empty (`PIE800`) ([#16008](https://github.com/astral-sh/ruff/pull/16008))
- \[`flake8-simplify`\] Only trigger `SIM401` on known dictionaries ([#15995](https://github.com/astral-sh/ruff/pull/15995))
- \[`pylint`\] Do not report calls when object type and argument type mismatch, remove custom escape handling logic (`PLE1310`) ([#15984](https://github.com/astral-sh/ruff/pull/15984))
- \[`pyupgrade`\] Comments within parenthesized value ranges should not affect applicability (`UP040`) ([#16027](https://github.com/astral-sh/ruff/pull/16027))
- \[`pyupgrade`\] Don't introduce invalid syntax when upgrading old-style type aliases with parenthesized multiline values (`UP040`) ([#16026](https://github.com/astral-sh/ruff/pull/16026))
- \[`pyupgrade`\] Ensure we do not rename two type parameters to the same name (`UP049`) ([#16038](https://github.com/astral-sh/ruff/pull/16038))
- \[`pyupgrade`\] \[`ruff`\] Don't apply renamings if the new name is shadowed in a scope of one of the references to the binding (`UP049`, `RUF052`) ([#16032](https://github.com/astral-sh/ruff/pull/16032))
- \[`ruff`\] Update `RUF009` to behave similar to `B008` and ignore attributes with immutable types ([#16048](https://github.com/astral-sh/ruff/pull/16048))
### Server
- Root exclusions in the server to project root ([#16043](https://github.com/astral-sh/ruff/pull/16043))
### Bug fixes
- \[`flake8-datetime`\] Ignore `.replace()` calls while looking for `.astimezone` ([#16050](https://github.com/astral-sh/ruff/pull/16050))
- \[`flake8-type-checking`\] Avoid `TC004` false positive where the runtime definition is provided by `__getattr__` ([#16052](https://github.com/astral-sh/ruff/pull/16052))
### Documentation
- Improve `ruff-lsp` migration document ([#16072](https://github.com/astral-sh/ruff/pull/16072))
- Undeprecate `ruff.nativeServer` ([#16039](https://github.com/astral-sh/ruff/pull/16039))
## 0.9.5
### Preview features
- Recognize all symbols named `TYPE_CHECKING` for `in_type_checking_block` ([#15719](https://github.com/astral-sh/ruff/pull/15719))
- \[`flake8-comprehensions`\] Handle builtins at top of file correctly for `unnecessary-dict-comprehension-for-iterable` (`C420`) ([#15837](https://github.com/astral-sh/ruff/pull/15837))
- \[`flake8-logging`\] `.exception()` and `exc_info=` outside exception handlers (`LOG004`, `LOG014`) ([#15799](https://github.com/astral-sh/ruff/pull/15799))
- \[`flake8-pyi`\] Fix incorrect behaviour of `custom-typevar-return-type` preview-mode autofix if `typing` was already imported (`PYI019`) ([#15853](https://github.com/astral-sh/ruff/pull/15853))
- \[`flake8-pyi`\] Fix more complex cases (`PYI019`) ([#15821](https://github.com/astral-sh/ruff/pull/15821))
- \[`flake8-pyi`\] Make `PYI019` autofixable for `.py` files in preview mode as well as stubs ([#15889](https://github.com/astral-sh/ruff/pull/15889))
- \[`flake8-pyi`\] Remove type parameter correctly when it is the last (`PYI019`) ([#15854](https://github.com/astral-sh/ruff/pull/15854))
- \[`pylint`\] Fix missing parens in unsafe fix for `unnecessary-dunder-call` (`PLC2801`) ([#15762](https://github.com/astral-sh/ruff/pull/15762))
- \[`pyupgrade`\] Better messages and diagnostic range (`UP015`) ([#15872](https://github.com/astral-sh/ruff/pull/15872))
- \[`pyupgrade`\] Rename private type parameters in PEP 695 generics (`UP049`) ([#15862](https://github.com/astral-sh/ruff/pull/15862))
- \[`refurb`\] Also report non-name expressions (`FURB169`) ([#15905](https://github.com/astral-sh/ruff/pull/15905))
- \[`refurb`\] Mark fix as unsafe if there are comments (`FURB171`) ([#15832](https://github.com/astral-sh/ruff/pull/15832))
- \[`ruff`\] Classes with mixed type variable style (`RUF053`) ([#15841](https://github.com/astral-sh/ruff/pull/15841))
- \[`airflow`\] `BashOperator` has been moved to `airflow.providers.standard.operators.bash.BashOperator` (`AIR302`) ([#15922](https://github.com/astral-sh/ruff/pull/15922))
- \[`flake8-pyi`\] Add autofix for unused-private-type-var (`PYI018`) ([#15999](https://github.com/astral-sh/ruff/pull/15999))
- \[`flake8-pyi`\] Significantly improve accuracy of `PYI019` if preview mode is enabled ([#15888](https://github.com/astral-sh/ruff/pull/15888))
### Rule changes
- Preserve triple quotes and prefixes for strings ([#15818](https://github.com/astral-sh/ruff/pull/15818))
- \[`flake8-comprehensions`\] Skip when `TypeError` present from too many (kw)args for `C410`,`C411`, and `C418` ([#15838](https://github.com/astral-sh/ruff/pull/15838))
- \[`flake8-pyi`\] Rename `PYI019` and improve its diagnostic message ([#15885](https://github.com/astral-sh/ruff/pull/15885))
- \[`pep8-naming`\] Ignore `@override` methods (`N803`) ([#15954](https://github.com/astral-sh/ruff/pull/15954))
- \[`pyupgrade`\] Reuse replacement logic from `UP046` and `UP047` to preserve more comments (`UP040`) ([#15840](https://github.com/astral-sh/ruff/pull/15840))
- \[`ruff`\] Analyze deferred annotations before enforcing `mutable-(data)class-default` and `function-call-in-dataclass-default-argument` (`RUF008`,`RUF009`,`RUF012`) ([#15921](https://github.com/astral-sh/ruff/pull/15921))
- \[`pycodestyle`\] Exempt `sys.path += ...` calls (`E402`) ([#15980](https://github.com/astral-sh/ruff/pull/15980))
### Configuration
- Config error only when `flake8-import-conventions` alias conflicts with `isort.required-imports` bound name ([#15918](https://github.com/astral-sh/ruff/pull/15918))
- Workaround Even Better TOML crash related to `allOf` ([#15992](https://github.com/astral-sh/ruff/pull/15992))
### Bug fixes
- \[`flake8-comprehensions`\] Unnecessary `list` comprehension (rewrite as a `set` comprehension) (`C403`) - Handle extraneous parentheses around list comprehension ([#15877](https://github.com/astral-sh/ruff/pull/15877))
- \[`flake8-comprehensions`\] Handle trailing comma in fixes for `unnecessary-generator-list/set` (`C400`,`C401`) ([#15929](https://github.com/astral-sh/ruff/pull/15929))
- \[`flake8-pyi`\] Fix several correctness issues with `custom-type-var-return-type` (`PYI019`) ([#15851](https://github.com/astral-sh/ruff/pull/15851))
- \[`pep8-naming`\] Consider any number of leading underscore for `N801` ([#15988](https://github.com/astral-sh/ruff/pull/15988))
- \[`pyflakes`\] Visit forward annotations in `TypeAliasType` as types (`F401`) ([#15829](https://github.com/astral-sh/ruff/pull/15829))
- \[`pylint`\] Correct min/max auto-fix and suggestion for (`PL1730`) ([#15930](https://github.com/astral-sh/ruff/pull/15930))
- \[`refurb`\] Handle unparenthesized tuples correctly (`FURB122`, `FURB142`) ([#15953](https://github.com/astral-sh/ruff/pull/15953))
- \[`refurb`\] Avoid `None | None` as well as better detection and fix (`FURB168`) ([#15779](https://github.com/astral-sh/ruff/pull/15779))
### Documentation
- Add deprecation warning for `ruff-lsp` related settings ([#15850](https://github.com/astral-sh/ruff/pull/15850))
- Docs (`linter.md`): clarify that Python files are always searched for in subdirectories ([#15882](https://github.com/astral-sh/ruff/pull/15882))
- Fix a typo in `non_pep695_generic_class.rs` ([#15946](https://github.com/astral-sh/ruff/pull/15946))
- Improve Docs: Pylint subcategories' codes ([#15909](https://github.com/astral-sh/ruff/pull/15909))
- Remove non-existing `lint.extendIgnore` editor setting ([#15844](https://github.com/astral-sh/ruff/pull/15844))
- Update black deviations ([#15928](https://github.com/astral-sh/ruff/pull/15928))
- Mention `UP049` in `UP046` and `UP047`, add `See also` section to `UP040` ([#15956](https://github.com/astral-sh/ruff/pull/15956))
- Add instance variable examples to `RUF012` ([#15982](https://github.com/astral-sh/ruff/pull/15982))
- Explain precedence for `ignore` and `select` config ([#15883](https://github.com/astral-sh/ruff/pull/15883))
## 0.9.4
### Preview features
- \[`airflow`\] Extend airflow context parameter check for `BaseOperator.execute` (`AIR302`) ([#15713](https://github.com/astral-sh/ruff/pull/15713))
- \[`airflow`\] Update `AIR302` to check for deprecated context keys ([#15144](https://github.com/astral-sh/ruff/pull/15144))
- \[`flake8-bandit`\] Permit suspicious imports within stub files (`S4`) ([#15822](https://github.com/astral-sh/ruff/pull/15822))
- \[`pylint`\] Do not trigger `PLR6201` on empty collections ([#15732](https://github.com/astral-sh/ruff/pull/15732))
- \[`refurb`\] Do not emit diagnostic when loop variables are used outside loop body (`FURB122`) ([#15757](https://github.com/astral-sh/ruff/pull/15757))
- \[`ruff`\] Add support for more `re` patterns (`RUF055`) ([#15764](https://github.com/astral-sh/ruff/pull/15764))
- \[`ruff`\] Check for shadowed `map` before suggesting fix (`RUF058`) ([#15790](https://github.com/astral-sh/ruff/pull/15790))
- \[`ruff`\] Do not emit diagnostic when all arguments to `zip()` are variadic (`RUF058`) ([#15744](https://github.com/astral-sh/ruff/pull/15744))
- \[`ruff`\] Parenthesize fix when argument spans multiple lines for `unnecessary-round` (`RUF057`) ([#15703](https://github.com/astral-sh/ruff/pull/15703))
### Rule changes
- Preserve quote style in generated code ([#15726](https://github.com/astral-sh/ruff/pull/15726), [#15778](https://github.com/astral-sh/ruff/pull/15778), [#15794](https://github.com/astral-sh/ruff/pull/15794))
- \[`flake8-bugbear`\] Exempt `NewType` calls where the original type is immutable (`B008`) ([#15765](https://github.com/astral-sh/ruff/pull/15765))
- \[`pylint`\] Honor banned top-level imports by `TID253` in `PLC0415`. ([#15628](https://github.com/astral-sh/ruff/pull/15628))
- \[`pyupgrade`\] Ignore `is_typeddict` and `TypedDict` for `deprecated-import` (`UP035`) ([#15800](https://github.com/astral-sh/ruff/pull/15800))
### CLI
- Fix formatter warning message for `flake8-quotes` option ([#15788](https://github.com/astral-sh/ruff/pull/15788))
- Implement tab autocomplete for `ruff config` ([#15603](https://github.com/astral-sh/ruff/pull/15603))
### Bug fixes
- \[`flake8-comprehensions`\] Do not emit `unnecessary-map` diagnostic when lambda has different arity (`C417`) ([#15802](https://github.com/astral-sh/ruff/pull/15802))
- \[`flake8-comprehensions`\] Parenthesize `sorted` when needed for `unnecessary-call-around-sorted` (`C413`) ([#15825](https://github.com/astral-sh/ruff/pull/15825))
- \[`pyupgrade`\] Handle end-of-line comments for `quoted-annotation` (`UP037`) ([#15824](https://github.com/astral-sh/ruff/pull/15824))
### Documentation
- Add missing config docstrings ([#15803](https://github.com/astral-sh/ruff/pull/15803))
- Add references to `trio.run_process` and `anyio.run_process` ([#15761](https://github.com/astral-sh/ruff/pull/15761))
- Use `uv init --lib` in tutorial ([#15718](https://github.com/astral-sh/ruff/pull/15718))
## 0.9.3
### Preview features
- \[`airflow`\] Argument `fail_stop` in DAG has been renamed as `fail_fast` (`AIR302`) ([#15633](https://github.com/astral-sh/ruff/pull/15633))
- \[`airflow`\] Extend `AIR303` with more symbols ([#15611](https://github.com/astral-sh/ruff/pull/15611))
- \[`flake8-bandit`\] Report all references to suspicious functions (`S3`) ([#15541](https://github.com/astral-sh/ruff/pull/15541))
- \[`flake8-pytest-style`\] Do not emit diagnostics for empty `for` loops (`PT012`, `PT031`) ([#15542](https://github.com/astral-sh/ruff/pull/15542))
- \[`flake8-simplify`\] Avoid double negations (`SIM103`) ([#15562](https://github.com/astral-sh/ruff/pull/15562))
- \[`pyflakes`\] Fix infinite loop with unused local import in `__init__.py` (`F401`) ([#15517](https://github.com/astral-sh/ruff/pull/15517))
- \[`pylint`\] Do not report methods with only one `EM101`-compatible `raise` (`PLR6301`) ([#15507](https://github.com/astral-sh/ruff/pull/15507))
- \[`pylint`\] Implement `redefined-slots-in-subclass` (`W0244`) ([#9640](https://github.com/astral-sh/ruff/pull/9640))
- \[`pyupgrade`\] Add rules to use PEP 695 generics in classes and functions (`UP046`, `UP047`) ([#15565](https://github.com/astral-sh/ruff/pull/15565), [#15659](https://github.com/astral-sh/ruff/pull/15659))
- \[`refurb`\] Implement `for-loop-writes` (`FURB122`) ([#10630](https://github.com/astral-sh/ruff/pull/10630))
- \[`ruff`\] Implement `needless-else` clause (`RUF047`) ([#15051](https://github.com/astral-sh/ruff/pull/15051))
- \[`ruff`\] Implement `starmap-zip` (`RUF058`) ([#15483](https://github.com/astral-sh/ruff/pull/15483))
### Rule changes
- \[`flake8-bugbear`\] Do not raise error if keyword argument is present and target-python version is less or equals than 3.9 (`B903`) ([#15549](https://github.com/astral-sh/ruff/pull/15549))
- \[`flake8-comprehensions`\] strip parentheses around generators in `unnecessary-generator-set` (`C401`) ([#15553](https://github.com/astral-sh/ruff/pull/15553))
- \[`flake8-pytest-style`\] Rewrite references to `.exception` (`PT027`) ([#15680](https://github.com/astral-sh/ruff/pull/15680))
- \[`flake8-simplify`\] Mark fixes as unsafe (`SIM201`, `SIM202`) ([#15626](https://github.com/astral-sh/ruff/pull/15626))
- \[`flake8-type-checking`\] Fix some safe fixes being labeled unsafe (`TC006`,`TC008`) ([#15638](https://github.com/astral-sh/ruff/pull/15638))
- \[`isort`\] Omit trailing whitespace in `unsorted-imports` (`I001`) ([#15518](https://github.com/astral-sh/ruff/pull/15518))
- \[`pydoclint`\] Allow ignoring one line docstrings for `DOC` rules ([#13302](https://github.com/astral-sh/ruff/pull/13302))
- \[`pyflakes`\] Apply redefinition fixes by source code order (`F811`) ([#15575](https://github.com/astral-sh/ruff/pull/15575))
- \[`pyflakes`\] Avoid removing too many imports in `redefined-while-unused` (`F811`) ([#15585](https://github.com/astral-sh/ruff/pull/15585))
- \[`pyflakes`\] Group redefinition fixes by source statement (`F811`) ([#15574](https://github.com/astral-sh/ruff/pull/15574))
- \[`pylint`\] Include name of base class in message for `redefined-slots-in-subclass` (`W0244`) ([#15559](https://github.com/astral-sh/ruff/pull/15559))
- \[`ruff`\] Update fix for `RUF055` to use `var == value` ([#15605](https://github.com/astral-sh/ruff/pull/15605))
### Formatter
- Fix bracket spacing for single-element tuples in f-string expressions ([#15537](https://github.com/astral-sh/ruff/pull/15537))
- Fix unstable f-string formatting for expressions containing a trailing comma ([#15545](https://github.com/astral-sh/ruff/pull/15545))
### Performance
- Avoid quadratic membership check in import fixes ([#15576](https://github.com/astral-sh/ruff/pull/15576))
### Server
- Allow `unsafe-fixes` settings for code actions ([#15666](https://github.com/astral-sh/ruff/pull/15666))
### Bug fixes
- \[`flake8-bandit`\] Add missing single-line/dotall regex flag (`S608`) ([#15654](https://github.com/astral-sh/ruff/pull/15654))
- \[`flake8-import-conventions`\] Fix infinite loop between `ICN001` and `I002` (`ICN001`) ([#15480](https://github.com/astral-sh/ruff/pull/15480))
- \[`flake8-simplify`\] Do not emit diagnostics for expressions inside string type annotations (`SIM222`, `SIM223`) ([#15405](https://github.com/astral-sh/ruff/pull/15405))
- \[`pyflakes`\] Treat arguments passed to the `default=` parameter of `TypeVar` as type expressions (`F821`) ([#15679](https://github.com/astral-sh/ruff/pull/15679))
- \[`pyupgrade`\] Avoid syntax error when the iterable is a non-parenthesized tuple (`UP028`) ([#15543](https://github.com/astral-sh/ruff/pull/15543))
- \[`ruff`\] Exempt `NewType` calls where the original type is immutable (`RUF009`) ([#15588](https://github.com/astral-sh/ruff/pull/15588))
- Preserve raw string prefix and escapes in all codegen fixes ([#15694](https://github.com/astral-sh/ruff/pull/15694))
### Documentation
- Generate documentation redirects for lowercase rule codes ([#15564](https://github.com/astral-sh/ruff/pull/15564))
- `TRY300`: Add some extra notes on not catching exceptions you didn't expect ([#15036](https://github.com/astral-sh/ruff/pull/15036))
## 0.9.2
### Preview features

View File

@@ -526,7 +526,7 @@ cargo benchmark
#### Benchmark-driven Development
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
`--save-baseline=<name>` to store an initial baseline benchmark (e.g., on `main`) and then use
`--save-baseline=<name>` to store an initial baseline benchmark (e.g. on `main`) and then use
`--benchmark=<name>` to compare against that benchmark. Criterion will print a message telling you
if the benchmark improved/regressed compared to that baseline.
@@ -678,9 +678,9 @@ utils with it:
23 Newline 24
```
- `cargo dev print-cst <file>`: Print the CST of a Python file using
- `cargo dev print-cst <file>`: Print the CST of a python file using
[LibCST](https://github.com/Instagram/LibCST), which is used in addition to the RustPython parser
in Ruff. For example, for `if True: pass # comment`, everything, including the whitespace, is represented:
in Ruff. E.g. for `if True: pass # comment` everything including the whitespace is represented:
```text
Module {

1152
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ resolver = "2"
[workspace.package]
edition = "2021"
rust-version = "1.83"
rust-version = "1.80"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@@ -74,13 +74,11 @@ env_logger = { version = "0.11.0" }
etcetera = { version = "0.8.0" }
fern = { version = "0.7.0" }
filetime = { version = "0.2.23" }
getrandom = { version = "0.3.1" }
glob = { version = "0.3.1" }
globset = { version = "0.4.14" }
globwalk = { version = "0.9.1" }
hashbrown = { version = "0.15.0", default-features = false, features = [
"raw-entry",
"equivalent",
"inline-more",
] }
ignore = { version = "0.4.22" }
@@ -118,12 +116,12 @@ proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.13.4" }
quick-junit = { version = "0.5.0" }
quote = { version = "1.0.23" }
rand = { version = "0.9.0" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "99be5d9917c3dd88e19735a82ef6bf39ba84bd7e" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "88a1d7774d78f048fbd77d40abca9ebd729fd1f0" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -136,15 +134,10 @@ serde_with = { version = "3.6.0", default-features = false, features = [
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2" }
snapbox = { version = "0.6.0", features = [
"diff",
"term-svg",
"cmd",
"examples",
] }
snapbox = { version = "0.6.0", features = ["diff", "term-svg", "cmd", "examples"] }
static_assertions = "1.1.0"
strum = { version = "0.27.0", features = ["strum_macros"] }
strum_macros = { version = "0.27.0" }
strum = { version = "0.26.0", features = ["strum_macros"] }
strum_macros = { version = "0.26.0" }
syn = { version = "2.0.55" }
tempfile = { version = "3.9.0" }
test-case = { version = "3.3.1" }
@@ -166,6 +159,7 @@ unicode-ident = { version = "1.0.12" }
unicode-width = { version = "0.2.0" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
ureq = { version = "2.9.6" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = [
"v4",
@@ -179,10 +173,6 @@ wasm-bindgen-test = { version = "0.3.42" }
wild = { version = "2" }
zip = { version = "0.6.6", default-features = false }
[workspace.metadata.cargo-shear]
ignored = ["getrandom"]
[workspace.lints.rust]
unsafe_code = "warn"
unreachable_pub = "warn"
@@ -315,11 +305,7 @@ local-artifacts-jobs = ["./build-binaries", "./build-docker"]
# Publish jobs to run in CI
publish-jobs = ["./publish-pypi", "./publish-wasm"]
# Post-announce jobs to run in CI
post-announce-jobs = [
"./notify-dependents",
"./publish-docs",
"./publish-playground",
]
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
# Custom permissions for GitHub Jobs
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
# Whether to install an updater program

View File

@@ -149,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.9.8/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.9.8/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.9.2/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.9.2/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -183,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.9.8
rev: v0.9.2
hooks:
# Run the linter.
- id: ruff
@@ -452,7 +452,6 @@ Ruff is used by a number of major open-source projects and companies, including:
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
- [Ibis](https://github.com/ibis-project/ibis)
- [ivy](https://github.com/unifyai/ivy)
- [JAX](https://github.com/jax-ml/jax)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Kraken Tech](https://kraken.tech/)
- [LangChain](https://github.com/hwchase17/langchain)

View File

@@ -1,15 +0,0 @@
# Security policy
## Reporting a vulnerability
If you have found a possible vulnerability, please email `security at astral dot sh`.
## Bug bounties
While we sincerely appreciate and encourage reports of suspected security problems, please note that
Astral does not currently run any bug bounty programs.
## Vulnerability disclosures
Critical vulnerabilities will be disclosed via GitHub's
[security advisory](https://github.com/astral-sh/ruff/security) system.

View File

@@ -16,10 +16,8 @@ red_knot_python_semantic = { workspace = true }
red_knot_project = { workspace = true, features = ["zstd"] }
red_knot_server = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
ruff_python_ast = { workspace = true }
anyhow = { workspace = true }
argfile = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["wrap_help"] }
colored = { workspace = true }
@@ -32,11 +30,9 @@ tracing = { workspace = true, features = ["release_max_level_debug"] }
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
tracing-flame = { workspace = true }
tracing-tree = { workspace = true }
wild = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_python_trivia = { workspace = true }
insta = { workspace = true, features = ["filters"] }
insta-cmd = { workspace = true }

View File

@@ -1,25 +0,0 @@
# Red Knot
Red Knot is an extremely fast type checker.
Currently, it is a work-in-progress and not ready for user testing.
Red Knot is designed to prioritize good type inference, even in unannotated code,
and aims to avoid false positives.
While Red Knot will produce similar results to mypy and pyright on many codebases,
100% compatibility with these tools is a non-goal.
On some codebases, Red Knot's design decisions lead to different outcomes
than you would get from running one of these more established tools.
## Contributing
Core type checking tests are written as Markdown code blocks.
They can be found in [`red_knot_python_semantic/resources/mdtest`][resources-mdtest].
See [`red_knot_test/README.md`][mdtest-readme] for more information
on the test framework itself.
The list of open issues can be found [here][open-issues].
[mdtest-readme]: ../red_knot_test/README.md
[open-issues]: https://github.com/astral-sh/ruff/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20label%3Ared-knot
[resources-mdtest]: ../red_knot_python_semantic/resources/mdtest

View File

@@ -1,104 +0,0 @@
use std::{
fs,
path::{Path, PathBuf},
process::Command,
};
fn main() {
// The workspace root directory is not available without walking up the tree
// https://github.com/rust-lang/cargo/issues/3946
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
.join("..")
.join("..");
commit_info(&workspace_root);
#[allow(clippy::disallowed_methods)]
let target = std::env::var("TARGET").unwrap();
println!("cargo::rustc-env=RUST_HOST_TARGET={target}");
}
fn commit_info(workspace_root: &Path) {
// If not in a git repository, do not attempt to retrieve commit information
let git_dir = workspace_root.join(".git");
if !git_dir.exists() {
return;
}
if let Some(git_head_path) = git_head(&git_dir) {
println!("cargo:rerun-if-changed={}", git_head_path.display());
let git_head_contents = fs::read_to_string(git_head_path);
if let Ok(git_head_contents) = git_head_contents {
// The contents are either a commit or a reference in the following formats
// - "<commit>" when the head is detached
// - "ref <ref>" when working on a branch
// If a commit, checking if the HEAD file has changed is sufficient
// If a ref, we need to add the head file for that ref to rebuild on commit
let mut git_ref_parts = git_head_contents.split_whitespace();
git_ref_parts.next();
if let Some(git_ref) = git_ref_parts.next() {
let git_ref_path = git_dir.join(git_ref);
println!("cargo:rerun-if-changed={}", git_ref_path.display());
}
}
}
let output = match Command::new("git")
.arg("log")
.arg("-1")
.arg("--date=short")
.arg("--abbrev=9")
.arg("--format=%H %h %cd %(describe)")
.output()
{
Ok(output) if output.status.success() => output,
_ => return,
};
let stdout = String::from_utf8(output.stdout).unwrap();
let mut parts = stdout.split_whitespace();
let mut next = || parts.next().unwrap();
let _commit_hash = next();
println!("cargo::rustc-env=RED_KNOT_COMMIT_SHORT_HASH={}", next());
println!("cargo::rustc-env=RED_KNOT_COMMIT_DATE={}", next());
// Describe can fail for some commits
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
if let Some(describe) = parts.next() {
let mut describe_parts = describe.split('-');
let _last_tag = describe_parts.next().unwrap();
// If this is the tagged commit, this component will be missing
println!(
"cargo::rustc-env=RED_KNOT_LAST_TAG_DISTANCE={}",
describe_parts.next().unwrap_or("0")
);
}
}
fn git_head(git_dir: &Path) -> Option<PathBuf> {
// The typical case is a standard git repository.
let git_head_path = git_dir.join("HEAD");
if git_head_path.exists() {
return Some(git_head_path);
}
if !git_dir.is_file() {
return None;
}
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
// then let's try to attempt to read it as a worktree. If it's
// a worktree, then its contents will look like this, e.g.:
//
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
//
// And the HEAD file we want to watch will be at:
//
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
let contents = fs::read_to_string(git_dir).ok()?;
let (label, worktree_path) = contents.split_once(':')?;
if label != "gitdir" {
return None;
}
let worktree_path = worktree_path.trim();
Some(PathBuf::from(worktree_path))
}

View File

@@ -1,206 +0,0 @@
use crate::logging::Verbosity;
use crate::python_version::PythonVersion;
use clap::{ArgAction, ArgMatches, Error, Parser};
use red_knot_project::metadata::options::{EnvironmentOptions, Options, TerminalOptions};
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
use red_knot_python_semantic::lint;
use ruff_db::system::SystemPathBuf;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
)]
#[command(version)]
pub(crate) struct Args {
#[command(subcommand)]
pub(crate) command: Command,
}
#[derive(Debug, clap::Subcommand)]
pub(crate) enum Command {
/// Check a project for type errors.
Check(CheckCommand),
/// Start the language server
Server,
/// Display Red Knot's version
Version,
}
#[derive(Debug, Parser)]
pub(crate) struct CheckCommand {
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
///
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
#[arg(long, value_name = "PROJECT")]
pub(crate) project: Option<SystemPathBuf>,
/// Path to the Python installation from which Red Knot resolves type information and third-party dependencies.
///
/// Red Knot will search in the path's `site-packages` directories for type information and
/// third-party imports.
///
/// This option is commonly used to specify the path to a virtual environment.
#[arg(long, value_name = "PATH")]
pub(crate) python: Option<SystemPathBuf>,
/// Custom directory to use for stdlib typeshed stubs.
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
pub(crate) typeshed: Option<SystemPathBuf>,
/// Additional path to use as a module-resolution source (can be passed multiple times).
#[arg(long, value_name = "PATH")]
pub(crate) extra_search_path: Option<Vec<SystemPathBuf>>,
/// Python version to assume when resolving types.
#[arg(long, value_name = "VERSION", alias = "target-version")]
pub(crate) python_version: Option<PythonVersion>,
#[clap(flatten)]
pub(crate) verbosity: Verbosity,
#[clap(flatten)]
pub(crate) rules: RulesArg,
/// Use exit code 1 if there are any warning-level diagnostics.
#[arg(long, conflicts_with = "exit_zero", default_missing_value = "true", num_args=0..1)]
pub(crate) error_on_warning: Option<bool>,
/// Always use exit code 0, even when there are error-level diagnostics.
#[arg(long)]
pub(crate) exit_zero: bool,
/// Run in watch mode by re-running whenever files change.
#[arg(long, short = 'W')]
pub(crate) watch: bool,
}
impl CheckCommand {
pub(crate) fn into_options(self) -> Options {
let rules = if self.rules.is_empty() {
None
} else {
Some(
self.rules
.into_iter()
.map(|(rule, level)| (RangedValue::cli(rule), RangedValue::cli(level)))
.collect(),
)
};
Options {
environment: Some(EnvironmentOptions {
python_version: self
.python_version
.map(|version| RangedValue::cli(version.into())),
python: self.python.map(RelativePathBuf::cli),
typeshed: self.typeshed.map(RelativePathBuf::cli),
extra_paths: self.extra_search_path.map(|extra_search_paths| {
extra_search_paths
.into_iter()
.map(RelativePathBuf::cli)
.collect()
}),
..EnvironmentOptions::default()
}),
terminal: Some(TerminalOptions {
error_on_warning: self.error_on_warning,
}),
rules,
..Default::default()
}
}
}
/// A list of rules to enable or disable with a given severity.
///
/// This type is used to parse the `--error`, `--warn`, and `--ignore` arguments
/// while preserving the order in which they were specified (arguments last override previous severities).
#[derive(Debug)]
pub(crate) struct RulesArg(Vec<(String, lint::Level)>);
impl RulesArg {
fn is_empty(&self) -> bool {
self.0.is_empty()
}
fn into_iter(self) -> impl Iterator<Item = (String, lint::Level)> {
self.0.into_iter()
}
}
impl clap::FromArgMatches for RulesArg {
fn from_arg_matches(matches: &ArgMatches) -> Result<Self, Error> {
let mut rules = Vec::new();
for (level, arg_id) in [
(lint::Level::Ignore, "ignore"),
(lint::Level::Warn, "warn"),
(lint::Level::Error, "error"),
] {
let indices = matches.indices_of(arg_id).into_iter().flatten();
let levels = matches.get_many::<String>(arg_id).into_iter().flatten();
rules.extend(
indices
.zip(levels)
.map(|(index, rule)| (index, rule, level)),
);
}
// Sort by their index so that values specified later override earlier ones.
rules.sort_by_key(|(index, _, _)| *index);
Ok(Self(
rules
.into_iter()
.map(|(_, rule, level)| (rule.to_owned(), level))
.collect(),
))
}
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), Error> {
self.0 = Self::from_arg_matches(matches)?.0;
Ok(())
}
}
impl clap::Args for RulesArg {
fn augment_args(cmd: clap::Command) -> clap::Command {
const HELP_HEADING: &str = "Enabling / disabling rules";
cmd.arg(
clap::Arg::new("error")
.long("error")
.action(ArgAction::Append)
.help("Treat the given rule as having severity 'error'. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
.arg(
clap::Arg::new("warn")
.long("warn")
.action(ArgAction::Append)
.help("Treat the given rule as having severity 'warn'. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
.arg(
clap::Arg::new("ignore")
.long("ignore")
.action(ArgAction::Append)
.help("Disables the rule. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
}
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
Self::augment_args(cmd)
}
}

View File

@@ -1,28 +1,104 @@
use std::io::{self, stdout, BufWriter, Write};
use std::process::{ExitCode, Termination};
use anyhow::Result;
use std::sync::Mutex;
use crate::args::{Args, CheckCommand, Command};
use crate::logging::setup_tracing;
use anyhow::{anyhow, Context};
use clap::Parser;
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use red_knot_project::metadata::options::Options;
use python_version::PythonVersion;
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
use red_knot_project::watch;
use red_knot_project::watch::ProjectWatcher;
use red_knot_project::{watch, Db};
use red_knot_project::{ProjectDatabase, ProjectMetadata};
use red_knot_server::run_server;
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, Severity};
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::diagnostic::Diagnostic;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use salsa::plumbing::ZalsaDatabase;
mod args;
use crate::logging::{setup_tracing, Verbosity};
mod logging;
mod python_version;
mod version;
mod verbosity;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
)]
#[command(version)]
struct Args {
#[command(subcommand)]
pub(crate) command: Option<Command>,
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
///
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
#[arg(long, value_name = "PROJECT")]
project: Option<SystemPathBuf>,
/// Path to the virtual environment the project uses.
///
/// If provided, red-knot will use the `site-packages` directory of this virtual environment
/// to resolve type information for the project's third-party dependencies.
#[arg(long, value_name = "PATH")]
venv_path: Option<SystemPathBuf>,
/// Custom directory to use for stdlib typeshed stubs.
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
typeshed: Option<SystemPathBuf>,
/// Additional path to use as a module-resolution source (can be passed multiple times).
#[arg(long, value_name = "PATH")]
extra_search_path: Option<Vec<SystemPathBuf>>,
/// Python version to assume when resolving types.
#[arg(long, value_name = "VERSION", alias = "target-version")]
python_version: Option<PythonVersion>,
#[clap(flatten)]
verbosity: Verbosity,
/// Run in watch mode by re-running whenever files change.
#[arg(long, short = 'W')]
watch: bool,
}
impl Args {
fn to_options(&self, cli_cwd: &SystemPath) -> Options {
Options {
environment: Some(EnvironmentOptions {
python_version: self.python_version.map(Into::into),
venv_path: self
.venv_path
.as_ref()
.map(|venv_path| SystemPath::absolute(venv_path, cli_cwd)),
typeshed: self
.typeshed
.as_ref()
.map(|typeshed| SystemPath::absolute(typeshed, cli_cwd)),
extra_paths: self.extra_search_path.as_ref().map(|extra_search_paths| {
extra_search_paths
.iter()
.map(|path| SystemPath::absolute(path, cli_cwd))
.collect()
}),
..EnvironmentOptions::default()
}),
..Default::default()
}
}
}
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Start the language server
Server,
}
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
pub fn main() -> ExitStatus {
@@ -39,15 +115,6 @@ pub fn main() -> ExitStatus {
// the configuration it is help to chain errors ("resolving configuration failed" ->
// "failed to read file: subdir/pyproject.toml")
for cause in error.chain() {
// Exit "gracefully" on broken pipe errors.
//
// See: https://github.com/BurntSushi/ripgrep/blob/bf63fe8f258afc09bae6caa48f0ae35eaf115005/crates/core/main.rs#L47C1-L61C14
if let Some(ioerr) = cause.downcast_ref::<io::Error>() {
if ioerr.kind() == io::ErrorKind::BrokenPipe {
return ExitStatus::Success;
}
}
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
}
@@ -56,32 +123,18 @@ pub fn main() -> ExitStatus {
}
fn run() -> anyhow::Result<ExitStatus> {
let args = wild::args_os();
let args = argfile::expand_args_from(args, argfile::parse_fromfile, argfile::PREFIX)
.context("Failed to read CLI arguments from file")?;
let args = Args::parse_from(args);
let args = Args::parse_from(std::env::args());
match args.command {
Command::Server => run_server().map(|()| ExitStatus::Success),
Command::Check(check_args) => run_check(check_args),
Command::Version => version().map(|()| ExitStatus::Success),
if matches!(args.command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success);
}
}
pub(crate) fn version() -> Result<()> {
let mut stdout = BufWriter::new(io::stdout().lock());
let version_info = crate::version::version();
writeln!(stdout, "red knot {}", &version_info)?;
Ok(())
}
fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
let verbosity = args.verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
// The base path to which all CLI arguments are relative to.
let cwd = {
let cli_base_path = {
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
SystemPathBuf::from_path_buf(cwd)
.map_err(|path| {
@@ -92,31 +145,25 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
})?
};
let project_path = args
let cwd = args
.project
.as_ref()
.map(|project| {
if project.as_std_path().is_dir() {
Ok(SystemPath::absolute(project, &cwd))
.map(|cwd| {
if cwd.as_std_path().is_dir() {
Ok(SystemPath::absolute(cwd, &cli_base_path))
} else {
Err(anyhow!(
"Provided project path `{project}` is not a directory"
))
Err(anyhow!("Provided project path `{cwd}` is not a directory"))
}
})
.transpose()?
.unwrap_or_else(|| cwd.clone());
.unwrap_or_else(|| cli_base_path.clone());
let system = OsSystem::new(cwd);
let watch = args.watch;
let exit_zero = args.exit_zero;
let system = OsSystem::new(cwd.clone());
let cli_options = args.to_options(&cwd);
let mut workspace_metadata = ProjectMetadata::discover(system.current_directory(), &system)?;
workspace_metadata.apply_cli_options(cli_options.clone());
let cli_options = args.into_options();
let mut project_metadata = ProjectMetadata::discover(&project_path, &system)?;
project_metadata.apply_cli_options(cli_options.clone());
project_metadata.apply_configuration_files(&system)?;
let mut db = ProjectDatabase::new(project_metadata, system)?;
let mut db = ProjectDatabase::new(workspace_metadata, system)?;
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options);
@@ -130,21 +177,17 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
}
})?;
let exit_status = if watch {
let exit_status = if args.watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)?
main_loop.run(&mut db)
};
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
std::mem::forget(db);
if exit_zero {
Ok(ExitStatus::Success)
} else {
Ok(exit_status)
}
Ok(exit_status)
}
#[derive(Copy, Clone)]
@@ -193,7 +236,7 @@ impl MainLoop {
)
}
fn watch(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
fn watch(mut self, db: &mut ProjectDatabase) -> anyhow::Result<ExitStatus> {
tracing::debug!("Starting watch mode");
let sender = self.sender.clone();
let watcher = watch::directory_watcher(move |event| {
@@ -202,12 +245,12 @@ impl MainLoop {
self.watcher = Some(ProjectWatcher::new(watcher, db));
self.run(db)?;
self.run(db);
Ok(ExitStatus::Success)
}
fn run(mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
fn run(mut self, db: &mut ProjectDatabase) -> ExitStatus {
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
let result = self.main_loop(db);
@@ -217,7 +260,7 @@ impl MainLoop {
result
}
fn main_loop(&mut self, db: &mut ProjectDatabase) -> Result<ExitStatus> {
fn main_loop(&mut self, db: &mut ProjectDatabase) -> ExitStatus {
// Schedule the first check.
tracing::debug!("Starting main loop");
@@ -245,24 +288,11 @@ impl MainLoop {
result,
revision: check_revision,
} => {
let display_config = DisplayDiagnosticConfig::default()
.color(colored::control::SHOULD_COLORIZE.should_colorize());
let min_error_severity =
if db.project().settings(db).terminal().error_on_warning {
Severity::Warning
} else {
Severity::Error
};
let failed = result
.iter()
.any(|diagnostic| diagnostic.severity() >= min_error_severity);
let has_diagnostics = !result.is_empty();
if check_revision == revision {
let mut stdout = stdout().lock();
#[allow(clippy::print_stdout)]
for diagnostic in result {
writeln!(stdout, "{}", diagnostic.display(db, &display_config))?;
println!("{}", diagnostic.display(db));
}
} else {
tracing::debug!(
@@ -271,11 +301,11 @@ impl MainLoop {
}
if self.watcher.is_none() {
return Ok(if failed {
return if has_diagnostics {
ExitStatus::Failure
} else {
ExitStatus::Success
});
};
}
tracing::trace!("Counts after last check:\n{}", countme::get_all());
@@ -295,14 +325,14 @@ impl MainLoop {
// TODO: Don't use Salsa internal APIs
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
let _ = db.zalsa_mut();
return Ok(ExitStatus::Success);
return ExitStatus::Success;
}
}
tracing::debug!("Waiting for next main loop message.");
}
Ok(ExitStatus::Success)
ExitStatus::Success
}
}

View File

@@ -40,7 +40,7 @@ impl std::fmt::Display for PythonVersion {
}
}
impl From<PythonVersion> for ruff_python_ast::PythonVersion {
impl From<PythonVersion> for red_knot_python_semantic::PythonVersion {
fn from(value: PythonVersion) -> Self {
match value {
PythonVersion::Py37 => Self::PY37,
@@ -61,8 +61,8 @@ mod tests {
#[test]
fn same_default_as_python_version() {
assert_eq!(
ruff_python_ast::PythonVersion::from(PythonVersion::default()),
ruff_python_ast::PythonVersion::default()
red_knot_python_semantic::PythonVersion::from(PythonVersion::default()),
red_knot_python_semantic::PythonVersion::default()
);
}
}

View File

@@ -0,0 +1 @@

View File

@@ -1,105 +0,0 @@
//! Code for representing Red Knot's release version number.
use std::fmt;
/// Information about the git repository where Red Knot was built from.
pub(crate) struct CommitInfo {
short_commit_hash: String,
commit_date: String,
commits_since_last_tag: u32,
}
/// Red Knot's version.
pub(crate) struct VersionInfo {
/// Red Knot's version, such as "0.5.1"
version: String,
/// Information about the git commit we may have been built from.
///
/// `None` if not built from a git repo or if retrieval failed.
commit_info: Option<CommitInfo>,
}
impl fmt::Display for VersionInfo {
/// Formatted version information: `<version>[+<commits>] (<commit> <date>)`
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.version)?;
if let Some(ref ci) = self.commit_info {
if ci.commits_since_last_tag > 0 {
write!(f, "+{}", ci.commits_since_last_tag)?;
}
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
}
Ok(())
}
}
/// Returns information about Red Knot's version.
pub(crate) fn version() -> VersionInfo {
// Environment variables are only read at compile-time
macro_rules! option_env_str {
($name:expr) => {
option_env!($name).map(|s| s.to_string())
};
}
// This version is pulled from Cargo.toml and set by Cargo
let version = option_env_str!("CARGO_PKG_VERSION").unwrap();
// Commit info is pulled from git and set by `build.rs`
let commit_info =
option_env_str!("RED_KNOT_COMMIT_SHORT_HASH").map(|short_commit_hash| CommitInfo {
short_commit_hash,
commit_date: option_env_str!("RED_KNOT_COMMIT_DATE").unwrap(),
commits_since_last_tag: option_env_str!("RED_KNOT_LAST_TAG_DISTANCE")
.as_deref()
.map_or(0, |value| value.parse::<u32>().unwrap_or(0)),
});
VersionInfo {
version,
commit_info,
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use super::{CommitInfo, VersionInfo};
#[test]
fn version_formatting() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: None,
};
assert_snapshot!(version, @"0.0.0");
}
#[test]
fn version_formatting_with_commit_info() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 0,
}),
};
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
}
#[test]
fn version_formatting_with_commits_since_last_tag() {
let version = VersionInfo {
version: "0.0.0".to_string(),
commit_info: Some(CommitInfo {
short_commit_hash: "53b0f5d92".to_string(),
commit_date: "2023-10-19".to_string(),
commits_since_last_tag: 24,
}),
};
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
}
}

View File

@@ -1,903 +1,60 @@
use anyhow::Context;
use insta::internals::SettingsBindDropGuard;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use std::path::{Path, PathBuf};
use std::process::Command;
use tempfile::TempDir;
/// Specifying an option on the CLI should take precedence over the same setting in the
/// project's configuration.
#[test]
fn config_override() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
"#,
),
(
"test.py",
r#"
import sys
fn test_config_override() -> anyhow::Result<()> {
let tempdir = TempDir::new()?;
# Access `sys.last_exc` that was only added in Python 3.12
print(sys.last_exc)
"#,
),
])?;
std::fs::write(
tempdir.path().join("pyproject.toml"),
r#"
[tool.knot.environment]
python-version = "3.11"
"#,
)
.context("Failed to write settings")?;
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:unresolved-attribute
--> <temp_dir>/test.py:5:7
|
4 | # Access `sys.last_exc` that was only added in Python 3.12
5 | print(sys.last_exc)
| ^^^^^^^^^^^^ Type `<module 'sys'>` has no attribute `last_exc`
|
std::fs::write(
tempdir.path().join("test.py"),
r#"
import sys
# Access `sys.last_exc` that was only added in Python 3.12
print(sys.last_exc)
"#,
)
.context("Failed to write test.py")?;
----- stderr -----
"###);
assert_cmd_snapshot!(case.command().arg("--python-version").arg("3.12"), @r"
success: true
exit_code: 0
insta::with_settings!({filters => vec![(&*tempdir_filter(&tempdir), "<temp_dir>/")]}, {
assert_cmd_snapshot!(knot().arg("--project").arg(tempdir.path()), @r"
success: false
exit_code: 1
----- stdout -----
error[lint:unresolved-attribute] <temp_dir>/test.py:5:7 Type `<module 'sys'>` has no attribute `last_exc`
----- stderr -----
");
});
assert_cmd_snapshot!(knot().arg("--project").arg(tempdir.path()).arg("--python-version").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
}
/// Paths specified on the CLI are relative to the current working directory and not the project root.
///
/// We test this by adding an extra search path from the CLI to the libs directory when
/// running the CLI from the child directory (using relative paths).
///
/// Project layout:
/// ```
/// - libs
/// |- utils.py
/// - child
/// | - test.py
/// - pyproject.toml
/// ```
///
/// And the command is run in the `child` directory.
#[test]
fn cli_arguments_are_relative_to_the_current_directory() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
"#,
),
(
"libs/utils.py",
r#"
def add(a: int, b: int) -> int:
a + b
"#,
),
(
"child/test.py",
r#"
from utils import add
stat = add(10, 15)
"#,
),
])?;
// Make sure that the CLI fails when the `libs` directory is not in the search path.
assert_cmd_snapshot!(case.command().current_dir(case.root().join("child")), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:unresolved-import
--> <temp_dir>/child/test.py:2:6
|
2 | from utils import add
| ^^^^^ Cannot resolve import `utils`
3 |
4 | stat = add(10, 15)
|
----- stderr -----
"###);
assert_cmd_snapshot!(case.command().current_dir(case.root().join("child")).arg("--extra-search-path").arg("../libs"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
fn knot() -> Command {
Command::new(get_cargo_bin("red_knot"))
}
/// Paths specified in a configuration file are relative to the project root.
///
/// We test this by adding `libs` (as a relative path) to the extra search path in the configuration and run
/// the CLI from a subdirectory.
///
/// Project layout:
/// ```
/// - libs
/// |- utils.py
/// - child
/// | - test.py
/// - pyproject.toml
/// ```
#[test]
fn paths_in_configuration_files_are_relative_to_the_project_root() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
extra-paths = ["libs"]
"#,
),
(
"libs/utils.py",
r#"
def add(a: int, b: int) -> int:
a + b
"#,
),
(
"child/test.py",
r#"
from utils import add
stat = add(10, 15)
"#,
),
])?;
assert_cmd_snapshot!(case.command().current_dir(case.root().join("child")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
}
/// The rule severity can be changed in the configuration file
#[test]
fn configuration_rule_severity() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:division-by-zero
--> <temp_dir>/test.py:2:5
|
2 | y = 4 / 0
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
warning: lint:possibly-unresolved-reference
--> <temp_dir>/test.py:7:7
|
5 | x = a
6 |
7 | print(x) # possibly-unresolved-reference
| - Name `x` used when possibly not defined
|
----- stderr -----
"###);
case.write_file(
"pyproject.toml",
r#"
[tool.knot.rules]
division-by-zero = "warn" # demote to warn
possibly-unresolved-reference = "ignore"
"#,
)?;
assert_cmd_snapshot!(case.command(), @r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:division-by-zero
--> <temp_dir>/test.py:2:5
|
2 | y = 4 / 0
| ----- Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
----- stderr -----
"###);
Ok(())
}
/// The rule severity can be changed using `--ignore`, `--warn`, and `--error`
#[test]
fn cli_rule_severity() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
import does_not_exit
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:unresolved-import
--> <temp_dir>/test.py:2:8
|
2 | import does_not_exit
| ^^^^^^^^^^^^^ Cannot resolve import `does_not_exit`
3 |
4 | y = 4 / 0
|
error: lint:division-by-zero
--> <temp_dir>/test.py:4:5
|
2 | import does_not_exit
3 |
4 | y = 4 / 0
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
5 |
6 | for a in range(0, y):
|
warning: lint:possibly-unresolved-reference
--> <temp_dir>/test.py:9:7
|
7 | x = a
8 |
9 | print(x) # possibly-unresolved-reference
| - Name `x` used when possibly not defined
|
----- stderr -----
"###);
assert_cmd_snapshot!(
case
.command()
.arg("--ignore")
.arg("possibly-unresolved-reference")
.arg("--warn")
.arg("division-by-zero")
.arg("--warn")
.arg("unresolved-import"),
@r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:unresolved-import
--> <temp_dir>/test.py:2:8
|
2 | import does_not_exit
| ------------- Cannot resolve import `does_not_exit`
3 |
4 | y = 4 / 0
|
warning: lint:division-by-zero
--> <temp_dir>/test.py:4:5
|
2 | import does_not_exit
3 |
4 | y = 4 / 0
| ----- Cannot divide object of type `Literal[4]` by zero
5 |
6 | for a in range(0, y):
|
----- stderr -----
"###
);
Ok(())
}
/// The rule severity can be changed using `--ignore`, `--warn`, and `--error` and
/// values specified last override previous severities.
#[test]
fn cli_rule_severity_precedence() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
error: lint:division-by-zero
--> <temp_dir>/test.py:2:5
|
2 | y = 4 / 0
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
warning: lint:possibly-unresolved-reference
--> <temp_dir>/test.py:7:7
|
5 | x = a
6 |
7 | print(x) # possibly-unresolved-reference
| - Name `x` used when possibly not defined
|
----- stderr -----
"###);
assert_cmd_snapshot!(
case
.command()
.arg("--error")
.arg("possibly-unresolved-reference")
.arg("--warn")
.arg("division-by-zero")
// Override the error severity with warning
.arg("--ignore")
.arg("possibly-unresolved-reference"),
@r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:division-by-zero
--> <temp_dir>/test.py:2:5
|
2 | y = 4 / 0
| ----- Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
----- stderr -----
"###
);
Ok(())
}
/// Red Knot warns about unknown rules specified in a configuration file
#[test]
fn configuration_unknown_rules() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.rules]
division-by-zer = "warn" # incorrect rule name
"#,
),
("test.py", "print(10)"),
])?;
assert_cmd_snapshot!(case.command(), @r###"
success: true
exit_code: 0
----- stdout -----
warning: unknown-rule
--> <temp_dir>/pyproject.toml:3:1
|
2 | [tool.knot.rules]
3 | division-by-zer = "warn" # incorrect rule name
| --------------- Unknown lint rule `division-by-zer`
|
----- stderr -----
"###);
Ok(())
}
/// Red Knot warns about unknown rules specified in a CLI argument
#[test]
fn cli_unknown_rules() -> anyhow::Result<()> {
let case = TestCase::with_file("test.py", "print(10)")?;
assert_cmd_snapshot!(case.command().arg("--ignore").arg("division-by-zer"), @r###"
success: true
exit_code: 0
----- stdout -----
warning: unknown-rule: Unknown lint rule `division-by-zer`
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_only_warnings() -> anyhow::Result<()> {
let case = TestCase::with_file("test.py", r"print(x) # [unresolved-reference]")?;
assert_cmd_snapshot!(case.command(), @r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:1:7
|
1 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_only_info() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
from typing_extensions import reveal_type
reveal_type(1)
"#,
)?;
assert_cmd_snapshot!(case.command(), @r###"
success: true
exit_code: 0
----- stdout -----
info: revealed-type
--> <temp_dir>/test.py:3:1
|
2 | from typing_extensions import reveal_type
3 | reveal_type(1)
| -------------- info: Revealed type is `Literal[1]`
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_only_info_and_error_on_warning_is_true() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
from typing_extensions import reveal_type
reveal_type(1)
"#,
)?;
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
success: true
exit_code: 0
----- stdout -----
info: revealed-type
--> <temp_dir>/test.py:3:1
|
2 | from typing_extensions import reveal_type
3 | reveal_type(1)
| -------------- info: Revealed type is `Literal[1]`
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_no_errors_but_error_on_warning_is_true() -> anyhow::Result<()> {
let case = TestCase::with_file("test.py", r"print(x) # [unresolved-reference]")?;
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
success: false
exit_code: 1
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:1:7
|
1 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_no_errors_but_error_on_warning_is_enabled_in_configuration() -> anyhow::Result<()> {
let case = TestCase::with_files([
("test.py", r"print(x) # [unresolved-reference]"),
(
"knot.toml",
r#"
[terminal]
error-on-warning = true
"#,
),
])?;
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:1:7
|
1 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_both_warnings_and_errors() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
print(x) # [unresolved-reference]
print(4[1]) # [non-subscriptable]
"#,
)?;
assert_cmd_snapshot!(case.command(), @r###"
success: false
exit_code: 1
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:2:7
|
2 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
3 | print(4[1]) # [non-subscriptable]
|
error: lint:non-subscriptable
--> <temp_dir>/test.py:3:7
|
2 | print(x) # [unresolved-reference]
3 | print(4[1]) # [non-subscriptable]
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_both_warnings_and_errors_and_error_on_warning_is_true() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r###"
print(x) # [unresolved-reference]
print(4[1]) # [non-subscriptable]
"###,
)?;
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
success: false
exit_code: 1
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:2:7
|
2 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
3 | print(4[1]) # [non-subscriptable]
|
error: lint:non-subscriptable
--> <temp_dir>/test.py:3:7
|
2 | print(x) # [unresolved-reference]
3 | print(4[1]) # [non-subscriptable]
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn exit_code_exit_zero_is_true() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
print(x) # [unresolved-reference]
print(4[1]) # [non-subscriptable]
"#,
)?;
assert_cmd_snapshot!(case.command().arg("--exit-zero"), @r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:unresolved-reference
--> <temp_dir>/test.py:2:7
|
2 | print(x) # [unresolved-reference]
| - Name `x` used when not defined
3 | print(4[1]) # [non-subscriptable]
|
error: lint:non-subscriptable
--> <temp_dir>/test.py:3:7
|
2 | print(x) # [unresolved-reference]
3 | print(4[1]) # [non-subscriptable]
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
----- stderr -----
"###);
Ok(())
}
#[test]
fn user_configuration() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"project/knot.toml",
r#"
[rules]
division-by-zero = "warn"
"#,
),
(
"project/main.py",
r#"
y = 4 / 0
for a in range(0, y):
x = a
print(x)
"#,
),
])?;
let config_directory = case.root().join("home/.config");
let config_env_var = if cfg!(windows) {
"APPDATA"
} else {
"XDG_CONFIG_HOME"
};
assert_cmd_snapshot!(
case.command().current_dir(case.root().join("project")).env(config_env_var, config_directory.as_os_str()),
@r###"
success: true
exit_code: 0
----- stdout -----
warning: lint:division-by-zero
--> <temp_dir>/project/main.py:2:5
|
2 | y = 4 / 0
| ----- Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
warning: lint:possibly-unresolved-reference
--> <temp_dir>/project/main.py:7:7
|
5 | x = a
6 |
7 | print(x)
| - Name `x` used when possibly not defined
|
----- stderr -----
"###
);
// The user-level configuration promotes `possibly-unresolved-reference` to an error.
// Changing the level for `division-by-zero` has no effect, because the project-level configuration
// has higher precedence.
case.write_file(
config_directory.join("knot/knot.toml"),
r#"
[rules]
division-by-zero = "error"
possibly-unresolved-reference = "error"
"#,
)?;
assert_cmd_snapshot!(
case.command().current_dir(case.root().join("project")).env(config_env_var, config_directory.as_os_str()),
@r###"
success: false
exit_code: 1
----- stdout -----
warning: lint:division-by-zero
--> <temp_dir>/project/main.py:2:5
|
2 | y = 4 / 0
| ----- Cannot divide object of type `Literal[4]` by zero
3 |
4 | for a in range(0, y):
|
error: lint:possibly-unresolved-reference
--> <temp_dir>/project/main.py:7:7
|
5 | x = a
6 |
7 | print(x)
| ^ Name `x` used when possibly not defined
|
----- stderr -----
"###
);
Ok(())
}
struct TestCase {
_temp_dir: TempDir,
_settings_scope: SettingsBindDropGuard,
project_dir: PathBuf,
}
impl TestCase {
fn new() -> anyhow::Result<Self> {
let temp_dir = TempDir::new()?;
// Canonicalize the tempdir path because macos uses symlinks for tempdirs
// and that doesn't play well with our snapshot filtering.
let project_dir = temp_dir
.path()
.canonicalize()
.context("Failed to canonicalize project path")?;
let mut settings = insta::Settings::clone_current();
settings.add_filter(&tempdir_filter(&project_dir), "<temp_dir>/");
settings.add_filter(r#"\\(\w\w|\s|\.|")"#, "/$1");
let settings_scope = settings.bind_to_scope();
Ok(Self {
project_dir,
_temp_dir: temp_dir,
_settings_scope: settings_scope,
})
}
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
let case = Self::new()?;
case.write_files(files)?;
Ok(case)
}
fn with_file(path: impl AsRef<Path>, content: &str) -> anyhow::Result<Self> {
let case = Self::new()?;
case.write_file(path, content)?;
Ok(case)
}
fn write_files<'a>(
&self,
files: impl IntoIterator<Item = (&'a str, &'a str)>,
) -> anyhow::Result<()> {
for (path, content) in files {
self.write_file(path, content)?;
}
Ok(())
}
fn write_file(&self, path: impl AsRef<Path>, content: &str) -> anyhow::Result<()> {
let path = path.as_ref();
let path = self.project_dir.join(path);
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("Failed to create directory `{}`", parent.display()))?;
}
std::fs::write(&path, &*ruff_python_trivia::textwrap::dedent(content))
.with_context(|| format!("Failed to write file `{path}`", path = path.display()))?;
Ok(())
}
fn root(&self) -> &Path {
&self.project_dir
}
fn command(&self) -> Command {
let mut command = Command::new(get_cargo_bin("red_knot"));
command.current_dir(&self.project_dir).arg("check");
command
}
}
fn tempdir_filter(path: &Path) -> String {
format!(r"{}\\?/?", regex::escape(path.to_str().unwrap()))
fn tempdir_filter(tempdir: &TempDir) -> String {
format!(r"{}\\?/?", regex::escape(tempdir.path().to_str().unwrap()))
}

View File

@@ -6,17 +6,13 @@ use std::time::{Duration, Instant};
use anyhow::{anyhow, Context};
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
use red_knot_project::metadata::pyproject::{PyProject, Tool};
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
use red_knot_project::watch::{directory_watcher, ChangeEvent, ProjectWatcher};
use red_knot_project::{Db, ProjectDatabase, ProjectMetadata};
use red_knot_python_semantic::{resolve_module, ModuleName, PythonPlatform};
use red_knot_python_semantic::{resolve_module, ModuleName, PythonPlatform, PythonVersion};
use ruff_db::files::{system_path_to_file, File, FileError};
use ruff_db::source::source_text;
use ruff_db::system::{
OsSystem, System, SystemPath, SystemPathBuf, UserConfigDirectoryOverrideGuard,
};
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::Upcast;
use ruff_python_ast::PythonVersion;
struct TestCase {
db: ProjectDatabase,
@@ -50,7 +46,7 @@ impl TestCase {
#[track_caller]
fn panic_with_formatted_events(events: Vec<ChangeEvent>) -> Vec<ChangeEvent> {
panic!(
"Didn't observe the expected event. The following events occurred:\n{}",
"Didn't observe expected change:\n{}",
events
.into_iter()
.map(|event| format!(" - {event:?}"))
@@ -223,44 +219,17 @@ where
}
trait SetupFiles {
fn setup(self, context: &SetupContext) -> anyhow::Result<()>;
}
struct SetupContext<'a> {
system: &'a OsSystem,
root_path: &'a SystemPath,
}
impl<'a> SetupContext<'a> {
fn system(&self) -> &'a OsSystem {
self.system
}
fn join_project_path(&self, relative: impl AsRef<SystemPath>) -> SystemPathBuf {
self.project_path().join(relative)
}
fn project_path(&self) -> &SystemPath {
self.system.current_directory()
}
fn root_path(&self) -> &'a SystemPath {
self.root_path
}
fn join_root_path(&self, relative: impl AsRef<SystemPath>) -> SystemPathBuf {
self.root_path().join(relative)
}
fn setup(self, root_path: &SystemPath, project_path: &SystemPath) -> anyhow::Result<()>;
}
impl<const N: usize, P> SetupFiles for [(P, &'static str); N]
where
P: AsRef<SystemPath>,
{
fn setup(self, context: &SetupContext) -> anyhow::Result<()> {
fn setup(self, _root_path: &SystemPath, project_path: &SystemPath) -> anyhow::Result<()> {
for (relative_path, content) in self {
let relative_path = relative_path.as_ref();
let absolute_path = context.join_project_path(relative_path);
let absolute_path = project_path.join(relative_path);
if let Some(parent) = absolute_path.parent() {
std::fs::create_dir_all(parent).with_context(|| {
format!("Failed to create parent directory for file `{relative_path}`")
@@ -280,10 +249,10 @@ where
impl<F> SetupFiles for F
where
F: FnOnce(&SetupContext) -> anyhow::Result<()>,
F: FnOnce(&SystemPath, &SystemPath) -> anyhow::Result<()>,
{
fn setup(self, context: &SetupContext) -> anyhow::Result<()> {
self(context)
fn setup(self, root_path: &SystemPath, project_path: &SystemPath) -> anyhow::Result<()> {
self(root_path, project_path)
}
}
@@ -291,12 +260,13 @@ fn setup<F>(setup_files: F) -> anyhow::Result<TestCase>
where
F: SetupFiles,
{
setup_with_options(setup_files, |_context| None)
setup_with_options(setup_files, |_root, _project_path| None)
}
// TODO: Replace with configuration?
fn setup_with_options<F>(
setup_files: F,
create_options: impl FnOnce(&SetupContext) -> Option<Options>,
create_options: impl FnOnce(&SystemPath, &SystemPath) -> Option<Options>,
) -> anyhow::Result<TestCase>
where
F: SetupFiles,
@@ -324,17 +294,13 @@ where
std::fs::create_dir_all(project_path.as_std_path())
.with_context(|| format!("Failed to create project directory `{project_path}`"))?;
let system = OsSystem::new(&project_path);
let setup_context = SetupContext {
system: &system,
root_path: &root_path,
};
setup_files
.setup(&setup_context)
.setup(&root_path, &project_path)
.context("Failed to setup test files")?;
if let Some(options) = create_options(&setup_context) {
let system = OsSystem::new(&project_path);
if let Some(options) = create_options(&root_path, &project_path) {
std::fs::write(
project_path.join("pyproject.toml").as_std_path(),
toml::to_string(&PyProject {
@@ -348,16 +314,14 @@ where
.context("Failed to write configuration")?;
}
let mut project = ProjectMetadata::discover(&project_path, &system)?;
project.apply_configuration_files(&system)?;
let project = ProjectMetadata::discover(&project_path, &system)?;
let program_settings = project.to_program_settings(&system);
for path in program_settings
.search_paths
.extra_paths
.iter()
.chain(program_settings.search_paths.custom_typeshed.as_ref())
.chain(program_settings.search_paths.typeshed.as_ref())
{
std::fs::create_dir_all(path.as_std_path())
.with_context(|| format!("Failed to create search path `{path}`"))?;
@@ -462,41 +426,6 @@ fn new_ignored_file() -> anyhow::Result<()> {
Ok(())
}
#[test]
fn new_non_project_file() -> anyhow::Result<()> {
let mut case = setup_with_options([("bar.py", "")], |context| {
Some(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli(
context.join_root_path("site_packages"),
)]),
..EnvironmentOptions::default()
}),
..Options::default()
})
})?;
let bar_path = case.project_path("bar.py");
let bar_file = case.system_file(&bar_path).unwrap();
assert_eq!(&case.collect_project_files(), &[bar_file]);
// Add a file to site packages
let black_path = case.root_path().join("site_packages/black.py");
std::fs::write(black_path.as_std_path(), "print('Hello')")?;
let changes = case.stop_watch(event_for_file("black.py"));
case.apply_changes(changes);
assert!(case.system_file(&black_path).is_ok());
// The file should not have been added to the project files
assert_eq!(&case.collect_project_files(), &[bar_file]);
Ok(())
}
#[test]
fn changed_file() -> anyhow::Result<()> {
let foo_source = "print('Hello, world!')";
@@ -859,12 +788,10 @@ fn directory_deleted() -> anyhow::Result<()> {
#[test]
fn search_path() -> anyhow::Result<()> {
let mut case = setup_with_options([("bar.py", "import sub.a")], |context| {
let mut case = setup_with_options([("bar.py", "import sub.a")], |root_path, _project_path| {
Some(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli(
context.join_root_path("site_packages"),
)]),
extra_paths: Some(vec![root_path.join("site_packages")]),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -905,7 +832,7 @@ fn add_search_path() -> anyhow::Result<()> {
// Register site-packages as a search path.
case.update_options(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli("site_packages")]),
extra_paths: Some(vec![site_packages.clone()]),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -925,12 +852,10 @@ fn add_search_path() -> anyhow::Result<()> {
#[test]
fn remove_search_path() -> anyhow::Result<()> {
let mut case = setup_with_options([("bar.py", "import sub.a")], |context| {
let mut case = setup_with_options([("bar.py", "import sub.a")], |root_path, _project_path| {
Some(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli(
context.join_root_path("site_packages"),
)]),
extra_paths: Some(vec![root_path.join("site_packages")]),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -968,13 +893,11 @@ import os
print(sys.last_exc, os.getegid())
"#,
)],
|_context| {
|_root_path, _project_path| {
Some(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(PythonVersion::PY311)),
python_platform: Some(RangedValue::cli(PythonPlatform::Identifier(
"win32".to_string(),
))),
python_version: Some(PythonVersion::PY311),
python_platform: Some(PythonPlatform::Identifier("win32".to_string())),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -997,10 +920,8 @@ print(sys.last_exc, os.getegid())
// Change the python version
case.update_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
python_platform: Some(RangedValue::cli(PythonPlatform::Identifier(
"linux".to_string(),
))),
python_version: Some(PythonVersion::PY312),
python_platform: Some(PythonPlatform::Identifier("linux".to_string())),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -1016,31 +937,21 @@ print(sys.last_exc, os.getegid())
#[test]
fn changed_versions_file() -> anyhow::Result<()> {
let mut case = setup_with_options(
|context: &SetupContext| {
|root_path: &SystemPath, project_path: &SystemPath| {
std::fs::write(project_path.join("bar.py").as_std_path(), "import sub.a")?;
std::fs::create_dir_all(root_path.join("typeshed/stdlib").as_std_path())?;
std::fs::write(root_path.join("typeshed/stdlib/VERSIONS").as_std_path(), "")?;
std::fs::write(
context.join_project_path("bar.py").as_std_path(),
"import sub.a",
)?;
std::fs::create_dir_all(context.join_root_path("typeshed/stdlib").as_std_path())?;
std::fs::write(
context
.join_root_path("typeshed/stdlib/VERSIONS")
.as_std_path(),
"",
)?;
std::fs::write(
context
.join_root_path("typeshed/stdlib/os.pyi")
.as_std_path(),
root_path.join("typeshed/stdlib/os.pyi").as_std_path(),
"# not important",
)?;
Ok(())
},
|context| {
|root_path, _project_path| {
Some(Options {
environment: Some(EnvironmentOptions {
typeshed: Some(RelativePathBuf::cli(context.join_root_path("typeshed"))),
typeshed: Some(root_path.join("typeshed")),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -1091,12 +1002,12 @@ fn changed_versions_file() -> anyhow::Result<()> {
/// we're seeing is that Windows only emits a single event, similar to Linux.
#[test]
fn hard_links_in_project() -> anyhow::Result<()> {
let mut case = setup(|context: &SetupContext| {
let foo_path = context.join_project_path("foo.py");
let mut case = setup(|_root: &SystemPath, project: &SystemPath| {
let foo_path = project.join("foo.py");
std::fs::write(foo_path.as_std_path(), "print('Version 1')")?;
// Create a hardlink to `foo`
let bar_path = context.join_project_path("bar.py");
let bar_path = project.join("bar.py");
std::fs::hard_link(foo_path.as_std_path(), bar_path.as_std_path())
.context("Failed to create hard link from foo.py -> bar.py")?;
@@ -1110,7 +1021,6 @@ fn hard_links_in_project() -> anyhow::Result<()> {
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')");
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 1')");
assert_eq!(case.collect_project_files(), &[bar, foo]);
// Write to the hard link target.
update_file(foo_path, "print('Version 2')").context("Failed to update foo.py")?;
@@ -1163,12 +1073,12 @@ fn hard_links_in_project() -> anyhow::Result<()> {
ignore = "windows doesn't support observing changes to hard linked files."
)]
fn hard_links_to_target_outside_project() -> anyhow::Result<()> {
let mut case = setup(|context: &SetupContext| {
let foo_path = context.join_root_path("foo.py");
let mut case = setup(|root: &SystemPath, project: &SystemPath| {
let foo_path = root.join("foo.py");
std::fs::write(foo_path.as_std_path(), "print('Version 1')")?;
// Create a hardlink to `foo`
let bar_path = context.join_project_path("bar.py");
let bar_path = project.join("bar.py");
std::fs::hard_link(foo_path.as_std_path(), bar_path.as_std_path())
.context("Failed to create hard link from foo.py -> bar.py")?;
@@ -1271,9 +1181,9 @@ mod unix {
ignore = "FSEvents doesn't emit change events for symlinked directories outside of the watched paths."
)]
fn symlink_target_outside_watched_paths() -> anyhow::Result<()> {
let mut case = setup(|context: &SetupContext| {
let mut case = setup(|root: &SystemPath, project: &SystemPath| {
// Set up the symlink target.
let link_target = context.join_root_path("bar");
let link_target = root.join("bar");
std::fs::create_dir_all(link_target.as_std_path())
.context("Failed to create link target directory")?;
let baz_original = link_target.join("baz.py");
@@ -1281,7 +1191,7 @@ mod unix {
.context("Failed to write link target file")?;
// Create a symlink inside the project
let bar = context.join_project_path("bar");
let bar = project.join("bar");
std::os::unix::fs::symlink(link_target.as_std_path(), bar.as_std_path())
.context("Failed to create symlink to bar package")?;
@@ -1352,9 +1262,9 @@ mod unix {
/// ```
#[test]
fn symlink_inside_project() -> anyhow::Result<()> {
let mut case = setup(|context: &SetupContext| {
let mut case = setup(|_root: &SystemPath, project: &SystemPath| {
// Set up the symlink target.
let link_target = context.join_project_path("patched/bar");
let link_target = project.join("patched/bar");
std::fs::create_dir_all(link_target.as_std_path())
.context("Failed to create link target directory")?;
let baz_original = link_target.join("baz.py");
@@ -1362,7 +1272,7 @@ mod unix {
.context("Failed to write link target file")?;
// Create a symlink inside site-packages
let bar_in_project = context.join_project_path("bar");
let bar_in_project = project.join("bar");
std::os::unix::fs::symlink(link_target.as_std_path(), bar_in_project.as_std_path())
.context("Failed to create symlink to bar package")?;
@@ -1390,8 +1300,6 @@ mod unix {
);
assert_eq!(baz.file().path(case.db()).as_system_path(), Some(&*bar_baz));
assert_eq!(case.collect_project_files(), &[patched_bar_baz_file]);
// Write to the symlink target.
update_file(&patched_bar_baz, "def baz(): print('Version 2')")
.context("Failed to update bar/baz.py")?;
@@ -1427,7 +1335,6 @@ mod unix {
bar_baz_text = bar_baz_text.as_str()
);
assert_eq!(case.collect_project_files(), &[patched_bar_baz_file]);
Ok(())
}
@@ -1446,9 +1353,9 @@ mod unix {
#[test]
fn symlinked_module_search_path() -> anyhow::Result<()> {
let mut case = setup_with_options(
|context: &SetupContext| {
|root: &SystemPath, project: &SystemPath| {
// Set up the symlink target.
let site_packages = context.join_root_path("site-packages");
let site_packages = root.join("site-packages");
let bar = site_packages.join("bar");
std::fs::create_dir_all(bar.as_std_path())
.context("Failed to create bar directory")?;
@@ -1457,8 +1364,7 @@ mod unix {
.context("Failed to write baz.py")?;
// Symlink the site packages in the venv to the global site packages
let venv_site_packages =
context.join_project_path(".venv/lib/python3.12/site-packages");
let venv_site_packages = project.join(".venv/lib/python3.12/site-packages");
std::fs::create_dir_all(venv_site_packages.parent().unwrap())
.context("Failed to create .venv directory")?;
std::os::unix::fs::symlink(
@@ -1469,13 +1375,11 @@ mod unix {
Ok(())
},
|_context| {
|_root, project| {
Some(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli(
".venv/lib/python3.12/site-packages",
)]),
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
extra_paths: Some(vec![project.join(".venv/lib/python3.12/site-packages")]),
python_version: Some(PythonVersion::PY312),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -1508,8 +1412,6 @@ mod unix {
Some(&*baz_original)
);
assert_eq!(case.collect_project_files(), &[]);
// Write to the symlink target.
update_file(&baz_original, "def baz(): print('Version 2')")
.context("Failed to update bar/baz.py")?;
@@ -1535,17 +1437,15 @@ mod unix {
"def baz(): print('Version 2')"
);
assert_eq!(case.collect_project_files(), &[]);
Ok(())
}
}
#[test]
fn nested_projects_delete_root() -> anyhow::Result<()> {
let mut case = setup(|context: &SetupContext| {
let mut case = setup(|root: &SystemPath, project_root: &SystemPath| {
std::fs::write(
context.join_project_path("pyproject.toml").as_std_path(),
project_root.join("pyproject.toml").as_std_path(),
r#"
[project]
name = "inner"
@@ -1555,7 +1455,7 @@ fn nested_projects_delete_root() -> anyhow::Result<()> {
)?;
std::fs::write(
context.join_root_path("pyproject.toml").as_std_path(),
root.join("pyproject.toml").as_std_path(),
r#"
[project]
name = "outer"
@@ -1580,79 +1480,3 @@ fn nested_projects_delete_root() -> anyhow::Result<()> {
Ok(())
}
#[test]
fn changes_to_user_configuration() -> anyhow::Result<()> {
let mut _config_dir_override: Option<UserConfigDirectoryOverrideGuard> = None;
let mut case = setup(|context: &SetupContext| {
std::fs::write(
context.join_project_path("pyproject.toml").as_std_path(),
r#"
[project]
name = "test"
"#,
)?;
std::fs::write(
context.join_project_path("foo.py").as_std_path(),
"a = 10 / 0",
)?;
let config_directory = context.join_root_path("home/.config");
std::fs::create_dir_all(config_directory.join("knot").as_std_path())?;
std::fs::write(
config_directory.join("knot/knot.toml").as_std_path(),
r#"
[rules]
division-by-zero = "ignore"
"#,
)?;
_config_dir_override = Some(
context
.system()
.with_user_config_directory(Some(config_directory)),
);
Ok(())
})?;
let foo = case
.system_file(case.project_path("foo.py"))
.expect("foo.py to exist");
let diagnostics = case
.db()
.check_file(foo)
.context("Failed to check project.")?;
assert!(
diagnostics.is_empty(),
"Expected no diagnostics but got: {diagnostics:#?}"
);
// Enable division-by-zero in the user configuration with warning severity
update_file(
case.root_path().join("home/.config/knot/knot.toml"),
r#"
[rules]
division-by-zero = "warn"
"#,
)?;
let changes = case.stop_watch(event_for_file("knot.toml"));
case.apply_changes(changes);
let diagnostics = case
.db()
.check_file(foo)
.context("Failed to check project.")?;
assert!(
diagnostics.len() == 1,
"Expected exactly one diagnostic but got: {diagnostics:#?}"
);
Ok(())
}

View File

@@ -13,7 +13,7 @@ license.workspace = true
[dependencies]
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["cache", "serde"] }
ruff_db = { workspace = true, features = ["os", "cache", "serde"] }
ruff_macros = { workspace = true }
ruff_python_ast = { workspace = true, features = ["serde"] }
ruff_text_size = { workspace = true }
@@ -24,11 +24,10 @@ anyhow = { workspace = true }
crossbeam = { workspace = true }
glob = { workspace = true }
notify = { workspace = true }
pep440_rs = { workspace = true, features = ["version-ranges"] }
pep440_rs = { workspace = true }
rayon = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
@@ -41,9 +40,8 @@ insta = { workspace = true, features = ["redactions", "ron"] }
[features]
default = ["zstd"]
deflate = ["red_knot_vendored/deflate"]
schemars = ["dep:schemars", "ruff_db/schemars", "red_knot_python_semantic/schemars"]
zstd = ["red_knot_vendored/zstd"]
deflate = ["red_knot_vendored/deflate"]
[lints]
workspace = true

View File

@@ -1,8 +1,7 @@
use std::{collections::HashMap, hash::BuildHasher};
use red_knot_python_semantic::{PythonPath, PythonPlatform};
use red_knot_python_semantic::{PythonPlatform, PythonVersion, SitePackages};
use ruff_db::system::SystemPathBuf;
use ruff_python_ast::PythonVersion;
/// Combine two values, preferring the values in `self`.
///
@@ -128,7 +127,7 @@ macro_rules! impl_noop_combine {
impl_noop_combine!(SystemPathBuf);
impl_noop_combine!(PythonPlatform);
impl_noop_combine!(PythonPath);
impl_noop_combine!(SitePackages);
impl_noop_combine!(PythonVersion);
// std types

View File

@@ -2,7 +2,7 @@ use std::panic::RefUnwindSafe;
use std::sync::Arc;
use crate::DEFAULT_LINT_REGISTRY;
use crate::{Project, ProjectMetadata};
use crate::{check_file, Project, ProjectMetadata};
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
use red_knot_python_semantic::{Db as SemanticDb, Program};
use ruff_db::diagnostic::Diagnostic;
@@ -27,6 +27,7 @@ pub struct ProjectDatabase {
storage: salsa::Storage<ProjectDatabase>,
files: Files,
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
rule_selection: Arc<RuleSelection>,
}
impl ProjectDatabase {
@@ -34,11 +35,14 @@ impl ProjectDatabase {
where
S: System + 'static + Send + Sync + RefUnwindSafe,
{
let rule_selection = RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY);
let mut db = Self {
project: None,
storage: salsa::Storage::default(),
files: Files::default(),
system: Arc::new(system),
rule_selection: Arc::new(rule_selection),
};
// TODO: Use the `program_settings` to compute the key for the database's persistent
@@ -62,7 +66,7 @@ impl ProjectDatabase {
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
self.with_db(|db| self.project().check_file(db, file))
self.with_db(|db| check_file(db, file))
}
/// Returns a mutable reference to the system.
@@ -114,8 +118,8 @@ impl SemanticDb for ProjectDatabase {
project.is_file_open(self, file)
}
fn rule_selection(&self) -> Arc<RuleSelection> {
self.project().rules(self)
fn rule_selection(&self) -> &RuleSelection {
&self.rule_selection
}
fn lint_registry(&self) -> &LintRegistry {
@@ -186,6 +190,7 @@ pub(crate) mod tests {
files: Files,
system: TestSystem,
vendored: VendoredFileSystem,
rule_selection: RuleSelection,
project: Option<Project>,
}
@@ -197,6 +202,7 @@ pub(crate) mod tests {
vendored: red_knot_vendored::file_system().clone(),
files: Files::default(),
events: Arc::default(),
rule_selection: RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY),
project: None,
};
@@ -268,8 +274,8 @@ pub(crate) mod tests {
!file.path(self).is_vendored_path()
}
fn rule_selection(&self) -> Arc<RuleSelection> {
self.project().rules(self)
fn rule_selection(&self) -> &RuleSelection {
&self.rule_selection
}
fn lint_registry(&self) -> &LintRegistry {

View File

@@ -8,7 +8,6 @@ use ruff_db::files::{system_path_to_file, File, Files};
use ruff_db::system::walk_directory::WalkState;
use ruff_db::system::SystemPath;
use ruff_db::Db as _;
use ruff_python_ast::PySourceType;
use rustc_hash::FxHashSet;
impl ProjectDatabase {
@@ -48,7 +47,7 @@ impl ProjectDatabase {
if let Some(path) = change.system_path() {
if matches!(
path.file_name(),
Some(".gitignore" | ".ignore" | "knot.toml" | "pyproject.toml")
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
) {
// Changes to ignore files or settings can change the project structure or add/remove files.
project_changed = true;
@@ -145,12 +144,6 @@ impl ProjectDatabase {
metadata.apply_cli_options(cli_options.clone());
}
if let Err(error) = metadata.apply_configuration_files(self.system()) {
tracing::error!(
"Failed to apply configuration files, continuing without applying them: {error}"
);
}
let program_settings = metadata.to_program_settings(self.system());
let program = Program::get(self);
@@ -208,17 +201,9 @@ impl ProjectDatabase {
return WalkState::Continue;
}
if entry.path().starts_with(&project_path)
&& entry
.path()
.extension()
.and_then(PySourceType::try_from_extension)
.is_some()
{
let mut paths = added_paths.lock().unwrap();
let mut paths = added_paths.lock().unwrap();
paths.push(entry.into_path());
}
paths.push(entry.into_path());
WalkState::Continue
})

View File

@@ -1,26 +1,26 @@
#![allow(clippy::ref_option)]
use crate::metadata::options::OptionDiagnostic;
pub use db::{Db, ProjectDatabase};
use files::{Index, Indexed, IndexedFiles};
use metadata::settings::Settings;
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder, RuleSelection};
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder};
use red_knot_python_semantic::register_lints;
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity, Span};
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::parsed::parsed_module;
use ruff_db::source::{source_text, SourceTextError};
use ruff_db::system::walk_directory::WalkState;
use ruff_db::system::{FileType, SystemPath};
use ruff_python_ast::PySourceType;
use ruff_text_size::TextRange;
use rustc_hash::{FxBuildHasher, FxHashSet};
use salsa::Durability;
use salsa::Setter;
use std::borrow::Cow;
use std::sync::Arc;
pub use db::{Db, ProjectDatabase};
use files::{Index, Indexed, IndexedFiles};
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
pub mod combine;
mod db;
@@ -66,22 +66,11 @@ pub struct Project {
/// The metadata describing the project, including the unresolved options.
#[return_ref]
pub metadata: ProjectMetadata,
/// The resolved project settings.
#[return_ref]
pub settings: Settings,
/// Diagnostics that were generated when resolving the project settings.
#[return_ref]
settings_diagnostics: Vec<OptionDiagnostic>,
}
#[salsa::tracked]
impl Project {
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
let (settings, settings_diagnostics) = metadata.options().to_settings(db);
Project::builder(metadata, settings, settings_diagnostics)
Project::builder(metadata)
.durability(Durability::MEDIUM)
.open_fileset_durability(Durability::LOW)
.file_set_durability(Durability::LOW)
@@ -96,31 +85,11 @@ impl Project {
self.metadata(db).name()
}
/// Returns the resolved linter rules for the project.
///
/// This is a salsa query to prevent re-computing queries if other, unrelated
/// settings change. For example, we don't want that changing the terminal settings
/// invalidates any type checking queries.
#[salsa::tracked]
pub fn rules(self, db: &dyn Db) -> Arc<RuleSelection> {
self.settings(db).to_rules()
}
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
tracing::debug!("Reloading project");
assert_eq!(self.root(db), metadata.root());
if &metadata != self.metadata(db) {
let (settings, settings_diagnostics) = metadata.options().to_settings(db);
if self.settings(db) != &settings {
self.set_settings(db).to(settings);
}
if self.settings_diagnostics(db) != &settings_diagnostics {
self.set_settings_diagnostics(db).to(settings_diagnostics);
}
self.set_metadata(db).to(metadata);
}
@@ -128,19 +97,12 @@ impl Project {
}
/// Checks all open files in the project and its dependencies.
pub(crate) fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
pub fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
let project_span = tracing::debug_span!("Project::check");
let _span = project_span.enter();
tracing::debug!("Checking project '{name}'", name = self.name(db));
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
diagnostics.extend(self.settings_diagnostics(db).iter().map(|diagnostic| {
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
diagnostic
}));
let result = Arc::new(std::sync::Mutex::new(diagnostics));
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result);
let db = db.clone();
@@ -157,7 +119,7 @@ impl Project {
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
let _entered = check_file_span.entered();
let file_diagnostics = check_file_impl(&db, file);
let file_diagnostics = check_file(&db, file);
result.lock().unwrap().extend(file_diagnostics);
});
}
@@ -166,22 +128,6 @@ impl Project {
Arc::into_inner(result).unwrap().into_inner().unwrap()
}
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
let mut file_diagnostics: Vec<_> = self
.settings_diagnostics(db)
.iter()
.map(|diagnostic| {
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
diagnostic
})
.collect();
let check_diagnostics = check_file_impl(db, file);
file_diagnostics.extend(check_diagnostics);
file_diagnostics
}
/// Opens a file in the project.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
@@ -319,9 +265,8 @@ impl Project {
}
}
fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
pub(crate) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
// Abort checking if there are IO errors.
let source = source_text(db.upcast(), file);
@@ -344,13 +289,7 @@ fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
boxed
}));
diagnostics.sort_unstable_by_key(|diagnostic| {
diagnostic
.span()
.and_then(|span| span.range())
.unwrap_or_default()
.start()
});
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
diagnostics
}
@@ -463,8 +402,12 @@ impl Diagnostic for IOErrorDiagnostic {
self.error.to_string().into()
}
fn span(&self) -> Option<Span> {
Some(Span::from(self.file))
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
None
}
fn severity(&self) -> Severity {
@@ -475,7 +418,7 @@ impl Diagnostic for IOErrorDiagnostic {
#[cfg(test)]
mod tests {
use crate::db::tests::TestDb;
use crate::{check_file_impl, ProjectMetadata};
use crate::{check_file, ProjectMetadata};
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::system_path_to_file;
@@ -499,7 +442,7 @@ mod tests {
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file_impl(&db, file)
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
@@ -515,7 +458,7 @@ mod tests {
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file_impl(&db, file)
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),

View File

@@ -1,21 +1,15 @@
use configuration_file::{ConfigurationFile, ConfigurationFileError};
use red_knot_python_semantic::ProgramSettings;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
use std::sync::Arc;
use thiserror::Error;
use crate::combine::Combine;
use crate::metadata::pyproject::{Project, PyProject, PyProjectError, ResolveRequiresPythonError};
use crate::metadata::value::ValueSource;
use crate::metadata::pyproject::{Project, PyProject, PyProjectError};
use options::KnotTomlError;
use options::Options;
mod configuration_file;
pub mod options;
pub mod pyproject;
pub mod settings;
pub mod value;
#[derive(Debug, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
@@ -26,15 +20,6 @@ pub struct ProjectMetadata {
/// The raw options
pub(super) options: Options,
/// Paths of configurations other than the project's configuration that were combined into [`Self::options`].
///
/// This field stores the paths of the configuration files, mainly for
/// knowing which files to watch for changes.
///
/// The path ordering doesn't imply precedence.
#[cfg_attr(test, serde(skip_serializing_if = "Vec::is_empty"))]
pub(super) extra_configuration_paths: Vec<SystemPathBuf>,
}
impl ProjectMetadata {
@@ -43,16 +28,12 @@ impl ProjectMetadata {
Self {
name,
root,
extra_configuration_paths: Vec::default(),
options: Options::default(),
}
}
/// Loads a project from a `pyproject.toml` file.
pub(crate) fn from_pyproject(
pyproject: PyProject,
root: SystemPathBuf,
) -> Result<Self, ResolveRequiresPythonError> {
pub(crate) fn from_pyproject(pyproject: PyProject, root: SystemPathBuf) -> Self {
Self::from_options(
pyproject
.tool
@@ -65,37 +46,21 @@ impl ProjectMetadata {
/// Loads a project from a set of options with an optional pyproject-project table.
pub(crate) fn from_options(
mut options: Options,
options: Options,
root: SystemPathBuf,
project: Option<&Project>,
) -> Result<Self, ResolveRequiresPythonError> {
) -> Self {
let name = project
.and_then(|project| project.name.as_deref())
.and_then(|project| project.name.as_ref())
.map(|name| Name::new(&**name))
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
// If the `options` don't specify a python version but the `project.requires-python` field is set,
// use that as a lower bound instead.
if let Some(project) = project {
if options
.environment
.as_ref()
.is_none_or(|env| env.python_version.is_none())
{
if let Some(requires_python) = project.resolve_requires_python_lower_bound()? {
let mut environment = options.environment.unwrap_or_default();
environment.python_version = Some(requires_python);
options.environment = Some(environment);
}
}
}
Ok(Self {
// TODO(https://github.com/astral-sh/ruff/issues/15491): Respect requires-python
Self {
name,
root,
options,
extra_configuration_paths: Vec::new(),
})
}
}
/// Discovers the closest project at `path` and returns its metadata.
@@ -122,10 +87,7 @@ impl ProjectMetadata {
let pyproject_path = project_root.join("pyproject.toml");
let pyproject = if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
match PyProject::from_toml_str(
&pyproject_str,
ValueSource::File(Arc::new(pyproject_path.clone())),
) {
match PyProject::from_toml_str(&pyproject_str) {
Ok(pyproject) => Some(pyproject),
Err(error) => {
return Err(ProjectDiscoveryError::InvalidPyProject {
@@ -141,10 +103,7 @@ impl ProjectMetadata {
// A `knot.toml` takes precedence over a `pyproject.toml`.
let knot_toml_path = project_root.join("knot.toml");
if let Ok(knot_str) = system.read_to_string(&knot_toml_path) {
let options = match Options::from_toml_str(
&knot_str,
ValueSource::File(Arc::new(knot_toml_path.clone())),
) {
let options = match Options::from_toml_str(&knot_str) {
Ok(options) => options,
Err(error) => {
return Err(ProjectDiscoveryError::InvalidKnotToml {
@@ -163,34 +122,19 @@ impl ProjectMetadata {
}
tracing::debug!("Found project at '{}'", project_root);
let metadata = ProjectMetadata::from_options(
return Ok(ProjectMetadata::from_options(
options,
project_root.to_path_buf(),
pyproject
.as_ref()
.and_then(|pyproject| pyproject.project.as_ref()),
)
.map_err(|err| {
ProjectDiscoveryError::InvalidRequiresPythonConstraint {
source: err,
path: pyproject_path,
}
})?;
return Ok(metadata);
));
}
if let Some(pyproject) = pyproject {
let has_knot_section = pyproject.knot().is_some();
let metadata =
ProjectMetadata::from_pyproject(pyproject, project_root.to_path_buf())
.map_err(
|err| ProjectDiscoveryError::InvalidRequiresPythonConstraint {
source: err,
path: pyproject_path,
},
)?;
ProjectMetadata::from_pyproject(pyproject, project_root.to_path_buf());
if has_knot_section {
tracing::debug!("Found project at '{}'", project_root);
@@ -238,10 +182,6 @@ impl ProjectMetadata {
&self.options
}
pub fn extra_configuration_paths(&self) -> &[SystemPathBuf] {
&self.extra_configuration_paths
}
pub fn to_program_settings(&self, system: &dyn System) -> ProgramSettings {
self.options.to_program_settings(self.root(), system)
}
@@ -251,31 +191,9 @@ impl ProjectMetadata {
self.options = options.combine(std::mem::take(&mut self.options));
}
/// Applies the options from the configuration files to the project's options.
///
/// This includes:
///
/// * The user-level configuration
pub fn apply_configuration_files(
&mut self,
system: &dyn System,
) -> Result<(), ConfigurationFileError> {
if let Some(user) = ConfigurationFile::user(system)? {
tracing::debug!(
"Applying user-level configuration loaded from `{path}`.",
path = user.path()
);
self.apply_configuration_file(user);
}
Ok(())
}
/// Applies a lower-precedence configuration files to the project's options.
fn apply_configuration_file(&mut self, options: ConfigurationFile) {
self.extra_configuration_paths
.push(options.path().to_owned());
self.options.combine_with(options.into_options());
/// Combine the project options with the user options where project options take precedence.
pub fn apply_user_options(&mut self, options: Options) {
self.options.combine_with(options);
}
}
@@ -295,22 +213,16 @@ pub enum ProjectDiscoveryError {
source: Box<KnotTomlError>,
path: SystemPathBuf,
},
#[error("Invalid `requires-python` version specifier (`{path}`): {source}")]
InvalidRequiresPythonConstraint {
source: ResolveRequiresPythonError,
path: SystemPathBuf,
},
}
#[cfg(test)]
mod tests {
//! Integration tests for project discovery
use crate::snapshot_project;
use anyhow::{anyhow, Context};
use insta::assert_ron_snapshot;
use ruff_db::system::{SystemPathBuf, TestSystem};
use ruff_python_ast::PythonVersion;
use crate::{ProjectDiscoveryError, ProjectMetadata};
@@ -329,15 +241,7 @@ mod tests {
assert_eq!(project.root(), &*root);
with_escaped_paths(|| {
assert_ron_snapshot!(&project, @r#"
ProjectMetadata(
name: Name("app"),
root: "/app",
options: Options(),
)
"#);
});
snapshot_project!(project);
Ok(())
}
@@ -366,16 +270,7 @@ mod tests {
ProjectMetadata::discover(&root, &system).context("Failed to discover project")?;
assert_eq!(project.root(), &*root);
with_escaped_paths(|| {
assert_ron_snapshot!(&project, @r#"
ProjectMetadata(
name: Name("backend"),
root: "/app",
options: Options(),
)
"#);
});
snapshot_project!(project);
// Discovering the same package from a subdirectory should give the same result
let from_src = ProjectMetadata::discover(&root.join("db"), &system)
@@ -458,19 +353,7 @@ expected `.`, `]`
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(sub_project, @r#"
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(
src: Some(SrcOptions(
root: Some("src"),
)),
),
)
"#);
});
snapshot_project!(sub_project);
Ok(())
}
@@ -508,19 +391,7 @@ expected `.`, `]`
let root = ProjectMetadata::discover(&root, &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(root, @r#"
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
src: Some(SrcOptions(
root: Some("src"),
)),
),
)
"#);
});
snapshot_project!(root);
Ok(())
}
@@ -552,15 +423,7 @@ expected `.`, `]`
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(sub_project, @r#"
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(),
)
"#);
});
snapshot_project!(sub_project);
Ok(())
}
@@ -595,19 +458,7 @@ expected `.`, `]`
let root = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(root, @r#"
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
environment: Some(EnvironmentOptions(
r#python-version: Some("3.10"),
)),
),
)
"#);
});
snapshot_project!(root);
Ok(())
}
@@ -627,304 +478,27 @@ expected `.`, `]`
(
root.join("pyproject.toml"),
r#"
[project]
name = "super-app"
requires-python = ">=3.12"
[project]
name = "super-app"
requires-python = ">=3.12"
[tool.knot.src]
root = "this_option_is_ignored"
"#,
[tool.knot.src]
root = "this_option_is_ignored"
"#,
),
(
root.join("knot.toml"),
r#"
[src]
root = "src"
"#,
[src]
root = "src"
"#,
),
])
.context("Failed to write files")?;
let root = ProjectMetadata::discover(&root, &system)?;
with_escaped_paths(|| {
assert_ron_snapshot!(root, @r#"
ProjectMetadata(
name: Name("super-app"),
root: "/app",
options: Options(
environment: Some(EnvironmentOptions(
r#python-version: Some("3.12"),
)),
src: Some(SrcOptions(
root: Some("src"),
)),
),
)
"#);
});
Ok(())
}
#[test]
fn requires_python_major_minor() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=3.12"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY312)
);
Ok(())
}
#[test]
fn requires_python_major_only() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=3"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::from((3, 0)))
);
Ok(())
}
/// A `requires-python` constraint with major, minor and patch can be simplified
/// to major and minor (e.g. 3.12.1 -> 3.12).
#[test]
fn requires_python_major_minor_patch() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=3.12.8"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY312)
);
Ok(())
}
#[test]
fn requires_python_beta_version() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">= 3.13.0b0"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY313)
);
Ok(())
}
#[test]
fn requires_python_greater_than_major_minor() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
# This is somewhat nonsensical because 3.12.1 > 3.12 is true.
# That's why simplifying the constraint to >= 3.12 is correct
requires-python = ">3.12"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY312)
);
Ok(())
}
/// `python-version` takes precedence if both `requires-python` and `python-version` are configured.
#[test]
fn requires_python_and_python_version() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=3.12"
[tool.knot.environment]
python-version = "3.10"
"#,
)
.context("Failed to write file")?;
let root = ProjectMetadata::discover(&root, &system)?;
assert_eq!(
root.options
.environment
.unwrap_or_default()
.python_version
.as_deref(),
Some(&PythonVersion::PY310)
);
Ok(())
}
#[test]
fn requires_python_less_than() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
requires-python = "<3.12"
"#,
)
.context("Failed to write file")?;
let Err(error) = ProjectMetadata::discover(&root, &system) else {
return Err(anyhow!("Expected project discovery to fail because the `requires-python` doesn't specify a lower bound (it only specifies an upper bound)."));
};
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): value `<3.12` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.");
Ok(())
}
#[test]
fn requires_python_no_specifiers() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ""
"#,
)
.context("Failed to write file")?;
let Err(error) = ProjectMetadata::discover(&root, &system) else {
return Err(anyhow!("Expected project discovery to fail because the `requires-python` specifiers are empty and don't define a lower bound."));
};
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): value `` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.");
Ok(())
}
#[test]
fn requires_python_too_large_major_version() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_file(
root.join("pyproject.toml"),
r#"
[project]
requires-python = ">=999.0"
"#,
)
.context("Failed to write file")?;
let Err(error) = ProjectMetadata::discover(&root, &system) else {
return Err(anyhow!("Expected project discovery to fail because of the requires-python major version that is larger than 255."));
};
assert_error_eq(&error, "Invalid `requires-python` version specifier (`/app/pyproject.toml`): The major version `999` is larger than the maximum supported value 255");
snapshot_project!(root);
Ok(())
}
@@ -934,12 +508,15 @@ expected `.`, `]`
assert_eq!(error.to_string().replace('\\', "/"), message);
}
fn with_escaped_paths<R>(f: impl FnOnce() -> R) -> R {
let mut settings = insta::Settings::clone_current();
settings.add_dynamic_redaction(".root", |content, _path| {
content.as_str().unwrap().replace('\\', "/")
/// Snapshots a project but with all paths using unix separators.
#[macro_export]
macro_rules! snapshot_project {
($project:expr) => {{
assert_ron_snapshot!($project,{
".root" => insta::dynamic_redaction(|content, _content_path| {
content.as_str().unwrap().replace("\\", "/")
}),
});
settings.bind(f)
}
}};
}
}

View File

@@ -1,69 +0,0 @@
use std::sync::Arc;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use thiserror::Error;
use crate::metadata::value::ValueSource;
use super::options::{KnotTomlError, Options};
/// A `knot.toml` configuration file with the options it contains.
pub(crate) struct ConfigurationFile {
path: SystemPathBuf,
options: Options,
}
impl ConfigurationFile {
/// Loads the user-level configuration file if it exists.
///
/// Returns `None` if the file does not exist or if the concept of user-level configurations
/// doesn't exist on `system`.
pub(crate) fn user(system: &dyn System) -> Result<Option<Self>, ConfigurationFileError> {
let Some(configuration_directory) = system.user_config_directory() else {
return Ok(None);
};
let knot_toml_path = configuration_directory.join("knot").join("knot.toml");
tracing::debug!(
"Searching for a user-level configuration at `{path}`",
path = &knot_toml_path
);
let Ok(knot_toml_str) = system.read_to_string(&knot_toml_path) else {
return Ok(None);
};
match Options::from_toml_str(
&knot_toml_str,
ValueSource::File(Arc::new(knot_toml_path.clone())),
) {
Ok(options) => Ok(Some(Self {
path: knot_toml_path,
options,
})),
Err(error) => Err(ConfigurationFileError::InvalidKnotToml {
source: Box::new(error),
path: knot_toml_path,
}),
}
}
/// Returns the path to the configuration file.
pub(crate) fn path(&self) -> &SystemPath {
&self.path
}
pub(crate) fn into_options(self) -> Options {
self.options
}
}
#[derive(Debug, Error)]
pub enum ConfigurationFileError {
#[error("{path} is not a valid `knot.toml`: {source}")]
InvalidKnotToml {
source: Box<KnotTomlError>,
path: SystemPathBuf,
},
}

View File

@@ -1,43 +1,22 @@
use crate::metadata::value::{RangedValue, RelativePathBuf, ValueSource, ValueSourceGuard};
use crate::Db;
use red_knot_python_semantic::lint::{GetLintError, Level, LintSource, RuleSelection};
use red_knot_python_semantic::{ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings};
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity, Span};
use ruff_db::files::system_path_to_file;
use ruff_db::system::{System, SystemPath};
use red_knot_python_semantic::{
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
};
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_macros::Combine;
use ruff_python_ast::PythonVersion;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::fmt::Debug;
use thiserror::Error;
use super::settings::{Settings, TerminalSettings};
/// The options for the project.
#[derive(Debug, Default, Clone, PartialEq, Eq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
/// Configures the type checking environment.
#[serde(skip_serializing_if = "Option::is_none")]
pub environment: Option<EnvironmentOptions>,
#[serde(skip_serializing_if = "Option::is_none")]
pub src: Option<SrcOptions>,
/// Configures the enabled lints and their severity.
#[serde(skip_serializing_if = "Option::is_none")]
pub rules: Option<Rules>,
#[serde(skip_serializing_if = "Option::is_none")]
pub terminal: Option<TerminalOptions>,
}
impl Options {
pub(crate) fn from_toml_str(content: &str, source: ValueSource) -> Result<Self, KnotTomlError> {
let _guard = ValueSourceGuard::new(source);
pub(crate) fn from_toml_str(content: &str) -> Result<Self, KnotTomlError> {
let options = toml::from_str(content)?;
Ok(options)
}
@@ -50,12 +29,7 @@ impl Options {
let (python_version, python_platform) = self
.environment
.as_ref()
.map(|env| {
(
env.python_version.as_deref().copied(),
env.python_platform.as_deref(),
)
})
.map(|env| (env.python_version, env.python_platform.as_ref()))
.unwrap_or_default();
ProgramSettings {
@@ -70,19 +44,19 @@ impl Options {
project_root: &SystemPath,
system: &dyn System,
) -> SearchPathSettings {
let src_roots = if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_ref())
{
vec![src_root.absolute(project_root, system)]
} else {
let src = project_root.join("src");
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
if system.is_directory(&src) {
vec![project_root.to_path_buf(), src]
let src_roots =
if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_deref()) {
vec![src_root.to_path_buf()]
} else {
vec![project_root.to_path_buf()]
}
};
let src = project_root.join("src");
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
if system.is_directory(&src) {
vec![project_root.to_path_buf(), src]
} else {
vec![project_root.to_path_buf()]
}
};
let (extra_paths, python, typeshed) = self
.environment
@@ -90,260 +64,50 @@ impl Options {
.map(|env| {
(
env.extra_paths.clone(),
env.python.clone(),
env.venv_path.clone(),
env.typeshed.clone(),
)
})
.unwrap_or_default();
SearchPathSettings {
extra_paths: extra_paths
.unwrap_or_default()
.into_iter()
.map(|path| path.absolute(project_root, system))
.collect(),
extra_paths: extra_paths.unwrap_or_default(),
src_roots,
custom_typeshed: typeshed.map(|path| path.absolute(project_root, system)),
python_path: python
.map(|python_path| {
PythonPath::SysPrefix(python_path.absolute(project_root, system))
})
.unwrap_or(PythonPath::KnownSitePackages(vec![])),
typeshed,
site_packages: python
.map(|venv_path| SitePackages::Derived { venv_path })
.unwrap_or(SitePackages::Known(vec![])),
}
}
#[must_use]
pub(crate) fn to_settings(&self, db: &dyn Db) -> (Settings, Vec<OptionDiagnostic>) {
let (rules, diagnostics) = self.to_rule_selection(db);
let mut settings = Settings::new(rules);
if let Some(terminal) = self.terminal.as_ref() {
settings.set_terminal(TerminalSettings {
error_on_warning: terminal.error_on_warning.unwrap_or_default(),
});
}
(settings, diagnostics)
}
#[must_use]
fn to_rule_selection(&self, db: &dyn Db) -> (RuleSelection, Vec<OptionDiagnostic>) {
let registry = db.lint_registry();
let mut diagnostics = Vec::new();
// Initialize the selection with the defaults
let mut selection = RuleSelection::from_registry(registry);
let rules = self
.rules
.as_ref()
.into_iter()
.flat_map(|rules| rules.inner.iter());
for (rule_name, level) in rules {
let source = rule_name.source();
match registry.get(rule_name) {
Ok(lint) => {
let lint_source = match source {
ValueSource::File(_) => LintSource::File,
ValueSource::Cli => LintSource::Cli,
};
if let Ok(severity) = Severity::try_from(**level) {
selection.enable(lint, severity, lint_source);
} else {
selection.disable(lint);
}
}
Err(error) => {
// `system_path_to_file` can return `Err` if the file was deleted since the configuration
// was read. This should be rare and it should be okay to default to not showing a configuration
// file in that case.
let file = source
.file()
.and_then(|path| system_path_to_file(db.upcast(), path).ok());
// TODO: Add a note if the value was configured on the CLI
let diagnostic = match error {
GetLintError::Unknown(_) => OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!("Unknown lint rule `{rule_name}`"),
Severity::Warning,
),
GetLintError::PrefixedWithCategory { suggestion, .. } => {
OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!(
"Unknown lint rule `{rule_name}`. Did you mean `{suggestion}`?"
),
Severity::Warning,
)
}
GetLintError::Removed(_) => OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!("Unknown lint rule `{rule_name}`"),
Severity::Warning,
),
};
let span = file.map(Span::from).map(|span| {
if let Some(range) = rule_name.range() {
span.with_range(range)
} else {
span
}
});
diagnostics.push(diagnostic.with_span(span));
}
}
}
(selection, diagnostics)
}
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct EnvironmentOptions {
/// Specifies the version of Python that will be used to execute the source code.
/// The version should be specified as a string in the format `M.m` where `M` is the major version
/// and `m` is the minor (e.g. "3.0" or "3.6").
/// If a version is provided, knot will generate errors if the source code makes use of language features
/// that are not supported in that version.
/// It will also tailor its use of type stub files, which conditionalizes type definitions based on the version.
#[serde(skip_serializing_if = "Option::is_none")]
pub python_version: Option<RangedValue<PythonVersion>>,
pub python_version: Option<PythonVersion>,
/// Specifies the target platform that will be used to execute the source code.
/// If specified, Red Knot will tailor its use of type stub files,
/// which conditionalize type definitions based on the platform.
///
/// If no platform is specified, knot will use `all` or the current platform in the LSP use case.
#[serde(skip_serializing_if = "Option::is_none")]
pub python_platform: Option<RangedValue<PythonPlatform>>,
pub python_platform: Option<PythonPlatform>,
/// List of user-provided paths that should take first priority in the module resolution.
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
/// or pyright's stubPath configuration setting.
#[serde(skip_serializing_if = "Option::is_none")]
pub extra_paths: Option<Vec<RelativePathBuf>>,
pub extra_paths: Option<Vec<SystemPathBuf>>,
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
/// bundled as a zip file in the binary
#[serde(skip_serializing_if = "Option::is_none")]
pub typeshed: Option<RelativePathBuf>,
pub typeshed: Option<SystemPathBuf>,
/// Path to the Python installation from which Red Knot resolves type information and third-party dependencies.
///
/// Red Knot will search in the path's `site-packages` directories for type information and
/// third-party imports.
///
/// This option is commonly used to specify the path to a virtual environment.
#[serde(skip_serializing_if = "Option::is_none")]
pub python: Option<RelativePathBuf>,
// TODO: Rename to python, see https://github.com/astral-sh/ruff/issues/15530
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
pub venv_path: Option<SystemPathBuf>,
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct SrcOptions {
/// The root of the project, used for finding first-party modules.
#[serde(skip_serializing_if = "Option::is_none")]
pub root: Option<RelativePathBuf>,
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", transparent)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Rules {
#[cfg_attr(feature = "schemars", schemars(with = "schema::Rules"))]
inner: FxHashMap<RangedValue<String>, RangedValue<Level>>,
}
impl FromIterator<(RangedValue<String>, RangedValue<Level>)> for Rules {
fn from_iter<T: IntoIterator<Item = (RangedValue<String>, RangedValue<Level>)>>(
iter: T,
) -> Self {
Self {
inner: iter.into_iter().collect(),
}
}
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct TerminalOptions {
/// Use exit code 1 if there are any warning-level diagnostics.
///
/// Defaults to `false`.
pub error_on_warning: Option<bool>,
}
#[cfg(feature = "schemars")]
mod schema {
use crate::DEFAULT_LINT_REGISTRY;
use red_knot_python_semantic::lint::Level;
use schemars::gen::SchemaGenerator;
use schemars::schema::{
InstanceType, Metadata, ObjectValidation, Schema, SchemaObject, SubschemaValidation,
};
use schemars::JsonSchema;
pub(super) struct Rules;
impl JsonSchema for Rules {
fn schema_name() -> String {
"Rules".to_string()
}
fn json_schema(gen: &mut SchemaGenerator) -> Schema {
let registry = &*DEFAULT_LINT_REGISTRY;
let level_schema = gen.subschema_for::<Level>();
let properties: schemars::Map<String, Schema> = registry
.lints()
.iter()
.map(|lint| {
(
lint.name().to_string(),
Schema::Object(SchemaObject {
metadata: Some(Box::new(Metadata {
title: Some(lint.summary().to_string()),
description: Some(lint.documentation()),
deprecated: lint.status.is_deprecated(),
default: Some(lint.default_level.to_string().into()),
..Metadata::default()
})),
subschemas: Some(Box::new(SubschemaValidation {
one_of: Some(vec![level_schema.clone()]),
..Default::default()
})),
..Default::default()
}),
)
})
.collect();
Schema::Object(SchemaObject {
instance_type: Some(InstanceType::Object.into()),
object: Some(Box::new(ObjectValidation {
properties,
// Allow unknown rules: Red Knot will warn about them.
// It gives a better experience when using an older Red Knot version because
// the schema will not deny rules that have been removed in newer versions.
additional_properties: Some(Box::new(level_schema)),
..ObjectValidation::default()
})),
..Default::default()
})
}
}
pub root: Option<SystemPathBuf>,
}
#[derive(Error, Debug)]
@@ -351,45 +115,3 @@ pub enum KnotTomlError {
#[error(transparent)]
TomlSyntax(#[from] toml::de::Error),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct OptionDiagnostic {
id: DiagnosticId,
message: String,
severity: Severity,
span: Option<Span>,
}
impl OptionDiagnostic {
pub fn new(id: DiagnosticId, message: String, severity: Severity) -> Self {
Self {
id,
message,
severity,
span: None,
}
}
#[must_use]
fn with_span(self, span: Option<Span>) -> Self {
OptionDiagnostic { span, ..self }
}
}
impl Diagnostic for OptionDiagnostic {
fn id(&self) -> DiagnosticId {
self.id
}
fn message(&self) -> Cow<str> {
Cow::Borrowed(&self.message)
}
fn span(&self) -> Option<Span> {
self.span.clone()
}
fn severity(&self) -> Severity {
self.severity
}
}

View File

@@ -1,12 +1,10 @@
use crate::metadata::options::Options;
use crate::metadata::value::{RangedValue, ValueSource, ValueSourceGuard};
use pep440_rs::{release_specifiers_to_ranges, Version, VersionSpecifiers};
use ruff_python_ast::PythonVersion;
use pep440_rs::{Version, VersionSpecifiers};
use serde::{Deserialize, Deserializer, Serialize};
use std::collections::Bound;
use std::ops::Deref;
use thiserror::Error;
use crate::metadata::options::Options;
/// A `pyproject.toml` as specified in PEP 517.
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
#[serde(rename_all = "kebab-case")]
@@ -30,11 +28,7 @@ pub enum PyProjectError {
}
impl PyProject {
pub(crate) fn from_toml_str(
content: &str,
source: ValueSource,
) -> Result<Self, PyProjectError> {
let _guard = ValueSourceGuard::new(source);
pub(crate) fn from_toml_str(content: &str) -> Result<Self, PyProjectError> {
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
}
}
@@ -49,78 +43,11 @@ pub struct Project {
///
/// Note: Intentionally option to be more permissive during deserialization.
/// `PackageMetadata::from_pyproject` reports missing names.
pub name: Option<RangedValue<PackageName>>,
pub name: Option<PackageName>,
/// The version of the project
pub version: Option<RangedValue<Version>>,
pub version: Option<Version>,
/// The Python versions this project is compatible with.
pub requires_python: Option<RangedValue<VersionSpecifiers>>,
}
impl Project {
pub(super) fn resolve_requires_python_lower_bound(
&self,
) -> Result<Option<RangedValue<PythonVersion>>, ResolveRequiresPythonError> {
let Some(requires_python) = self.requires_python.as_ref() else {
return Ok(None);
};
tracing::debug!("Resolving requires-python constraint: `{requires_python}`");
let ranges = release_specifiers_to_ranges((**requires_python).clone());
let Some((lower, _)) = ranges.bounding_range() else {
return Ok(None);
};
let version = match lower {
// Ex) `>=3.10.1` -> `>=3.10`
Bound::Included(version) => version,
// Ex) `>3.10.1` -> `>=3.10` or `>3.10` -> `>=3.10`
// The second example looks obscure at first but it is required because
// `3.10.1 > 3.10` is true but we only have two digits here. So including 3.10 is the
// right move. Overall, using `>` without a patch release is most likely bogus.
Bound::Excluded(version) => version,
// Ex) `<3.10` or ``
Bound::Unbounded => {
return Err(ResolveRequiresPythonError::NoLowerBound(
requires_python.to_string(),
))
}
};
// Take the major and minor version
let mut versions = version.release().iter().take(2);
let Some(major) = versions.next().copied() else {
return Ok(None);
};
let minor = versions.next().copied().unwrap_or_default();
tracing::debug!("Resolved requires-python constraint to: {major}.{minor}");
let major =
u8::try_from(major).map_err(|_| ResolveRequiresPythonError::TooLargeMajor(major))?;
let minor =
u8::try_from(minor).map_err(|_| ResolveRequiresPythonError::TooLargeMajor(minor))?;
Ok(Some(
requires_python
.clone()
.map_value(|_| PythonVersion::from((major, minor))),
))
}
}
#[derive(Debug, Error)]
pub enum ResolveRequiresPythonError {
#[error("The major version `{0}` is larger than the maximum supported value 255")]
TooLargeMajor(u64),
#[error("The minor version `{0}` is larger than the maximum supported value 255")]
TooLargeMinor(u64),
#[error("value `{0}` does not contain a lower bound. Add a lower bound to indicate the minimum compatible Python version (e.g., `>=3.13`) or specify a version in `environment.python-version`.")]
NoLowerBound(String),
pub requires_python: Option<VersionSpecifiers>,
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]

View File

@@ -1,53 +0,0 @@
use std::sync::Arc;
use red_knot_python_semantic::lint::RuleSelection;
/// The resolved [`super::Options`] for the project.
///
/// Unlike [`super::Options`], the struct has default values filled in and
/// uses representations that are optimized for reads (instead of preserving the source representation).
/// It's also not required that this structure precisely resembles the TOML schema, although
/// it's encouraged to use a similar structure.
///
/// It's worth considering to adding a salsa query for specific settings to
/// limit the blast radius when only some settings change. For example,
/// changing the terminal settings shouldn't invalidate any core type-checking queries.
/// This can be achieved by adding a salsa query for the type checking specific settings.
///
/// Settings that are part of [`red_knot_python_semantic::ProgramSettings`] are not included here.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Settings {
rules: Arc<RuleSelection>,
terminal: TerminalSettings,
}
impl Settings {
pub fn new(rules: RuleSelection) -> Self {
Self {
rules: Arc::new(rules),
terminal: TerminalSettings::default(),
}
}
pub fn rules(&self) -> &RuleSelection {
&self.rules
}
pub fn to_rules(&self) -> Arc<RuleSelection> {
self.rules.clone()
}
pub fn terminal(&self) -> &TerminalSettings {
&self.terminal
}
pub fn set_terminal(&mut self, terminal: TerminalSettings) {
self.terminal = terminal;
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct TerminalSettings {
pub error_on_warning: bool,
}

View File

@@ -1,339 +0,0 @@
use crate::combine::Combine;
use crate::Db;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_macros::Combine;
use ruff_text_size::{TextRange, TextSize};
use serde::{Deserialize, Deserializer};
use std::cell::RefCell;
use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
use toml::Spanned;
#[derive(Clone, Debug)]
pub enum ValueSource {
/// Value loaded from a project's configuration file.
///
/// Ideally, we'd use [`ruff_db::files::File`] but we can't because the database hasn't been
/// created when loading the configuration.
File(Arc<SystemPathBuf>),
/// The value comes from a CLI argument, while it's left open if specified using a short argument,
/// long argument (`--extra-paths`) or `--config key=value`.
Cli,
}
impl ValueSource {
pub fn file(&self) -> Option<&SystemPath> {
match self {
ValueSource::File(path) => Some(&**path),
ValueSource::Cli => None,
}
}
}
thread_local! {
/// Serde doesn't provide any easy means to pass a value to a [`Deserialize`] implementation,
/// but we want to associate each deserialized [`RelativePath`] with the source from
/// which it originated. We use a thread local variable to work around this limitation.
///
/// Use the [`ValueSourceGuard`] to initialize the thread local before calling into any
/// deserialization code. It ensures that the thread local variable gets cleaned up
/// once deserialization is done (once the guard gets dropped).
static VALUE_SOURCE: RefCell<Option<ValueSource>> = const { RefCell::new(None) };
}
/// Guard to safely change the [`VALUE_SOURCE`] for the current thread.
#[must_use]
pub(super) struct ValueSourceGuard {
prev_value: Option<ValueSource>,
}
impl ValueSourceGuard {
pub(super) fn new(source: ValueSource) -> Self {
let prev = VALUE_SOURCE.replace(Some(source));
Self { prev_value: prev }
}
}
impl Drop for ValueSourceGuard {
fn drop(&mut self) {
VALUE_SOURCE.set(self.prev_value.take());
}
}
/// A value that "remembers" where it comes from (source) and its range in source.
///
/// ## Equality, Hash, and Ordering
/// The equality, hash, and ordering are solely based on the value. They disregard the value's range
/// or source.
///
/// This ensures that two resolved configurations are identical even if the position of a value has changed
/// or if the values were loaded from different sources.
#[derive(Clone, serde::Serialize)]
#[serde(transparent)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct RangedValue<T> {
value: T,
#[serde(skip)]
source: ValueSource,
/// The byte range of `value` in `source`.
///
/// Can be `None` because not all sources support a range.
/// For example, arguments provided on the CLI won't have a range attached.
#[serde(skip)]
range: Option<TextRange>,
}
impl<T> RangedValue<T> {
pub fn new(value: T, source: ValueSource) -> Self {
Self::with_range(value, source, TextRange::default())
}
pub fn cli(value: T) -> Self {
Self::with_range(value, ValueSource::Cli, TextRange::default())
}
pub fn with_range(value: T, source: ValueSource, range: TextRange) -> Self {
Self {
value,
range: Some(range),
source,
}
}
pub fn range(&self) -> Option<TextRange> {
self.range
}
pub fn source(&self) -> &ValueSource {
&self.source
}
#[must_use]
pub fn with_source(mut self, source: ValueSource) -> Self {
self.source = source;
self
}
#[must_use]
pub fn map_value<R>(self, f: impl FnOnce(T) -> R) -> RangedValue<R> {
RangedValue {
value: f(self.value),
source: self.source,
range: self.range,
}
}
pub fn into_inner(self) -> T {
self.value
}
}
impl<T> Combine for RangedValue<T> {
fn combine(self, _other: Self) -> Self
where
Self: Sized,
{
self
}
fn combine_with(&mut self, _other: Self) {}
}
impl<T> IntoIterator for RangedValue<T>
where
T: IntoIterator,
{
type Item = T::Item;
type IntoIter = T::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
// The type already has an `iter` method thanks to `Deref`.
#[allow(clippy::into_iter_without_iter)]
impl<'a, T> IntoIterator for &'a RangedValue<T>
where
&'a T: IntoIterator,
{
type Item = <&'a T as IntoIterator>::Item;
type IntoIter = <&'a T as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
// The type already has a `into_iter_mut` method thanks to `DerefMut`.
#[allow(clippy::into_iter_without_iter)]
impl<'a, T> IntoIterator for &'a mut RangedValue<T>
where
&'a mut T: IntoIterator,
{
type Item = <&'a mut T as IntoIterator>::Item;
type IntoIter = <&'a mut T as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
impl<T> fmt::Debug for RangedValue<T>
where
T: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.value.fmt(f)
}
}
impl<T> fmt::Display for RangedValue<T>
where
T: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.value.fmt(f)
}
}
impl<T> Deref for RangedValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl<T> DerefMut for RangedValue<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.value
}
}
impl<T, U: ?Sized> AsRef<U> for RangedValue<T>
where
T: AsRef<U>,
{
fn as_ref(&self) -> &U {
self.value.as_ref()
}
}
impl<T: PartialEq> PartialEq for RangedValue<T> {
fn eq(&self, other: &Self) -> bool {
self.value.eq(&other.value)
}
}
impl<T: PartialEq<T>> PartialEq<T> for RangedValue<T> {
fn eq(&self, other: &T) -> bool {
self.value.eq(other)
}
}
impl<T: Eq> Eq for RangedValue<T> {}
impl<T: Hash> Hash for RangedValue<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.value.hash(state);
}
}
impl<T: PartialOrd> PartialOrd for RangedValue<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.value.partial_cmp(&other.value)
}
}
impl<T: PartialOrd<T>> PartialOrd<T> for RangedValue<T> {
fn partial_cmp(&self, other: &T) -> Option<Ordering> {
self.value.partial_cmp(other)
}
}
impl<T: Ord> Ord for RangedValue<T> {
fn cmp(&self, other: &Self) -> Ordering {
self.value.cmp(&other.value)
}
}
impl<'de, T> Deserialize<'de> for RangedValue<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let spanned: Spanned<T> = Spanned::deserialize(deserializer)?;
let span = spanned.span();
let range = TextRange::new(
TextSize::try_from(span.start).expect("Configuration file to be smaller than 4GB"),
TextSize::try_from(span.end).expect("Configuration file to be smaller than 4GB"),
);
Ok(VALUE_SOURCE.with_borrow(|source| {
let source = source.clone().unwrap();
Self::with_range(spanned.into_inner(), source, range)
}))
}
}
/// A possibly relative path in a configuration file.
///
/// Relative paths in configuration files or from CLI options
/// require different anchoring:
///
/// * CLI: The path is relative to the current working directory
/// * Configuration file: The path is relative to the project's root.
#[derive(
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
PartialEq,
Eq,
PartialOrd,
Ord,
Hash,
Combine,
)]
#[serde(transparent)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct RelativePathBuf(RangedValue<SystemPathBuf>);
impl RelativePathBuf {
pub fn new(path: impl AsRef<SystemPath>, source: ValueSource) -> Self {
Self(RangedValue::new(path.as_ref().to_path_buf(), source))
}
pub fn cli(path: impl AsRef<SystemPath>) -> Self {
Self::new(path, ValueSource::Cli)
}
/// Returns the relative path as specified by the user.
pub fn path(&self) -> &SystemPath {
&self.0
}
/// Returns the owned relative path.
pub fn into_path_buf(self) -> SystemPathBuf {
self.0.into_inner()
}
/// Resolves the absolute path for `self` based on its origin.
pub fn absolute_with_db(&self, db: &dyn Db) -> SystemPathBuf {
self.absolute(db.project().root(db), db.system())
}
/// Resolves the absolute path for `self` based on its origin.
pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> SystemPathBuf {
let relative_to = match &self.0.source {
ValueSource::File(_) => project_root,
ValueSource::Cli => system.current_directory(),
};
SystemPath::absolute(&self.0, relative_to)
}
}

View File

@@ -0,0 +1,14 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: root
---
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),
),
)

View File

@@ -0,0 +1,14 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: sub_project
---
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),
),
)

View File

@@ -0,0 +1,18 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: root
---
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
environment: Some(EnvironmentOptions(
r#python-version: Some("3.10"),
r#python-platform: None,
r#extra-paths: None,
typeshed: None,
r#venv-path: None,
)),
src: None,
),
)

View File

@@ -0,0 +1,12 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: sub_project
---
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(
environment: None,
src: None,
),
)

View File

@@ -0,0 +1,14 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: root
---
ProjectMetadata(
name: Name("super-app"),
root: "/app",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),
),
)

View File

@@ -0,0 +1,12 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: project
---
ProjectMetadata(
name: Name("backend"),
root: "/app",
options: Options(
environment: None,
src: None,
),
)

View File

@@ -0,0 +1,12 @@
---
source: crates/red_knot_project/src/metadata.rs
expression: project
---
ProjectMetadata(
name: Name("app"),
root: "/app",
options: Options(
environment: None,
src: None,
),
)

View File

@@ -73,13 +73,6 @@ impl ProjectWatcher {
.canonicalize_path(&project_path)
.unwrap_or(project_path);
let config_paths = db
.project()
.metadata(db)
.extra_configuration_paths()
.iter()
.cloned();
// Find the non-overlapping module search paths and filter out paths that are already covered by the project.
// Module search paths are already canonicalized.
let unique_module_paths = ruff_db::system::deduplicate_nested_paths(
@@ -90,11 +83,8 @@ impl ProjectWatcher {
.map(SystemPath::to_path_buf);
// Now add the new paths, first starting with the project path and then
// adding the library search paths, and finally the paths for configurations.
for path in std::iter::once(project_path)
.chain(unique_module_paths)
.chain(config_paths)
{
// adding the library search paths.
for path in std::iter::once(project_path).chain(unique_module_paths) {
// Log a warning. It's not worth aborting if registering a single folder fails because
// Ruff otherwise stills works as expected.
if let Err(error) = self.watcher.watch(&path) {

View File

@@ -1,6 +1,6 @@
use anyhow::{anyhow, Context};
use red_knot_project::{ProjectDatabase, ProjectMetadata};
use red_knot_python_semantic::{HasType, SemanticModel};
use red_knot_python_semantic::{HasTy, SemanticModel};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::parsed::parsed_module;
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
@@ -197,10 +197,10 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
fn visit_stmt(&mut self, stmt: &Stmt) {
match stmt {
Stmt::FunctionDef(function) => {
let _ty = function.inferred_type(&self.model);
let _ty = function.ty(&self.model);
}
Stmt::ClassDef(class) => {
let _ty = class.inferred_type(&self.model);
let _ty = class.ty(&self.model);
}
Stmt::Assign(assign) => {
for target in &assign.targets {
@@ -243,25 +243,25 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
}
fn visit_expr(&mut self, expr: &Expr) {
let _ty = expr.inferred_type(&self.model);
let _ty = expr.ty(&self.model);
source_order::walk_expr(self, expr);
}
fn visit_parameter(&mut self, parameter: &Parameter) {
let _ty = parameter.inferred_type(&self.model);
let _ty = parameter.ty(&self.model);
source_order::walk_parameter(self, parameter);
}
fn visit_parameter_with_default(&mut self, parameter_with_default: &ParameterWithDefault) {
let _ty = parameter_with_default.inferred_type(&self.model);
let _ty = parameter_with_default.ty(&self.model);
source_order::walk_parameter_with_default(self, parameter_with_default);
}
fn visit_alias(&mut self, alias: &Alias) {
let _ty = alias.inferred_type(&self.model);
let _ty = alias.ty(&self.model);
source_order::walk_alias(self, alias);
}
@@ -270,8 +270,6 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
/// Whether or not the .py/.pyi version of this file is expected to fail
#[rustfmt::skip]
const KNOWN_FAILURES: &[(&str, bool, bool)] = &[
// related to circular references in nested functions
("crates/ruff_linter/resources/test/fixtures/flake8_return/RET503.py", false, true),
// related to circular references in class definitions
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_26.py", true, true),
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_27.py", true, true),

View File

@@ -12,9 +12,9 @@ license = { workspace = true }
[dependencies]
ruff_db = { workspace = true }
ruff_index = { workspace = true, features = ["salsa"] }
ruff_index = { workspace = true }
ruff_macros = { workspace = true }
ruff_python_ast = { workspace = true, features = ["salsa"] }
ruff_python_ast = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_source_file = { workspace = true }
@@ -31,22 +31,19 @@ drop_bomb = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true }
ordermap = { workspace = true }
salsa = { workspace = true, features = ["compact_str"] }
salsa = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
rustc-hash = { workspace = true }
hashbrown = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
smallvec = { workspace = true }
static_assertions = { workspace = true }
test-case = { workspace = true }
memchr = { workspace = true }
strum = { workspace = true}
strum_macros = { workspace = true}
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing", "os"] }
ruff_db = { workspace = true, features = ["os", "testing"] }
ruff_python_parser = { workspace = true }
red_knot_test = { workspace = true }
red_knot_vendored = { workspace = true }
@@ -59,7 +56,7 @@ quickcheck = { version = "1.0.3", default-features = false }
quickcheck_macros = { version = "1.0.0" }
[features]
serde = ["ruff_db/serde", "dep:serde", "ruff_python_ast/serde"]
serde = ["ruff_db/serde", "dep:serde"]
[lints]
workspace = true

View File

@@ -61,13 +61,7 @@ class MDTestRunner:
return False
# Run it again with 'json' format to find the mdtest executable:
try:
json_output = self._run_cargo_test(message_format="json")
except subprocess.CalledProcessError as _:
# `cargo test` can still fail if something changed in between the two runs.
# Here we don't have a human-readable output, so just show a generic message:
self.console.print("[red]Error[/red]: Failed to compile tests")
return False
json_output = self._run_cargo_test(message_format="json")
if json_output:
self._get_executable_path_from_json(json_output)

View File

@@ -1,46 +0,0 @@
# Deferred annotations
## Deferred annotations in stubs always resolve
`mod.pyi`:
```pyi
def get_foo() -> Foo: ...
class Foo: ...
```
```py
from mod import get_foo
reveal_type(get_foo()) # revealed: Foo
```
## Deferred annotations in regular code fail
In (regular) source files, annotations are *not* deferred. This also tests that imports from
`__future__` that are not `annotations` are ignored.
```py
from __future__ import with_statement as annotations
# error: [unresolved-reference]
def get_foo() -> Foo: ...
class Foo: ...
reveal_type(get_foo()) # revealed: Unknown
```
## Deferred annotations in regular code with `__future__.annotations`
If `__future__.annotations` is imported, annotations *are* deferred.
```py
from __future__ import annotations
def get_foo() -> Foo: ...
class Foo: ...
reveal_type(get_foo()) # revealed: Foo
```

View File

@@ -1,90 +0,0 @@
# Special cases for int/float/complex in annotations
In order to support common use cases, an annotation of `float` actually means `int | float`, and an
annotation of `complex` actually means `int | float | complex`. See
[the specification](https://typing.readthedocs.io/en/latest/spec/special-types.html#special-cases-for-float-and-complex)
## float
An annotation of `float` means `int | float`, so `int` is assignable to it:
```py
def takes_float(x: float):
pass
def passes_int_to_float(x: int):
# no error!
takes_float(x)
```
It also applies to variable annotations:
```py
def assigns_int_to_float(x: int):
# no error!
y: float = x
```
It doesn't work the other way around:
```py
def takes_int(x: int):
pass
def passes_float_to_int(x: float):
# error: [invalid-argument-type]
takes_int(x)
def assigns_float_to_int(x: float):
# error: [invalid-assignment]
y: int = x
```
Unlike other type checkers, we choose not to obfuscate this special case by displaying `int | float`
as just `float`; we display the actual type:
```py
def f(x: float):
reveal_type(x) # revealed: int | float
```
## complex
An annotation of `complex` means `int | float | complex`, so `int` and `float` are both assignable
to it (but not the other way around):
```py
def takes_complex(x: complex):
pass
def passes_to_complex(x: float, y: int):
# no errors!
takes_complex(x)
takes_complex(y)
def assigns_to_complex(x: float, y: int):
# no errors!
a: complex = x
b: complex = y
def takes_int(x: int):
pass
def takes_float(x: float):
pass
def passes_complex(x: complex):
# error: [invalid-argument-type]
takes_int(x)
# error: [invalid-argument-type]
takes_float(x)
def assigns_complex(x: complex):
# error: [invalid-assignment]
y: int = x
# error: [invalid-assignment]
z: float = x
def f(x: complex):
reveal_type(x) # revealed: int | float | complex
```

View File

@@ -36,7 +36,7 @@ def f():
reveal_type(a7) # revealed: None
reveal_type(a8) # revealed: Literal[1]
# TODO: This should be Color.RED
reveal_type(b1) # revealed: Unknown | Literal[0]
reveal_type(b1) # revealed: Literal[0]
# error: [invalid-type-form]
invalid1: Literal[3 + 4]
@@ -106,7 +106,7 @@ def union_example(
Literal["B"],
Literal[True],
None,
],
]
):
reveal_type(x) # revealed: Unknown | Literal[-1, "A", b"A", b"\x00", b"\x07", 0, 1, "B", "foo", "bar", True] | None
```
@@ -116,9 +116,7 @@ def union_example(
Only Literal that is defined in typing and typing_extension modules is detected as the special
Literal.
`other.pyi`:
```pyi
```pyi path=other.pyi
from typing import _SpecialForm
Literal: _SpecialForm

View File

@@ -73,12 +73,12 @@ qux = (foo, bar)
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
# TODO: Infer "LiteralString"
reveal_type(foo.join(qux)) # revealed: @Todo(overloaded method)
reveal_type(foo.join(qux)) # revealed: @Todo(Attribute access on `StringLiteral` types)
template: LiteralString = "{}, {}"
reveal_type(template) # revealed: Literal["{}, {}"]
# TODO: Infer `LiteralString`
reveal_type(template.format(foo, bar)) # revealed: @Todo(overloaded method)
reveal_type(template.format(foo, bar)) # revealed: @Todo(Attribute access on `StringLiteral` types)
```
### Assignability

View File

@@ -116,8 +116,8 @@ MyType = int
class Aliases:
MyType = str
forward: "MyType" = "value"
not_forward: MyType = "value"
forward: "MyType"
not_forward: MyType
reveal_type(Aliases.forward) # revealed: str
reveal_type(Aliases.not_forward) # revealed: str

View File

@@ -9,9 +9,9 @@ from typing import Union
a: Union[int, str]
a1: Union[int, bool]
a2: Union[int, Union[bytes, str]]
a2: Union[int, Union[float, str]]
a3: Union[int, None]
a4: Union[Union[bytes, str]]
a4: Union[Union[float, str]]
a5: Union[int]
a6: Union[()]
@@ -21,11 +21,11 @@ def f():
# Since bool is a subtype of int we simplify to int here. But we do allow assigning boolean values (see below).
# revealed: int
reveal_type(a1)
# revealed: int | bytes | str
# revealed: int | float | str
reveal_type(a2)
# revealed: int | None
reveal_type(a3)
# revealed: bytes | str
# revealed: float | str
reveal_type(a4)
# revealed: int
reveal_type(a5)

View File

@@ -25,9 +25,7 @@ x = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]` is not
## Tuple annotations are understood
`module.py`:
```py
```py path=module.py
from typing_extensions import Unpack
a: tuple[()] = ()
@@ -42,9 +40,7 @@ i: tuple[str | int, str | int] = (42, 42)
j: tuple[str | int] = (42,)
```
`script.py`:
```py
```py path=script.py
from module import a, b, c, d, e, f, g, h, i, j
reveal_type(a) # revealed: tuple[()]
@@ -118,7 +114,7 @@ reveal_type(x) # revealed: Foo
## Annotations in stub files are deferred
```pyi
```pyi path=main.pyi
x: Foo
class Foo: ...
@@ -129,7 +125,7 @@ reveal_type(x) # revealed: Foo
## Annotated assignments in stub files are inferred correctly
```pyi
```pyi path=main.pyi
x: int = 1
reveal_type(x) # revealed: Literal[1]
```

View File

@@ -9,7 +9,7 @@ reveal_type(x) # revealed: Literal[2]
x = 1.0
x /= 2
reveal_type(x) # revealed: int | float
reveal_type(x) # revealed: float
```
## Dunder methods
@@ -24,12 +24,12 @@ x -= 1
reveal_type(x) # revealed: str
class C:
def __iadd__(self, other: str) -> int:
return 1
def __iadd__(self, other: str) -> float:
return 1.0
x = C()
x += "Hello"
reveal_type(x) # revealed: int
reveal_type(x) # revealed: float
```
## Unsupported types
@@ -40,7 +40,7 @@ class C:
return 42
x = C()
# error: [unsupported-operator] "Operator `-=` is unsupported between objects of type `C` and `Literal[1]`"
# error: [invalid-argument-type]
x -= 1
reveal_type(x) # revealed: int
@@ -130,10 +130,10 @@ def _(flag: bool):
if flag:
f = Foo()
else:
f = 42
f = 42.0
f += 12
reveal_type(f) # revealed: str | Literal[54]
reveal_type(f) # revealed: str | float
```
## Partially bound target union with `__add__`

View File

@@ -50,44 +50,46 @@ reveal_type(b | b) # revealed: Literal[False]
## Arithmetic with a variable
```py
def _(a: bool):
def lhs_is_int(x: int):
reveal_type(x + a) # revealed: int
reveal_type(x - a) # revealed: int
reveal_type(x * a) # revealed: int
reveal_type(x // a) # revealed: int
reveal_type(x / a) # revealed: int | float
reveal_type(x % a) # revealed: int
a = True
b = False
def rhs_is_int(x: int):
reveal_type(a + x) # revealed: int
reveal_type(a - x) # revealed: int
reveal_type(a * x) # revealed: int
reveal_type(a // x) # revealed: int
reveal_type(a / x) # revealed: int | float
reveal_type(a % x) # revealed: int
def lhs_is_int(x: int):
reveal_type(x + a) # revealed: int
reveal_type(x - a) # revealed: int
reveal_type(x * a) # revealed: int
reveal_type(x // a) # revealed: int
reveal_type(x / a) # revealed: float
reveal_type(x % a) # revealed: int
def lhs_is_bool(x: bool):
reveal_type(x + a) # revealed: int
reveal_type(x - a) # revealed: int
reveal_type(x * a) # revealed: int
reveal_type(x // a) # revealed: int
reveal_type(x / a) # revealed: int | float
reveal_type(x % a) # revealed: int
def rhs_is_int(x: int):
reveal_type(a + x) # revealed: int
reveal_type(a - x) # revealed: int
reveal_type(a * x) # revealed: int
reveal_type(a // x) # revealed: int
reveal_type(a / x) # revealed: float
reveal_type(a % x) # revealed: int
def rhs_is_bool(x: bool):
reveal_type(a + x) # revealed: int
reveal_type(a - x) # revealed: int
reveal_type(a * x) # revealed: int
reveal_type(a // x) # revealed: int
reveal_type(a / x) # revealed: int | float
reveal_type(a % x) # revealed: int
def lhs_is_bool(x: bool):
reveal_type(x + a) # revealed: int
reveal_type(x - a) # revealed: int
reveal_type(x * a) # revealed: int
reveal_type(x // a) # revealed: int
reveal_type(x / a) # revealed: float
reveal_type(x % a) # revealed: int
def both_are_bool(x: bool, y: bool):
reveal_type(x + y) # revealed: int
reveal_type(x - y) # revealed: int
reveal_type(x * y) # revealed: int
reveal_type(x // y) # revealed: int
reveal_type(x / y) # revealed: int | float
reveal_type(x % y) # revealed: int
def rhs_is_bool(x: bool):
reveal_type(a + x) # revealed: int
reveal_type(a - x) # revealed: int
reveal_type(a * x) # revealed: int
reveal_type(a // x) # revealed: int
reveal_type(a / x) # revealed: float
reveal_type(a % x) # revealed: int
def both_are_bool(x: bool, y: bool):
reveal_type(x + y) # revealed: int
reveal_type(x - y) # revealed: int
reveal_type(x * y) # revealed: int
reveal_type(x // y) # revealed: int
reveal_type(x / y) # revealed: float
reveal_type(x % y) # revealed: int
```

View File

@@ -3,8 +3,6 @@
## Class instances
```py
from typing import Literal
class Yes:
def __add__(self, other) -> Literal["+"]:
return "+"
@@ -138,8 +136,6 @@ reveal_type(No() // Yes()) # revealed: Unknown
## Subclass reflections override superclass dunders
```py
from typing import Literal
class Yes:
def __add__(self, other) -> Literal["+"]:
return "+"
@@ -298,8 +294,6 @@ itself. (For these operators to work on the class itself, they would have to be
class's type, i.e. `type`.)
```py
from typing import Literal
class Yes:
def __add__(self, other) -> Literal["+"]:
return "+"
@@ -318,8 +312,6 @@ reveal_type(No + No) # revealed: Unknown
## Subclass
```py
from typing import Literal
class Yes:
def __add__(self, other) -> Literal["+"]:
return "+"

View File

@@ -244,7 +244,10 @@ class B:
def __rsub__(self, other: A) -> B:
return B()
reveal_type(A() - B()) # revealed: B
# TODO: this should be `B` (the return annotation of `B.__rsub__`),
# because `A.__sub__` is annotated as only accepting `A`,
# but `B.__rsub__` will accept `A`.
reveal_type(A() - B()) # revealed: A
```
## Callable instances as dunders
@@ -259,38 +262,31 @@ class A:
class B:
__add__ = A()
reveal_type(B() + B()) # revealed: Unknown | int
```
Note that we union with `Unknown` here because `__add__` is not declared. We do infer just `int` if
the callable is declared:
```py
class B2:
__add__: A = A()
reveal_type(B2() + B2()) # revealed: int
reveal_type(B() + B()) # revealed: int
```
## Integration test: numbers from typeshed
We get less precise results from binary operations on float/complex literals due to the special case
for annotations of `float` or `complex`, which applies also to return annotations for typeshed
dunder methods. Perhaps we could have a special-case on the special-case, to exclude these typeshed
return annotations from the widening, and preserve a bit more precision here?
```py
reveal_type(3j + 3.14) # revealed: int | float | complex
reveal_type(4.2 + 42) # revealed: int | float
reveal_type(3j + 3) # revealed: int | float | complex
reveal_type(3.14 + 3j) # revealed: int | float | complex
reveal_type(42 + 4.2) # revealed: int | float
reveal_type(3 + 3j) # revealed: int | float | complex
reveal_type(3j + 3.14) # revealed: complex
reveal_type(4.2 + 42) # revealed: float
reveal_type(3j + 3) # revealed: complex
# TODO should be complex, need to check arg type and fall back to `rhs.__radd__`
reveal_type(3.14 + 3j) # revealed: float
# TODO should be float, need to check arg type and fall back to `rhs.__radd__`
reveal_type(42 + 4.2) # revealed: int
# TODO should be complex, need to check arg type and fall back to `rhs.__radd__`
reveal_type(3 + 3j) # revealed: int
def _(x: bool, y: int):
reveal_type(x + y) # revealed: int
reveal_type(4.2 + x) # revealed: int | float
reveal_type(y + 4.12) # revealed: int | float
reveal_type(4.2 + x) # revealed: float
# TODO should be float, need to check arg type and fall back to `rhs.__radd__`
reveal_type(y + 4.12) # revealed: int
```
## With literal types
@@ -307,12 +303,13 @@ class A:
return self
reveal_type(A() + 1) # revealed: A
reveal_type(1 + A()) # revealed: A
# TODO should be `A` since `int.__add__` doesn't support `A` instances
reveal_type(1 + A()) # revealed: int
reveal_type(A() + "foo") # revealed: A
# TODO should be `A` since `str.__add__` doesn't support `A` instances
# TODO overloads
reveal_type("foo" + A()) # revealed: @Todo(return type of decorated function)
reveal_type("foo" + A()) # revealed: @Todo(return type)
reveal_type(A() + b"foo") # revealed: A
# TODO should be `A` since `bytes.__add__` doesn't support `A` instances
@@ -320,7 +317,7 @@ reveal_type(b"foo" + A()) # revealed: bytes
reveal_type(A() + ()) # revealed: A
# TODO this should be `A`, since `tuple.__add__` doesn't support `A` instances
reveal_type(() + A()) # revealed: @Todo(return type of decorated function)
reveal_type(() + A()) # revealed: @Todo(return type)
literal_string_instance = "foo" * 1_000_000_000
# the test is not testing what it's meant to be testing if this isn't a `LiteralString`:
@@ -329,7 +326,7 @@ reveal_type(literal_string_instance) # revealed: LiteralString
reveal_type(A() + literal_string_instance) # revealed: A
# TODO should be `A` since `str.__add__` doesn't support `A` instances
# TODO overloads
reveal_type(literal_string_instance + A()) # revealed: @Todo(return type of decorated function)
reveal_type(literal_string_instance + A()) # revealed: @Todo(return type)
```
## Operations involving instances of classes inheriting from `Any`
@@ -357,20 +354,6 @@ class Y(Foo): ...
reveal_type(X() + Y()) # revealed: int
```
## Operations involving types with invalid `__bool__` methods
<!-- snapshot-diagnostics -->
```py
class NotBoolable:
__bool__ = 3
a = NotBoolable()
# error: [unsupported-bool-conversion]
10 and a and True
```
## Unsupported
### Dunder as instance attribute

View File

@@ -10,16 +10,16 @@ reveal_type(-3 // 3) # revealed: Literal[-1]
reveal_type(-3 / 3) # revealed: float
reveal_type(5 % 3) # revealed: Literal[2]
# TODO: Should emit `unsupported-operator` but we don't understand the bases of `str`, so we think
# it inherits `Unknown`, so we think `str.__radd__` is `Unknown` instead of nonexistent.
reveal_type(2 + "f") # revealed: Unknown
# TODO: We don't currently verify that the actual parameter to int.__add__ matches the declared
# formal parameter type.
reveal_type(2 + "f") # revealed: int
def lhs(x: int):
reveal_type(x + 1) # revealed: int
reveal_type(x - 4) # revealed: int
reveal_type(x * -1) # revealed: int
reveal_type(x // 3) # revealed: int
reveal_type(x / 3) # revealed: int | float
reveal_type(x / 3) # revealed: float
reveal_type(x % 3) # revealed: int
def rhs(x: int):
@@ -27,7 +27,7 @@ def rhs(x: int):
reveal_type(3 - x) # revealed: int
reveal_type(3 * x) # revealed: int
reveal_type(-3 // x) # revealed: int
reveal_type(-3 / x) # revealed: int | float
reveal_type(-3 / x) # revealed: float
reveal_type(5 % x) # revealed: int
def both(x: int):
@@ -35,7 +35,7 @@ def both(x: int):
reveal_type(x - x) # revealed: int
reveal_type(x * x) # revealed: int
reveal_type(x // x) # revealed: int
reveal_type(x / x) # revealed: int | float
reveal_type(x / x) # revealed: float
reveal_type(x % x) # revealed: int
```
@@ -51,9 +51,9 @@ reveal_type(1 ** (largest_u32 + 1)) # revealed: int
reveal_type(2**largest_u32) # revealed: int
def variable(x: int):
reveal_type(x**2) # revealed: @Todo(return type of decorated function)
reveal_type(2**x) # revealed: @Todo(return type of decorated function)
reveal_type(x**x) # revealed: @Todo(return type of decorated function)
reveal_type(x**2) # revealed: @Todo(return type)
reveal_type(2**x) # revealed: @Todo(return type)
reveal_type(x**x) # revealed: @Todo(return type)
```
## Division by Zero
@@ -80,20 +80,24 @@ c = 3 % 0 # error: "Cannot reduce object of type `Literal[3]` modulo zero"
reveal_type(c) # revealed: int
# error: "Cannot divide object of type `int` by zero"
reveal_type(int() / 0) # revealed: int | float
# revealed: float
reveal_type(int() / 0)
# error: "Cannot divide object of type `Literal[1]` by zero"
reveal_type(1 / False) # revealed: float
# revealed: float
reveal_type(1 / False)
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
True / False
# error: [division-by-zero] "Cannot divide object of type `Literal[True]` by zero"
bool(1) / False
# error: "Cannot divide object of type `float` by zero"
reveal_type(1.0 / 0) # revealed: int | float
# revealed: float
reveal_type(1.0 / 0)
class MyInt(int): ...
# No error for a subclass of int
reveal_type(MyInt(3) / 0) # revealed: int | float
# revealed: float
reveal_type(MyInt(3) / 0)
```

View File

@@ -1,15 +1,10 @@
# Boundness and declaredness: public uses
This document demonstrates how type-inference and diagnostics work for *public* uses of a symbol,
This document demonstrates how type-inference and diagnostics works for *public* uses of a symbol,
that is, a use of a symbol from another scope. If a symbol has a declared type in its local scope
(e.g. `int`), we use that as the symbol's "public type" (the type of the symbol from the perspective
of other scopes) even if there is a more precise local inferred type for the symbol (`Literal[1]`).
If a symbol has no declared type, we use the union of `Unknown` with the inferred type as the public
type. If there is no declaration, then the symbol can be reassigned to any type from another scope;
the union with `Unknown` reflects that its type must at least be as large as the type of the
assigned value, but could be arbitrarily larger.
We test the whole matrix of possible boundness and declaredness states. The current behavior is
summarized in the following table, while the tests below demonstrate each case. Note that some of
this behavior is questionable and might change in the future. See the TODOs in `symbol_by_id`
@@ -17,11 +12,11 @@ this behavior is questionable and might change in the future. See the TODOs in `
In particular, we should raise errors in the "possibly-undeclared-and-unbound" as well as the
"undeclared-and-possibly-unbound" cases (marked with a "?").
| **Public type** | declared | possibly-undeclared | undeclared |
| ---------------- | ------------ | -------------------------- | ----------------------- |
| bound | `T_declared` | `T_declared \| T_inferred` | `Unknown \| T_inferred` |
| possibly-unbound | `T_declared` | `T_declared \| T_inferred` | `Unknown \| T_inferred` |
| unbound | `T_declared` | `T_declared` | `Unknown` |
| **Public type** | declared | possibly-undeclared | undeclared |
| ---------------- | ------------ | -------------------------- | ------------ |
| bound | `T_declared` | `T_declared \| T_inferred` | `T_inferred` |
| possibly-unbound | `T_declared` | `T_declared \| T_inferred` | `T_inferred` |
| unbound | `T_declared` | `T_declared` | `Unknown` |
| **Diagnostic** | declared | possibly-undeclared | undeclared |
| ---------------- | -------- | ------------------------- | ------------------- |
@@ -34,28 +29,20 @@ In particular, we should raise errors in the "possibly-undeclared-and-unbound" a
### Declared and bound
If a symbol has a declared type (`int`), we use that even if there is a more precise inferred type
(`Literal[1]`), or a conflicting inferred type (`str` vs. `Literal[2]` below):
(`Literal[1]`), or a conflicting inferred type (`Literal[2]`):
`mod.py`:
```py path=mod.py
x: int = 1
```py
from typing import Any
def any() -> Any: ...
a: int = 1
b: str = 2 # error: [invalid-assignment]
c: Any = 3
d: int = any()
# error: [invalid-assignment]
y: str = 2
```
```py
from mod import a, b, c, d
from mod import x, y
reveal_type(a) # revealed: int
reveal_type(b) # revealed: str
reveal_type(c) # revealed: Any
reveal_type(d) # revealed: int
reveal_type(x) # revealed: int
reveal_type(y) # revealed: str
```
### Declared and possibly unbound
@@ -63,33 +50,22 @@ reveal_type(d) # revealed: int
If a symbol is declared and *possibly* unbound, we trust that other module and use the declared type
without raising an error.
`mod.py`:
```py
from typing import Any
def any() -> Any: ...
```py path=mod.py
def flag() -> bool: ...
a: int
b: str
c: Any
d: int
x: int
y: str
if flag:
a = 1
b = 2 # error: [invalid-assignment]
c = 3
d = any()
x = 1
# error: [invalid-assignment]
y = 2
```
```py
from mod import a, b, c, d
from mod import x, y
reveal_type(a) # revealed: int
reveal_type(b) # revealed: str
reveal_type(c) # revealed: Any
reveal_type(d) # revealed: int
reveal_type(x) # revealed: int
reveal_type(y) # revealed: str
```
### Declared and unbound
@@ -97,20 +73,14 @@ reveal_type(d) # revealed: int
Similarly, if a symbol is declared but unbound, we do not raise an error. We trust that this symbol
is available somehow and simply use the declared type.
`mod.py`:
```py
from typing import Any
a: int
b: Any
```py path=mod.py
x: int
```
```py
from mod import a, b
from mod import x
reveal_type(a) # revealed: int
reveal_type(b) # revealed: Any
reveal_type(x) # revealed: int
```
## Possibly undeclared
@@ -120,70 +90,50 @@ reveal_type(b) # revealed: Any
If a symbol is possibly undeclared but definitely bound, we use the union of the declared and
inferred types:
`mod.py`:
```py
```py path=mod.py
from typing import Any
def any() -> Any: ...
def flag() -> bool: ...
a = 1
b = 2
c = 3
d = any()
x = 1
y = 2
if flag():
a: int
b: Any
c: str # error: [invalid-declaration]
d: int
x: Any
# error: [invalid-declaration]
y: str
```
```py
from mod import a, b, c, d
from mod import x, y
reveal_type(a) # revealed: int
reveal_type(b) # revealed: Literal[2] | Any
reveal_type(c) # revealed: Literal[3] | Unknown
reveal_type(d) # revealed: Any | int
# External modifications of `a` that violate the declared type are not allowed:
# error: [invalid-assignment]
a = None
reveal_type(x) # revealed: Literal[1] | Any
reveal_type(y) # revealed: Literal[2] | Unknown
```
### Possibly undeclared and possibly unbound
If a symbol is possibly undeclared and possibly unbound, we also use the union of the declared and
inferred types. This case is interesting because the "possibly declared" definition might not be the
same as the "possibly bound" definition (symbol `b`). Note that we raise a `possibly-unbound-import`
error for both `a` and `b`:
`mod.py`:
```py
from typing import Any
same as the "possibly bound" definition (symbol `y`). Note that we raise a `possibly-unbound-import`
error for both `x` and `y`:
```py path=mod.py
def flag() -> bool: ...
if flag():
a: Any = 1
b = 2
x: Any = 1
y = 2
else:
b: str
y: str
```
```py
# error: [possibly-unbound-import]
# error: [possibly-unbound-import]
from mod import a, b
from mod import x, y
reveal_type(a) # revealed: Literal[1] | Any
reveal_type(b) # revealed: Literal[2] | str
# External modifications of `b` that violate the declared type are not allowed:
# error: [invalid-assignment]
b = None
reveal_type(x) # revealed: Literal[1] | Any
reveal_type(y) # revealed: Literal[2] | str
```
### Possibly undeclared and unbound
@@ -191,53 +141,35 @@ b = None
If a symbol is possibly undeclared and definitely unbound, we currently do not raise an error. This
seems inconsistent when compared to the case just above.
`mod.py`:
```py
```py path=mod.py
def flag() -> bool: ...
if flag():
a: int
x: int
```
```py
# TODO: this should raise an error. Once we fix this, update the section description and the table
# on top of this document.
from mod import a
from mod import x
reveal_type(a) # revealed: int
# External modifications to `a` that violate the declared type are not allowed:
# error: [invalid-assignment]
a = None
reveal_type(x) # revealed: int
```
## Undeclared
### Undeclared but bound
If a symbol is *undeclared*, we use the union of `Unknown` with the inferred type. Note that we
treat this case differently from the case where a symbol is implicitly declared with `Unknown`,
possibly due to the usage of an unknown name in the annotation:
We use the inferred type as the public type, if a symbol has no declared type.
`mod.py`:
```py
# Undeclared:
a = 1
# Implicitly declared with `Unknown`, due to the usage of an unknown name in the annotation:
b: SomeUnknownName = 1 # error: [unresolved-reference]
```py path=mod.py
x = 1
```
```py
from mod import a, b
from mod import x
reveal_type(a) # revealed: Unknown | Literal[1]
reveal_type(b) # revealed: Unknown
# All external modifications of `a` are allowed:
a = None
reveal_type(x) # revealed: Literal[1]
```
### Undeclared and possibly unbound
@@ -245,45 +177,33 @@ a = None
If a symbol is undeclared and *possibly* unbound, we currently do not raise an error. This seems
inconsistent when compared to the "possibly-undeclared-and-possibly-unbound" case.
`mod.py`:
```py
```py path=mod.py
def flag() -> bool: ...
if flag:
a = 1
b: SomeUnknownName = 1 # error: [unresolved-reference]
x = 1
```
```py
# TODO: this should raise an error. Once we fix this, update the section description and the table
# on top of this document.
from mod import a, b
from mod import x
reveal_type(a) # revealed: Unknown | Literal[1]
reveal_type(b) # revealed: Unknown
# All external modifications of `a` are allowed:
a = None
reveal_type(x) # revealed: Literal[1]
```
### Undeclared and unbound
If a symbol is undeclared *and* unbound, we infer `Unknown` and raise an error.
`mod.py`:
```py
```py path=mod.py
if False:
a: int = 1
x: int = 1
```
```py
# error: [unresolved-import]
from mod import a
from mod import x
reveal_type(a) # revealed: Unknown
# Modifications allowed in this case:
a = None
reveal_type(x) # revealed: Unknown
```

View File

@@ -1,37 +0,0 @@
# Calling builtins
## `bool` with incorrect arguments
```py
class NotBool:
__bool__ = None
# TODO: We should emit an `invalid-argument` error here for `2` because `bool` only takes one argument.
bool(1, 2)
# TODO: We should emit an `unsupported-bool-conversion` error here because the argument doesn't implement `__bool__` correctly.
bool(NotBool())
```
## Calls to `type()`
A single-argument call to `type()` returns an object that has the argument's meta-type. (This is
tested more extensively in `crates/red_knot_python_semantic/resources/mdtest/attributes.md`,
alongside the tests for the `__class__` attribute.)
```py
reveal_type(type(1)) # revealed: Literal[int]
```
But a three-argument call to type creates a dynamic instance of the `type` class:
```py
reveal_type(type("Foo", (), {})) # revealed: type
```
Other numbers of arguments are invalid (TODO -- these should emit a diagnostic)
```py
type("Foo", ())
type("Foo", (), {}, weird_other_arg=42)
```

View File

@@ -4,14 +4,14 @@
```py
class Multiplier:
def __init__(self, factor: int):
def __init__(self, factor: float):
self.factor = factor
def __call__(self, number: int) -> int:
def __call__(self, number: float) -> float:
return number * self.factor
a = Multiplier(2)(3)
reveal_type(a) # revealed: int
a = Multiplier(2.0)(3.0)
reveal_type(a) # revealed: float
class Unit: ...
@@ -52,7 +52,7 @@ class NonCallable:
__call__ = 1
a = NonCallable()
# error: [call-non-callable] "Object of type `Literal[1]` is not callable"
# error: "Object of type `NonCallable` is not callable"
reveal_type(a()) # revealed: Unknown
```
@@ -67,8 +67,8 @@ def _(flag: bool):
def __call__(self) -> int: ...
a = NonCallable()
# error: [call-non-callable] "Object of type `Literal[1]` is not callable"
reveal_type(a()) # revealed: int | Unknown
# error: "Object of type `Literal[1] | Literal[__call__]` is not callable (due to union element `Literal[1]`)"
reveal_type(a()) # revealed: Unknown | int
```
## Call binding errors
@@ -82,7 +82,7 @@ class C:
c = C()
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter 2 (`x`) of bound method `__call__`; expected type `int`"
# error: 15 [invalid-argument-type] "Object of type `Literal["foo"]` cannot be assigned to parameter 2 (`x`) of function `__call__`; expected type `int`"
reveal_type(c("foo")) # revealed: int
```
@@ -96,29 +96,6 @@ class C:
c = C()
# error: 13 [invalid-argument-type] "Object of type `C` cannot be assigned to parameter 1 (`self`) of bound method `__call__`; expected type `int`"
# error: 13 [invalid-argument-type] "Object of type `C` cannot be assigned to parameter 1 (`self`) of function `__call__`; expected type `int`"
reveal_type(c()) # revealed: int
```
## Union over callables
### Possibly unbound `__call__`
```py
def outer(cond1: bool):
class Test:
if cond1:
def __call__(self): ...
class Other:
def __call__(self): ...
def inner(cond2: bool):
if cond2:
a = Test()
else:
a = Other()
# error: [call-non-callable] "Object of type `Test` is not callable (possibly unbound `__call__` method)"
a()
```

View File

@@ -1,128 +0,0 @@
# Dunder calls
## Introduction
This test suite explains and documents how dunder methods are looked up and called. Throughout the
document, we use `__getitem__` as an example, but the same principles apply to other dunder methods.
Dunder methods are implicitly called when using certain syntax. For example, the index operator
`obj[key]` calls the `__getitem__` method under the hood. Exactly *how* a dunder method is looked up
and called works slightly different from regular methods. Dunder methods are not looked up on `obj`
directly, but rather on `type(obj)`. But in many ways, they still *act* as if they were called on
`obj` directly. If the `__getitem__` member of `type(obj)` is a descriptor, it is called with `obj`
as the `instance` argument to `__get__`. A desugared version of `obj[key]` is roughly equivalent to
`getitem_desugared(obj, key)` as defined below:
```py
from typing import Any
def find_name_in_mro(typ: type, name: str) -> Any:
# See implementation in https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
pass
def getitem_desugared(obj: object, key: object) -> object:
getitem_callable = find_name_in_mro(type(obj), "__getitem__")
if hasattr(getitem_callable, "__get__"):
getitem_callable = getitem_callable.__get__(obj, type(obj))
return getitem_callable(key)
```
In the following tests, we demonstrate that we implement this behavior correctly.
## Operating on class objects
If we invoke a dunder method on a class, it is looked up on the *meta* class, since any class is an
instance of its metaclass:
```py
class Meta(type):
def __getitem__(cls, key: int) -> str:
return str(key)
class DunderOnMetaClass(metaclass=Meta):
pass
reveal_type(DunderOnMetaClass[0]) # revealed: str
```
## Operating on instances
When invoking a dunder method on an instance of a class, it is looked up on the class:
```py
class ClassWithNormalDunder:
def __getitem__(self, key: int) -> str:
return str(key)
class_with_normal_dunder = ClassWithNormalDunder()
reveal_type(class_with_normal_dunder[0]) # revealed: str
```
Which can be demonstrated by trying to attach a dunder method to an instance, which will not work:
```py
def external_getitem(instance, key: int) -> str:
return str(key)
class ThisFails:
def __init__(self):
self.__getitem__ = external_getitem
this_fails = ThisFails()
# error: [non-subscriptable] "Cannot subscript object of type `ThisFails` with no `__getitem__` method"
reveal_type(this_fails[0]) # revealed: Unknown
```
However, the attached dunder method *can* be called if accessed directly:
```py
# TODO: `this_fails.__getitem__` is incorrectly treated as a bound method. This
# should be fixed with https://github.com/astral-sh/ruff/issues/16367
# error: [too-many-positional-arguments]
# error: [invalid-argument-type]
reveal_type(this_fails.__getitem__(this_fails, 0)) # revealed: Unknown | str
```
## When the dunder is not a method
A dunder can also be a non-method callable:
```py
class SomeCallable:
def __call__(self, key: int) -> str:
return str(key)
class ClassWithNonMethodDunder:
__getitem__: SomeCallable = SomeCallable()
class_with_callable_dunder = ClassWithNonMethodDunder()
reveal_type(class_with_callable_dunder[0]) # revealed: str
```
## Dunders are looked up using the descriptor protocol
Here, we demonstrate that the descriptor protocol is invoked when looking up a dunder method. Note
that the `instance` argument is on object of type `ClassWithDescriptorDunder`:
```py
from __future__ import annotations
class SomeCallable:
def __call__(self, key: int) -> str:
return str(key)
class Descriptor:
def __get__(self, instance: ClassWithDescriptorDunder, owner: type[ClassWithDescriptorDunder]) -> SomeCallable:
return SomeCallable()
class ClassWithDescriptorDunder:
__getitem__: Descriptor = Descriptor()
class_with_descriptor_dunder = ClassWithDescriptorDunder()
reveal_type(class_with_descriptor_dunder[0]) # revealed: str
```

View File

@@ -44,7 +44,7 @@ def bar() -> str:
return "bar"
# TODO: should reveal `int`, as the decorator replaces `bar` with `foo`
reveal_type(bar()) # revealed: @Todo(return type of decorated function)
reveal_type(bar()) # revealed: @Todo(return type)
```
## Invalid callable
@@ -278,10 +278,10 @@ proper diagnostics in case of missing or superfluous arguments.
from typing_extensions import reveal_type
# error: [missing-argument] "No argument provided for required parameter `obj` of function `reveal_type`"
reveal_type()
reveal_type() # revealed: Unknown
# error: [too-many-positional-arguments] "Too many positional arguments to function `reveal_type`: expected 1, got 2"
reveal_type(1, 2)
reveal_type(1, 2) # revealed: Literal[1]
```
### `static_assert`
@@ -290,6 +290,7 @@ reveal_type(1, 2)
from knot_extensions import static_assert
# error: [missing-argument] "No argument provided for required parameter `condition` of function `static_assert`"
# error: [static-assert-error]
static_assert()
# error: [too-many-positional-arguments] "Too many positional arguments to function `static_assert`: expected 2, got 3"

View File

@@ -1,133 +0,0 @@
# `inspect.getattr_static`
## Basic usage
`inspect.getattr_static` is a function that returns attributes of an object without invoking the
descriptor protocol (for caveats, see the [official documentation]).
Consider the following example:
```py
import inspect
class Descriptor:
def __get__(self, instance, owner) -> str:
return 1
class C:
normal: int = 1
descriptor: Descriptor = Descriptor()
```
If we access attributes on an instance of `C` as usual, the descriptor protocol is invoked, and we
get a type of `str` for the `descriptor` attribute:
```py
c = C()
reveal_type(c.normal) # revealed: int
reveal_type(c.descriptor) # revealed: str
```
However, if we use `inspect.getattr_static`, we can see the underlying `Descriptor` type:
```py
reveal_type(inspect.getattr_static(c, "normal")) # revealed: int
reveal_type(inspect.getattr_static(c, "descriptor")) # revealed: Descriptor
```
For non-existent attributes, a default value can be provided:
```py
reveal_type(inspect.getattr_static(C, "normal", "default-arg")) # revealed: int
reveal_type(inspect.getattr_static(C, "non_existent", "default-arg")) # revealed: Literal["default-arg"]
```
When a non-existent attribute is accessed without a default value, the runtime raises an
`AttributeError`. We could emit a diagnostic for this case, but that is currently not supported:
```py
# TODO: we could emit a diagnostic here
reveal_type(inspect.getattr_static(C, "non_existent")) # revealed: Never
```
We can access attributes on objects of all kinds:
```py
import sys
reveal_type(inspect.getattr_static(sys, "platform")) # revealed: LiteralString
reveal_type(inspect.getattr_static(inspect, "getattr_static")) # revealed: Literal[getattr_static]
reveal_type(inspect.getattr_static(1, "real")) # revealed: Literal[1]
```
(Implicit) instance attributes can also be accessed through `inspect.getattr_static`:
```py
class D:
def __init__(self) -> None:
self.instance_attr: int = 1
reveal_type(inspect.getattr_static(D(), "instance_attr")) # revealed: int
```
## Error cases
We can only infer precise types if the attribute is a literal string. In all other cases, we fall
back to `Any`:
```py
import inspect
class C:
x: int = 1
def _(attr_name: str):
reveal_type(inspect.getattr_static(C(), attr_name)) # revealed: Any
reveal_type(inspect.getattr_static(C(), attr_name, 1)) # revealed: Any
```
But we still detect errors in the number or type of arguments:
```py
# error: [missing-argument] "No arguments provided for required parameters `obj`, `attr` of function `getattr_static`"
inspect.getattr_static()
# error: [missing-argument] "No argument provided for required parameter `attr`"
inspect.getattr_static(C())
# error: [invalid-argument-type] "Object of type `Literal[1]` cannot be assigned to parameter 2 (`attr`) of function `getattr_static`; expected type `str`"
inspect.getattr_static(C(), 1)
# error: [too-many-positional-arguments] "Too many positional arguments to function `getattr_static`: expected 3, got 4"
inspect.getattr_static(C(), "x", "default-arg", "one too many")
```
## Possibly unbound attributes
```py
import inspect
def _(flag: bool):
class C:
if flag:
x: int = 1
reveal_type(inspect.getattr_static(C, "x", "default")) # revealed: int | Literal["default"]
```
## Gradual types
```py
import inspect
from typing import Any
def _(a: Any, tuple_of_any: tuple[Any]):
reveal_type(inspect.getattr_static(a, "x", "default")) # revealed: Any | Literal["default"]
# TODO: Ideally, this would just be `Literal[index]`
reveal_type(inspect.getattr_static(tuple_of_any, "index", "default")) # revealed: Literal[index] | Literal["default"]
```
[official documentation]: https://docs.python.org/3/library/inspect.html#inspect.getattr_static

View File

@@ -1,380 +0,0 @@
# Methods
## Background: Functions as descriptors
> Note: See also this related section in the descriptor guide: [Functions and methods].
Say we have a simple class `C` with a function definition `f` inside its body:
```py
class C:
def f(self, x: int) -> str:
return "a"
```
Whenever we access the `f` attribute through the class object itself (`C.f`) or through an instance
(`C().f`), this access happens via the descriptor protocol. Functions are (non-data) descriptors
because they implement a `__get__` method. This is crucial in making sure that method calls work as
expected. In general, the signature of the `__get__` method in the descriptor protocol is
`__get__(self, instance, owner)`. The `self` argument is the descriptor object itself (`f`). The
passed value for the `instance` argument depends on whether the attribute is accessed from the class
object (in which case it is `None`), or from an instance (in which case it is the instance of type
`C`). The `owner` argument is the class itself (`C` of type `Literal[C]`). To summarize:
- `C.f` is equivalent to `getattr_static(C, "f").__get__(None, C)`
- `C().f` is equivalent to `getattr_static(C, "f").__get__(C(), C)`
Here, `inspect.getattr_static` is used to bypass the descriptor protocol and directly access the
function attribute. The way the special `__get__` method *on functions* works is as follows. In the
former case, if the `instance` argument is `None`, `__get__` simply returns the function itself. In
the latter case, it returns a *bound method* object:
```py
from inspect import getattr_static
reveal_type(getattr_static(C, "f")) # revealed: Literal[f]
reveal_type(getattr_static(C, "f").__get__) # revealed: <method-wrapper `__get__` of `f`>
reveal_type(getattr_static(C, "f").__get__(None, C)) # revealed: Literal[f]
reveal_type(getattr_static(C, "f").__get__(C(), C)) # revealed: <bound method `f` of `C`>
```
In conclusion, this is why we see the following two types when accessing the `f` attribute on the
class object `C` and on an instance `C()`:
```py
reveal_type(C.f) # revealed: Literal[f]
reveal_type(C().f) # revealed: <bound method `f` of `C`>
```
A bound method is a callable object that contains a reference to the `instance` that it was called
on (can be inspected via `__self__`), and the function object that it refers to (can be inspected
via `__func__`):
```py
bound_method = C().f
reveal_type(bound_method.__self__) # revealed: C
reveal_type(bound_method.__func__) # revealed: Literal[f]
```
When we call the bound method, the `instance` is implicitly passed as the first argument (`self`):
```py
reveal_type(C().f(1)) # revealed: str
reveal_type(bound_method(1)) # revealed: str
```
When we call the function object itself, we need to pass the `instance` explicitly:
```py
C.f(1) # error: [missing-argument]
reveal_type(C.f(C(), 1)) # revealed: str
```
When we access methods from derived classes, they will be bound to instances of the derived class:
```py
class D(C):
pass
reveal_type(D().f) # revealed: <bound method `f` of `D`>
```
If we access an attribute on a bound method object itself, it will defer to `types.MethodType`:
```py
reveal_type(bound_method.__hash__) # revealed: <bound method `__hash__` of `MethodType`>
```
If an attribute is not available on the bound method object, it will be looked up on the underlying
function object. We model this explicitly, which means that we can access `__kwdefaults__` on bound
methods, even though it is not available on `types.MethodType`:
```py
reveal_type(bound_method.__kwdefaults__) # revealed: @Todo(generics) | None
```
## Basic method calls on class objects and instances
```py
class Base:
def method_on_base(self, x: int | None) -> str:
return "a"
class Derived(Base):
def method_on_derived(self, x: bytes) -> tuple[int, str]:
return (1, "a")
reveal_type(Base().method_on_base(1)) # revealed: str
reveal_type(Base.method_on_base(Base(), 1)) # revealed: str
Base().method_on_base("incorrect") # error: [invalid-argument-type]
Base().method_on_base() # error: [missing-argument]
Base().method_on_base(1, 2) # error: [too-many-positional-arguments]
reveal_type(Derived().method_on_base(1)) # revealed: str
reveal_type(Derived().method_on_derived(b"abc")) # revealed: tuple[int, str]
reveal_type(Derived.method_on_base(Derived(), 1)) # revealed: str
reveal_type(Derived.method_on_derived(Derived(), b"abc")) # revealed: tuple[int, str]
```
## Method calls on literals
### Boolean literals
```py
reveal_type(True.bit_length()) # revealed: int
reveal_type(True.as_integer_ratio()) # revealed: tuple[int, Literal[1]]
```
### Integer literals
```py
reveal_type((42).bit_length()) # revealed: int
```
### String literals
```py
reveal_type("abcde".find("abc")) # revealed: int
reveal_type("foo".encode(encoding="utf-8")) # revealed: bytes
"abcde".find(123) # error: [invalid-argument-type]
```
### Bytes literals
```py
reveal_type(b"abcde".startswith(b"abc")) # revealed: bool
```
## Method calls on `LiteralString`
```py
from typing_extensions import LiteralString
def f(s: LiteralString) -> None:
reveal_type(s.find("a")) # revealed: int
```
## Method calls on `tuple`
```py
def f(t: tuple[int, str]) -> None:
reveal_type(t.index("a")) # revealed: int
```
## Method calls on unions
```py
from typing import Any
class A:
def f(self) -> int:
return 1
class B:
def f(self) -> str:
return "a"
def f(a_or_b: A | B, any_or_a: Any | A):
reveal_type(a_or_b.f) # revealed: <bound method `f` of `A`> | <bound method `f` of `B`>
reveal_type(a_or_b.f()) # revealed: int | str
reveal_type(any_or_a.f) # revealed: Any | <bound method `f` of `A`>
reveal_type(any_or_a.f()) # revealed: Any | int
```
## Method calls on `KnownInstance` types
```toml
[environment]
python-version = "3.12"
```
```py
type IntOrStr = int | str
reveal_type(IntOrStr.__or__) # revealed: <bound method `__or__` of `typing.TypeAliasType`>
```
## Error cases: Calling `__get__` for methods
The `__get__` method on `types.FunctionType` has the following overloaded signature in typeshed:
```py
from types import FunctionType, MethodType
from typing import overload
@overload
def __get__(self, instance: None, owner: type, /) -> FunctionType: ...
@overload
def __get__(self, instance: object, owner: type | None = None, /) -> MethodType: ...
```
Here, we test that this signature is enforced correctly:
```py
from inspect import getattr_static
class C:
def f(self, x: int) -> str:
return "a"
method_wrapper = getattr_static(C, "f").__get__
reveal_type(method_wrapper) # revealed: <method-wrapper `__get__` of `f`>
# All of these are fine:
method_wrapper(C(), C)
method_wrapper(C())
method_wrapper(C(), None)
method_wrapper(None, C)
# Passing `None` without an `owner` argument is an
# error: [missing-argument] "No argument provided for required parameter `owner`"
method_wrapper(None)
# Passing something that is not assignable to `type` as the `owner` argument is an
# error: [invalid-argument-type] "Object of type `Literal[1]` cannot be assigned to parameter 2 (`owner`) of method wrapper `__get__` of function `f`; expected type `type`"
method_wrapper(None, 1)
# Passing `None` as the `owner` argument when `instance` is `None` is an
# error: [invalid-argument-type] "Object of type `None` cannot be assigned to parameter 2 (`owner`) of method wrapper `__get__` of function `f`; expected type `type`"
method_wrapper(None, None)
# Calling `__get__` without any arguments is an
# error: [missing-argument] "No argument provided for required parameter `instance`"
method_wrapper()
# Calling `__get__` with too many positional arguments is an
# error: [too-many-positional-arguments] "Too many positional arguments to method wrapper `__get__` of function `f`: expected 2, got 3"
method_wrapper(C(), C, "one too many")
```
## `@classmethod`
### Basic
When a `@classmethod` attribute is accessed, it returns a bound method object, even when accessed on
the class object itself:
```py
from __future__ import annotations
class C:
@classmethod
def f(cls: type[C], x: int) -> str:
return "a"
reveal_type(C.f) # revealed: <bound method `f` of `Literal[C]`>
reveal_type(C().f) # revealed: <bound method `f` of `type[C]`>
```
The `cls` method argument is then implicitly passed as the first argument when calling the method:
```py
reveal_type(C.f(1)) # revealed: str
reveal_type(C().f(1)) # revealed: str
```
When the class method is called incorrectly, we detect it:
```py
C.f("incorrect") # error: [invalid-argument-type]
C.f() # error: [missing-argument]
C.f(1, 2) # error: [too-many-positional-arguments]
```
If the `cls` parameter is wrongly annotated, we emit an error at the call site:
```py
class D:
@classmethod
def f(cls: D):
# This function is wrongly annotated, it should be `type[D]` instead of `D`
pass
# error: [invalid-argument-type] "Object of type `Literal[D]` cannot be assigned to parameter 1 (`cls`) of bound method `f`; expected type `D`"
D.f()
```
When a class method is accessed on a derived class, it is bound to that derived class:
```py
class Derived(C):
pass
reveal_type(Derived.f) # revealed: <bound method `f` of `Literal[Derived]`>
reveal_type(Derived().f) # revealed: <bound method `f` of `type[Derived]`>
reveal_type(Derived.f(1)) # revealed: str
reveal_type(Derived().f(1)) # revealed: str
```
### Accessing the classmethod as a static member
Accessing a `@classmethod`-decorated function at runtime returns a `classmethod` object. We
currently don't model this explicitly:
```py
from inspect import getattr_static
class C:
@classmethod
def f(cls): ...
reveal_type(getattr_static(C, "f")) # revealed: Literal[f]
reveal_type(getattr_static(C, "f").__get__) # revealed: <method-wrapper `__get__` of `f`>
```
But we correctly model how the `classmethod` descriptor works:
```py
reveal_type(getattr_static(C, "f").__get__(None, C)) # revealed: <bound method `f` of `Literal[C]`>
reveal_type(getattr_static(C, "f").__get__(C(), C)) # revealed: <bound method `f` of `Literal[C]`>
reveal_type(getattr_static(C, "f").__get__(C())) # revealed: <bound method `f` of `type[C]`>
```
The `owner` argument takes precedence over the `instance` argument:
```py
reveal_type(getattr_static(C, "f").__get__("dummy", C)) # revealed: <bound method `f` of `Literal[C]`>
```
### Classmethods mixed with other decorators
When a `@classmethod` is additionally decorated with another decorator, it is still treated as a
class method:
```py
from __future__ import annotations
def does_nothing[T](f: T) -> T:
return f
class C:
@classmethod
@does_nothing
def f1(cls: type[C], x: int) -> str:
return "a"
@does_nothing
@classmethod
def f2(cls: type[C], x: int) -> str:
return "a"
# TODO: We do not support decorators yet (only limited special cases). Eventually,
# these should all return `str`:
reveal_type(C.f1(1)) # revealed: @Todo(return type of decorated function)
reveal_type(C().f1(1)) # revealed: @Todo(decorated method)
reveal_type(C.f2(1)) # revealed: @Todo(return type of decorated function)
reveal_type(C().f2(1)) # revealed: @Todo(decorated method)
```
[functions and methods]: https://docs.python.org/3/howto/descriptor.html#functions-and-methods

View File

@@ -39,8 +39,8 @@ def _(flag: bool):
else:
def f() -> int:
return 1
x = f() # error: [call-non-callable] "Object of type `Literal[1]` is not callable"
reveal_type(x) # revealed: int | Unknown
x = f() # error: "Object of type `Literal[1] | Literal[f]` is not callable (due to union element `Literal[1]`)"
reveal_type(x) # revealed: Unknown | int
```
## Multiple non-callable elements in a union
@@ -56,9 +56,8 @@ def _(flag: bool, flag2: bool):
else:
def f() -> int:
return 1
# TODO we should mention all non-callable elements of the union
# error: [call-non-callable] "Object of type `Literal[1]` is not callable"
# revealed: int | Unknown
# error: "Object of type `Literal[1, "foo"] | Literal[f]` is not callable (due to union elements Literal[1], Literal["foo"])"
# revealed: Unknown | int
reveal_type(f())
```
@@ -73,74 +72,6 @@ def _(flag: bool):
else:
f = "foo"
x = f() # error: [call-non-callable] "Object of type `Literal[1, "foo"]` is not callable"
reveal_type(x) # revealed: Unknown
```
## Mismatching signatures
Calling a union where the arguments don't match the signature of all variants.
```py
def f1(a: int) -> int: ...
def f2(a: str) -> str: ...
def _(flag: bool):
if flag:
f = f1
else:
f = f2
# error: [invalid-argument-type] "Object of type `Literal[3]` cannot be assigned to parameter 1 (`a`) of function `f2`; expected type `str`"
x = f(3)
reveal_type(x) # revealed: int | str
```
## Any non-callable variant
```py
def f1(a: int): ...
def _(flag: bool):
if flag:
f = f1
else:
f = "This is a string literal"
# error: [call-non-callable] "Object of type `Literal["This is a string literal"]` is not callable"
x = f(3)
reveal_type(x) # revealed: Unknown
```
## Union of binding errors
```py
def f1(): ...
def f2(): ...
def _(flag: bool):
if flag:
f = f1
else:
f = f2
# TODO: we should show all errors from the union, not arbitrarily pick one union element
# error: [too-many-positional-arguments] "Too many positional arguments to function `f1`: expected 0, got 1"
x = f(3)
reveal_type(x) # revealed: Unknown
```
## One not-callable, one wrong argument
```py
class C: ...
def f1(): ...
def _(flag: bool):
if flag:
f = f1
else:
f = C()
# TODO: we should either show all union errors here, or prioritize the not-callable error
# error: [too-many-positional-arguments] "Too many positional arguments to function `f1`: expected 0, got 1"
x = f(3)
x = f() # error: "Object of type `Literal[1, "foo"]` is not callable"
reveal_type(x) # revealed: Unknown
```

View File

@@ -21,9 +21,8 @@ class A:
reveal_type("hello" in A()) # revealed: bool
reveal_type("hello" not in A()) # revealed: bool
# error: [unsupported-operator] "Operator `in` is not supported for types `int` and `A`, in comparing `Literal[42]` with `A`"
# TODO: should emit diagnostic, need to check arg type, will fail
reveal_type(42 in A()) # revealed: bool
# error: [unsupported-operator] "Operator `not in` is not supported for types `int` and `A`, in comparing `Literal[42]` with `A`"
reveal_type(42 not in A()) # revealed: bool
```
@@ -127,9 +126,9 @@ class A:
reveal_type(CheckContains() in A()) # revealed: bool
# error: [unsupported-operator] "Operator `in` is not supported for types `CheckIter` and `A`"
# TODO: should emit diagnostic, need to check arg type,
# should not fall back to __iter__ or __getitem__
reveal_type(CheckIter() in A()) # revealed: bool
# error: [unsupported-operator] "Operator `in` is not supported for types `CheckGetItem` and `A`"
reveal_type(CheckGetItem() in A()) # revealed: bool
class B:
@@ -155,50 +154,7 @@ class A:
def __getitem__(self, key: str) -> str:
return "foo"
# error: [unsupported-operator] "Operator `in` is not supported for types `int` and `A`, in comparing `Literal[42]` with `A`"
# TODO should emit a diagnostic
reveal_type(42 in A()) # revealed: bool
# error: [unsupported-operator] "Operator `in` is not supported for types `str` and `A`, in comparing `Literal["hello"]` with `A`"
reveal_type("hello" in A()) # revealed: bool
```
## Return type that doesn't implement `__bool__` correctly
`in` and `not in` operations will fail at runtime if the object on the right-hand side of the
operation has a `__contains__` method that returns a type which is not convertible to `bool`. This
is because of the way these operations are handled by the Python interpreter at runtime. If we
assume that `y` is an object that has a `__contains__` method, the Python expression `x in y`
desugars to a `contains(y, x)` call, where `contains` looks something like this:
```ignore
def contains(y, x):
return bool(type(y).__contains__(y, x))
```
where the `bool()` conversion itself implicitly calls `__bool__` under the hood.
TODO: Ideally the message would explain to the user what's wrong. E.g,
```ignore
error: [operator] cannot use `in` operator on object of type `WithContains`
note: This is because the `in` operator implicitly calls `WithContains.__contains__`, but `WithContains.__contains__` is invalidly defined
note: `WithContains.__contains__` is invalidly defined because it returns an instance of `NotBoolable`, which cannot be evaluated in a boolean context
note: `NotBoolable` cannot be evaluated in a boolean context because its `__bool__` attribute is not callable
```
It may also be more appropriate to use `unsupported-operator` as the error code.
<!-- snapshot-diagnostics -->
```py
class NotBoolable:
__bool__ = 3
class WithContains:
def __contains__(self, item) -> NotBoolable:
return NotBoolable()
# error: [unsupported-bool-conversion]
10 in WithContains()
# error: [unsupported-bool-conversion]
10 not in WithContains()
```

View File

@@ -16,38 +16,31 @@ most common case involves implementing these methods for the same type:
```py
from __future__ import annotations
class EqReturnType: ...
class NeReturnType: ...
class LtReturnType: ...
class LeReturnType: ...
class GtReturnType: ...
class GeReturnType: ...
class A:
def __eq__(self, other: A) -> EqReturnType:
return EqReturnType()
def __eq__(self, other: A) -> int:
return 42
def __ne__(self, other: A) -> NeReturnType:
return NeReturnType()
def __ne__(self, other: A) -> float:
return 42.0
def __lt__(self, other: A) -> LtReturnType:
return LtReturnType()
def __lt__(self, other: A) -> str:
return "42"
def __le__(self, other: A) -> LeReturnType:
return LeReturnType()
def __le__(self, other: A) -> bytes:
return b"42"
def __gt__(self, other: A) -> GtReturnType:
return GtReturnType()
def __gt__(self, other: A) -> list:
return [42]
def __ge__(self, other: A) -> GeReturnType:
return GeReturnType()
def __ge__(self, other: A) -> set:
return {42}
reveal_type(A() == A()) # revealed: EqReturnType
reveal_type(A() != A()) # revealed: NeReturnType
reveal_type(A() < A()) # revealed: LtReturnType
reveal_type(A() <= A()) # revealed: LeReturnType
reveal_type(A() > A()) # revealed: GtReturnType
reveal_type(A() >= A()) # revealed: GeReturnType
reveal_type(A() == A()) # revealed: int
reveal_type(A() != A()) # revealed: float
reveal_type(A() < A()) # revealed: str
reveal_type(A() <= A()) # revealed: bytes
reveal_type(A() > A()) # revealed: list
reveal_type(A() >= A()) # revealed: set
```
## Rich Comparison Dunder Implementations for Other Class
@@ -58,40 +51,33 @@ type:
```py
from __future__ import annotations
class EqReturnType: ...
class NeReturnType: ...
class LtReturnType: ...
class LeReturnType: ...
class GtReturnType: ...
class GeReturnType: ...
class A:
def __eq__(self, other: B) -> EqReturnType:
return EqReturnType()
def __eq__(self, other: B) -> int:
return 42
def __ne__(self, other: B) -> NeReturnType:
return NeReturnType()
def __ne__(self, other: B) -> float:
return 42.0
def __lt__(self, other: B) -> LtReturnType:
return LtReturnType()
def __lt__(self, other: B) -> str:
return "42"
def __le__(self, other: B) -> LeReturnType:
return LeReturnType()
def __le__(self, other: B) -> bytes:
return b"42"
def __gt__(self, other: B) -> GtReturnType:
return GtReturnType()
def __gt__(self, other: B) -> list:
return [42]
def __ge__(self, other: B) -> GeReturnType:
return GeReturnType()
def __ge__(self, other: B) -> set:
return {42}
class B: ...
reveal_type(A() == B()) # revealed: EqReturnType
reveal_type(A() != B()) # revealed: NeReturnType
reveal_type(A() < B()) # revealed: LtReturnType
reveal_type(A() <= B()) # revealed: LeReturnType
reveal_type(A() > B()) # revealed: GtReturnType
reveal_type(A() >= B()) # revealed: GeReturnType
reveal_type(A() == B()) # revealed: int
reveal_type(A() != B()) # revealed: float
reveal_type(A() < B()) # revealed: str
reveal_type(A() <= B()) # revealed: bytes
reveal_type(A() > B()) # revealed: list
reveal_type(A() >= B()) # revealed: set
```
## Reflected Comparisons
@@ -103,64 +89,58 @@ these methods will be ignored here because they require a mismatched operand typ
```py
from __future__ import annotations
class EqReturnType: ...
class NeReturnType: ...
class LtReturnType: ...
class LeReturnType: ...
class GtReturnType: ...
class GeReturnType: ...
class A:
def __eq__(self, other: B) -> EqReturnType:
return EqReturnType()
def __eq__(self, other: B) -> int:
return 42
def __ne__(self, other: B) -> NeReturnType:
return NeReturnType()
def __ne__(self, other: B) -> float:
return 42.0
def __lt__(self, other: B) -> LtReturnType:
return LtReturnType()
def __lt__(self, other: B) -> str:
return "42"
def __le__(self, other: B) -> LeReturnType:
return LeReturnType()
def __le__(self, other: B) -> bytes:
return b"42"
def __gt__(self, other: B) -> GtReturnType:
return GtReturnType()
def __gt__(self, other: B) -> list:
return [42]
def __ge__(self, other: B) -> GeReturnType:
return GeReturnType()
class Unrelated: ...
def __ge__(self, other: B) -> set:
return {42}
class B:
# To override builtins.object.__eq__ and builtins.object.__ne__
# TODO these should emit an invalid override diagnostic
def __eq__(self, other: Unrelated) -> B:
def __eq__(self, other: str) -> B:
return B()
def __ne__(self, other: Unrelated) -> B:
def __ne__(self, other: str) -> B:
return B()
# TODO: should be `int` and `float`.
# Need to check arg type and fall back to `rhs.__eq__` and `rhs.__ne__`.
#
# Because `object.__eq__` and `object.__ne__` accept `object` in typeshed,
# this can only happen with an invalid override of these methods,
# but we still support it.
reveal_type(B() == A()) # revealed: EqReturnType
reveal_type(B() != A()) # revealed: NeReturnType
reveal_type(B() == A()) # revealed: B
reveal_type(B() != A()) # revealed: B
reveal_type(B() < A()) # revealed: GtReturnType
reveal_type(B() <= A()) # revealed: GeReturnType
reveal_type(B() < A()) # revealed: list
reveal_type(B() <= A()) # revealed: set
reveal_type(B() > A()) # revealed: LtReturnType
reveal_type(B() >= A()) # revealed: LeReturnType
reveal_type(B() > A()) # revealed: str
reveal_type(B() >= A()) # revealed: bytes
class C:
def __gt__(self, other: C) -> EqReturnType:
def __gt__(self, other: C) -> int:
return 42
def __ge__(self, other: C) -> NeReturnType:
return NeReturnType()
def __ge__(self, other: C) -> float:
return 42.0
reveal_type(C() < C()) # revealed: EqReturnType
reveal_type(C() <= C()) # revealed: NeReturnType
reveal_type(C() < C()) # revealed: int
reveal_type(C() <= C()) # revealed: float
```
## Reflected Comparisons with Subclasses
@@ -172,13 +152,6 @@ than `A`.
```py
from __future__ import annotations
class EqReturnType: ...
class NeReturnType: ...
class LtReturnType: ...
class LeReturnType: ...
class GtReturnType: ...
class GeReturnType: ...
class A:
def __eq__(self, other: A) -> A:
return A()
@@ -199,32 +172,32 @@ class A:
return A()
class B(A):
def __eq__(self, other: A) -> EqReturnType:
return EqReturnType()
def __eq__(self, other: A) -> int:
return 42
def __ne__(self, other: A) -> NeReturnType:
return NeReturnType()
def __ne__(self, other: A) -> float:
return 42.0
def __lt__(self, other: A) -> LtReturnType:
return LtReturnType()
def __lt__(self, other: A) -> str:
return "42"
def __le__(self, other: A) -> LeReturnType:
return LeReturnType()
def __le__(self, other: A) -> bytes:
return b"42"
def __gt__(self, other: A) -> GtReturnType:
return GtReturnType()
def __gt__(self, other: A) -> list:
return [42]
def __ge__(self, other: A) -> GeReturnType:
return GeReturnType()
def __ge__(self, other: A) -> set:
return {42}
reveal_type(A() == B()) # revealed: EqReturnType
reveal_type(A() != B()) # revealed: NeReturnType
reveal_type(A() == B()) # revealed: int
reveal_type(A() != B()) # revealed: float
reveal_type(A() < B()) # revealed: GtReturnType
reveal_type(A() <= B()) # revealed: GeReturnType
reveal_type(A() < B()) # revealed: list
reveal_type(A() <= B()) # revealed: set
reveal_type(A() > B()) # revealed: LtReturnType
reveal_type(A() >= B()) # revealed: LeReturnType
reveal_type(A() > B()) # revealed: str
reveal_type(A() >= B()) # revealed: bytes
```
## Reflected Comparisons with Subclass But Falls Back to LHS
@@ -249,8 +222,9 @@ class B(A):
def __gt__(self, other: int) -> B:
return B()
reveal_type(A() < B()) # revealed: A
reveal_type(A() > B()) # revealed: A
# TODO: should be `A`, need to check argument type and fall back to LHS method
reveal_type(A() < B()) # revealed: B
reveal_type(A() > B()) # revealed: B
```
## Operations involving instances of classes inheriting from `Any`
@@ -298,8 +272,9 @@ class A:
def __ne__(self, other: int) -> A:
return A()
reveal_type(A() == A()) # revealed: bool
reveal_type(A() != A()) # revealed: bool
# TODO: it should be `bool`, need to check arg type and fall back to `is` and `is not`
reveal_type(A() == A()) # revealed: A
reveal_type(A() != A()) # revealed: A
```
## Object Comparisons with Typeshed
@@ -330,14 +305,12 @@ reveal_type(1 >= 1.0) # revealed: bool
reveal_type(1 == 2j) # revealed: bool
reveal_type(1 != 2j) # revealed: bool
# error: [unsupported-operator] "Operator `<` is not supported for types `int` and `complex`, in comparing `Literal[1]` with `complex`"
reveal_type(1 < 2j) # revealed: Unknown
# error: [unsupported-operator] "Operator `<=` is not supported for types `int` and `complex`, in comparing `Literal[1]` with `complex`"
reveal_type(1 <= 2j) # revealed: Unknown
# error: [unsupported-operator] "Operator `>` is not supported for types `int` and `complex`, in comparing `Literal[1]` with `complex`"
reveal_type(1 > 2j) # revealed: Unknown
# error: [unsupported-operator] "Operator `>=` is not supported for types `int` and `complex`, in comparing `Literal[1]` with `complex`"
reveal_type(1 >= 2j) # revealed: Unknown
# TODO: should be Unknown and emit diagnostic,
# need to check arg type and should be failed
reveal_type(1 < 2j) # revealed: bool
reveal_type(1 <= 2j) # revealed: bool
reveal_type(1 > 2j) # revealed: bool
reveal_type(1 >= 2j) # revealed: bool
def f(x: bool, y: int):
reveal_type(x < y) # revealed: bool
@@ -345,47 +318,3 @@ def f(x: bool, y: int):
reveal_type(4.2 < x) # revealed: bool
reveal_type(x < 4.2) # revealed: bool
```
## Chained comparisons with objects that don't implement `__bool__` correctly
<!-- snapshot-diagnostics -->
Python implicitly calls `bool` on the comparison result of preceding elements (but not for the last
element) of a chained comparison.
```py
class NotBoolable:
__bool__ = 3
class Comparable:
def __lt__(self, item) -> NotBoolable:
return NotBoolable()
def __gt__(self, item) -> NotBoolable:
return NotBoolable()
# error: [unsupported-bool-conversion]
10 < Comparable() < 20
# error: [unsupported-bool-conversion]
10 < Comparable() < Comparable()
Comparable() < Comparable() # fine
```
## Callables as comparison dunders
```py
from typing import Literal
class AlwaysTrue:
def __call__(self, other: object) -> Literal[True]:
return True
class A:
__eq__: AlwaysTrue = AlwaysTrue()
__lt__: AlwaysTrue = AlwaysTrue()
reveal_type(A() == A()) # revealed: Literal[True]
reveal_type(A() < A()) # revealed: Literal[True]
reveal_type(A() > A()) # revealed: Literal[True]
```

View File

@@ -12,8 +12,8 @@ reveal_type(1 is 1) # revealed: bool
reveal_type(1 is not 1) # revealed: bool
reveal_type(1 is 2) # revealed: Literal[False]
reveal_type(1 is not 7) # revealed: Literal[True]
# error: [unsupported-operator] "Operator `<=` is not supported for types `int` and `str`, in comparing `Literal[1]` with `Literal[""]`"
reveal_type(1 <= "" and 0 < 1) # revealed: Unknown & ~AlwaysTruthy | Literal[True]
# TODO: should be Unknown, and emit diagnostic, once we check call argument types
reveal_type(1 <= "" and 0 < 1) # revealed: bool
```
## Integer instance

View File

@@ -6,11 +6,7 @@ If we have an intersection type `A & B` and we get a definitive true/false answe
types, we can infer that the result for the intersection type is also true/false:
```py
from typing import Literal
class Base:
def __gt__(self, other) -> bool:
return False
class Base: ...
class Child1(Base):
def __eq__(self, other) -> Literal[True]:

View File

@@ -23,7 +23,6 @@ from __future__ import annotations
class A:
def __lt__(self, other) -> A: ...
def __gt__(self, other) -> bool: ...
class B:
def __lt__(self, other) -> B: ...

View File

@@ -33,7 +33,7 @@ reveal_type(a >= b) # revealed: Literal[False]
Even when tuples have different lengths, comparisons should be handled appropriately.
```py
```py path=different_length.py
a = (1, 2, 3)
b = (1, 2, 3, 4)
@@ -92,20 +92,17 @@ reveal_type(a == b) # revealed: bool
# TODO: should be Literal[True], once we implement (in)equality for mismatched literals
reveal_type(a != b) # revealed: bool
# error: [unsupported-operator] "Operator `<` is not supported for types `int` and `str`, in comparing `tuple[Literal[1], Literal[2]]` with `tuple[Literal[1], Literal["hello"]]`"
reveal_type(a < b) # revealed: Unknown
# error: [unsupported-operator] "Operator `<=` is not supported for types `int` and `str`, in comparing `tuple[Literal[1], Literal[2]]` with `tuple[Literal[1], Literal["hello"]]`"
reveal_type(a <= b) # revealed: Unknown
# error: [unsupported-operator] "Operator `>` is not supported for types `int` and `str`, in comparing `tuple[Literal[1], Literal[2]]` with `tuple[Literal[1], Literal["hello"]]`"
reveal_type(a > b) # revealed: Unknown
# error: [unsupported-operator] "Operator `>=` is not supported for types `int` and `str`, in comparing `tuple[Literal[1], Literal[2]]` with `tuple[Literal[1], Literal["hello"]]`"
reveal_type(a >= b) # revealed: Unknown
# TODO: should be Unknown and add more informative diagnostics
reveal_type(a < b) # revealed: bool
reveal_type(a <= b) # revealed: bool
reveal_type(a > b) # revealed: bool
reveal_type(a >= b) # revealed: bool
```
However, if the lexicographic comparison completes without reaching a point where str and int are
compared, Python will still produce a result based on the prior elements.
```py
```py path=short_circuit.py
a = (1, 2)
b = (999999, "hello")
@@ -147,40 +144,33 @@ of the dunder methods.)
```py
from __future__ import annotations
class EqReturnType: ...
class NeReturnType: ...
class LtReturnType: ...
class LeReturnType: ...
class GtReturnType: ...
class GeReturnType: ...
class A:
def __eq__(self, o: object) -> EqReturnType:
return EqReturnType()
def __eq__(self, o: object) -> str:
return "hello"
def __ne__(self, o: object) -> NeReturnType:
return NeReturnType()
def __ne__(self, o: object) -> bytes:
return b"world"
def __lt__(self, o: A) -> LtReturnType:
return LtReturnType()
def __lt__(self, o: A) -> float:
return 3.14
def __le__(self, o: A) -> LeReturnType:
return LeReturnType()
def __le__(self, o: A) -> complex:
return complex(0.5, -0.5)
def __gt__(self, o: A) -> GtReturnType:
return GtReturnType()
def __gt__(self, o: A) -> tuple:
return (1, 2, 3)
def __ge__(self, o: A) -> GeReturnType:
return GeReturnType()
def __ge__(self, o: A) -> list:
return [1, 2, 3]
a = (A(), A())
reveal_type(a == a) # revealed: bool
reveal_type(a != a) # revealed: bool
reveal_type(a < a) # revealed: LtReturnType | Literal[False]
reveal_type(a <= a) # revealed: LeReturnType | Literal[True]
reveal_type(a > a) # revealed: GtReturnType | Literal[False]
reveal_type(a >= a) # revealed: GeReturnType | Literal[True]
reveal_type(a < a) # revealed: float | Literal[False]
reveal_type(a <= a) # revealed: complex | Literal[True]
reveal_type(a > a) # revealed: tuple | Literal[False]
reveal_type(a >= a) # revealed: list | Literal[True]
# If lexicographic comparison is finished before comparing A()
b = ("1_foo", A())
@@ -193,13 +183,11 @@ reveal_type(b <= c) # revealed: Literal[True]
reveal_type(b > c) # revealed: Literal[False]
reveal_type(b >= c) # revealed: Literal[False]
class LtReturnTypeOnB: ...
class B:
def __lt__(self, o: B) -> LtReturnTypeOnB:
def __lt__(self, o: B) -> set:
return set()
reveal_type((A(), B()) < (A(), B())) # revealed: LtReturnType | LtReturnTypeOnB | Literal[False]
reveal_type((A(), B()) < (A(), B())) # revealed: float | set | Literal[False]
```
#### Special Handling of Eq and NotEq in Lexicographic Comparisons
@@ -334,61 +322,3 @@ reveal_type(a is not c) # revealed: Literal[True]
For tuples like `tuple[int, ...]`, `tuple[Any, ...]`
// TODO
## Chained comparisons with elements that incorrectly implement `__bool__`
<!-- snapshot-diagnostics -->
For an operation `A() < A()` to succeed at runtime, the `A.__lt__` method does not necessarily need
to return an object that is convertible to a `bool`. However, the return type _does_ need to be
convertible to a `bool` for the operation `A() < A() < A()` (a _chained_ comparison) to succeed.
This is because `A() < A() < A()` desugars to something like this, which involves several implicit
conversions to `bool`:
```ignore
def compute_chained_comparison():
a1 = A()
a2 = A()
first_comparison = a1 < a2
return first_comparison and (a2 < A())
```
```py
class NotBoolable:
__bool__ = 5
class Comparable:
def __lt__(self, other) -> NotBoolable:
return NotBoolable()
def __gt__(self, other) -> NotBoolable:
return NotBoolable()
a = (1, Comparable())
b = (1, Comparable())
# error: [unsupported-bool-conversion]
a < b < b
a < b # fine
```
## Equality with elements that incorrectly implement `__bool__`
<!-- snapshot-diagnostics -->
Python does not generally attempt to coerce the result of `==` and `!=` operations between two
arbitrary objects to a `bool`, but a comparison of tuples will fail if the result of comparing any
pair of elements at equivalent positions cannot be converted to a `bool`:
```py
class A:
def __eq__(self, other) -> NotBoolable:
return NotBoolable()
class NotBoolable:
__bool__ = None
# error: [unsupported-bool-conversion]
(A(),) == (A(),)
```

View File

@@ -9,22 +9,28 @@ def _(flag: bool, flag1: bool, flag2: bool):
b = 0 not in 10 # error: "Operator `not in` is not supported for types `Literal[0]` and `Literal[10]`"
reveal_type(b) # revealed: bool
# error: [unsupported-operator] "Operator `<` is not supported for types `object` and `int`, in comparing `object` with `Literal[5]`"
# TODO: should error, once operand type check is implemented
# ("Operator `<` is not supported for types `object` and `int`")
c = object() < 5
reveal_type(c) # revealed: Unknown
# TODO: should be Unknown, once operand type check is implemented
reveal_type(c) # revealed: bool
# error: [unsupported-operator] "Operator `<` is not supported for types `int` and `object`, in comparing `Literal[5]` with `object`"
# TODO: should error, once operand type check is implemented
# ("Operator `<` is not supported for types `int` and `object`")
d = 5 < object()
reveal_type(d) # revealed: Unknown
# TODO: should be Unknown, once operand type check is implemented
reveal_type(d) # revealed: bool
int_literal_or_str_literal = 1 if flag else "foo"
# error: "Operator `in` is not supported for types `Literal[42]` and `Literal[1]`, in comparing `Literal[42]` with `Literal[1, "foo"]`"
e = 42 in int_literal_or_str_literal
reveal_type(e) # revealed: bool
# error: [unsupported-operator] "Operator `<` is not supported for types `int` and `str`, in comparing `tuple[Literal[1], Literal[2]]` with `tuple[Literal[1], Literal["hello"]]`"
# TODO: should error, need to check if __lt__ signature is valid for right operand
# error may be "Operator `<` is not supported for types `int` and `str`, in comparing `tuple[Literal[1], Literal[2]]` with `tuple[Literal[1], Literal["hello"]]`
f = (1, 2) < (1, "hello")
reveal_type(f) # revealed: Unknown
# TODO: should be Unknown, once operand type check is implemented
reveal_type(f) # revealed: bool
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`, in comparing `tuple[bool, A]` with `tuple[bool, A]`"
g = (flag1, A()) < (flag2, A())

View File

@@ -1,149 +0,0 @@
# Comprehensions
## Basic comprehensions
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# revealed: int
[reveal_type(x) for x in IntIterable()]
class IteratorOfIterables:
def __next__(self) -> IntIterable:
return IntIterable()
class IterableOfIterables:
def __iter__(self) -> IteratorOfIterables:
return IteratorOfIterables()
# revealed: tuple[int, IntIterable]
[reveal_type((x, y)) for y in IterableOfIterables() for x in y]
# revealed: int
{reveal_type(x): 0 for x in IntIterable()}
# revealed: int
{0: reveal_type(x) for x in IntIterable()}
```
## Nested comprehension
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# revealed: tuple[int, int]
[[reveal_type((x, y)) for x in IntIterable()] for y in IntIterable()]
```
## Comprehension referencing outer comprehension
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
class IteratorOfIterables:
def __next__(self) -> IntIterable:
return IntIterable()
class IterableOfIterables:
def __iter__(self) -> IteratorOfIterables:
return IteratorOfIterables()
# revealed: tuple[int, IntIterable]
[[reveal_type((x, y)) for x in y] for y in IterableOfIterables()]
```
## Comprehension with unbound iterable
Iterating over an unbound iterable yields `Unknown`:
```py
# error: [unresolved-reference] "Name `x` used when not defined"
# revealed: Unknown
[reveal_type(z) for z in x]
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# error: [not-iterable] "Object of type `int` is not iterable"
# revealed: tuple[int, Unknown]
[reveal_type((x, z)) for x in IntIterable() for z in x]
```
## Starred expressions
Starred expressions must be iterable
```py
class NotIterable: ...
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator: ...
# This is fine:
x = [*Iterable()]
# error: [not-iterable] "Object of type `NotIterable` is not iterable"
y = [*NotIterable()]
```
## Async comprehensions
### Basic
```py
class AsyncIterator:
async def __anext__(self) -> int:
return 42
class AsyncIterable:
def __aiter__(self) -> AsyncIterator:
return AsyncIterator()
# revealed: @Todo(async iterables/iterators)
[reveal_type(x) async for x in AsyncIterable()]
```
### Invalid async comprehension
This tests that we understand that `async` comprehensions do *not* work according to the synchronous
iteration protocol
```py
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator:
return Iterator()
# revealed: @Todo(async iterables/iterators)
[reveal_type(x) async for x in Iterable()]
```

View File

@@ -1,43 +0,0 @@
# Comprehensions with invalid syntax
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# Missing 'in' keyword.
# It's reasonably clear here what they *meant* to write,
# so we'll still infer the correct type:
# error: [invalid-syntax] "Expected 'in', found name"
# revealed: int
[reveal_type(a) for a IntIterable()]
# Missing iteration variable
# error: [invalid-syntax] "Expected an identifier, but found a keyword 'in' that cannot be used here"
# error: [invalid-syntax] "Expected 'in', found name"
# error: [unresolved-reference]
# revealed: Unknown
[reveal_type(b) for in IntIterable()]
# Missing iterable
# error: [invalid-syntax] "Expected an expression"
# revealed: Unknown
[reveal_type(c) for c in]
# Missing 'in' keyword and missing iterable
# error: [invalid-syntax] "Expected 'in', found ']'"
# revealed: Unknown
[reveal_type(d) for d]
```

View File

@@ -35,13 +35,3 @@ def _(flag: bool):
x = 1 if flag else None
reveal_type(x) # revealed: Literal[1] | None
```
## Condition with object that implements `__bool__` incorrectly
```py
class NotBoolable:
__bool__ = 3
# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`; its `__bool__` method isn't callable"
3 if NotBoolable() else 4
```

View File

@@ -147,17 +147,3 @@ def _(flag: bool):
reveal_type(y) # revealed: Literal[0, 1]
```
## Condition with object that implements `__bool__` incorrectly
```py
class NotBoolable:
__bool__ = 3
# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`; its `__bool__` method isn't callable"
if NotBoolable():
...
# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`; its `__bool__` method isn't callable"
elif NotBoolable():
...
```

View File

@@ -43,21 +43,3 @@ def _(target: int):
reveal_type(y) # revealed: Literal[2, 3, 4]
```
## Guard with object that implements `__bool__` incorrectly
```py
class NotBoolable:
__bool__ = 3
def _(target: int, flag: NotBoolable):
y = 1
match target:
# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`; its `__bool__` method isn't callable"
case 1 if flag:
y = 2
case 2:
y = 3
reveal_type(y) # revealed: Literal[1, 2, 3]
```

View File

@@ -1,441 +0,0 @@
# Descriptor protocol
[Descriptors] let objects customize attribute lookup, storage, and deletion.
A descriptor is an attribute value that has one of the methods in the descriptor protocol. Those
methods are `__get__()`, `__set__()`, and `__delete__()`. If any of those methods are defined for an
attribute, it is said to be a descriptor.
## Basic example
An introductory example, modeled after a [simple example] in the primer on descriptors, involving a
descriptor that returns a constant value:
```py
from typing import Literal
class Ten:
def __get__(self, instance: object, owner: type | None = None) -> Literal[10]:
return 10
def __set__(self, instance: object, value: Literal[10]) -> None:
pass
class C:
ten: Ten = Ten()
c = C()
reveal_type(c.ten) # revealed: Literal[10]
reveal_type(C.ten) # revealed: Literal[10]
# These are fine:
# TODO: This should not be an error
c.ten = 10 # error: [invalid-assignment]
C.ten = 10
# TODO: This should be an error (as the wrong type is being implicitly passed to `Ten.__set__`),
# but the error message is misleading.
# error: [invalid-assignment] "Object of type `Literal[11]` is not assignable to attribute `ten` of type `Ten`"
c.ten = 11
# TODO: same as above
# error: [invalid-assignment] "Object of type `Literal[11]` is not assignable to attribute `ten` of type `Literal[10]`"
C.ten = 11
```
## Different types for `__get__` and `__set__`
The return type of `__get__` and the value type of `__set__` can be different:
```py
class FlexibleInt:
def __init__(self):
self._value: int | None = None
def __get__(self, instance: object, owner: type | None = None) -> int | None:
return self._value
def __set__(self, instance: object, value: int | str) -> None:
self._value = int(value)
class C:
flexible_int: FlexibleInt = FlexibleInt()
c = C()
reveal_type(c.flexible_int) # revealed: int | None
# TODO: These should not be errors
# error: [invalid-assignment]
c.flexible_int = 42 # okay
# error: [invalid-assignment]
c.flexible_int = "42" # also okay!
reveal_type(c.flexible_int) # revealed: int | None
# TODO: This should be an error, but the message needs to be improved.
# error: [invalid-assignment] "Object of type `None` is not assignable to attribute `flexible_int` of type `FlexibleInt`"
c.flexible_int = None # not okay
reveal_type(c.flexible_int) # revealed: int | None
```
## Data and non-data descriptors
Descriptors that define `__set__` or `__delete__` are called *data descriptors*. An example\
of a data descriptor is a `property` with a setter and/or a deleter.\
Descriptors that only define `__get__`, meanwhile, are called *non-data descriptors*. Examples
include\
functions, `classmethod` or `staticmethod`).
The precedence chain for attribute access is (1) data descriptors, (2) instance attributes, and (3)
non-data descriptors.
```py
from typing import Literal
class DataDescriptor:
def __get__(self, instance: object, owner: type | None = None) -> Literal["data"]:
return "data"
def __set__(self, instance: int, value) -> None:
pass
class NonDataDescriptor:
def __get__(self, instance: object, owner: type | None = None) -> Literal["non-data"]:
return "non-data"
class C:
data_descriptor = DataDescriptor()
non_data_descriptor = NonDataDescriptor()
def f(self):
# This explains why data descriptors come first in the precedence chain. If
# instance attributes would take priority, we would override the descriptor
# here. Instead, this calls `DataDescriptor.__set__`, i.e. it does not affect
# the type of the `data_descriptor` attribute.
self.data_descriptor = 1
# However, for non-data descriptors, instance attributes do take precedence.
# So it is possible to override them.
self.non_data_descriptor = 1
c = C()
# TODO: This should ideally be `Unknown | Literal["data"]`.
#
# - Pyright also wrongly shows `int | Literal['data']` here
# - Mypy shows Literal["data"] here, but also shows Literal["non-data"] below.
#
reveal_type(c.data_descriptor) # revealed: Unknown | Literal["data", 1]
reveal_type(c.non_data_descriptor) # revealed: Unknown | Literal["non-data", 1]
reveal_type(C.data_descriptor) # revealed: Unknown | Literal["data"]
reveal_type(C.non_data_descriptor) # revealed: Unknown | Literal["non-data"]
# It is possible to override data descriptors via class objects. The following
# assignment does not call `DataDescriptor.__set__`. For this reason, we infer
# `Unknown | …` for all (descriptor) attributes.
C.data_descriptor = "something else" # This is okay
```
## Built-in `property` descriptor
The built-in `property` decorator creates a descriptor. The names for attribute reads/writes are
determined by the return type of the `name` method and the parameter type of the setter,
respectively.
```py
class C:
_name: str | None = None
@property
def name(self) -> str:
return self._name or "Unset"
# TODO: No diagnostic should be emitted here
# error: [unresolved-attribute] "Type `Literal[name]` has no attribute `setter`"
@name.setter
def name(self, value: str | None) -> None:
self._value = value
c = C()
reveal_type(c._name) # revealed: str | None
# Should be `str`
reveal_type(c.name) # revealed: @Todo(decorated method)
# Should be `builtins.property`
reveal_type(C.name) # revealed: Literal[name]
# This is fine:
c.name = "new"
c.name = None
# TODO: this should be an error
c.name = 42
```
## Built-in `classmethod` descriptor
Similarly to `property`, `classmethod` decorator creates an implicit descriptor that binds the first
argument to the class instead of the instance.
```py
class C:
def __init__(self, value: str) -> None:
self._name: str = value
@classmethod
def factory(cls, value: str) -> "C":
return cls(value)
@classmethod
def get_name(cls) -> str:
return cls.__name__
c1 = C.factory("test") # okay
reveal_type(c1) # revealed: C
reveal_type(C.get_name()) # revealed: str
reveal_type(C("42").get_name()) # revealed: str
```
## Descriptors only work when used as class variables
From the descriptor guide:
> Descriptors only work when used as class variables. When put in instances, they have no effect.
```py
from typing import Literal
class Ten:
def __get__(self, instance: object, owner: type | None = None) -> Literal[10]:
return 10
class C:
def __init__(self):
self.ten: Ten = Ten()
# TODO: Should be Ten
reveal_type(C().ten) # revealed: Literal[10]
```
## Descriptors distinguishing between class and instance access
Overloads can be used to distinguish between when a descriptor is accessed on a class object and
when it is accessed on an instance. A real-world example of this is the `__get__` method on
`types.FunctionType`.
```py
from typing_extensions import Literal, LiteralString, overload
class Descriptor:
@overload
def __get__(self, instance: None, owner: type, /) -> Literal["called on class object"]: ...
@overload
def __get__(self, instance: object, owner: type | None = None, /) -> Literal["called on instance"]: ...
def __get__(self, instance, owner=None, /) -> LiteralString:
if instance:
return "called on instance"
else:
return "called on class object"
class C:
d: Descriptor = Descriptor()
# TODO: should be `Literal["called on class object"]
reveal_type(C.d) # revealed: LiteralString
# TODO: should be `Literal["called on instance"]
reveal_type(C().d) # revealed: LiteralString
```
## Undeclared descriptor arguments
If a descriptor attribute is not declared, we union with `Unknown`, just like for regular
attributes, since that attribute could be overwritten externally. Even a data descriptor with a
`__set__` method can be overwritten when accessed through a class object.
```py
class Descriptor:
def __get__(self, instance: object, owner: type | None = None) -> int:
return 1
def __set__(self, instance: object, value: int) -> None:
pass
class C:
descriptor = Descriptor()
C.descriptor = "something else"
# This could also be `Literal["something else"]` if we support narrowing of attribute types based on assignments
reveal_type(C.descriptor) # revealed: Unknown | int
```
## `__get__` is called with correct arguments
```py
from __future__ import annotations
class TailoredForClassObjectAccess:
def __get__(self, instance: None, owner: type[C]) -> int:
return 1
class TailoredForInstanceAccess:
def __get__(self, instance: C, owner: type[C] | None = None) -> str:
return "a"
class C:
class_object_access: TailoredForClassObjectAccess = TailoredForClassObjectAccess()
instance_access: TailoredForInstanceAccess = TailoredForInstanceAccess()
reveal_type(C.class_object_access) # revealed: int
reveal_type(C().instance_access) # revealed: str
# TODO: These should emit a diagnostic
reveal_type(C().class_object_access) # revealed: TailoredForClassObjectAccess
reveal_type(C.instance_access) # revealed: TailoredForInstanceAccess
```
## Descriptors with incorrect `__get__` signature
```py
class Descriptor:
# `__get__` method with missing parameters:
def __get__(self) -> int:
return 1
class C:
descriptor: Descriptor = Descriptor()
# TODO: This should be an error
reveal_type(C.descriptor) # revealed: Descriptor
```
## Possibly-unbound `__get__` method
```py
def _(flag: bool):
class MaybeDescriptor:
if flag:
def __get__(self, instance: object, owner: type | None = None) -> int:
return 1
class C:
descriptor: MaybeDescriptor = MaybeDescriptor()
# TODO: This should be `MaybeDescriptor | int`
reveal_type(C.descriptor) # revealed: int
```
## Dunder methods
Dunder methods are looked up on the meta type, but we still need to invoke the descriptor protocol:
```py
class SomeCallable:
def __call__(self, x: int) -> str:
return "a"
class Descriptor:
def __get__(self, instance: object, owner: type | None = None) -> SomeCallable:
return SomeCallable()
class B:
__call__: Descriptor = Descriptor()
b_instance = B()
reveal_type(b_instance(1)) # revealed: str
b_instance("bla") # error: [invalid-argument-type]
```
## Functions as descriptors
Functions are descriptors because they implement a `__get__` method. This is crucial in making sure
that method calls work as expected. See [this test suite](./call/methods.md) for more information.
Here, we only demonstrate how `__get__` works on functions:
```py
from inspect import getattr_static
def f(x: object) -> str:
return "a"
reveal_type(f) # revealed: Literal[f]
reveal_type(f.__get__) # revealed: <method-wrapper `__get__` of `f`>
reveal_type(f.__get__(None, type(f))) # revealed: Literal[f]
reveal_type(f.__get__(None, type(f))(1)) # revealed: str
wrapper_descriptor = getattr_static(f, "__get__")
reveal_type(wrapper_descriptor) # revealed: <wrapper-descriptor `__get__` of `function` objects>
reveal_type(wrapper_descriptor(f, None, type(f))) # revealed: Literal[f]
# Attribute access on the method-wrapper `f.__get__` falls back to `MethodWrapperType`:
reveal_type(f.__get__.__hash__) # revealed: <bound method `__hash__` of `MethodWrapperType`>
# Attribute access on the wrapper-descriptor falls back to `WrapperDescriptorType`:
reveal_type(wrapper_descriptor.__qualname__) # revealed: @Todo(@property)
```
We can also bind the free function `f` to an instance of a class `C`:
```py
class C: ...
bound_method = wrapper_descriptor(f, C(), C)
reveal_type(bound_method) # revealed: <bound method `f` of `C`>
```
We can then call it, and the instance of `C` is implicitly passed to the first parameter of `f`
(`x`):
```py
reveal_type(bound_method()) # revealed: str
```
Finally, we test some error cases for the call to the wrapper descriptor:
```py
# Calling the wrapper descriptor without any arguments is an
# error: [missing-argument] "No arguments provided for required parameters `self`, `instance`"
wrapper_descriptor()
# Calling it without the `instance` argument is an also an
# error: [missing-argument] "No argument provided for required parameter `instance`"
wrapper_descriptor(f)
# Calling it without the `owner` argument if `instance` is not `None` is an
# error: [missing-argument] "No argument provided for required parameter `owner`"
wrapper_descriptor(f, None)
# But calling it with an instance is fine (in this case, the `owner` argument is optional):
wrapper_descriptor(f, C())
# Calling it with something that is not a `FunctionType` as the first argument is an
# error: [invalid-argument-type] "Object of type `Literal[1]` cannot be assigned to parameter 1 (`self`) of wrapper descriptor `FunctionType.__get__`; expected type `FunctionType`"
wrapper_descriptor(1, None, type(f))
# Calling it with something that is not a `type` as the `owner` argument is an
# error: [invalid-argument-type] "Object of type `Literal[f]` cannot be assigned to parameter 3 (`owner`) of wrapper descriptor `FunctionType.__get__`; expected type `type`"
wrapper_descriptor(f, None, f)
# Calling it with too many positional arguments is an
# error: [too-many-positional-arguments] "Too many positional arguments to wrapper descriptor `FunctionType.__get__`: expected 3, got 4"
wrapper_descriptor(f, None, type(f), "one too many")
```
[descriptors]: https://docs.python.org/3/howto/descriptor.html
[simple example]: https://docs.python.org/3/howto/descriptor.html#simple-example-a-descriptor-that-returns-a-constant

View File

@@ -1,197 +0,0 @@
# Invalid argument type diagnostics
<!-- snapshot-diagnostics -->
## Basic
This is a basic test demonstrating that a diagnostic points to the function definition corresponding
to the invalid argument.
```py
def foo(x: int) -> int:
return x * x
foo("hello") # error: [invalid-argument-type]
```
## Different source order
This is like the basic test, except we put the call site above the function definition.
```py
def bar():
foo("hello") # error: [invalid-argument-type]
def foo(x: int) -> int:
return x * x
```
## Different files
This tests that a diagnostic can point to a function definition in a different file in which an
invalid call site was found.
`package.py`:
```py
def foo(x: int) -> int:
return x * x
```
```py
import package
package.foo("hello") # error: [invalid-argument-type]
```
## Many parameters
This checks that a diagnostic renders reasonably when there are multiple parameters.
```py
def foo(x: int, y: int, z: int) -> int:
return x * y * z
foo(1, "hello", 3) # error: [invalid-argument-type]
```
## Many parameters across multiple lines
This checks that a diagnostic renders reasonably when there are multiple parameters spread out
across multiple lines.
```py
def foo(
x: int,
y: int,
z: int,
) -> int:
return x * y * z
foo(1, "hello", 3) # error: [invalid-argument-type]
```
## Many parameters with multiple invalid arguments
This checks that a diagnostic renders reasonably when there are multiple parameters and multiple
invalid argument types.
```py
def foo(x: int, y: int, z: int) -> int:
return x * y * z
# error: [invalid-argument-type]
# error: [invalid-argument-type]
# error: [invalid-argument-type]
foo("a", "b", "c")
```
At present (2025-02-18), this renders three different diagnostic messages. But arguably, these could
all be folded into one diagnostic. Fixing this requires at least better support for multi-spans in
the diagnostic model and possibly also how diagnostics are emitted by the type checker itself.
## Test calling a function whose type is vendored from `typeshed`
This tests that diagnostic rendering is reasonable when the function being called is from the
standard library.
```py
import json
json.loads(5) # error: [invalid-argument-type]
```
## Tests for a variety of argument types
These tests check that diagnostic output is reasonable regardless of the kinds of arguments used in
a function definition.
### Only positional
Tests a function definition with only positional parameters.
```py
def foo(x: int, y: int, z: int, /) -> int:
return x * y * z
foo(1, "hello", 3) # error: [invalid-argument-type]
```
### Variadic arguments
Tests a function definition with variadic arguments.
```py
def foo(*numbers: int) -> int:
return len(numbers)
foo(1, 2, 3, "hello", 5) # error: [invalid-argument-type]
```
### Keyword only arguments
Tests a function definition with keyword-only arguments.
```py
def foo(x: int, y: int, *, z: int = 0) -> int:
return x * y * z
foo(1, 2, z="hello") # error: [invalid-argument-type]
```
### One keyword argument
Tests a function definition with keyword-only arguments.
```py
def foo(x: int, y: int, z: int = 0) -> int:
return x * y * z
foo(1, 2, "hello") # error: [invalid-argument-type]
```
### Variadic keyword arguments
```py
def foo(**numbers: int) -> int:
return len(numbers)
foo(a=1, b=2, c=3, d="hello", e=5) # error: [invalid-argument-type]
```
### Mix of arguments
Tests a function definition with multiple different kinds of arguments.
```py
def foo(x: int, /, y: int, *, z: int = 0) -> int:
return x * y * z
foo(1, 2, z="hello") # error: [invalid-argument-type]
```
### Synthetic arguments
Tests a function call with synthetic arguments.
```py
class C:
def __call__(self, x: int) -> int:
return 1
c = C()
c("wrong") # error: [invalid-argument-type]
```
## Calls to methods
Tests that we also see a reference to a function if the callable is a bound method.
```py
class C:
def square(self, x: int) -> int:
return x * x
c = C()
c.square("hello") # error: [invalid-argument-type]
```

View File

@@ -1,21 +0,0 @@
# Unpacking
<!-- snapshot-diagnostics -->
## Right hand side not iterable
```py
a, b = 1 # error: [not-iterable]
```
## Too many values to unpack
```py
a, b = (1, 2, 3) # error: [invalid-assignment]
```
## Too few values to unpack
```py
a, b = (1,) # error: [invalid-assignment]
```

View File

@@ -1,87 +0,0 @@
# Unresolved import diagnostics
<!-- snapshot-diagnostics -->
## Using `from` with an unresolvable module
This example demonstrates the diagnostic when a `from` style import is used with a module that could
not be found:
```py
from does_not_exist import add # error: [unresolved-import]
stat = add(10, 15)
```
## Using `from` with too many leading dots
This example demonstrates the diagnostic when a `from` style import is used with a presumptively
valid path, but where there are too many leading dots.
`package/__init__.py`:
```py
```
`package/foo.py`:
```py
def add(x, y):
return x + y
```
`package/subpackage/subsubpackage/__init__.py`:
```py
from ....foo import add # error: [unresolved-import]
stat = add(10, 15)
```
## Using `from` with an unknown current module
This is another case handled separately in Red Knot, where a `.` provokes relative module name
resolution, but where the module name is not resolvable.
```py
from .does_not_exist import add # error: [unresolved-import]
stat = add(10, 15)
```
## Using `from` with an unknown nested module
Like the previous test, but with sub-modules to ensure the span is correct.
```py
from .does_not_exist.foo.bar import add # error: [unresolved-import]
stat = add(10, 15)
```
## Using `from` with a resolvable module but unresolvable item
This ensures that diagnostics for an unresolvable item inside a resolvable import highlight the item
and not the entire `from ... import ...` statement.
`a.py`:
```py
does_exist1 = 1
does_exist2 = 2
```
```py
from a import does_exist1, does_not_exist, does_exist2 # error: [unresolved-import]
```
## An unresolvable import that does not use `from`
This ensures that an unresolvable `import ...` statement highlights just the module name and not the
entire statement.
```py
import does_not_exist # error: [unresolved-import]
x = does_not_exist.foo
```

View File

@@ -78,7 +78,7 @@ def _(a: type[Unknown], b: type[Any]):
Tuple types with the same elements are the same.
```py
from typing_extensions import Any, assert_type
from typing_extensions import assert_type
from knot_extensions import Unknown

View File

@@ -1,2 +0,0 @@
This directory contains user-facing documentation, but also doubles as an extended test suite that
makes sure that our documentation stays up to date.

View File

@@ -1,125 +0,0 @@
# Public type of undeclared symbols
## Summary
One major deviation from the behavior of existing Python type checkers is our handling of 'public'
types for undeclared symbols. This is best illustrated with an example:
```py
class Wrapper:
value = None
wrapper = Wrapper()
reveal_type(wrapper.value) # revealed: Unknown | None
wrapper.value = 1
```
Mypy and Pyright both infer a type of `None` for the type of `wrapper.value`. Consequently, both
tools emit an error when trying to assign `1` to `wrapper.value`. But there is nothing wrong with
this program. Emitting an error here violates the [gradual guarantee] which states that *"Removing
type annotations (making the program more dynamic) should not result in additional static type
errors."*: If `value` were annotated with `int | None` here, Mypy and Pyright would not emit any
errors.
By inferring `Unknown | None` instead, we allow arbitrary values to be assigned to `wrapper.value`.
This is a deliberate choice to prevent false positive errors on untyped code.
More generally, we infer `Unknown | T_inferred` for undeclared symbols, where `T_inferred` is the
inferred type of the right-hand side of the assignment. This gradual type represents an *unknown*
fully-static type that is *at least as large as* `T_inferred`. It accurately describes our static
knowledge about this type. In the example above, we don't know what values `wrapper.value` could
possibly contain, but we *do know* that `None` is a possibility. This allows us to catch errors
where `wrapper.value` is used in a way that is incompatible with `None`:
```py
def accepts_int(i: int) -> None:
pass
def f(w: Wrapper) -> None:
# This is fine
v: int | None = w.value
# This function call is incorrect, because `w.value` could be `None`. We therefore emit the following
# error: "`Unknown | None` cannot be assigned to parameter 1 (`i`) of function `accepts_int`; expected type `int`"
c = accepts_int(w.value)
```
## Explicit lack of knowledge
The following example demonstrates how Mypy and Pyright's type inference of fully-static types in
these situations can lead to false-negatives, even though everything appears to be (statically)
typed. To make this a bit more realistic, imagine that `OptionalInt` is imported from an external,
untyped module:
`optional_int.py`:
```py
class OptionalInt:
value = 10
def reset(o):
o.value = None
```
It is then used like this:
```py
from optional_int import OptionalInt, reset
o = OptionalInt()
reset(o) # Oh no...
# Mypy and Pyright infer a fully-static type of `int` here, which appears to make the
# subsequent division operation safe -- but it is not. We infer the following type:
reveal_type(o.value) # revealed: Unknown | Literal[10]
print(o.value // 2) # Runtime error!
```
We do not catch this mistake either, but we accurately reflect our lack of knowledge about
`o.value`. Together with a possible future type-checker mode that would detect the prevalence of
dynamic types, this could help developers catch such mistakes.
## Stricter behavior
Users can always opt in to stricter behavior by adding type annotations. For the `OptionalInt`
class, this would probably be:
```py
class OptionalInt:
value: int | None = 10
o = OptionalInt()
# The following public type is now
# revealed: int | None
reveal_type(o.value)
# Incompatible assignments are now caught:
# error: "Object of type `Literal["a"]` is not assignable to attribute `value` of type `int | None`"
o.value = "a"
```
## What is meant by 'public' type?
We apply different semantics depending on whether a symbol is accessed from the same scope in which
it was originally defined, or whether it is accessed from an external scope. External scopes will
see the symbol's "public type", which has been discussed above. But within the same scope the symbol
was defined in, we use a narrower type of `T_inferred` for undeclared symbols. This is because, from
the perspective of this scope, there is no way that the value of the symbol could have been
reassigned from external scopes. For example:
```py
class Wrapper:
value = None
# Type as seen from the same scope:
reveal_type(value) # revealed: None
# Type as seen from another scope:
reveal_type(Wrapper.value) # revealed: Unknown | None
```
[gradual guarantee]: https://typing.readthedocs.io/en/latest/spec/concepts.html#the-gradual-guarantee

View File

@@ -124,49 +124,42 @@ def _(e: Exception | type[Exception] | None):
## Exception cause is not an exception
```py
def _():
try:
raise EOFError() from GeneratorExit # fine
except:
...
try:
raise EOFError() from GeneratorExit # fine
except:
...
def _():
try:
raise StopIteration from MemoryError() # fine
except:
...
try:
raise StopIteration from MemoryError() # fine
except:
...
def _():
try:
raise BufferError() from None # fine
except:
...
try:
raise BufferError() from None # fine
except:
...
def _():
try:
raise ZeroDivisionError from False # error: [invalid-raise]
except:
...
try:
raise ZeroDivisionError from False # error: [invalid-raise]
except:
...
def _():
try:
raise SystemExit from bool() # error: [invalid-raise]
except:
...
try:
raise SystemExit from bool() # error: [invalid-raise]
except:
...
def _():
try:
raise
except KeyboardInterrupt as e: # fine
reveal_type(e) # revealed: KeyboardInterrupt
raise LookupError from e # fine
try:
raise
except KeyboardInterrupt as e: # fine
reveal_type(e) # revealed: KeyboardInterrupt
raise LookupError from e # fine
def _():
try:
raise
except int as e: # error: [invalid-exception-caught]
reveal_type(e) # revealed: Unknown
raise KeyError from e
try:
raise
except int as e: # error: [invalid-exception-caught]
reveal_type(e) # revealed: Unknown
raise KeyError from e
def _(e: Exception | type[Exception]):
raise ModuleNotFoundError from e # fine

View File

@@ -29,7 +29,7 @@ completing. The type of `x` at the beginning of the `except` suite in this examp
`x = could_raise_returns_str()` redefinition, but we *also* could have jumped to the `except` suite
*after* that redefinition.
```py
```py path=union_type_inferred.py
def could_raise_returns_str() -> str:
return "foo"
@@ -50,7 +50,10 @@ reveal_type(x) # revealed: str | Literal[2]
If `x` has the same type at the end of both branches, however, the branches unify and `x` is not
inferred as having a union type following the `try`/`except` block:
```py
```py path=branches_unify_to_non_union_type.py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
@@ -130,7 +133,7 @@ the `except` suite:
- At the end of `else`, `x == 3`
- At the end of `except`, `x == 2`
```py
```py path=single_except.py
def could_raise_returns_str() -> str:
return "foo"
@@ -158,6 +161,9 @@ been executed in its entirety, or the `try` suite and the `else` suite must both
in their entireties:
```py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
@@ -186,7 +192,7 @@ A `finally` suite is *always* executed. As such, if we reach the `reveal_type` c
this example, we know that `x` *must* have been reassigned to `2` during the `finally` suite. The
type of `x` at the end of the example is therefore `Literal[2]`:
```py
```py path=redef_in_finally.py
def could_raise_returns_str() -> str:
return "foo"
@@ -211,7 +217,10 @@ at this point than there were when we were inside the `finally` block.
(Our current model does *not* correctly infer the types *inside* `finally` suites, however; this is
still a TODO item for us.)
```py
```py path=no_redef_in_finally.py
def could_raise_returns_str() -> str:
return "foo"
x = 1
try:
@@ -240,35 +249,31 @@ suites:
exception raised in the `except` suite to cause us to jump to the `finally` suite before the
`except` suite ran to completion
```py
class A: ...
class B: ...
class C: ...
```py path=redef_in_finally.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_A() -> A:
return A()
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_B() -> B:
return B()
def could_raise_returns_C() -> C:
return C()
def could_raise_returns_bool() -> bool:
return True
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_A()
reveal_type(x) # revealed: A
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_B()
reveal_type(x) # revealed: B
x = could_raise_returns_C()
reveal_type(x) # revealed: C
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
finally:
# TODO: should be `Literal[1] | A | B | C`
reveal_type(x) # revealed: A | C
# TODO: should be `Literal[1] | str | bytes | bool`
reveal_type(x) # revealed: str | bool
x = 2
reveal_type(x) # revealed: Literal[2]
@@ -281,61 +286,76 @@ itself. (In some control-flow possibilities, some exceptions were merely *suspen
`finally` suite; these lead to the scope's termination following the conclusion of the `finally`
suite.)
```py
```py path=no_redef_in_finally.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_bool() -> bool:
return True
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_A()
reveal_type(x) # revealed: A
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_B()
reveal_type(x) # revealed: B
x = could_raise_returns_C()
reveal_type(x) # revealed: C
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
finally:
# TODO: should be `Literal[1] | A | B | C`
reveal_type(x) # revealed: A | C
# TODO: should be `Literal[1] | str | bytes | bool`
reveal_type(x) # revealed: str | bool
reveal_type(x) # revealed: A | C
reveal_type(x) # revealed: str | bool
```
An example with multiple `except` branches and a `finally` branch:
```py
class D: ...
class E: ...
```py path=multiple_except_branches.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_D() -> D:
return D()
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_E() -> E:
return E()
def could_raise_returns_bool() -> bool:
return True
def could_raise_returns_memoryview() -> memoryview:
return memoryview(b"")
def could_raise_returns_float() -> float:
return 3.14
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_A()
reveal_type(x) # revealed: A
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_B()
reveal_type(x) # revealed: B
x = could_raise_returns_C()
reveal_type(x) # revealed: C
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
except ValueError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_D()
reveal_type(x) # revealed: D
x = could_raise_returns_E()
reveal_type(x) # revealed: E
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_memoryview()
reveal_type(x) # revealed: memoryview
x = could_raise_returns_float()
reveal_type(x) # revealed: float
finally:
# TODO: should be `Literal[1] | A | B | C | D | E`
reveal_type(x) # revealed: A | C | E
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float`
reveal_type(x) # revealed: str | bool | float
reveal_type(x) # revealed: A | C | E
reveal_type(x) # revealed: str | bool | float
```
## Combining `except`, `else` and `finally` branches
@@ -344,94 +364,100 @@ If the exception handler has an `else` branch, we must also take into account th
control flow could have jumped to the `finally` suite from partway through the `else` suite due to
an exception raised *there*.
```py
class A: ...
class B: ...
class C: ...
class D: ...
class E: ...
```py path=single_except_branch.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_A() -> A:
return A()
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_B() -> B:
return B()
def could_raise_returns_bool() -> bool:
return True
def could_raise_returns_C() -> C:
return C()
def could_raise_returns_memoryview() -> memoryview:
return memoryview(b"")
def could_raise_returns_D() -> D:
return D()
def could_raise_returns_E() -> E:
return E()
def could_raise_returns_float() -> float:
return 3.14
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_A()
reveal_type(x) # revealed: A
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_B()
reveal_type(x) # revealed: B
x = could_raise_returns_C()
reveal_type(x) # revealed: C
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
else:
reveal_type(x) # revealed: A
x = could_raise_returns_D()
reveal_type(x) # revealed: D
x = could_raise_returns_E()
reveal_type(x) # revealed: E
reveal_type(x) # revealed: str
x = could_raise_returns_memoryview()
reveal_type(x) # revealed: memoryview
x = could_raise_returns_float()
reveal_type(x) # revealed: float
finally:
# TODO: should be `Literal[1] | A | B | C | D | E`
reveal_type(x) # revealed: C | E
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float`
reveal_type(x) # revealed: bool | float
reveal_type(x) # revealed: C | E
reveal_type(x) # revealed: bool | float
```
The same again, this time with multiple `except` branches:
```py
class F: ...
class G: ...
```py path=multiple_except_branches.py
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_F() -> F:
return F()
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_G() -> G:
return G()
def could_raise_returns_bool() -> bool:
return True
def could_raise_returns_memoryview() -> memoryview:
return memoryview(b"")
def could_raise_returns_float() -> float:
return 3.14
def could_raise_returns_range() -> range:
return range(42)
def could_raise_returns_slice() -> slice:
return slice(None)
x = 1
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_A()
reveal_type(x) # revealed: A
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_B()
reveal_type(x) # revealed: B
x = could_raise_returns_C()
reveal_type(x) # revealed: C
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
except ValueError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_D()
reveal_type(x) # revealed: D
x = could_raise_returns_E()
reveal_type(x) # revealed: E
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_memoryview()
reveal_type(x) # revealed: memoryview
x = could_raise_returns_float()
reveal_type(x) # revealed: float
else:
reveal_type(x) # revealed: A
x = could_raise_returns_F()
reveal_type(x) # revealed: F
x = could_raise_returns_G()
reveal_type(x) # revealed: G
reveal_type(x) # revealed: str
x = could_raise_returns_range()
reveal_type(x) # revealed: range
x = could_raise_returns_slice()
reveal_type(x) # revealed: slice
finally:
# TODO: should be `Literal[1] | A | B | C | D | E | F | G`
reveal_type(x) # revealed: C | E | G
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float | range | slice`
reveal_type(x) # revealed: bool | float | slice
reveal_type(x) # revealed: C | E | G
reveal_type(x) # revealed: bool | float | slice
```
## Nested `try`/`except` blocks
@@ -445,101 +471,92 @@ a suite containing statements that could possibly raise exceptions, which would
jumping out of that suite prior to the suite running to completion.
```py
class A: ...
class B: ...
class C: ...
class D: ...
class E: ...
class F: ...
class G: ...
class H: ...
class I: ...
class J: ...
class K: ...
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_A() -> A:
return A()
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_B() -> B:
return B()
def could_raise_returns_bool() -> bool:
return True
def could_raise_returns_C() -> C:
return C()
def could_raise_returns_memoryview() -> memoryview:
return memoryview(b"")
def could_raise_returns_D() -> D:
return D()
def could_raise_returns_float() -> float:
return 3.14
def could_raise_returns_E() -> E:
return E()
def could_raise_returns_range() -> range:
return range(42)
def could_raise_returns_F() -> F:
return F()
def could_raise_returns_slice() -> slice:
return slice(None)
def could_raise_returns_G() -> G:
return G()
def could_raise_returns_complex() -> complex:
return 3j
def could_raise_returns_H() -> H:
return H()
def could_raise_returns_bytearray() -> bytearray:
return bytearray()
def could_raise_returns_I() -> I:
return I()
class Foo: ...
class Bar: ...
def could_raise_returns_J() -> J:
return J()
def could_raise_returns_Foo() -> Foo:
return Foo()
def could_raise_returns_K() -> K:
return K()
def could_raise_returns_Bar() -> Bar:
return Bar()
x = 1
try:
try:
reveal_type(x) # revealed: Literal[1]
x = could_raise_returns_A()
reveal_type(x) # revealed: A
x = could_raise_returns_str()
reveal_type(x) # revealed: str
except TypeError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_B()
reveal_type(x) # revealed: B
x = could_raise_returns_C()
reveal_type(x) # revealed: C
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
x = could_raise_returns_bool()
reveal_type(x) # revealed: bool
except ValueError:
reveal_type(x) # revealed: Literal[1] | A
x = could_raise_returns_D()
reveal_type(x) # revealed: D
x = could_raise_returns_E()
reveal_type(x) # revealed: E
reveal_type(x) # revealed: Literal[1] | str
x = could_raise_returns_memoryview()
reveal_type(x) # revealed: memoryview
x = could_raise_returns_float()
reveal_type(x) # revealed: float
else:
reveal_type(x) # revealed: A
x = could_raise_returns_F()
reveal_type(x) # revealed: F
x = could_raise_returns_G()
reveal_type(x) # revealed: G
reveal_type(x) # revealed: str
x = could_raise_returns_range()
reveal_type(x) # revealed: range
x = could_raise_returns_slice()
reveal_type(x) # revealed: slice
finally:
# TODO: should be `Literal[1] | A | B | C | D | E | F | G`
reveal_type(x) # revealed: C | E | G
# TODO: should be `Literal[1] | str | bytes | bool | memoryview | float | range | slice`
reveal_type(x) # revealed: bool | float | slice
x = 2
reveal_type(x) # revealed: Literal[2]
reveal_type(x) # revealed: Literal[2]
except:
reveal_type(x) # revealed: Literal[1, 2] | A | B | C | D | E | F | G
x = could_raise_returns_H()
reveal_type(x) # revealed: H
x = could_raise_returns_I()
reveal_type(x) # revealed: I
reveal_type(x) # revealed: Literal[1, 2] | str | bytes | bool | memoryview | float | range | slice
x = could_raise_returns_complex()
reveal_type(x) # revealed: complex
x = could_raise_returns_bytearray()
reveal_type(x) # revealed: bytearray
else:
reveal_type(x) # revealed: Literal[2]
x = could_raise_returns_J()
reveal_type(x) # revealed: J
x = could_raise_returns_K()
reveal_type(x) # revealed: K
x = could_raise_returns_Foo()
reveal_type(x) # revealed: Foo
x = could_raise_returns_Bar()
reveal_type(x) # revealed: Bar
finally:
# TODO: should be `Literal[1, 2] | A | B | C | D | E | F | G | H | I | J | K`
reveal_type(x) # revealed: I | K
# TODO: should be `Literal[1, 2] | str | bytes | bool | memoryview | float | range | slice | complex | bytearray | Foo | Bar`
reveal_type(x) # revealed: bytearray | Bar
# Either one `except` branch or the `else`
# must have been taken and completed to get here:
reveal_type(x) # revealed: I | K
reveal_type(x) # revealed: bytearray | Bar
```
## Nested scopes inside `try` blocks
@@ -548,56 +565,50 @@ Shadowing a variable in an inner scope has no effect on type inference of the va
in the outer scope:
```py
class A: ...
class B: ...
class C: ...
class D: ...
class E: ...
def could_raise_returns_str() -> str:
return "foo"
def could_raise_returns_A() -> A:
return A()
def could_raise_returns_bytes() -> bytes:
return b"foo"
def could_raise_returns_B() -> B:
return B()
def could_raise_returns_range() -> range:
return range(42)
def could_raise_returns_C() -> C:
return C()
def could_raise_returns_bytearray() -> bytearray:
return bytearray()
def could_raise_returns_D() -> D:
return D()
def could_raise_returns_E() -> E:
return E()
def could_raise_returns_float() -> float:
return 3.14
x = 1
try:
def foo(param=could_raise_returns_A()):
x = could_raise_returns_A()
def foo(param=could_raise_returns_str()):
x = could_raise_returns_str()
try:
reveal_type(x) # revealed: A
x = could_raise_returns_B()
reveal_type(x) # revealed: B
reveal_type(x) # revealed: str
x = could_raise_returns_bytes()
reveal_type(x) # revealed: bytes
except:
reveal_type(x) # revealed: A | B
x = could_raise_returns_C()
reveal_type(x) # revealed: C
x = could_raise_returns_D()
reveal_type(x) # revealed: D
reveal_type(x) # revealed: str | bytes
x = could_raise_returns_bytearray()
reveal_type(x) # revealed: bytearray
x = could_raise_returns_float()
reveal_type(x) # revealed: float
finally:
# TODO: should be `A | B | C | D`
reveal_type(x) # revealed: B | D
reveal_type(x) # revealed: B | D
# TODO: should be `str | bytes | bytearray | float`
reveal_type(x) # revealed: bytes | float
reveal_type(x) # revealed: bytes | float
x = foo
reveal_type(x) # revealed: Literal[foo]
except:
reveal_type(x) # revealed: Literal[1] | Literal[foo]
class Bar:
x = could_raise_returns_E()
reveal_type(x) # revealed: E
x = could_raise_returns_range()
reveal_type(x) # revealed: range
x = Bar
reveal_type(x) # revealed: Literal[Bar]

View File

@@ -1,9 +0,0 @@
## Condition with object that implements `__bool__` incorrectly
```py
class NotBoolable:
__bool__ = 3
# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`; its `__bool__` method isn't callable"
assert NotBoolable()
```

View File

@@ -5,7 +5,7 @@
```py
def _(flag: bool):
class A:
always_bound: int = 1
always_bound = 1
if flag:
union = 1
@@ -13,21 +13,14 @@ def _(flag: bool):
union = "abc"
if flag:
union_declared: int = 1
else:
union_declared: str = "abc"
possibly_unbound = "abc"
if flag:
possibly_unbound: str = "abc"
reveal_type(A.always_bound) # revealed: Literal[1]
reveal_type(A.always_bound) # revealed: int
reveal_type(A.union) # revealed: Unknown | Literal[1, "abc"]
reveal_type(A.union_declared) # revealed: int | str
reveal_type(A.union) # revealed: Literal[1, "abc"]
# error: [possibly-unbound-attribute] "Attribute `possibly_unbound` on type `Literal[A]` is possibly unbound"
reveal_type(A.possibly_unbound) # revealed: str
reveal_type(A.possibly_unbound) # revealed: Literal["abc"]
# error: [unresolved-attribute] "Type `Literal[A]` has no attribute `non_existent`"
reveal_type(A.non_existent) # revealed: Unknown

View File

@@ -54,10 +54,8 @@ reveal_type("x" or "y" and "") # revealed: Literal["x"]
## Evaluates to builtin
`a.py`:
```py
redefined_builtin_bool: type[bool] = bool
```py path=a.py
redefined_builtin_bool = bool
def my_bool(x) -> bool:
return True
@@ -101,55 +99,3 @@ reveal_type(bool([])) # revealed: bool
reveal_type(bool({})) # revealed: bool
reveal_type(bool(set())) # revealed: bool
```
## `__bool__` returning `NoReturn`
```py
from typing import NoReturn
class NotBoolable:
def __bool__(self) -> NoReturn:
raise NotImplementedError("This object can't be converted to a boolean")
# TODO: This should emit an error that `NotBoolable` can't be converted to a bool but it currently doesn't
# because `Never` is assignable to `bool`. This probably requires dead code analysis to fix.
if NotBoolable():
...
```
## Not callable `__bool__`
```py
class NotBoolable:
__bool__ = None
# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`; its `__bool__` method isn't callable"
if NotBoolable():
...
```
## Not-boolable union
```py
def test(cond: bool):
class NotBoolable:
__bool__ = None if cond else 3
# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`; it incorrectly implements `__bool__`"
if NotBoolable():
...
```
## Union with some variants implementing `__bool__` incorrectly
```py
def test(cond: bool):
class NotBoolable:
__bool__ = None
a = 10 if cond else NotBoolable()
# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `Literal[10] | NotBoolable`; its `__bool__` method isn't callable"
if a:
...
```

Some files were not shown because too many files have changed in this diff Show More