Compare commits
12 Commits
0.9.3
...
deprecated
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c76e15a45d | ||
|
|
d0aff2bbff | ||
|
|
65db31f0e1 | ||
|
|
c2c37b8052 | ||
|
|
51613d9107 | ||
|
|
f20e70cd62 | ||
|
|
4737824345 | ||
|
|
1961b76d03 | ||
|
|
62a1e55705 | ||
|
|
8a7ec4c0a3 | ||
|
|
9b9540c3cd | ||
|
|
ccafaf8e30 |
@@ -6,10 +6,3 @@ failure-output = "immediate-final"
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
|
||||
# Mark tests that take longer than 1s as slow.
|
||||
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "1s", terminate-after = 60 }
|
||||
|
||||
# Show slow jobs in the final summary
|
||||
final-status-level = "slow"
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,2 +0,0 @@
|
||||
# This file cannot use the extension `.yaml`.
|
||||
blank_issues_enabled: false
|
||||
22
.github/ISSUE_TEMPLATE/issue.yaml
vendored
22
.github/ISSUE_TEMPLATE/issue.yaml
vendored
@@ -1,22 +0,0 @@
|
||||
name: New issue
|
||||
description: A generic issue
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
* The current Ruff version (`ruff --version`).
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
description: A description of the issue
|
||||
2
.github/workflows/build-binaries.yml
vendored
2
.github/workflows/build-binaries.yml
vendored
@@ -23,8 +23,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: ruff
|
||||
MODULE_NAME: ruff
|
||||
|
||||
2
.github/workflows/build-docker.yml
vendored
2
.github/workflows/build-docker.yml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
env:
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
|
||||
run: |
|
||||
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
|
||||
58
.github/workflows/ci.yaml
vendored
58
.github/workflows/ci.yaml
vendored
@@ -1,7 +1,5 @@
|
||||
name: CI
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -61,7 +59,6 @@ jobs:
|
||||
- Cargo.toml
|
||||
- Cargo.lock
|
||||
- crates/**
|
||||
- "!crates/red_knot*/**"
|
||||
- "!crates/ruff_python_formatter/**"
|
||||
- "!crates/ruff_formatter/**"
|
||||
- "!crates/ruff_dev/**"
|
||||
@@ -119,11 +116,11 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: |
|
||||
rustup component add clippy
|
||||
rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
@@ -133,13 +130,12 @@ jobs:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -152,6 +148,7 @@ jobs:
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -179,13 +176,12 @@ jobs:
|
||||
name: "cargo test (linux, release)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -198,6 +194,7 @@ jobs:
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -208,23 +205,22 @@ jobs:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: github-windows-2025-x86_64-16
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||
run: |
|
||||
@@ -235,13 +231,12 @@ jobs:
|
||||
name: "cargo test (wasm)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@v4
|
||||
@@ -252,6 +247,7 @@ jobs:
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
@@ -270,11 +266,11 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -282,7 +278,7 @@ jobs:
|
||||
name: "cargo build (msrv)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -293,7 +289,6 @@ jobs:
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
@@ -308,6 +303,7 @@ jobs:
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -325,11 +321,11 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
with:
|
||||
@@ -345,7 +341,7 @@ jobs:
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
|
||||
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -381,15 +377,15 @@ jobs:
|
||||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
# already checked into git.
|
||||
- run: python crates/ruff_python_ast/generate.py
|
||||
@@ -413,7 +409,7 @@ jobs:
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
# Ecosystem check needs linter and/or formatter changes.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -547,7 +543,6 @@ jobs:
|
||||
name: "python package"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -582,9 +577,9 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install pre-commit"
|
||||
run: pip install pre-commit
|
||||
- name: "Cache pre-commit"
|
||||
@@ -616,7 +611,6 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
@@ -626,6 +620,7 @@ jobs:
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -649,15 +644,16 @@ jobs:
|
||||
name: "formatter instabilities and black similarity"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Cache rust"
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: "Run checks"
|
||||
run: scripts/formatter_ecosystem_checks.sh
|
||||
- name: "Github step summary"
|
||||
@@ -672,7 +668,7 @@ jobs:
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@v2
|
||||
env:
|
||||
@@ -714,7 +710,7 @@ jobs:
|
||||
benchmarks:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
@@ -722,8 +718,6 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
@@ -732,6 +726,8 @@ jobs:
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
|
||||
7
.github/zizmor.yml
vendored
7
.github/zizmor.yml
vendored
@@ -10,10 +10,3 @@ rules:
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
excessive-permissions:
|
||||
# it's hard to test what the impact of removing these ignores would be
|
||||
# without actually running the release workflow...
|
||||
ignore:
|
||||
- build-docker.yml
|
||||
- publish-playground.yml
|
||||
- publish-docs.yml
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -29,10 +29,6 @@ tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
# insta
|
||||
.rs.pending-snap
|
||||
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -91,7 +91,7 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.2.2
|
||||
rev: v1.1.1
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
|
||||
58
CHANGELOG.md
58
CHANGELOG.md
@@ -1,63 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.9.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Argument `fail_stop` in DAG has been renamed as `fail_fast` (`AIR302`) ([#15633](https://github.com/astral-sh/ruff/pull/15633))
|
||||
- \[`airflow`\] Extend `AIR303` with more symbols ([#15611](https://github.com/astral-sh/ruff/pull/15611))
|
||||
- \[`flake8-bandit`\] Report all references to suspicious functions (`S3`) ([#15541](https://github.com/astral-sh/ruff/pull/15541))
|
||||
- \[`flake8-pytest-style`\] Do not emit diagnostics for empty `for` loops (`PT012`, `PT031`) ([#15542](https://github.com/astral-sh/ruff/pull/15542))
|
||||
- \[`flake8-simplify`\] Avoid double negations (`SIM103`) ([#15562](https://github.com/astral-sh/ruff/pull/15562))
|
||||
- \[`pyflakes`\] Fix infinite loop with unused local import in `__init__.py` (`F401`) ([#15517](https://github.com/astral-sh/ruff/pull/15517))
|
||||
- \[`pylint`\] Do not report methods with only one `EM101`-compatible `raise` (`PLR6301`) ([#15507](https://github.com/astral-sh/ruff/pull/15507))
|
||||
- \[`pylint`\] Implement `redefined-slots-in-subclass` (`W0244`) ([#9640](https://github.com/astral-sh/ruff/pull/9640))
|
||||
- \[`pyupgrade`\] Add rules to use PEP 695 generics in classes and functions (`UP046`, `UP047`) ([#15565](https://github.com/astral-sh/ruff/pull/15565), [#15659](https://github.com/astral-sh/ruff/pull/15659))
|
||||
- \[`refurb`\] Implement `for-loop-writes` (`FURB122`) ([#10630](https://github.com/astral-sh/ruff/pull/10630))
|
||||
- \[`ruff`\] Implement `needless-else` clause (`RUF047`) ([#15051](https://github.com/astral-sh/ruff/pull/15051))
|
||||
- \[`ruff`\] Implement `starmap-zip` (`RUF058`) ([#15483](https://github.com/astral-sh/ruff/pull/15483))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Do not raise error if keyword argument is present and target-python version is less or equals than 3.9 (`B903`) ([#15549](https://github.com/astral-sh/ruff/pull/15549))
|
||||
- \[`flake8-comprehensions`\] strip parentheses around generators in `unnecessary-generator-set` (`C401`) ([#15553](https://github.com/astral-sh/ruff/pull/15553))
|
||||
- \[`flake8-simplify`\] Mark fixes as unsafe (`SIM201`, `SIM202`) ([#15626](https://github.com/astral-sh/ruff/pull/15626))
|
||||
- \[`flake8-type-checking`\] Fix some safe fixes being labeled unsafe (`TC006`,`TC008`) ([#15638](https://github.com/astral-sh/ruff/pull/15638))
|
||||
- \[`isort`\] Omit trailing whitespace in `unsorted-imports` (`I001`) ([#15518](https://github.com/astral-sh/ruff/pull/15518))
|
||||
- \[`pydoclint`\] Allow ignoring one line docstrings for `DOC` rules ([#13302](https://github.com/astral-sh/ruff/pull/13302))
|
||||
- \[`pyflakes`\] Apply redefinition fixes by source code order (`F811`) ([#15575](https://github.com/astral-sh/ruff/pull/15575))
|
||||
- \[`pyflakes`\] Avoid removing too many imports in `redefined-while-unused` (`F811`) ([#15585](https://github.com/astral-sh/ruff/pull/15585))
|
||||
- \[`pyflakes`\] Group redefinition fixes by source statement (`F811`) ([#15574](https://github.com/astral-sh/ruff/pull/15574))
|
||||
- \[`pylint`\] Include name of base class in message for `redefined-slots-in-subclass` (`W0244`) ([#15559](https://github.com/astral-sh/ruff/pull/15559))
|
||||
- \[`ruff`\] Update fix for `RUF055` to use `var == value` ([#15605](https://github.com/astral-sh/ruff/pull/15605))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix bracket spacing for single-element tuples in f-string expressions ([#15537](https://github.com/astral-sh/ruff/pull/15537))
|
||||
- Fix unstable f-string formatting for expressions containing a trailing comma ([#15545](https://github.com/astral-sh/ruff/pull/15545))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid quadratic membership check in import fixes ([#15576](https://github.com/astral-sh/ruff/pull/15576))
|
||||
|
||||
### Server
|
||||
|
||||
- Allow `unsafe-fixes` settings for code actions ([#15666](https://github.com/astral-sh/ruff/pull/15666))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bandit`\] Add missing single-line/dotall regex flag (`S608`) ([#15654](https://github.com/astral-sh/ruff/pull/15654))
|
||||
- \[`flake8-simplify`\] Do not emit diagnostics for expressions inside string type annotations (`SIM222`, `SIM223`) ([#15405](https://github.com/astral-sh/ruff/pull/15405))
|
||||
- \[`pyflakes`\] Treat arguments passed to the `default=` parameter of `TypeVar` as type expressions (`F821`) ([#15679](https://github.com/astral-sh/ruff/pull/15679))
|
||||
- \[`pyupgrade`\] Avoid syntax error when the iterable is a non-parenthesized tuple (`UP028`) ([#15543](https://github.com/astral-sh/ruff/pull/15543))
|
||||
- \[`ruff`\] Exempt `NewType` calls where the original type is immutable (`RUF009`) ([#15588](https://github.com/astral-sh/ruff/pull/15588))
|
||||
- \[`unconventional-import-alias`\] Fix infinite loop between `ICN001` and `I002` (`ICN001`) ([#15480](https://github.com/astral-sh/ruff/pull/15480))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Generate documentation redirects for lowercase rule codes ([#15564](https://github.com/astral-sh/ruff/pull/15564))
|
||||
- `TRY300`: Add some extra notes on not catching exceptions you didn't expect ([#15036](https://github.com/astral-sh/ruff/pull/15036))
|
||||
|
||||
## 0.9.2
|
||||
|
||||
### Preview features
|
||||
|
||||
7
Cargo.lock
generated
7
Cargo.lock
generated
@@ -2332,7 +2332,6 @@ dependencies = [
|
||||
"red_knot_server",
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"ruff_python_trivia",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"toml",
|
||||
@@ -2584,7 +2583,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.9.3"
|
||||
version = "0.9.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2818,7 +2817,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.9.3"
|
||||
version = "0.9.2"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -3135,7 +3134,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.9.3"
|
||||
version = "0.9.2"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
|
||||
@@ -149,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.9.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.9.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.9.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.9.2/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.3
|
||||
rev: v0.9.2
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -33,7 +33,6 @@ tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
|
||||
insta = { workspace = true, features = ["filters"] }
|
||||
insta-cmd = { workspace = true }
|
||||
|
||||
@@ -7,7 +7,6 @@ use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use python_version::PythonVersion;
|
||||
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||
use red_knot_project::watch;
|
||||
use red_knot_project::watch::ProjectWatcher;
|
||||
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||
@@ -70,18 +69,22 @@ struct Args {
|
||||
}
|
||||
|
||||
impl Args {
|
||||
fn to_options(&self) -> Options {
|
||||
fn to_options(&self, cli_cwd: &SystemPath) -> Options {
|
||||
Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: self
|
||||
.python_version
|
||||
.map(|version| RangedValue::cli(version.into())),
|
||||
venv_path: self.venv_path.as_ref().map(RelativePathBuf::cli),
|
||||
typeshed: self.typeshed.as_ref().map(RelativePathBuf::cli),
|
||||
python_version: self.python_version.map(Into::into),
|
||||
venv_path: self
|
||||
.venv_path
|
||||
.as_ref()
|
||||
.map(|venv_path| SystemPath::absolute(venv_path, cli_cwd)),
|
||||
typeshed: self
|
||||
.typeshed
|
||||
.as_ref()
|
||||
.map(|typeshed| SystemPath::absolute(typeshed, cli_cwd)),
|
||||
extra_paths: self.extra_search_path.as_ref().map(|extra_search_paths| {
|
||||
extra_search_paths
|
||||
.iter()
|
||||
.map(RelativePathBuf::cli)
|
||||
.map(|path| SystemPath::absolute(path, cli_cwd))
|
||||
.collect()
|
||||
}),
|
||||
..EnvironmentOptions::default()
|
||||
@@ -155,8 +158,8 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd);
|
||||
let cli_options = args.to_options();
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_options = args.to_options(&cwd);
|
||||
let mut workspace_metadata = ProjectMetadata::discover(system.current_directory(), &system)?;
|
||||
workspace_metadata.apply_cli_options(cli_options.clone());
|
||||
|
||||
|
||||
@@ -1,327 +1,60 @@
|
||||
use anyhow::Context;
|
||||
use insta::Settings;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Specifying an option on the CLI should take precedence over the same setting in the
|
||||
/// project's configuration.
|
||||
#[test]
|
||||
fn config_override() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.environment]
|
||||
python-version = "3.11"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"test.py",
|
||||
r#"
|
||||
import sys
|
||||
fn test_config_override() -> anyhow::Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
# Access `sys.last_exc` that was only added in Python 3.12
|
||||
print(sys.last_exc)
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
case.insta_settings().bind(|| {
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error[lint:unresolved-attribute] <temp_dir>/test.py:5:7 Type `<module 'sys'>` has no attribute `last_exc`
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--python-version").arg("3.12"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Paths specified on the CLI are relative to the current working directory and not the project root.
|
||||
///
|
||||
/// We test this by adding an extra search path from the CLI to the libs directory when
|
||||
/// running the CLI from the child directory (using relative paths).
|
||||
///
|
||||
/// Project layout:
|
||||
/// ```
|
||||
/// - libs
|
||||
/// |- utils.py
|
||||
/// - child
|
||||
/// | - test.py
|
||||
/// - pyproject.toml
|
||||
/// ```
|
||||
///
|
||||
/// And the command is run in the `child` directory.
|
||||
#[test]
|
||||
fn cli_arguments_are_relative_to_the_current_directory() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.environment]
|
||||
python-version = "3.11"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"libs/utils.py",
|
||||
r#"
|
||||
def add(a: int, b: int) -> int:
|
||||
a + b
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"child/test.py",
|
||||
r#"
|
||||
from utils import add
|
||||
|
||||
stat = add(10, 15)
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
case.insta_settings().bind(|| {
|
||||
// Make sure that the CLI fails when the `libs` directory is not in the search path.
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")), @r#"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error[lint:unresolved-import] <temp_dir>/child/test.py:2:1 Cannot resolve import `utils`
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")).arg("--extra-search-path").arg("../libs"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Paths specified in a configuration file are relative to the project root.
|
||||
///
|
||||
/// We test this by adding `libs` (as a relative path) to the extra search path in the configuration and run
|
||||
/// the CLI from a subdirectory.
|
||||
///
|
||||
/// Project layout:
|
||||
/// ```
|
||||
/// - libs
|
||||
/// |- utils.py
|
||||
/// - child
|
||||
/// | - test.py
|
||||
/// - pyproject.toml
|
||||
/// ```
|
||||
#[test]
|
||||
fn paths_in_configuration_files_are_relative_to_the_project_root() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.environment]
|
||||
python-version = "3.11"
|
||||
extra-paths = ["libs"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"libs/utils.py",
|
||||
r#"
|
||||
def add(a: int, b: int) -> int:
|
||||
a + b
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"child/test.py",
|
||||
r#"
|
||||
from utils import add
|
||||
|
||||
stat = add(10, 15)
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
case.insta_settings().bind(|| {
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The rule severity can be changed in the configuration file
|
||||
#[test]
|
||||
fn rule_severity() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file(
|
||||
"test.py",
|
||||
std::fs::write(
|
||||
tempdir.path().join("pyproject.toml"),
|
||||
r#"
|
||||
y = 4 / 0
|
||||
[tool.knot.environment]
|
||||
python-version = "3.11"
|
||||
"#,
|
||||
)
|
||||
.context("Failed to write settings")?;
|
||||
|
||||
for a in range(0, y):
|
||||
x = a
|
||||
std::fs::write(
|
||||
tempdir.path().join("test.py"),
|
||||
r#"
|
||||
import sys
|
||||
|
||||
print(x) # possibly-unresolved-reference
|
||||
"#,
|
||||
)?;
|
||||
# Access `sys.last_exc` that was only added in Python 3.12
|
||||
print(sys.last_exc)
|
||||
"#,
|
||||
)
|
||||
.context("Failed to write test.py")?;
|
||||
|
||||
case.insta_settings().bind(|| {
|
||||
// Assert that there's a possibly unresolved reference diagnostic
|
||||
// and that division-by-zero has a severity of error by default.
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error[lint:division-by-zero] <temp_dir>/test.py:2:5 Cannot divide object of type `Literal[4]` by zero
|
||||
warning[lint:possibly-unresolved-reference] <temp_dir>/test.py:7:7 Name `x` used when possibly not defined
|
||||
insta::with_settings!({filters => vec![(&*tempdir_filter(&tempdir), "<temp_dir>/")]}, {
|
||||
assert_cmd_snapshot!(knot().arg("--project").arg(tempdir.path()), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error[lint:unresolved-attribute] <temp_dir>/test.py:5:7 Type `<module 'sys'>` has no attribute `last_exc`
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
case.write_file("pyproject.toml", r#"
|
||||
[tool.knot.rules]
|
||||
division-by-zero = "warn" # demote to warn
|
||||
possibly-unresolved-reference = "ignore"
|
||||
"#)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
warning[lint:division-by-zero] <temp_dir>/test.py:2:5 Cannot divide object of type `Literal[4]` by zero
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
/// Red Knot warns about unknown rules
|
||||
#[test]
|
||||
fn unknown_rules() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[tool.knot.rules]
|
||||
division-by-zer = "warn" # incorrect rule name
|
||||
"#,
|
||||
),
|
||||
("test.py", "print(10)"),
|
||||
])?;
|
||||
|
||||
case.insta_settings().bind(|| {
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
warning[unknown-rule] <temp_dir>/pyproject.toml:3:1 Unknown lint rule `division-by-zer`
|
||||
|
||||
----- stderr -----
|
||||
----- stderr -----
|
||||
");
|
||||
});
|
||||
|
||||
assert_cmd_snapshot!(knot().arg("--project").arg(tempdir.path()).arg("--python-version").arg("3.12"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct TestCase {
|
||||
_temp_dir: TempDir,
|
||||
project_dir: PathBuf,
|
||||
fn knot() -> Command {
|
||||
Command::new(get_cargo_bin("red_knot"))
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
fn new() -> anyhow::Result<Self> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
|
||||
// Canonicalize the tempdir path because macos uses symlinks for tempdirs
|
||||
// and that doesn't play well with our snapshot filtering.
|
||||
let project_dir = temp_dir
|
||||
.path()
|
||||
.canonicalize()
|
||||
.context("Failed to canonicalize project path")?;
|
||||
|
||||
Ok(Self {
|
||||
project_dir,
|
||||
_temp_dir: temp_dir,
|
||||
})
|
||||
}
|
||||
|
||||
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_files(files)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
fn with_file(path: impl AsRef<Path>, content: &str) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
case.write_file(path, content)?;
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
fn write_files<'a>(
|
||||
&self,
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> anyhow::Result<()> {
|
||||
for (path, content) in files {
|
||||
self.write_file(path, content)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_file(&self, path: impl AsRef<Path>, content: &str) -> anyhow::Result<()> {
|
||||
let path = path.as_ref();
|
||||
let path = self.project_dir.join(path);
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)
|
||||
.with_context(|| format!("Failed to create directory `{}`", parent.display()))?;
|
||||
}
|
||||
std::fs::write(&path, &*ruff_python_trivia::textwrap::dedent(content))
|
||||
.with_context(|| format!("Failed to write file `{path}`", path = path.display()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn project_dir(&self) -> &Path {
|
||||
&self.project_dir
|
||||
}
|
||||
|
||||
// Returns the insta filters to escape paths in snapshots
|
||||
fn insta_settings(&self) -> Settings {
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.add_filter(&tempdir_filter(&self.project_dir), "<temp_dir>/");
|
||||
settings.add_filter(r#"\\(\w\w|\s|\.|")"#, "/$1");
|
||||
settings
|
||||
}
|
||||
|
||||
fn command(&self) -> Command {
|
||||
let mut command = Command::new(get_cargo_bin("red_knot"));
|
||||
command.current_dir(&self.project_dir);
|
||||
command
|
||||
}
|
||||
}
|
||||
|
||||
fn tempdir_filter(path: &Path) -> String {
|
||||
format!(r"{}\\?/?", regex::escape(path.to_str().unwrap()))
|
||||
fn tempdir_filter(tempdir: &TempDir) -> String {
|
||||
format!(r"{}\\?/?", regex::escape(tempdir.path().to_str().unwrap()))
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ use std::time::{Duration, Instant};
|
||||
use anyhow::{anyhow, Context};
|
||||
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use red_knot_project::metadata::pyproject::{PyProject, Tool};
|
||||
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||
use red_knot_project::watch::{directory_watcher, ChangeEvent, ProjectWatcher};
|
||||
use red_knot_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||
use red_knot_python_semantic::{resolve_module, ModuleName, PythonPlatform, PythonVersion};
|
||||
@@ -322,7 +321,7 @@ where
|
||||
.search_paths
|
||||
.extra_paths
|
||||
.iter()
|
||||
.chain(program_settings.search_paths.custom_typeshed.as_ref())
|
||||
.chain(program_settings.search_paths.typeshed.as_ref())
|
||||
{
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
.with_context(|| format!("Failed to create search path `{path}`"))?;
|
||||
@@ -792,7 +791,7 @@ fn search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options([("bar.py", "import sub.a")], |root_path, _project_path| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli(root_path.join("site_packages"))]),
|
||||
extra_paths: Some(vec![root_path.join("site_packages")]),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
@@ -833,7 +832,7 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
// Register site-packages as a search path.
|
||||
case.update_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli("site_packages")]),
|
||||
extra_paths: Some(vec![site_packages.clone()]),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
@@ -856,7 +855,7 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options([("bar.py", "import sub.a")], |root_path, _project_path| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli(root_path.join("site_packages"))]),
|
||||
extra_paths: Some(vec![root_path.join("site_packages")]),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
@@ -897,10 +896,8 @@ print(sys.last_exc, os.getegid())
|
||||
|_root_path, _project_path| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY311)),
|
||||
python_platform: Some(RangedValue::cli(PythonPlatform::Identifier(
|
||||
"win32".to_string(),
|
||||
))),
|
||||
python_version: Some(PythonVersion::PY311),
|
||||
python_platform: Some(PythonPlatform::Identifier("win32".to_string())),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
@@ -923,10 +920,8 @@ print(sys.last_exc, os.getegid())
|
||||
// Change the python version
|
||||
case.update_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
python_platform: Some(RangedValue::cli(PythonPlatform::Identifier(
|
||||
"linux".to_string(),
|
||||
))),
|
||||
python_version: Some(PythonVersion::PY312),
|
||||
python_platform: Some(PythonPlatform::Identifier("linux".to_string())),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
@@ -956,7 +951,7 @@ fn changed_versions_file() -> anyhow::Result<()> {
|
||||
|root_path, _project_path| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
typeshed: Some(RelativePathBuf::cli(root_path.join("typeshed"))),
|
||||
typeshed: Some(root_path.join("typeshed")),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
@@ -1380,13 +1375,11 @@ mod unix {
|
||||
|
||||
Ok(())
|
||||
},
|
||||
|_root, _project| {
|
||||
|_root, project| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli(
|
||||
".venv/lib/python3.12/site-packages",
|
||||
)]),
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
extra_paths: Some(vec![project.join(".venv/lib/python3.12/site-packages")]),
|
||||
python_version: Some(PythonVersion::PY312),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::panic::RefUnwindSafe;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::DEFAULT_LINT_REGISTRY;
|
||||
use crate::{Project, ProjectMetadata};
|
||||
use crate::{check_file, Project, ProjectMetadata};
|
||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Program};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
@@ -27,6 +27,7 @@ pub struct ProjectDatabase {
|
||||
storage: salsa::Storage<ProjectDatabase>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
rule_selection: Arc<RuleSelection>,
|
||||
}
|
||||
|
||||
impl ProjectDatabase {
|
||||
@@ -34,11 +35,14 @@ impl ProjectDatabase {
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
let rule_selection = RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY);
|
||||
|
||||
let mut db = Self {
|
||||
project: None,
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
rule_selection: Arc::new(rule_selection),
|
||||
};
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
@@ -62,7 +66,7 @@ impl ProjectDatabase {
|
||||
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
|
||||
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
|
||||
|
||||
self.with_db(|db| self.project().check_file(db, file))
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the system.
|
||||
@@ -115,7 +119,7 @@ impl SemanticDb for ProjectDatabase {
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
self.project().rule_selection(self)
|
||||
&self.rule_selection
|
||||
}
|
||||
|
||||
fn lint_registry(&self) -> &LintRegistry {
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
#![allow(clippy::ref_option)]
|
||||
|
||||
use crate::metadata::options::OptionDiagnostic;
|
||||
pub use db::{Db, ProjectDatabase};
|
||||
use files::{Index, Indexed, IndexedFiles};
|
||||
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
|
||||
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder, RuleSelection};
|
||||
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder};
|
||||
use red_knot_python_semantic::register_lints;
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
|
||||
@@ -21,6 +17,10 @@ use salsa::Setter;
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use db::{Db, ProjectDatabase};
|
||||
use files::{Index, Indexed, IndexedFiles};
|
||||
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
|
||||
|
||||
pub mod combine;
|
||||
|
||||
mod db;
|
||||
@@ -68,7 +68,6 @@ pub struct Project {
|
||||
pub metadata: ProjectMetadata,
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
impl Project {
|
||||
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
|
||||
Project::builder(metadata)
|
||||
@@ -97,34 +96,13 @@ impl Project {
|
||||
self.reload_files(db);
|
||||
}
|
||||
|
||||
pub fn rule_selection(self, db: &dyn Db) -> &RuleSelection {
|
||||
let (selection, _) = self.rule_selection_with_diagnostics(db);
|
||||
selection
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn rule_selection_with_diagnostics(
|
||||
self,
|
||||
db: &dyn Db,
|
||||
) -> (RuleSelection, Vec<OptionDiagnostic>) {
|
||||
self.metadata(db).options().to_rule_selection(db)
|
||||
}
|
||||
|
||||
/// Checks all open files in the project and its dependencies.
|
||||
pub(crate) fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
|
||||
pub fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
|
||||
let project_span = tracing::debug_span!("Project::check");
|
||||
let _span = project_span.enter();
|
||||
|
||||
tracing::debug!("Checking project '{name}'", name = self.name(db));
|
||||
|
||||
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
||||
let (_, options_diagnostics) = self.rule_selection_with_diagnostics(db);
|
||||
diagnostics.extend(options_diagnostics.iter().map(|diagnostic| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||
diagnostic
|
||||
}));
|
||||
|
||||
let result = Arc::new(std::sync::Mutex::new(diagnostics));
|
||||
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
|
||||
let inner_result = Arc::clone(&result);
|
||||
|
||||
let db = db.clone();
|
||||
@@ -141,7 +119,7 @@ impl Project {
|
||||
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
|
||||
let _entered = check_file_span.entered();
|
||||
|
||||
let file_diagnostics = check_file_impl(&db, file);
|
||||
let file_diagnostics = check_file(&db, file);
|
||||
result.lock().unwrap().extend(file_diagnostics);
|
||||
});
|
||||
}
|
||||
@@ -150,23 +128,6 @@ impl Project {
|
||||
Arc::into_inner(result).unwrap().into_inner().unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
let (_, options_diagnostics) = self.rule_selection_with_diagnostics(db);
|
||||
|
||||
let mut file_diagnostics: Vec<_> = options_diagnostics
|
||||
.iter()
|
||||
.map(|diagnostic| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||
diagnostic
|
||||
})
|
||||
.collect();
|
||||
|
||||
let check_diagnostics = check_file_impl(db, file);
|
||||
file_diagnostics.extend(check_diagnostics);
|
||||
|
||||
file_diagnostics
|
||||
}
|
||||
|
||||
/// Opens a file in the project.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
||||
@@ -304,9 +265,8 @@ impl Project {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
pub(crate) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
||||
|
||||
// Abort checking if there are IO errors.
|
||||
let source = source_text(db.upcast(), file);
|
||||
|
||||
@@ -442,8 +402,8 @@ impl Diagnostic for IOErrorDiagnostic {
|
||||
self.error.to_string().into()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
Some(self.file)
|
||||
fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
|
||||
fn range(&self) -> Option<TextRange> {
|
||||
@@ -458,7 +418,7 @@ impl Diagnostic for IOErrorDiagnostic {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::{check_file_impl, ProjectMetadata};
|
||||
use crate::{check_file, ProjectMetadata};
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
@@ -482,7 +442,7 @@ mod tests {
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
check_file_impl(&db, file)
|
||||
check_file(&db, file)
|
||||
.into_iter()
|
||||
.map(|diagnostic| diagnostic.message().into_owned())
|
||||
.collect::<Vec<_>>(),
|
||||
@@ -498,7 +458,7 @@ mod tests {
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
check_file_impl(&db, file)
|
||||
check_file(&db, file)
|
||||
.into_iter()
|
||||
.map(|diagnostic| diagnostic.message().into_owned())
|
||||
.collect::<Vec<_>>(),
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
use red_knot_python_semantic::ProgramSettings;
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::combine::Combine;
|
||||
use crate::metadata::pyproject::{Project, PyProject, PyProjectError};
|
||||
use crate::metadata::value::ValueSource;
|
||||
use options::KnotTomlError;
|
||||
use options::Options;
|
||||
|
||||
pub mod options;
|
||||
pub mod pyproject;
|
||||
pub mod value;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
@@ -55,7 +52,7 @@ impl ProjectMetadata {
|
||||
) -> Self {
|
||||
let name = project
|
||||
.and_then(|project| project.name.as_ref())
|
||||
.map(|name| Name::new(&***name))
|
||||
.map(|name| Name::new(&**name))
|
||||
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
|
||||
|
||||
// TODO(https://github.com/astral-sh/ruff/issues/15491): Respect requires-python
|
||||
@@ -90,10 +87,7 @@ impl ProjectMetadata {
|
||||
let pyproject_path = project_root.join("pyproject.toml");
|
||||
|
||||
let pyproject = if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
|
||||
match PyProject::from_toml_str(
|
||||
&pyproject_str,
|
||||
ValueSource::File(Arc::new(pyproject_path.clone())),
|
||||
) {
|
||||
match PyProject::from_toml_str(&pyproject_str) {
|
||||
Ok(pyproject) => Some(pyproject),
|
||||
Err(error) => {
|
||||
return Err(ProjectDiscoveryError::InvalidPyProject {
|
||||
@@ -109,10 +103,7 @@ impl ProjectMetadata {
|
||||
// A `knot.toml` takes precedence over a `pyproject.toml`.
|
||||
let knot_toml_path = project_root.join("knot.toml");
|
||||
if let Ok(knot_str) = system.read_to_string(&knot_toml_path) {
|
||||
let options = match Options::from_toml_str(
|
||||
&knot_str,
|
||||
ValueSource::File(Arc::new(knot_toml_path.clone())),
|
||||
) {
|
||||
let options = match Options::from_toml_str(&knot_str) {
|
||||
Ok(options) => options,
|
||||
Err(error) => {
|
||||
return Err(ProjectDiscoveryError::InvalidKnotToml {
|
||||
|
||||
@@ -1,37 +1,22 @@
|
||||
use crate::metadata::value::{RangedValue, RelativePathBuf, ValueSource, ValueSourceGuard};
|
||||
use crate::Db;
|
||||
use red_knot_python_semantic::lint::{GetLintError, Level, LintSource, RuleSelection};
|
||||
use red_knot_python_semantic::{
|
||||
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
|
||||
};
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::{System, SystemPath};
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_macros::Combine;
|
||||
use ruff_text_size::TextRange;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Debug;
|
||||
use thiserror::Error;
|
||||
|
||||
/// The options for the project.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct Options {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub environment: Option<EnvironmentOptions>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub src: Option<SrcOptions>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub rules: Option<Rules>,
|
||||
}
|
||||
|
||||
impl Options {
|
||||
pub(crate) fn from_toml_str(content: &str, source: ValueSource) -> Result<Self, KnotTomlError> {
|
||||
let _guard = ValueSourceGuard::new(source);
|
||||
pub(crate) fn from_toml_str(content: &str) -> Result<Self, KnotTomlError> {
|
||||
let options = toml::from_str(content)?;
|
||||
Ok(options)
|
||||
}
|
||||
@@ -44,12 +29,7 @@ impl Options {
|
||||
let (python_version, python_platform) = self
|
||||
.environment
|
||||
.as_ref()
|
||||
.map(|env| {
|
||||
(
|
||||
env.python_version.as_deref().copied(),
|
||||
env.python_platform.as_deref(),
|
||||
)
|
||||
})
|
||||
.map(|env| (env.python_version, env.python_platform.as_ref()))
|
||||
.unwrap_or_default();
|
||||
|
||||
ProgramSettings {
|
||||
@@ -64,19 +44,19 @@ impl Options {
|
||||
project_root: &SystemPath,
|
||||
system: &dyn System,
|
||||
) -> SearchPathSettings {
|
||||
let src_roots = if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_ref())
|
||||
{
|
||||
vec![src_root.absolute(project_root, system)]
|
||||
} else {
|
||||
let src = project_root.join("src");
|
||||
|
||||
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
|
||||
if system.is_directory(&src) {
|
||||
vec![project_root.to_path_buf(), src]
|
||||
let src_roots =
|
||||
if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_deref()) {
|
||||
vec![src_root.to_path_buf()]
|
||||
} else {
|
||||
vec![project_root.to_path_buf()]
|
||||
}
|
||||
};
|
||||
let src = project_root.join("src");
|
||||
|
||||
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
|
||||
if system.is_directory(&src) {
|
||||
vec![project_root.to_path_buf(), src]
|
||||
} else {
|
||||
vec![project_root.to_path_buf()]
|
||||
}
|
||||
};
|
||||
|
||||
let (extra_paths, python, typeshed) = self
|
||||
.environment
|
||||
@@ -91,119 +71,43 @@ impl Options {
|
||||
.unwrap_or_default();
|
||||
|
||||
SearchPathSettings {
|
||||
extra_paths: extra_paths
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|path| path.absolute(project_root, system))
|
||||
.collect(),
|
||||
extra_paths: extra_paths.unwrap_or_default(),
|
||||
src_roots,
|
||||
custom_typeshed: typeshed.map(|path| path.absolute(project_root, system)),
|
||||
typeshed,
|
||||
site_packages: python
|
||||
.map(|venv_path| SitePackages::Derived {
|
||||
venv_path: venv_path.absolute(project_root, system),
|
||||
})
|
||||
.map(|venv_path| SitePackages::Derived { venv_path })
|
||||
.unwrap_or(SitePackages::Known(vec![])),
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn to_rule_selection(&self, db: &dyn Db) -> (RuleSelection, Vec<OptionDiagnostic>) {
|
||||
let registry = db.lint_registry();
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
// Initialize the selection with the defaults
|
||||
let mut selection = RuleSelection::from_registry(registry);
|
||||
|
||||
let rules = self
|
||||
.rules
|
||||
.as_ref()
|
||||
.into_iter()
|
||||
.flat_map(|rules| rules.inner.iter());
|
||||
|
||||
for (rule_name, level) in rules {
|
||||
let source = rule_name.source();
|
||||
match registry.get(rule_name) {
|
||||
Ok(lint) => {
|
||||
let lint_source = match source {
|
||||
ValueSource::File(_) => LintSource::File,
|
||||
ValueSource::Cli => LintSource::Cli,
|
||||
};
|
||||
if let Ok(severity) = Severity::try_from(**level) {
|
||||
selection.enable(lint, severity, lint_source);
|
||||
} else {
|
||||
selection.disable(lint);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
// `system_path_to_file` can return `Err` if the file was deleted since the configuration
|
||||
// was read. This should be rare and it should be okay to default to not showing a configuration
|
||||
// file in that case.
|
||||
let file = source
|
||||
.file()
|
||||
.and_then(|path| system_path_to_file(db.upcast(), path).ok());
|
||||
|
||||
// TODO: Add a note if the value was configured on the CLI
|
||||
let diagnostic = match error {
|
||||
GetLintError::Unknown(_) => OptionDiagnostic::new(
|
||||
DiagnosticId::UnknownRule,
|
||||
format!("Unknown lint rule `{rule_name}`"),
|
||||
Severity::Warning,
|
||||
),
|
||||
GetLintError::Removed(_) => OptionDiagnostic::new(
|
||||
DiagnosticId::UnknownRule,
|
||||
format!("Unknown lint rule `{rule_name}`"),
|
||||
Severity::Warning,
|
||||
),
|
||||
};
|
||||
|
||||
diagnostics.push(diagnostic.with_file(file).with_range(rule_name.range()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(selection, diagnostics)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct EnvironmentOptions {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub python_version: Option<RangedValue<PythonVersion>>,
|
||||
pub python_version: Option<PythonVersion>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub python_platform: Option<RangedValue<PythonPlatform>>,
|
||||
pub python_platform: Option<PythonPlatform>,
|
||||
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extra_paths: Option<Vec<RelativePathBuf>>,
|
||||
pub extra_paths: Option<Vec<SystemPathBuf>>,
|
||||
|
||||
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub typeshed: Option<RelativePathBuf>,
|
||||
pub typeshed: Option<SystemPathBuf>,
|
||||
|
||||
// TODO: Rename to python, see https://github.com/astral-sh/ruff/issues/15530
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub venv_path: Option<RelativePathBuf>,
|
||||
pub venv_path: Option<SystemPathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct SrcOptions {
|
||||
/// The root of the project, used for finding first-party modules.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub root: Option<RelativePathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", transparent)]
|
||||
pub struct Rules {
|
||||
inner: FxHashMap<RangedValue<String>, RangedValue<Level>>,
|
||||
pub root: Option<SystemPathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
@@ -211,58 +115,3 @@ pub enum KnotTomlError {
|
||||
#[error(transparent)]
|
||||
TomlSyntax(#[from] toml::de::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct OptionDiagnostic {
|
||||
id: DiagnosticId,
|
||||
message: String,
|
||||
severity: Severity,
|
||||
file: Option<File>,
|
||||
range: Option<TextRange>,
|
||||
}
|
||||
|
||||
impl OptionDiagnostic {
|
||||
pub fn new(id: DiagnosticId, message: String, severity: Severity) -> Self {
|
||||
Self {
|
||||
id,
|
||||
message,
|
||||
severity,
|
||||
file: None,
|
||||
range: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn with_file(mut self, file: Option<File>) -> Self {
|
||||
self.file = file;
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn with_range(mut self, range: Option<TextRange>) -> Self {
|
||||
self.range = range;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Diagnostic for OptionDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn message(&self) -> Cow<str> {
|
||||
Cow::Borrowed(&self.message)
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
self.file
|
||||
}
|
||||
|
||||
fn range(&self) -> Option<TextRange> {
|
||||
self.range
|
||||
}
|
||||
|
||||
fn severity(&self) -> Severity {
|
||||
self.severity
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ use std::ops::Deref;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::metadata::options::Options;
|
||||
use crate::metadata::value::{RangedValue, ValueSource, ValueSourceGuard};
|
||||
|
||||
/// A `pyproject.toml` as specified in PEP 517.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
|
||||
@@ -29,11 +28,7 @@ pub enum PyProjectError {
|
||||
}
|
||||
|
||||
impl PyProject {
|
||||
pub(crate) fn from_toml_str(
|
||||
content: &str,
|
||||
source: ValueSource,
|
||||
) -> Result<Self, PyProjectError> {
|
||||
let _guard = ValueSourceGuard::new(source);
|
||||
pub(crate) fn from_toml_str(content: &str) -> Result<Self, PyProjectError> {
|
||||
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
|
||||
}
|
||||
}
|
||||
@@ -48,11 +43,11 @@ pub struct Project {
|
||||
///
|
||||
/// Note: Intentionally option to be more permissive during deserialization.
|
||||
/// `PackageMetadata::from_pyproject` reports missing names.
|
||||
pub name: Option<RangedValue<PackageName>>,
|
||||
pub name: Option<PackageName>,
|
||||
/// The version of the project
|
||||
pub version: Option<RangedValue<Version>>,
|
||||
pub version: Option<Version>,
|
||||
/// The Python versions this project is compatible with.
|
||||
pub requires_python: Option<RangedValue<VersionSpecifiers>>,
|
||||
pub requires_python: Option<VersionSpecifiers>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
||||
|
||||
@@ -1,337 +0,0 @@
|
||||
use crate::combine::Combine;
|
||||
use crate::Db;
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use toml::Spanned;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ValueSource {
|
||||
/// Value loaded from a project's configuration file.
|
||||
///
|
||||
/// Ideally, we'd use [`ruff_db::files::File`] but we can't because the database hasn't been
|
||||
/// created when loading the configuration.
|
||||
File(Arc<SystemPathBuf>),
|
||||
/// The value comes from a CLI argument, while it's left open if specified using a short argument,
|
||||
/// long argument (`--extra-paths`) or `--config key=value`.
|
||||
Cli,
|
||||
}
|
||||
|
||||
impl ValueSource {
|
||||
pub fn file(&self) -> Option<&SystemPath> {
|
||||
match self {
|
||||
ValueSource::File(path) => Some(&**path),
|
||||
ValueSource::Cli => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
/// Serde doesn't provide any easy means to pass a value to a [`Deserialize`] implementation,
|
||||
/// but we want to associate each deserialized [`RelativePath`] with the source from
|
||||
/// which it originated. We use a thread local variable to work around this limitation.
|
||||
///
|
||||
/// Use the [`ValueSourceGuard`] to initialize the thread local before calling into any
|
||||
/// deserialization code. It ensures that the thread local variable gets cleaned up
|
||||
/// once deserialization is done (once the guard gets dropped).
|
||||
static VALUE_SOURCE: RefCell<Option<ValueSource>> = const { RefCell::new(None) };
|
||||
}
|
||||
|
||||
/// Guard to safely change the [`VALUE_SOURCE`] for the current thread.
|
||||
#[must_use]
|
||||
pub(super) struct ValueSourceGuard {
|
||||
prev_value: Option<ValueSource>,
|
||||
}
|
||||
|
||||
impl ValueSourceGuard {
|
||||
pub(super) fn new(source: ValueSource) -> Self {
|
||||
let prev = VALUE_SOURCE.replace(Some(source));
|
||||
Self { prev_value: prev }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for ValueSourceGuard {
|
||||
fn drop(&mut self) {
|
||||
VALUE_SOURCE.set(self.prev_value.take());
|
||||
}
|
||||
}
|
||||
|
||||
/// A value that "remembers" where it comes from (source) and its range in source.
|
||||
///
|
||||
/// ## Equality, Hash, and Ordering
|
||||
/// The equality, hash, and ordering are solely based on the value. They disregard the value's range
|
||||
/// or source.
|
||||
///
|
||||
/// This ensures that two resolved configurations are identical even if the position of a value has changed
|
||||
/// or if the values were loaded from different sources.
|
||||
#[derive(Clone)]
|
||||
pub struct RangedValue<T> {
|
||||
value: T,
|
||||
source: ValueSource,
|
||||
|
||||
/// The byte range of `value` in `source`.
|
||||
///
|
||||
/// Can be `None` because not all sources support a range.
|
||||
/// For example, arguments provided on the CLI won't have a range attached.
|
||||
range: Option<TextRange>,
|
||||
}
|
||||
|
||||
impl<T> RangedValue<T> {
|
||||
pub fn new(value: T, source: ValueSource) -> Self {
|
||||
Self::with_range(value, source, TextRange::default())
|
||||
}
|
||||
|
||||
pub fn cli(value: T) -> Self {
|
||||
Self::with_range(value, ValueSource::Cli, TextRange::default())
|
||||
}
|
||||
|
||||
pub fn with_range(value: T, source: ValueSource, range: TextRange) -> Self {
|
||||
Self {
|
||||
value,
|
||||
range: Some(range),
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn range(&self) -> Option<TextRange> {
|
||||
self.range
|
||||
}
|
||||
|
||||
pub fn source(&self) -> &ValueSource {
|
||||
&self.source
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_source(mut self, source: ValueSource) -> Self {
|
||||
self.source = source;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> T {
|
||||
self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Combine for RangedValue<T> {
|
||||
fn combine(self, _other: Self) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self
|
||||
}
|
||||
fn combine_with(&mut self, _other: Self) {}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for RangedValue<T>
|
||||
where
|
||||
T: IntoIterator,
|
||||
{
|
||||
type Item = T::Item;
|
||||
type IntoIter = T::IntoIter;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.value.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
// The type already has an `iter` method thanks to `Deref`.
|
||||
#[allow(clippy::into_iter_without_iter)]
|
||||
impl<'a, T> IntoIterator for &'a RangedValue<T>
|
||||
where
|
||||
&'a T: IntoIterator,
|
||||
{
|
||||
type Item = <&'a T as IntoIterator>::Item;
|
||||
type IntoIter = <&'a T as IntoIterator>::IntoIter;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.value.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
// The type already has a `into_iter_mut` method thanks to `DerefMut`.
|
||||
#[allow(clippy::into_iter_without_iter)]
|
||||
impl<'a, T> IntoIterator for &'a mut RangedValue<T>
|
||||
where
|
||||
&'a mut T: IntoIterator,
|
||||
{
|
||||
type Item = <&'a mut T as IntoIterator>::Item;
|
||||
type IntoIter = <&'a mut T as IntoIterator>::IntoIter;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.value.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Debug for RangedValue<T>
|
||||
where
|
||||
T: fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.value.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Display for RangedValue<T>
|
||||
where
|
||||
T: fmt::Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.value.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for RangedValue<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for RangedValue<T> {
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
&mut self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, U: ?Sized> AsRef<U> for RangedValue<T>
|
||||
where
|
||||
T: AsRef<U>,
|
||||
{
|
||||
fn as_ref(&self) -> &U {
|
||||
self.value.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialEq> PartialEq for RangedValue<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.value.eq(&other.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialEq<T>> PartialEq<T> for RangedValue<T> {
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
self.value.eq(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Eq> Eq for RangedValue<T> {}
|
||||
|
||||
impl<T: Hash> Hash for RangedValue<T> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.value.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialOrd> PartialOrd for RangedValue<T> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
self.value.partial_cmp(&other.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialOrd<T>> PartialOrd<T> for RangedValue<T> {
|
||||
fn partial_cmp(&self, other: &T) -> Option<Ordering> {
|
||||
self.value.partial_cmp(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Ord> Ord for RangedValue<T> {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.value.cmp(&other.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T> Deserialize<'de> for RangedValue<T>
|
||||
where
|
||||
T: Deserialize<'de>,
|
||||
{
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let spanned: Spanned<T> = Spanned::deserialize(deserializer)?;
|
||||
let span = spanned.span();
|
||||
let range = TextRange::new(
|
||||
TextSize::try_from(span.start).expect("Configuration file to be smaller than 4GB"),
|
||||
TextSize::try_from(span.end).expect("Configuration file to be smaller than 4GB"),
|
||||
);
|
||||
|
||||
Ok(VALUE_SOURCE.with_borrow(|source| {
|
||||
let source = source.clone().unwrap();
|
||||
|
||||
Self::with_range(spanned.into_inner(), source, range)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Serialize for RangedValue<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.value.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
/// A possibly relative path in a configuration file.
|
||||
///
|
||||
/// Relative paths in configuration files or from CLI options
|
||||
/// require different anchoring:
|
||||
///
|
||||
/// * CLI: The path is relative to the current working directory
|
||||
/// * Configuration file: The path is relative to the project's root.
|
||||
#[derive(
|
||||
Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash,
|
||||
)]
|
||||
#[serde(transparent)]
|
||||
pub struct RelativePathBuf(RangedValue<SystemPathBuf>);
|
||||
|
||||
impl RelativePathBuf {
|
||||
pub fn new(path: impl AsRef<SystemPath>, source: ValueSource) -> Self {
|
||||
Self(RangedValue::new(path.as_ref().to_path_buf(), source))
|
||||
}
|
||||
|
||||
pub fn cli(path: impl AsRef<SystemPath>) -> Self {
|
||||
Self::new(path, ValueSource::Cli)
|
||||
}
|
||||
|
||||
/// Returns the relative path as specified by the user.
|
||||
pub fn path(&self) -> &SystemPath {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Returns the owned relative path.
|
||||
pub fn into_path_buf(self) -> SystemPathBuf {
|
||||
self.0.into_inner()
|
||||
}
|
||||
|
||||
/// Resolves the absolute path for `self` based on its origin.
|
||||
pub fn absolute_with_db(&self, db: &dyn Db) -> SystemPathBuf {
|
||||
self.absolute(db.project().root(db), db.system())
|
||||
}
|
||||
|
||||
/// Resolves the absolute path for `self` based on its origin.
|
||||
pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> SystemPathBuf {
|
||||
let relative_to = match &self.0.source {
|
||||
ValueSource::File(_) => project_root,
|
||||
ValueSource::Cli => system.current_directory(),
|
||||
};
|
||||
|
||||
SystemPath::absolute(&self.0, relative_to)
|
||||
}
|
||||
}
|
||||
|
||||
impl Combine for RelativePathBuf {
|
||||
fn combine(self, other: Self) -> Self {
|
||||
Self(self.0.combine(other.0))
|
||||
}
|
||||
|
||||
fn combine_with(&mut self, other: Self) {
|
||||
self.0.combine_with(other.0);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ ProjectMetadata(
|
||||
name: Name("project-root"),
|
||||
root: "/app",
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: Some(SrcOptions(
|
||||
root: Some("src"),
|
||||
)),
|
||||
|
||||
@@ -6,6 +6,7 @@ ProjectMetadata(
|
||||
name: Name("nested-project"),
|
||||
root: "/app/packages/a",
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: Some(SrcOptions(
|
||||
root: Some("src"),
|
||||
)),
|
||||
|
||||
@@ -8,6 +8,11 @@ ProjectMetadata(
|
||||
options: Options(
|
||||
environment: Some(EnvironmentOptions(
|
||||
r#python-version: Some("3.10"),
|
||||
r#python-platform: None,
|
||||
r#extra-paths: None,
|
||||
typeshed: None,
|
||||
r#venv-path: None,
|
||||
)),
|
||||
src: None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -5,5 +5,8 @@ expression: sub_project
|
||||
ProjectMetadata(
|
||||
name: Name("nested-project"),
|
||||
root: "/app/packages/a",
|
||||
options: Options(),
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -6,6 +6,7 @@ ProjectMetadata(
|
||||
name: Name("super-app"),
|
||||
root: "/app",
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: Some(SrcOptions(
|
||||
root: Some("src"),
|
||||
)),
|
||||
|
||||
@@ -5,5 +5,8 @@ expression: project
|
||||
ProjectMetadata(
|
||||
name: Name("backend"),
|
||||
root: "/app",
|
||||
options: Options(),
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -5,5 +5,8 @@ expression: project
|
||||
ProjectMetadata(
|
||||
name: Name("app"),
|
||||
root: "/app",
|
||||
options: Options(),
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::{anyhow, Context};
|
||||
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||
use red_knot_python_semantic::{HasType, SemanticModel};
|
||||
use red_knot_python_semantic::{HasTy, SemanticModel};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
|
||||
@@ -197,10 +197,10 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(function) => {
|
||||
let _ty = function.inferred_type(&self.model);
|
||||
let _ty = function.ty(&self.model);
|
||||
}
|
||||
Stmt::ClassDef(class) => {
|
||||
let _ty = class.inferred_type(&self.model);
|
||||
let _ty = class.ty(&self.model);
|
||||
}
|
||||
Stmt::Assign(assign) => {
|
||||
for target in &assign.targets {
|
||||
@@ -243,25 +243,25 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &Expr) {
|
||||
let _ty = expr.inferred_type(&self.model);
|
||||
let _ty = expr.ty(&self.model);
|
||||
|
||||
source_order::walk_expr(self, expr);
|
||||
}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &Parameter) {
|
||||
let _ty = parameter.inferred_type(&self.model);
|
||||
let _ty = parameter.ty(&self.model);
|
||||
|
||||
source_order::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_parameter_with_default(&mut self, parameter_with_default: &ParameterWithDefault) {
|
||||
let _ty = parameter_with_default.inferred_type(&self.model);
|
||||
let _ty = parameter_with_default.ty(&self.model);
|
||||
|
||||
source_order::walk_parameter_with_default(self, parameter_with_default);
|
||||
}
|
||||
|
||||
fn visit_alias(&mut self, alias: &Alias) {
|
||||
let _ty = alias.inferred_type(&self.model);
|
||||
let _ty = alias.ty(&self.model);
|
||||
|
||||
source_order::walk_alias(self, alias);
|
||||
}
|
||||
|
||||
@@ -61,13 +61,7 @@ class MDTestRunner:
|
||||
return False
|
||||
|
||||
# Run it again with 'json' format to find the mdtest executable:
|
||||
try:
|
||||
json_output = self._run_cargo_test(message_format="json")
|
||||
except subprocess.CalledProcessError as _:
|
||||
# `cargo test` can still fail if something changed in between the two runs.
|
||||
# Here we don't have a human-readable output, so just show a generic message:
|
||||
self.console.print("[red]Error[/red]: Failed to compile tests")
|
||||
return False
|
||||
json_output = self._run_cargo_test(message_format="json")
|
||||
|
||||
if json_output:
|
||||
self._get_executable_path_from_json(json_output)
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
# Deferred annotations
|
||||
|
||||
## Deferred annotations in stubs always resolve
|
||||
|
||||
```pyi path=mod.pyi
|
||||
def get_foo() -> Foo: ...
|
||||
class Foo: ...
|
||||
```
|
||||
|
||||
```py
|
||||
from mod import get_foo
|
||||
|
||||
reveal_type(get_foo()) # revealed: Foo
|
||||
```
|
||||
|
||||
## Deferred annotations in regular code fail
|
||||
|
||||
In (regular) source files, annotations are *not* deferred. This also tests that imports from
|
||||
`__future__` that are not `annotations` are ignored.
|
||||
|
||||
```py
|
||||
from __future__ import with_statement as annotations
|
||||
|
||||
# error: [unresolved-reference]
|
||||
def get_foo() -> Foo: ...
|
||||
|
||||
class Foo: ...
|
||||
|
||||
reveal_type(get_foo()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Deferred annotations in regular code with `__future__.annotations`
|
||||
|
||||
If `__future__.annotations` is imported, annotations *are* deferred.
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
def get_foo() -> Foo: ...
|
||||
|
||||
class Foo: ...
|
||||
|
||||
reveal_type(get_foo()) # revealed: Foo
|
||||
```
|
||||
@@ -102,7 +102,7 @@ reveal_type(C.pure_instance_variable) # revealed: str
|
||||
# and pyright allow this.
|
||||
C.pure_instance_variable = "overwritten on class"
|
||||
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `pure_instance_variable` of type `str`"
|
||||
# TODO: this should be an error (incompatible types in assignment)
|
||||
c_instance.pure_instance_variable = 1
|
||||
```
|
||||
|
||||
@@ -191,7 +191,7 @@ c_instance.pure_class_variable1 = "value set on instance"
|
||||
|
||||
C.pure_class_variable1 = "overwritten on class"
|
||||
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `pure_class_variable1` of type `str`"
|
||||
# TODO: should raise an error (incompatible types in assignment)
|
||||
C.pure_class_variable1 = 1
|
||||
|
||||
class Subclass(C):
|
||||
@@ -436,64 +436,6 @@ class Foo: ...
|
||||
reveal_type(Foo.__class__) # revealed: Literal[type]
|
||||
```
|
||||
|
||||
## Module attributes
|
||||
|
||||
```py path=mod.py
|
||||
global_symbol: str = "a"
|
||||
```
|
||||
|
||||
```py
|
||||
import mod
|
||||
|
||||
reveal_type(mod.global_symbol) # revealed: str
|
||||
mod.global_symbol = "b"
|
||||
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `global_symbol` of type `str`"
|
||||
mod.global_symbol = 1
|
||||
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `global_symbol` of type `str`"
|
||||
(_, mod.global_symbol) = (..., 1)
|
||||
|
||||
# TODO: this should be an error, but we do not understand list unpackings yet.
|
||||
[_, mod.global_symbol] = [1, 2]
|
||||
|
||||
class IntIterator:
|
||||
def __next__(self) -> int:
|
||||
return 42
|
||||
|
||||
class IntIterable:
|
||||
def __iter__(self) -> IntIterator:
|
||||
return IntIterator()
|
||||
|
||||
# error: [invalid-assignment] "Object of type `int` is not assignable to attribute `global_symbol` of type `str`"
|
||||
for mod.global_symbol in IntIterable():
|
||||
pass
|
||||
```
|
||||
|
||||
## Nested attributes
|
||||
|
||||
```py path=outer/__init__.py
|
||||
```
|
||||
|
||||
```py path=outer/nested/__init__.py
|
||||
```
|
||||
|
||||
```py path=outer/nested/inner.py
|
||||
class Outer:
|
||||
class Nested:
|
||||
class Inner:
|
||||
attr: int = 1
|
||||
```
|
||||
|
||||
```py
|
||||
import outer.nested.inner
|
||||
|
||||
reveal_type(outer.nested.inner.Outer.Nested.Inner.attr) # revealed: int
|
||||
|
||||
# error: [invalid-assignment]
|
||||
outer.nested.inner.Outer.Nested.Inner.attr = "a"
|
||||
```
|
||||
|
||||
## Literal types
|
||||
|
||||
### Function-literal attributes
|
||||
|
||||
@@ -50,44 +50,46 @@ reveal_type(b | b) # revealed: Literal[False]
|
||||
## Arithmetic with a variable
|
||||
|
||||
```py
|
||||
def _(a: bool):
|
||||
def lhs_is_int(x: int):
|
||||
reveal_type(x + a) # revealed: int
|
||||
reveal_type(x - a) # revealed: int
|
||||
reveal_type(x * a) # revealed: int
|
||||
reveal_type(x // a) # revealed: int
|
||||
reveal_type(x / a) # revealed: float
|
||||
reveal_type(x % a) # revealed: int
|
||||
a = True
|
||||
b = False
|
||||
|
||||
def rhs_is_int(x: int):
|
||||
reveal_type(a + x) # revealed: int
|
||||
reveal_type(a - x) # revealed: int
|
||||
reveal_type(a * x) # revealed: int
|
||||
reveal_type(a // x) # revealed: int
|
||||
reveal_type(a / x) # revealed: float
|
||||
reveal_type(a % x) # revealed: int
|
||||
def lhs_is_int(x: int):
|
||||
reveal_type(x + a) # revealed: int
|
||||
reveal_type(x - a) # revealed: int
|
||||
reveal_type(x * a) # revealed: int
|
||||
reveal_type(x // a) # revealed: int
|
||||
reveal_type(x / a) # revealed: float
|
||||
reveal_type(x % a) # revealed: int
|
||||
|
||||
def lhs_is_bool(x: bool):
|
||||
reveal_type(x + a) # revealed: int
|
||||
reveal_type(x - a) # revealed: int
|
||||
reveal_type(x * a) # revealed: int
|
||||
reveal_type(x // a) # revealed: int
|
||||
reveal_type(x / a) # revealed: float
|
||||
reveal_type(x % a) # revealed: int
|
||||
def rhs_is_int(x: int):
|
||||
reveal_type(a + x) # revealed: int
|
||||
reveal_type(a - x) # revealed: int
|
||||
reveal_type(a * x) # revealed: int
|
||||
reveal_type(a // x) # revealed: int
|
||||
reveal_type(a / x) # revealed: float
|
||||
reveal_type(a % x) # revealed: int
|
||||
|
||||
def rhs_is_bool(x: bool):
|
||||
reveal_type(a + x) # revealed: int
|
||||
reveal_type(a - x) # revealed: int
|
||||
reveal_type(a * x) # revealed: int
|
||||
reveal_type(a // x) # revealed: int
|
||||
reveal_type(a / x) # revealed: float
|
||||
reveal_type(a % x) # revealed: int
|
||||
def lhs_is_bool(x: bool):
|
||||
reveal_type(x + a) # revealed: int
|
||||
reveal_type(x - a) # revealed: int
|
||||
reveal_type(x * a) # revealed: int
|
||||
reveal_type(x // a) # revealed: int
|
||||
reveal_type(x / a) # revealed: float
|
||||
reveal_type(x % a) # revealed: int
|
||||
|
||||
def both_are_bool(x: bool, y: bool):
|
||||
reveal_type(x + y) # revealed: int
|
||||
reveal_type(x - y) # revealed: int
|
||||
reveal_type(x * y) # revealed: int
|
||||
reveal_type(x // y) # revealed: int
|
||||
reveal_type(x / y) # revealed: float
|
||||
reveal_type(x % y) # revealed: int
|
||||
def rhs_is_bool(x: bool):
|
||||
reveal_type(a + x) # revealed: int
|
||||
reveal_type(a - x) # revealed: int
|
||||
reveal_type(a * x) # revealed: int
|
||||
reveal_type(a // x) # revealed: int
|
||||
reveal_type(a / x) # revealed: float
|
||||
reveal_type(a % x) # revealed: int
|
||||
|
||||
def both_are_bool(x: bool, y: bool):
|
||||
reveal_type(x + y) # revealed: int
|
||||
reveal_type(x - y) # revealed: int
|
||||
reveal_type(x * y) # revealed: int
|
||||
reveal_type(x // y) # revealed: int
|
||||
reveal_type(x / y) # revealed: float
|
||||
reveal_type(x % y) # revealed: int
|
||||
```
|
||||
|
||||
@@ -1,149 +0,0 @@
|
||||
# Comprehensions
|
||||
|
||||
## Basic comprehensions
|
||||
|
||||
```py
|
||||
class IntIterator:
|
||||
def __next__(self) -> int:
|
||||
return 42
|
||||
|
||||
class IntIterable:
|
||||
def __iter__(self) -> IntIterator:
|
||||
return IntIterator()
|
||||
|
||||
# revealed: int
|
||||
[reveal_type(x) for x in IntIterable()]
|
||||
|
||||
class IteratorOfIterables:
|
||||
def __next__(self) -> IntIterable:
|
||||
return IntIterable()
|
||||
|
||||
class IterableOfIterables:
|
||||
def __iter__(self) -> IteratorOfIterables:
|
||||
return IteratorOfIterables()
|
||||
|
||||
# revealed: tuple[int, IntIterable]
|
||||
[reveal_type((x, y)) for y in IterableOfIterables() for x in y]
|
||||
|
||||
# revealed: int
|
||||
{reveal_type(x): 0 for x in IntIterable()}
|
||||
|
||||
# revealed: int
|
||||
{0: reveal_type(x) for x in IntIterable()}
|
||||
```
|
||||
|
||||
## Nested comprehension
|
||||
|
||||
```py
|
||||
class IntIterator:
|
||||
def __next__(self) -> int:
|
||||
return 42
|
||||
|
||||
class IntIterable:
|
||||
def __iter__(self) -> IntIterator:
|
||||
return IntIterator()
|
||||
|
||||
# revealed: tuple[int, int]
|
||||
[[reveal_type((x, y)) for x in IntIterable()] for y in IntIterable()]
|
||||
```
|
||||
|
||||
## Comprehension referencing outer comprehension
|
||||
|
||||
```py
|
||||
class IntIterator:
|
||||
def __next__(self) -> int:
|
||||
return 42
|
||||
|
||||
class IntIterable:
|
||||
def __iter__(self) -> IntIterator:
|
||||
return IntIterator()
|
||||
|
||||
class IteratorOfIterables:
|
||||
def __next__(self) -> IntIterable:
|
||||
return IntIterable()
|
||||
|
||||
class IterableOfIterables:
|
||||
def __iter__(self) -> IteratorOfIterables:
|
||||
return IteratorOfIterables()
|
||||
|
||||
# revealed: tuple[int, IntIterable]
|
||||
[[reveal_type((x, y)) for x in y] for y in IterableOfIterables()]
|
||||
```
|
||||
|
||||
## Comprehension with unbound iterable
|
||||
|
||||
Iterating over an unbound iterable yields `Unknown`:
|
||||
|
||||
```py
|
||||
# error: [unresolved-reference] "Name `x` used when not defined"
|
||||
# revealed: Unknown
|
||||
[reveal_type(z) for z in x]
|
||||
|
||||
class IntIterator:
|
||||
def __next__(self) -> int:
|
||||
return 42
|
||||
|
||||
class IntIterable:
|
||||
def __iter__(self) -> IntIterator:
|
||||
return IntIterator()
|
||||
|
||||
# error: [not-iterable] "Object of type `int` is not iterable"
|
||||
# revealed: tuple[int, Unknown]
|
||||
[reveal_type((x, z)) for x in IntIterable() for z in x]
|
||||
```
|
||||
|
||||
## Starred expressions
|
||||
|
||||
Starred expressions must be iterable
|
||||
|
||||
```py
|
||||
class NotIterable: ...
|
||||
|
||||
class Iterator:
|
||||
def __next__(self) -> int:
|
||||
return 42
|
||||
|
||||
class Iterable:
|
||||
def __iter__(self) -> Iterator: ...
|
||||
|
||||
# This is fine:
|
||||
x = [*Iterable()]
|
||||
|
||||
# error: [not-iterable] "Object of type `NotIterable` is not iterable"
|
||||
y = [*NotIterable()]
|
||||
```
|
||||
|
||||
## Async comprehensions
|
||||
|
||||
### Basic
|
||||
|
||||
```py
|
||||
class AsyncIterator:
|
||||
async def __anext__(self) -> int:
|
||||
return 42
|
||||
|
||||
class AsyncIterable:
|
||||
def __aiter__(self) -> AsyncIterator:
|
||||
return AsyncIterator()
|
||||
|
||||
# revealed: @Todo(async iterables/iterators)
|
||||
[reveal_type(x) async for x in AsyncIterable()]
|
||||
```
|
||||
|
||||
### Invalid async comprehension
|
||||
|
||||
This tests that we understand that `async` comprehensions do *not* work according to the synchronous
|
||||
iteration protocol
|
||||
|
||||
```py
|
||||
class Iterator:
|
||||
def __next__(self) -> int:
|
||||
return 42
|
||||
|
||||
class Iterable:
|
||||
def __iter__(self) -> Iterator:
|
||||
return Iterator()
|
||||
|
||||
# revealed: @Todo(async iterables/iterators)
|
||||
[reveal_type(x) async for x in Iterable()]
|
||||
```
|
||||
@@ -1,43 +0,0 @@
|
||||
# Comprehensions with invalid syntax
|
||||
|
||||
```py
|
||||
class IntIterator:
|
||||
def __next__(self) -> int:
|
||||
return 42
|
||||
|
||||
class IntIterable:
|
||||
def __iter__(self) -> IntIterator:
|
||||
return IntIterator()
|
||||
|
||||
# Missing 'in' keyword.
|
||||
|
||||
# It's reasonably clear here what they *meant* to write,
|
||||
# so we'll still infer the correct type:
|
||||
|
||||
# error: [invalid-syntax] "Expected 'in', found name"
|
||||
# revealed: int
|
||||
[reveal_type(a) for a IntIterable()]
|
||||
|
||||
|
||||
# Missing iteration variable
|
||||
|
||||
# error: [invalid-syntax] "Expected an identifier, but found a keyword 'in' that cannot be used here"
|
||||
# error: [invalid-syntax] "Expected 'in', found name"
|
||||
# error: [unresolved-reference]
|
||||
# revealed: Unknown
|
||||
[reveal_type(b) for in IntIterable()]
|
||||
|
||||
|
||||
# Missing iterable
|
||||
|
||||
# error: [invalid-syntax] "Expected an expression"
|
||||
# revealed: Unknown
|
||||
[reveal_type(c) for c in]
|
||||
|
||||
|
||||
# Missing 'in' keyword and missing iterable
|
||||
|
||||
# error: [invalid-syntax] "Expected 'in', found ']'"
|
||||
# revealed: Unknown
|
||||
[reveal_type(d) for d]
|
||||
```
|
||||
@@ -1,70 +1,8 @@
|
||||
# Builtins
|
||||
|
||||
## Importing builtin module
|
||||
|
||||
Builtin symbols can be explicitly imported:
|
||||
# Importing builtin module
|
||||
|
||||
```py
|
||||
import builtins
|
||||
|
||||
reveal_type(builtins.chr) # revealed: Literal[chr]
|
||||
```
|
||||
|
||||
## Implicit use of builtin
|
||||
|
||||
Or used implicitly:
|
||||
|
||||
```py
|
||||
reveal_type(chr) # revealed: Literal[chr]
|
||||
reveal_type(str) # revealed: Literal[str]
|
||||
```
|
||||
|
||||
## Builtin symbol from custom typeshed
|
||||
|
||||
If we specify a custom typeshed, we can use the builtin symbol from it, and no longer access the
|
||||
builtins from the "actual" vendored typeshed:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
typeshed = "/typeshed"
|
||||
```
|
||||
|
||||
```pyi path=/typeshed/stdlib/builtins.pyi
|
||||
class Custom: ...
|
||||
|
||||
custom_builtin: Custom
|
||||
```
|
||||
|
||||
```pyi path=/typeshed/stdlib/typing_extensions.pyi
|
||||
def reveal_type(obj, /): ...
|
||||
```
|
||||
|
||||
```py
|
||||
reveal_type(custom_builtin) # revealed: Custom
|
||||
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(str) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Unknown builtin (later defined)
|
||||
|
||||
`foo` has a type of `Unknown` in this example, as it relies on `bar` which has not been defined at
|
||||
that point:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
typeshed = "/typeshed"
|
||||
```
|
||||
|
||||
```pyi path=/typeshed/stdlib/builtins.pyi
|
||||
foo = bar
|
||||
bar = 1
|
||||
```
|
||||
|
||||
```pyi path=/typeshed/stdlib/typing_extensions.pyi
|
||||
def reveal_type(obj, /): ...
|
||||
```
|
||||
|
||||
```py
|
||||
reveal_type(foo) # revealed: Unknown
|
||||
x = builtins.chr
|
||||
reveal_type(x) # revealed: Literal[chr]
|
||||
```
|
||||
|
||||
@@ -9,7 +9,7 @@ reveal_type(a.b) # revealed: <module 'a.b'>
|
||||
```
|
||||
|
||||
```py path=a/__init__.py
|
||||
b: int = 42
|
||||
b = 42
|
||||
```
|
||||
|
||||
```py path=a/b.py
|
||||
@@ -20,11 +20,11 @@ b: int = 42
|
||||
```py
|
||||
from a import b
|
||||
|
||||
reveal_type(b) # revealed: int
|
||||
reveal_type(b) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
```py path=a/__init__.py
|
||||
b: int = 42
|
||||
b = 42
|
||||
```
|
||||
|
||||
```py path=a/b.py
|
||||
@@ -41,7 +41,7 @@ reveal_type(a.b) # revealed: <module 'a.b'>
|
||||
```
|
||||
|
||||
```py path=a/__init__.py
|
||||
b: int = 42
|
||||
b = 42
|
||||
```
|
||||
|
||||
```py path=a/b.py
|
||||
@@ -60,13 +60,13 @@ sees the submodule as the value of `b` instead of the integer.
|
||||
from a import b
|
||||
import a.b
|
||||
|
||||
# Python would say `int` for `b`
|
||||
# Python would say `Literal[42]` for `b`
|
||||
reveal_type(b) # revealed: <module 'a.b'>
|
||||
reveal_type(a.b) # revealed: <module 'a.b'>
|
||||
```
|
||||
|
||||
```py path=a/__init__.py
|
||||
b: int = 42
|
||||
b = 42
|
||||
```
|
||||
|
||||
```py path=a/b.py
|
||||
|
||||
@@ -20,12 +20,12 @@ from a import b.c
|
||||
|
||||
# TODO: Should these be inferred as Unknown?
|
||||
reveal_type(b) # revealed: <module 'a.b'>
|
||||
reveal_type(b.c) # revealed: int
|
||||
reveal_type(b.c) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
```py path=a/__init__.py
|
||||
```
|
||||
|
||||
```py path=a/b.py
|
||||
c: int = 1
|
||||
c = 1
|
||||
```
|
||||
|
||||
@@ -17,13 +17,13 @@ reveal_type(X) # revealed: Unknown
|
||||
```
|
||||
|
||||
```py path=package/foo.py
|
||||
X: int = 42
|
||||
X = 42
|
||||
```
|
||||
|
||||
```py path=package/bar.py
|
||||
from .foo import X
|
||||
|
||||
reveal_type(X) # revealed: int
|
||||
reveal_type(X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Dotted
|
||||
@@ -32,25 +32,25 @@ reveal_type(X) # revealed: int
|
||||
```
|
||||
|
||||
```py path=package/foo/bar/baz.py
|
||||
X: int = 42
|
||||
X = 42
|
||||
```
|
||||
|
||||
```py path=package/bar.py
|
||||
from .foo.bar.baz import X
|
||||
|
||||
reveal_type(X) # revealed: int
|
||||
reveal_type(X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Bare to package
|
||||
|
||||
```py path=package/__init__.py
|
||||
X: int = 42
|
||||
X = 42
|
||||
```
|
||||
|
||||
```py path=package/bar.py
|
||||
from . import X
|
||||
|
||||
reveal_type(X) # revealed: int
|
||||
reveal_type(X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Non-existent + bare to package
|
||||
@@ -66,11 +66,11 @@ reveal_type(X) # revealed: Unknown
|
||||
```py path=package/__init__.py
|
||||
from .foo import X
|
||||
|
||||
reveal_type(X) # revealed: int
|
||||
reveal_type(X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
```py path=package/foo.py
|
||||
X: int = 42
|
||||
X = 42
|
||||
```
|
||||
|
||||
## Non-existent + dunder init
|
||||
@@ -87,13 +87,13 @@ reveal_type(X) # revealed: Unknown
|
||||
```
|
||||
|
||||
```py path=package/foo.py
|
||||
X: int = 42
|
||||
X = 42
|
||||
```
|
||||
|
||||
```py path=package/subpackage/subsubpackage/bar.py
|
||||
from ...foo import X
|
||||
|
||||
reveal_type(X) # revealed: int
|
||||
reveal_type(X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Unbound symbol
|
||||
@@ -117,13 +117,13 @@ reveal_type(x) # revealed: Unknown
|
||||
```
|
||||
|
||||
```py path=package/foo.py
|
||||
X: int = 42
|
||||
X = 42
|
||||
```
|
||||
|
||||
```py path=package/bar.py
|
||||
from . import foo
|
||||
|
||||
reveal_type(foo.X) # revealed: int
|
||||
reveal_type(foo.X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Non-existent + bare to module
|
||||
@@ -152,7 +152,7 @@ submodule via the attribute on its parent package.
|
||||
```
|
||||
|
||||
```py path=package/foo.py
|
||||
X: int = 42
|
||||
X = 42
|
||||
```
|
||||
|
||||
```py path=package/bar.py
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
# Custom typeshed
|
||||
|
||||
The `environment.typeshed` configuration option can be used to specify a custom typeshed directory
|
||||
for Markdown-based tests. Custom typeshed stubs can then be placed in the specified directory using
|
||||
fenced code blocks with language `pyi`, and will be used instead of the vendored copy of typeshed.
|
||||
|
||||
A fenced code block with language `text` can be used to provide a `stdlib/VERSIONS` file in the
|
||||
custom typeshed root. If no such file is created explicitly, it will be automatically created with
|
||||
entries enabling all specified `<typeshed-root>/stdlib` files for all supported Python versions.
|
||||
|
||||
## Basic example (auto-generated `VERSIONS` file)
|
||||
|
||||
First, we specify `/typeshed` as the custom typeshed directory:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
typeshed = "/typeshed"
|
||||
```
|
||||
|
||||
We can then place custom stub files in `/typeshed/stdlib`, for example:
|
||||
|
||||
```pyi path=/typeshed/stdlib/builtins.pyi
|
||||
class BuiltinClass: ...
|
||||
|
||||
builtin_symbol: BuiltinClass
|
||||
```
|
||||
|
||||
```pyi path=/typeshed/stdlib/sys/__init__.pyi
|
||||
version = "my custom Python"
|
||||
```
|
||||
|
||||
And finally write a normal Python code block that makes use of the custom stubs:
|
||||
|
||||
```py
|
||||
b: BuiltinClass = builtin_symbol
|
||||
|
||||
class OtherClass: ...
|
||||
|
||||
o: OtherClass = builtin_symbol # error: [invalid-assignment]
|
||||
|
||||
# Make sure that 'sys' has a proper entry in the auto-generated 'VERSIONS' file
|
||||
import sys
|
||||
```
|
||||
|
||||
## Custom `VERSIONS` file
|
||||
|
||||
If we want to specify a custom `VERSIONS` file, we can do so by creating a fenced code block with
|
||||
language `text`. In the following test, we set the Python version to `3.10` and then make sure that
|
||||
we can *not* import `new_module` with a version requirement of `3.11-`:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
typeshed = "/typeshed"
|
||||
```
|
||||
|
||||
```pyi path=/typeshed/stdlib/old_module.pyi
|
||||
class OldClass: ...
|
||||
```
|
||||
|
||||
```pyi path=/typeshed/stdlib/new_module.pyi
|
||||
class NewClass: ...
|
||||
```
|
||||
|
||||
```text path=/typeshed/stdlib/VERSIONS
|
||||
old_module: 3.0-
|
||||
new_module: 3.11-
|
||||
```
|
||||
|
||||
```py
|
||||
from old_module import OldClass
|
||||
|
||||
# error: [unresolved-import] "Cannot resolve import `new_module`"
|
||||
from new_module import NewClass
|
||||
```
|
||||
|
||||
## Using `reveal_type` with a custom typeshed
|
||||
|
||||
When providing a custom typeshed directory, basic things like `reveal_type` will stop working
|
||||
because we rely on being able to import it from `typing_extensions`. The actual definition of
|
||||
`reveal_type` in typeshed is slightly involved (depends on generics, `TypeVar`, etc.), but a very
|
||||
simple untyped definition is enough to make `reveal_type` work in tests:
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
typeshed = "/typeshed"
|
||||
```
|
||||
|
||||
```pyi path=/typeshed/stdlib/typing_extensions.pyi
|
||||
def reveal_type(obj, /): ...
|
||||
```
|
||||
|
||||
```py
|
||||
reveal_type(()) # revealed: tuple[()]
|
||||
```
|
||||
@@ -10,10 +10,10 @@ def returns_bool() -> bool:
|
||||
return True
|
||||
|
||||
if returns_bool():
|
||||
chr: int = 1
|
||||
chr = 1
|
||||
|
||||
def f():
|
||||
reveal_type(chr) # revealed: Literal[chr] | int
|
||||
reveal_type(chr) # revealed: Literal[chr] | Literal[1]
|
||||
```
|
||||
|
||||
## Conditionally global or builtin, with annotation
|
||||
|
||||
@@ -43,14 +43,3 @@ def f():
|
||||
def h():
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Implicit global in function
|
||||
|
||||
A name reference to a never-defined symbol in a function is implicitly a global lookup.
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
@@ -11,7 +11,7 @@ version:
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
SomeFeature: str = "available"
|
||||
SomeFeature = "available"
|
||||
```
|
||||
|
||||
If we can statically determine that the condition is always true, then we can also understand that
|
||||
@@ -21,7 +21,7 @@ If we can statically determine that the condition is always true, then we can al
|
||||
from module1 import SomeFeature
|
||||
|
||||
# SomeFeature is unconditionally available here, because we are on Python 3.9 or newer:
|
||||
reveal_type(SomeFeature) # revealed: str
|
||||
reveal_type(SomeFeature) # revealed: Literal["available"]
|
||||
```
|
||||
|
||||
Another scenario where this is useful is for `typing.TYPE_CHECKING` branches, which are often used
|
||||
|
||||
@@ -31,7 +31,7 @@ def _(n: int):
|
||||
## Slices
|
||||
|
||||
```py
|
||||
b: bytes = b"\x00abc\xff"
|
||||
b = b"\x00abc\xff"
|
||||
|
||||
reveal_type(b[0:2]) # revealed: Literal[b"\x00a"]
|
||||
reveal_type(b[-3:]) # revealed: Literal[b"bc\xff"]
|
||||
|
||||
@@ -28,52 +28,52 @@ def _(n: int):
|
||||
## Slices
|
||||
|
||||
```py
|
||||
s = "abcde"
|
||||
|
||||
reveal_type(s[0:0]) # revealed: Literal[""]
|
||||
reveal_type(s[0:1]) # revealed: Literal["a"]
|
||||
reveal_type(s[0:2]) # revealed: Literal["ab"]
|
||||
reveal_type(s[0:5]) # revealed: Literal["abcde"]
|
||||
reveal_type(s[0:6]) # revealed: Literal["abcde"]
|
||||
reveal_type(s[1:3]) # revealed: Literal["bc"]
|
||||
|
||||
reveal_type(s[-3:5]) # revealed: Literal["cde"]
|
||||
reveal_type(s[-4:-2]) # revealed: Literal["bc"]
|
||||
reveal_type(s[-10:10]) # revealed: Literal["abcde"]
|
||||
|
||||
reveal_type(s[0:]) # revealed: Literal["abcde"]
|
||||
reveal_type(s[2:]) # revealed: Literal["cde"]
|
||||
reveal_type(s[5:]) # revealed: Literal[""]
|
||||
reveal_type(s[:2]) # revealed: Literal["ab"]
|
||||
reveal_type(s[:0]) # revealed: Literal[""]
|
||||
reveal_type(s[:2]) # revealed: Literal["ab"]
|
||||
reveal_type(s[:10]) # revealed: Literal["abcde"]
|
||||
reveal_type(s[:]) # revealed: Literal["abcde"]
|
||||
|
||||
reveal_type(s[::-1]) # revealed: Literal["edcba"]
|
||||
reveal_type(s[::2]) # revealed: Literal["ace"]
|
||||
reveal_type(s[-2:-5:-1]) # revealed: Literal["dcb"]
|
||||
reveal_type(s[::-2]) # revealed: Literal["eca"]
|
||||
reveal_type(s[-1::-3]) # revealed: Literal["eb"]
|
||||
|
||||
reveal_type(s[None:2:None]) # revealed: Literal["ab"]
|
||||
reveal_type(s[1:None:1]) # revealed: Literal["bcde"]
|
||||
reveal_type(s[None:None:None]) # revealed: Literal["abcde"]
|
||||
|
||||
start = 1
|
||||
stop = None
|
||||
step = 2
|
||||
reveal_type(s[start:stop:step]) # revealed: Literal["bd"]
|
||||
|
||||
reveal_type(s[False:True]) # revealed: Literal["a"]
|
||||
reveal_type(s[True:3]) # revealed: Literal["bc"]
|
||||
|
||||
s[0:4:0] # error: [zero-stepsize-in-slice]
|
||||
s[:4:0] # error: [zero-stepsize-in-slice]
|
||||
s[0::0] # error: [zero-stepsize-in-slice]
|
||||
s[::0] # error: [zero-stepsize-in-slice]
|
||||
|
||||
def _(m: int, n: int, s2: str):
|
||||
s = "abcde"
|
||||
|
||||
reveal_type(s[0:0]) # revealed: Literal[""]
|
||||
reveal_type(s[0:1]) # revealed: Literal["a"]
|
||||
reveal_type(s[0:2]) # revealed: Literal["ab"]
|
||||
reveal_type(s[0:5]) # revealed: Literal["abcde"]
|
||||
reveal_type(s[0:6]) # revealed: Literal["abcde"]
|
||||
reveal_type(s[1:3]) # revealed: Literal["bc"]
|
||||
|
||||
reveal_type(s[-3:5]) # revealed: Literal["cde"]
|
||||
reveal_type(s[-4:-2]) # revealed: Literal["bc"]
|
||||
reveal_type(s[-10:10]) # revealed: Literal["abcde"]
|
||||
|
||||
reveal_type(s[0:]) # revealed: Literal["abcde"]
|
||||
reveal_type(s[2:]) # revealed: Literal["cde"]
|
||||
reveal_type(s[5:]) # revealed: Literal[""]
|
||||
reveal_type(s[:2]) # revealed: Literal["ab"]
|
||||
reveal_type(s[:0]) # revealed: Literal[""]
|
||||
reveal_type(s[:2]) # revealed: Literal["ab"]
|
||||
reveal_type(s[:10]) # revealed: Literal["abcde"]
|
||||
reveal_type(s[:]) # revealed: Literal["abcde"]
|
||||
|
||||
reveal_type(s[::-1]) # revealed: Literal["edcba"]
|
||||
reveal_type(s[::2]) # revealed: Literal["ace"]
|
||||
reveal_type(s[-2:-5:-1]) # revealed: Literal["dcb"]
|
||||
reveal_type(s[::-2]) # revealed: Literal["eca"]
|
||||
reveal_type(s[-1::-3]) # revealed: Literal["eb"]
|
||||
|
||||
reveal_type(s[None:2:None]) # revealed: Literal["ab"]
|
||||
reveal_type(s[1:None:1]) # revealed: Literal["bcde"]
|
||||
reveal_type(s[None:None:None]) # revealed: Literal["abcde"]
|
||||
|
||||
start = 1
|
||||
stop = None
|
||||
step = 2
|
||||
reveal_type(s[start:stop:step]) # revealed: Literal["bd"]
|
||||
|
||||
reveal_type(s[False:True]) # revealed: Literal["a"]
|
||||
reveal_type(s[True:3]) # revealed: Literal["bc"]
|
||||
|
||||
s[0:4:0] # error: [zero-stepsize-in-slice]
|
||||
s[:4:0] # error: [zero-stepsize-in-slice]
|
||||
s[0::0] # error: [zero-stepsize-in-slice]
|
||||
s[::0] # error: [zero-stepsize-in-slice]
|
||||
|
||||
substring1 = s[m:n]
|
||||
# TODO: Support overloads... Should be `LiteralString`
|
||||
reveal_type(substring1) # revealed: @Todo(return type)
|
||||
|
||||
@@ -23,51 +23,51 @@ reveal_type(b) # revealed: Unknown
|
||||
## Slices
|
||||
|
||||
```py
|
||||
t = (1, "a", None, b"b")
|
||||
|
||||
reveal_type(t[0:0]) # revealed: tuple[()]
|
||||
reveal_type(t[0:1]) # revealed: tuple[Literal[1]]
|
||||
reveal_type(t[0:2]) # revealed: tuple[Literal[1], Literal["a"]]
|
||||
reveal_type(t[0:4]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[0:5]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[1:3]) # revealed: tuple[Literal["a"], None]
|
||||
|
||||
reveal_type(t[-2:4]) # revealed: tuple[None, Literal[b"b"]]
|
||||
reveal_type(t[-3:-1]) # revealed: tuple[Literal["a"], None]
|
||||
reveal_type(t[-10:10]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
|
||||
reveal_type(t[0:]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[2:]) # revealed: tuple[None, Literal[b"b"]]
|
||||
reveal_type(t[4:]) # revealed: tuple[()]
|
||||
reveal_type(t[:0]) # revealed: tuple[()]
|
||||
reveal_type(t[:2]) # revealed: tuple[Literal[1], Literal["a"]]
|
||||
reveal_type(t[:10]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[:]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
|
||||
reveal_type(t[::-1]) # revealed: tuple[Literal[b"b"], None, Literal["a"], Literal[1]]
|
||||
reveal_type(t[::2]) # revealed: tuple[Literal[1], None]
|
||||
reveal_type(t[-2:-5:-1]) # revealed: tuple[None, Literal["a"], Literal[1]]
|
||||
reveal_type(t[::-2]) # revealed: tuple[Literal[b"b"], Literal["a"]]
|
||||
reveal_type(t[-1::-3]) # revealed: tuple[Literal[b"b"], Literal[1]]
|
||||
|
||||
reveal_type(t[None:2:None]) # revealed: tuple[Literal[1], Literal["a"]]
|
||||
reveal_type(t[1:None:1]) # revealed: tuple[Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[None:None:None]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
|
||||
start = 1
|
||||
stop = None
|
||||
step = 2
|
||||
reveal_type(t[start:stop:step]) # revealed: tuple[Literal["a"], Literal[b"b"]]
|
||||
|
||||
reveal_type(t[False:True]) # revealed: tuple[Literal[1]]
|
||||
reveal_type(t[True:3]) # revealed: tuple[Literal["a"], None]
|
||||
|
||||
t[0:4:0] # error: [zero-stepsize-in-slice]
|
||||
t[:4:0] # error: [zero-stepsize-in-slice]
|
||||
t[0::0] # error: [zero-stepsize-in-slice]
|
||||
t[::0] # error: [zero-stepsize-in-slice]
|
||||
|
||||
def _(m: int, n: int):
|
||||
t = (1, "a", None, b"b")
|
||||
|
||||
reveal_type(t[0:0]) # revealed: tuple[()]
|
||||
reveal_type(t[0:1]) # revealed: tuple[Literal[1]]
|
||||
reveal_type(t[0:2]) # revealed: tuple[Literal[1], Literal["a"]]
|
||||
reveal_type(t[0:4]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[0:5]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[1:3]) # revealed: tuple[Literal["a"], None]
|
||||
|
||||
reveal_type(t[-2:4]) # revealed: tuple[None, Literal[b"b"]]
|
||||
reveal_type(t[-3:-1]) # revealed: tuple[Literal["a"], None]
|
||||
reveal_type(t[-10:10]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
|
||||
reveal_type(t[0:]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[2:]) # revealed: tuple[None, Literal[b"b"]]
|
||||
reveal_type(t[4:]) # revealed: tuple[()]
|
||||
reveal_type(t[:0]) # revealed: tuple[()]
|
||||
reveal_type(t[:2]) # revealed: tuple[Literal[1], Literal["a"]]
|
||||
reveal_type(t[:10]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[:]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
|
||||
reveal_type(t[::-1]) # revealed: tuple[Literal[b"b"], None, Literal["a"], Literal[1]]
|
||||
reveal_type(t[::2]) # revealed: tuple[Literal[1], None]
|
||||
reveal_type(t[-2:-5:-1]) # revealed: tuple[None, Literal["a"], Literal[1]]
|
||||
reveal_type(t[::-2]) # revealed: tuple[Literal[b"b"], Literal["a"]]
|
||||
reveal_type(t[-1::-3]) # revealed: tuple[Literal[b"b"], Literal[1]]
|
||||
|
||||
reveal_type(t[None:2:None]) # revealed: tuple[Literal[1], Literal["a"]]
|
||||
reveal_type(t[1:None:1]) # revealed: tuple[Literal["a"], None, Literal[b"b"]]
|
||||
reveal_type(t[None:None:None]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
|
||||
|
||||
start = 1
|
||||
stop = None
|
||||
step = 2
|
||||
reveal_type(t[start:stop:step]) # revealed: tuple[Literal["a"], Literal[b"b"]]
|
||||
|
||||
reveal_type(t[False:True]) # revealed: tuple[Literal[1]]
|
||||
reveal_type(t[True:3]) # revealed: tuple[Literal["a"], None]
|
||||
|
||||
t[0:4:0] # error: [zero-stepsize-in-slice]
|
||||
t[:4:0] # error: [zero-stepsize-in-slice]
|
||||
t[0::0] # error: [zero-stepsize-in-slice]
|
||||
t[::0] # error: [zero-stepsize-in-slice]
|
||||
|
||||
tuple_slice = t[m:n]
|
||||
# TODO: Support overloads... Should be `tuple[Literal[1, 'a', b"b"] | None, ...]`
|
||||
reveal_type(tuple_slice) # revealed: @Todo(return type)
|
||||
|
||||
@@ -263,12 +263,15 @@ static_assert(not is_assignable_to(int, Not[int]))
|
||||
static_assert(not is_assignable_to(int, Not[Literal[1]]))
|
||||
|
||||
static_assert(not is_assignable_to(Intersection[Any, Parent], Unrelated))
|
||||
static_assert(is_assignable_to(Intersection[Unrelated, Any], Intersection[Unrelated, Any]))
|
||||
static_assert(is_assignable_to(Intersection[Unrelated, Any], Intersection[Unrelated, Not[Any]]))
|
||||
|
||||
# TODO: The following assertions should not fail (see https://github.com/astral-sh/ruff/issues/14899)
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_assignable_to(Intersection[Any, int], int))
|
||||
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_assignable_to(Intersection[Unrelated, Any], Intersection[Unrelated, Any]))
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_assignable_to(Intersection[Unrelated, Any], Intersection[Unrelated, Not[Any]]))
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_assignable_to(Intersection[Unrelated, Any], Not[tuple[Unrelated, Any]]))
|
||||
```
|
||||
|
||||
@@ -136,7 +136,7 @@ pub(crate) mod tests {
|
||||
/// Target Python platform
|
||||
python_platform: PythonPlatform,
|
||||
/// Path to a custom typeshed directory
|
||||
typeshed: Option<SystemPathBuf>,
|
||||
custom_typeshed: Option<SystemPathBuf>,
|
||||
/// Path and content pairs for files that should be present
|
||||
files: Vec<(&'a str, &'a str)>,
|
||||
}
|
||||
@@ -146,7 +146,7 @@ pub(crate) mod tests {
|
||||
Self {
|
||||
python_version: PythonVersion::default(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
typeshed: None,
|
||||
custom_typeshed: None,
|
||||
files: vec![],
|
||||
}
|
||||
}
|
||||
@@ -156,6 +156,11 @@ pub(crate) mod tests {
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn with_custom_typeshed(mut self, path: &str) -> Self {
|
||||
self.custom_typeshed = Some(SystemPathBuf::from(path));
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn with_file(mut self, path: &'a str, content: &'a str) -> Self {
|
||||
self.files.push((path, content));
|
||||
self
|
||||
@@ -171,7 +176,7 @@ pub(crate) mod tests {
|
||||
.context("Failed to write test files")?;
|
||||
|
||||
let mut search_paths = SearchPathSettings::new(vec![src_root]);
|
||||
search_paths.custom_typeshed = self.typeshed;
|
||||
search_paths.typeshed = self.custom_typeshed;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
|
||||
@@ -10,7 +10,7 @@ pub use module_resolver::{resolve_module, system_module_search_paths, KnownModul
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
|
||||
pub use python_platform::PythonPlatform;
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasType, SemanticModel};
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod db;
|
||||
|
||||
@@ -31,11 +31,6 @@ pub struct LintMetadata {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Serialize, serde::Deserialize),
|
||||
serde(rename_all = "kebab-case")
|
||||
)]
|
||||
pub enum Level {
|
||||
/// The lint is disabled and should not run.
|
||||
Ignore,
|
||||
@@ -409,12 +404,12 @@ impl From<&'static LintMetadata> for LintEntry {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct RuleSelection {
|
||||
/// Map with the severity for each enabled lint rule.
|
||||
///
|
||||
/// If a rule isn't present in this map, then it should be considered disabled.
|
||||
lints: FxHashMap<LintId, (Severity, LintSource)>,
|
||||
lints: FxHashMap<LintId, Severity>,
|
||||
}
|
||||
|
||||
impl RuleSelection {
|
||||
@@ -427,7 +422,7 @@ impl RuleSelection {
|
||||
.filter_map(|lint| {
|
||||
Severity::try_from(lint.default_level())
|
||||
.ok()
|
||||
.map(|severity| (*lint, (severity, LintSource::Default)))
|
||||
.map(|severity| (*lint, severity))
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -441,14 +436,12 @@ impl RuleSelection {
|
||||
|
||||
/// Returns an iterator over all enabled lints and their severity.
|
||||
pub fn iter(&self) -> impl ExactSizeIterator<Item = (LintId, Severity)> + '_ {
|
||||
self.lints
|
||||
.iter()
|
||||
.map(|(&lint, &(severity, _))| (lint, severity))
|
||||
self.lints.iter().map(|(&lint, &severity)| (lint, severity))
|
||||
}
|
||||
|
||||
/// Returns the configured severity for the lint with the given id or `None` if the lint is disabled.
|
||||
pub fn severity(&self, lint: LintId) -> Option<Severity> {
|
||||
self.lints.get(&lint).map(|(severity, _)| *severity)
|
||||
self.lints.get(&lint).copied()
|
||||
}
|
||||
|
||||
/// Returns `true` if the `lint` is enabled.
|
||||
@@ -459,25 +452,19 @@ impl RuleSelection {
|
||||
/// Enables `lint` and configures with the given `severity`.
|
||||
///
|
||||
/// Overrides any previous configuration for the lint.
|
||||
pub fn enable(&mut self, lint: LintId, severity: Severity, source: LintSource) {
|
||||
self.lints.insert(lint, (severity, source));
|
||||
pub fn enable(&mut self, lint: LintId, severity: Severity) {
|
||||
self.lints.insert(lint, severity);
|
||||
}
|
||||
|
||||
/// Disables `lint` if it was previously enabled.
|
||||
pub fn disable(&mut self, lint: LintId) {
|
||||
self.lints.remove(&lint);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum LintSource {
|
||||
/// The user didn't enable the rule explicitly, instead it's enabled by default.
|
||||
#[default]
|
||||
Default,
|
||||
|
||||
/// The rule was enabled by using a CLI argument
|
||||
Cli,
|
||||
|
||||
/// The rule was enabled in a configuration file.
|
||||
File,
|
||||
|
||||
/// Merges the enabled lints from `other` into this selection.
|
||||
///
|
||||
/// Lints from `other` will override any existing configuration.
|
||||
pub fn merge(&mut self, other: &RuleSelection) {
|
||||
self.lints.extend(other.iter());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,7 +169,7 @@ impl SearchPaths {
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_roots,
|
||||
custom_typeshed: typeshed,
|
||||
typeshed,
|
||||
site_packages: site_packages_paths,
|
||||
} = settings;
|
||||
|
||||
@@ -1308,7 +1308,7 @@ mod tests {
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_roots: vec![src.clone()],
|
||||
custom_typeshed: Some(custom_typeshed),
|
||||
typeshed: Some(custom_typeshed),
|
||||
site_packages: SitePackages::Known(vec![site_packages]),
|
||||
},
|
||||
},
|
||||
@@ -1814,7 +1814,7 @@ not_a_directory
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_roots: vec![SystemPathBuf::from("/src")],
|
||||
custom_typeshed: None,
|
||||
typeshed: None,
|
||||
site_packages: SitePackages::Known(vec![
|
||||
venv_site_packages,
|
||||
system_site_packages,
|
||||
|
||||
@@ -73,7 +73,7 @@ pub(crate) struct UnspecifiedTypeshed;
|
||||
///
|
||||
/// For tests checking that standard-library module resolution is working
|
||||
/// correctly, you should usually create a [`MockedTypeshed`] instance
|
||||
/// and pass it to the [`TestCaseBuilder::with_mocked_typeshed`] method.
|
||||
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
|
||||
/// If you need to check something that involves the vendored typeshed stubs
|
||||
/// we include as part of the binary, you can instead use the
|
||||
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
|
||||
@@ -238,7 +238,7 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_roots: vec![src.clone()],
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
typeshed: Some(typeshed.clone()),
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
},
|
||||
},
|
||||
|
||||
@@ -108,7 +108,7 @@ pub struct SearchPathSettings {
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
pub typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: SitePackages,
|
||||
@@ -119,7 +119,7 @@ impl SearchPathSettings {
|
||||
Self {
|
||||
src_roots,
|
||||
extra_paths: vec![],
|
||||
custom_typeshed: None,
|
||||
typeshed: None,
|
||||
site_packages: SitePackages::Known(vec![]),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -603,8 +603,8 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
let definition = self.add_definition(symbol, parameter);
|
||||
|
||||
// Insert a mapping from the inner Parameter node to the same definition. This
|
||||
// ensures that calling `HasType::inferred_type` on the inner parameter returns
|
||||
// Insert a mapping from the inner Parameter node to the same definition.
|
||||
// This ensures that calling `HasTy::ty` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
let existing_definition = self
|
||||
.definitions_by_node
|
||||
|
||||
@@ -8,7 +8,7 @@ use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedExpressionId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{binding_type, infer_scope_types, Type};
|
||||
use crate::types::{binding_ty, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
|
||||
pub struct SemanticModel<'db> {
|
||||
@@ -40,117 +40,117 @@ impl<'db> SemanticModel<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasType {
|
||||
pub trait HasTy {
|
||||
/// Returns the inferred type of `self`.
|
||||
///
|
||||
/// ## Panics
|
||||
/// May panic if `self` is from another file than `model`.
|
||||
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db>;
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db>;
|
||||
}
|
||||
|
||||
impl HasType for ast::ExprRef<'_> {
|
||||
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
impl HasTy for ast::ExprRef<'_> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let file_scope = index.expression_scope_id(*self);
|
||||
let scope = file_scope.to_scope_id(model.db, model.file);
|
||||
|
||||
let expression_id = self.scoped_expression_id(model.db, scope);
|
||||
infer_scope_types(model.db, scope).expression_type(expression_id)
|
||||
infer_scope_types(model.db, scope).expression_ty(expression_id)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_expression_has_type {
|
||||
macro_rules! impl_expression_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasType for $ty {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let expression_ref = ExprRef::from(self);
|
||||
expression_ref.inferred_type(model)
|
||||
expression_ref.ty(model)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_expression_has_type!(ast::ExprBoolOp);
|
||||
impl_expression_has_type!(ast::ExprNamed);
|
||||
impl_expression_has_type!(ast::ExprBinOp);
|
||||
impl_expression_has_type!(ast::ExprUnaryOp);
|
||||
impl_expression_has_type!(ast::ExprLambda);
|
||||
impl_expression_has_type!(ast::ExprIf);
|
||||
impl_expression_has_type!(ast::ExprDict);
|
||||
impl_expression_has_type!(ast::ExprSet);
|
||||
impl_expression_has_type!(ast::ExprListComp);
|
||||
impl_expression_has_type!(ast::ExprSetComp);
|
||||
impl_expression_has_type!(ast::ExprDictComp);
|
||||
impl_expression_has_type!(ast::ExprGenerator);
|
||||
impl_expression_has_type!(ast::ExprAwait);
|
||||
impl_expression_has_type!(ast::ExprYield);
|
||||
impl_expression_has_type!(ast::ExprYieldFrom);
|
||||
impl_expression_has_type!(ast::ExprCompare);
|
||||
impl_expression_has_type!(ast::ExprCall);
|
||||
impl_expression_has_type!(ast::ExprFString);
|
||||
impl_expression_has_type!(ast::ExprStringLiteral);
|
||||
impl_expression_has_type!(ast::ExprBytesLiteral);
|
||||
impl_expression_has_type!(ast::ExprNumberLiteral);
|
||||
impl_expression_has_type!(ast::ExprBooleanLiteral);
|
||||
impl_expression_has_type!(ast::ExprNoneLiteral);
|
||||
impl_expression_has_type!(ast::ExprEllipsisLiteral);
|
||||
impl_expression_has_type!(ast::ExprAttribute);
|
||||
impl_expression_has_type!(ast::ExprSubscript);
|
||||
impl_expression_has_type!(ast::ExprStarred);
|
||||
impl_expression_has_type!(ast::ExprName);
|
||||
impl_expression_has_type!(ast::ExprList);
|
||||
impl_expression_has_type!(ast::ExprTuple);
|
||||
impl_expression_has_type!(ast::ExprSlice);
|
||||
impl_expression_has_type!(ast::ExprIpyEscapeCommand);
|
||||
impl_expression_has_ty!(ast::ExprBoolOp);
|
||||
impl_expression_has_ty!(ast::ExprNamed);
|
||||
impl_expression_has_ty!(ast::ExprBinOp);
|
||||
impl_expression_has_ty!(ast::ExprUnaryOp);
|
||||
impl_expression_has_ty!(ast::ExprLambda);
|
||||
impl_expression_has_ty!(ast::ExprIf);
|
||||
impl_expression_has_ty!(ast::ExprDict);
|
||||
impl_expression_has_ty!(ast::ExprSet);
|
||||
impl_expression_has_ty!(ast::ExprListComp);
|
||||
impl_expression_has_ty!(ast::ExprSetComp);
|
||||
impl_expression_has_ty!(ast::ExprDictComp);
|
||||
impl_expression_has_ty!(ast::ExprGenerator);
|
||||
impl_expression_has_ty!(ast::ExprAwait);
|
||||
impl_expression_has_ty!(ast::ExprYield);
|
||||
impl_expression_has_ty!(ast::ExprYieldFrom);
|
||||
impl_expression_has_ty!(ast::ExprCompare);
|
||||
impl_expression_has_ty!(ast::ExprCall);
|
||||
impl_expression_has_ty!(ast::ExprFString);
|
||||
impl_expression_has_ty!(ast::ExprStringLiteral);
|
||||
impl_expression_has_ty!(ast::ExprBytesLiteral);
|
||||
impl_expression_has_ty!(ast::ExprNumberLiteral);
|
||||
impl_expression_has_ty!(ast::ExprBooleanLiteral);
|
||||
impl_expression_has_ty!(ast::ExprNoneLiteral);
|
||||
impl_expression_has_ty!(ast::ExprEllipsisLiteral);
|
||||
impl_expression_has_ty!(ast::ExprAttribute);
|
||||
impl_expression_has_ty!(ast::ExprSubscript);
|
||||
impl_expression_has_ty!(ast::ExprStarred);
|
||||
impl_expression_has_ty!(ast::ExprName);
|
||||
impl_expression_has_ty!(ast::ExprList);
|
||||
impl_expression_has_ty!(ast::ExprTuple);
|
||||
impl_expression_has_ty!(ast::ExprSlice);
|
||||
impl_expression_has_ty!(ast::ExprIpyEscapeCommand);
|
||||
|
||||
impl HasType for ast::Expr {
|
||||
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
impl HasTy for ast::Expr {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Expr::BoolOp(inner) => inner.inferred_type(model),
|
||||
Expr::Named(inner) => inner.inferred_type(model),
|
||||
Expr::BinOp(inner) => inner.inferred_type(model),
|
||||
Expr::UnaryOp(inner) => inner.inferred_type(model),
|
||||
Expr::Lambda(inner) => inner.inferred_type(model),
|
||||
Expr::If(inner) => inner.inferred_type(model),
|
||||
Expr::Dict(inner) => inner.inferred_type(model),
|
||||
Expr::Set(inner) => inner.inferred_type(model),
|
||||
Expr::ListComp(inner) => inner.inferred_type(model),
|
||||
Expr::SetComp(inner) => inner.inferred_type(model),
|
||||
Expr::DictComp(inner) => inner.inferred_type(model),
|
||||
Expr::Generator(inner) => inner.inferred_type(model),
|
||||
Expr::Await(inner) => inner.inferred_type(model),
|
||||
Expr::Yield(inner) => inner.inferred_type(model),
|
||||
Expr::YieldFrom(inner) => inner.inferred_type(model),
|
||||
Expr::Compare(inner) => inner.inferred_type(model),
|
||||
Expr::Call(inner) => inner.inferred_type(model),
|
||||
Expr::FString(inner) => inner.inferred_type(model),
|
||||
Expr::StringLiteral(inner) => inner.inferred_type(model),
|
||||
Expr::BytesLiteral(inner) => inner.inferred_type(model),
|
||||
Expr::NumberLiteral(inner) => inner.inferred_type(model),
|
||||
Expr::BooleanLiteral(inner) => inner.inferred_type(model),
|
||||
Expr::NoneLiteral(inner) => inner.inferred_type(model),
|
||||
Expr::EllipsisLiteral(inner) => inner.inferred_type(model),
|
||||
Expr::Attribute(inner) => inner.inferred_type(model),
|
||||
Expr::Subscript(inner) => inner.inferred_type(model),
|
||||
Expr::Starred(inner) => inner.inferred_type(model),
|
||||
Expr::Name(inner) => inner.inferred_type(model),
|
||||
Expr::List(inner) => inner.inferred_type(model),
|
||||
Expr::Tuple(inner) => inner.inferred_type(model),
|
||||
Expr::Slice(inner) => inner.inferred_type(model),
|
||||
Expr::IpyEscapeCommand(inner) => inner.inferred_type(model),
|
||||
Expr::BoolOp(inner) => inner.ty(model),
|
||||
Expr::Named(inner) => inner.ty(model),
|
||||
Expr::BinOp(inner) => inner.ty(model),
|
||||
Expr::UnaryOp(inner) => inner.ty(model),
|
||||
Expr::Lambda(inner) => inner.ty(model),
|
||||
Expr::If(inner) => inner.ty(model),
|
||||
Expr::Dict(inner) => inner.ty(model),
|
||||
Expr::Set(inner) => inner.ty(model),
|
||||
Expr::ListComp(inner) => inner.ty(model),
|
||||
Expr::SetComp(inner) => inner.ty(model),
|
||||
Expr::DictComp(inner) => inner.ty(model),
|
||||
Expr::Generator(inner) => inner.ty(model),
|
||||
Expr::Await(inner) => inner.ty(model),
|
||||
Expr::Yield(inner) => inner.ty(model),
|
||||
Expr::YieldFrom(inner) => inner.ty(model),
|
||||
Expr::Compare(inner) => inner.ty(model),
|
||||
Expr::Call(inner) => inner.ty(model),
|
||||
Expr::FString(inner) => inner.ty(model),
|
||||
Expr::StringLiteral(inner) => inner.ty(model),
|
||||
Expr::BytesLiteral(inner) => inner.ty(model),
|
||||
Expr::NumberLiteral(inner) => inner.ty(model),
|
||||
Expr::BooleanLiteral(inner) => inner.ty(model),
|
||||
Expr::NoneLiteral(inner) => inner.ty(model),
|
||||
Expr::EllipsisLiteral(inner) => inner.ty(model),
|
||||
Expr::Attribute(inner) => inner.ty(model),
|
||||
Expr::Subscript(inner) => inner.ty(model),
|
||||
Expr::Starred(inner) => inner.ty(model),
|
||||
Expr::Name(inner) => inner.ty(model),
|
||||
Expr::List(inner) => inner.ty(model),
|
||||
Expr::Tuple(inner) => inner.ty(model),
|
||||
Expr::Slice(inner) => inner.ty(model),
|
||||
Expr::IpyEscapeCommand(inner) => inner.ty(model),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_binding_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasType for $ty {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let binding = index.definition(self);
|
||||
binding_type(model.db, binding)
|
||||
binding_ty(model.db, binding)
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -168,10 +168,10 @@ mod tests {
|
||||
use ruff_db::parsed::parsed_module;
|
||||
|
||||
use crate::db::tests::TestDbBuilder;
|
||||
use crate::{HasType, SemanticModel};
|
||||
use crate::{HasTy, SemanticModel};
|
||||
|
||||
#[test]
|
||||
fn function_type() -> anyhow::Result<()> {
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "def test(): pass")
|
||||
.build()?;
|
||||
@@ -182,7 +182,7 @@ mod tests {
|
||||
|
||||
let function = ast.suite()[0].as_function_def_stmt().unwrap();
|
||||
let model = SemanticModel::new(&db, foo);
|
||||
let ty = function.inferred_type(&model);
|
||||
let ty = function.ty(&model);
|
||||
|
||||
assert!(ty.is_function_literal());
|
||||
|
||||
@@ -190,7 +190,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn class_type() -> anyhow::Result<()> {
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "class Test: pass")
|
||||
.build()?;
|
||||
@@ -201,7 +201,7 @@ mod tests {
|
||||
|
||||
let class = ast.suite()[0].as_class_def_stmt().unwrap();
|
||||
let model = SemanticModel::new(&db, foo);
|
||||
let ty = class.inferred_type(&model);
|
||||
let ty = class.ty(&model);
|
||||
|
||||
assert!(ty.is_class_literal());
|
||||
|
||||
@@ -209,7 +209,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alias_type() -> anyhow::Result<()> {
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "class Test: pass")
|
||||
.with_file("/src/bar.py", "from foo import Test")
|
||||
@@ -222,7 +222,7 @@ mod tests {
|
||||
let import = ast.suite()[0].as_import_from_stmt().unwrap();
|
||||
let alias = &import.names[0];
|
||||
let model = SemanticModel::new(&db, bar);
|
||||
let ty = alias.inferred_type(&model);
|
||||
let ty = alias.ty(&model);
|
||||
|
||||
assert!(ty.is_class_literal());
|
||||
|
||||
|
||||
@@ -129,7 +129,7 @@ fn symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str) -> Symbol<'db>
|
||||
Err((declared_ty, _)) => {
|
||||
// Intentionally ignore conflicting declared types; that's not our problem,
|
||||
// it's the problem of the module we are importing from.
|
||||
declared_ty.inner_type().into()
|
||||
declared_ty.inner_ty().into()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -243,15 +243,15 @@ pub(crate) fn global_symbol<'db>(db: &'db dyn Db, file: File, name: &str) -> Sym
|
||||
}
|
||||
|
||||
/// Infer the type of a binding.
|
||||
pub(crate) fn binding_type<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
|
||||
pub(crate) fn binding_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
inference.binding_type(definition)
|
||||
inference.binding_ty(definition)
|
||||
}
|
||||
|
||||
/// Infer the type of a declaration.
|
||||
fn declaration_type<'db>(db: &'db dyn Db, definition: Definition<'db>) -> TypeAndQualifiers<'db> {
|
||||
fn declaration_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> TypeAndQualifiers<'db> {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
inference.declaration_type(definition)
|
||||
inference.declaration_ty(definition)
|
||||
}
|
||||
|
||||
/// Infer the type of a (possibly deferred) sub-expression of a [`Definition`].
|
||||
@@ -260,7 +260,7 @@ fn declaration_type<'db>(db: &'db dyn Db, definition: Definition<'db>) -> TypeAn
|
||||
///
|
||||
/// ## Panics
|
||||
/// If the given expression is not a sub-expression of the given [`Definition`].
|
||||
fn definition_expression_type<'db>(
|
||||
fn definition_expression_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
expression: &ast::Expr,
|
||||
@@ -273,14 +273,14 @@ fn definition_expression_type<'db>(
|
||||
if scope == definition.scope(db) {
|
||||
// expression is in the definition scope
|
||||
let inference = infer_definition_types(db, definition);
|
||||
if let Some(ty) = inference.try_expression_type(expr_id) {
|
||||
if let Some(ty) = inference.try_expression_ty(expr_id) {
|
||||
ty
|
||||
} else {
|
||||
infer_deferred_types(db, definition).expression_type(expr_id)
|
||||
infer_deferred_types(db, definition).expression_ty(expr_id)
|
||||
}
|
||||
} else {
|
||||
// expression is in a type-params sub-scope
|
||||
infer_scope_types(db, scope).expression_type(expr_id)
|
||||
infer_scope_types(db, scope).expression_ty(expr_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -323,7 +323,7 @@ fn symbol_from_bindings<'db>(
|
||||
.filter_map(|constraint| narrowing_constraint(db, constraint, binding))
|
||||
.peekable();
|
||||
|
||||
let binding_ty = binding_type(db, binding);
|
||||
let binding_ty = binding_ty(db, binding);
|
||||
if constraint_tys.peek().is_some() {
|
||||
let intersection_ty = constraint_tys
|
||||
.fold(
|
||||
@@ -432,7 +432,7 @@ fn symbol_from_declarations<'db>(
|
||||
if static_visibility.is_always_false() {
|
||||
None
|
||||
} else {
|
||||
Some(declaration_type(db, declaration))
|
||||
Some(declaration_ty(db, declaration))
|
||||
}
|
||||
},
|
||||
);
|
||||
@@ -440,12 +440,12 @@ fn symbol_from_declarations<'db>(
|
||||
if let Some(first) = types.next() {
|
||||
let mut conflicting: Vec<Type<'db>> = vec![];
|
||||
let declared_ty = if let Some(second) = types.next() {
|
||||
let ty_first = first.inner_type();
|
||||
let ty_first = first.inner_ty();
|
||||
let mut qualifiers = first.qualifiers();
|
||||
|
||||
let mut builder = UnionBuilder::new(db).add(ty_first);
|
||||
for other in std::iter::once(second).chain(types) {
|
||||
let other_ty = other.inner_type();
|
||||
let other_ty = other.inner_ty();
|
||||
if !ty_first.is_equivalent_to(db, other_ty) {
|
||||
conflicting.push(other_ty);
|
||||
}
|
||||
@@ -466,13 +466,13 @@ fn symbol_from_declarations<'db>(
|
||||
};
|
||||
|
||||
Ok(SymbolAndQualifiers(
|
||||
Symbol::Type(declared_ty.inner_type(), boundness),
|
||||
Symbol::Type(declared_ty.inner_ty(), boundness),
|
||||
declared_ty.qualifiers(),
|
||||
))
|
||||
} else {
|
||||
Err((
|
||||
declared_ty,
|
||||
std::iter::once(first.inner_type())
|
||||
std::iter::once(first.inner_ty())
|
||||
.chain(conflicting)
|
||||
.collect(),
|
||||
))
|
||||
@@ -1008,7 +1008,7 @@ impl<'db> Type<'db> {
|
||||
///
|
||||
/// [assignable to]: https://typing.readthedocs.io/en/latest/spec/concepts.html#the-assignable-to-or-consistent-subtyping-relation
|
||||
pub(crate) fn is_assignable_to(self, db: &'db dyn Db, target: Type<'db>) -> bool {
|
||||
if self.is_gradual_equivalent_to(db, target) {
|
||||
if self.is_equivalent_to(db, target) {
|
||||
return true;
|
||||
}
|
||||
match (self, target) {
|
||||
@@ -1787,7 +1787,7 @@ impl<'db> Type<'db> {
|
||||
|
||||
if let Some(Type::BooleanLiteral(bool_val)) = bool_method
|
||||
.call(db, &CallArguments::positional([*instance_ty]))
|
||||
.return_type(db)
|
||||
.return_ty(db)
|
||||
{
|
||||
bool_val.into()
|
||||
} else {
|
||||
@@ -1864,7 +1864,7 @@ impl<'db> Type<'db> {
|
||||
CallDunderResult::MethodNotAvailable => return None,
|
||||
|
||||
CallDunderResult::CallOutcome(outcome) | CallDunderResult::PossiblyUnbound(outcome) => {
|
||||
outcome.return_type(db)?
|
||||
outcome.return_ty(db)?
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1879,11 +1879,11 @@ impl<'db> Type<'db> {
|
||||
let mut binding = bind_call(db, arguments, function_type.signature(db), Some(self));
|
||||
match function_type.known(db) {
|
||||
Some(KnownFunction::RevealType) => {
|
||||
let revealed_ty = binding.one_parameter_type().unwrap_or(Type::unknown());
|
||||
let revealed_ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
|
||||
CallOutcome::revealed(binding, revealed_ty)
|
||||
}
|
||||
Some(KnownFunction::StaticAssert) => {
|
||||
if let Some((parameter_ty, message)) = binding.two_parameter_types() {
|
||||
if let Some((parameter_ty, message)) = binding.two_parameter_tys() {
|
||||
let truthiness = parameter_ty.bool(db);
|
||||
|
||||
if truthiness.is_always_true() {
|
||||
@@ -1914,64 +1914,64 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
Some(KnownFunction::IsEquivalentTo) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_types()
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding
|
||||
.set_return_type(Type::BooleanLiteral(ty_a.is_equivalent_to(db, ty_b)));
|
||||
.set_return_ty(Type::BooleanLiteral(ty_a.is_equivalent_to(db, ty_b)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsSubtypeOf) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_types()
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding.set_return_type(Type::BooleanLiteral(ty_a.is_subtype_of(db, ty_b)));
|
||||
binding.set_return_ty(Type::BooleanLiteral(ty_a.is_subtype_of(db, ty_b)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsAssignableTo) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_types()
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding
|
||||
.set_return_type(Type::BooleanLiteral(ty_a.is_assignable_to(db, ty_b)));
|
||||
.set_return_ty(Type::BooleanLiteral(ty_a.is_assignable_to(db, ty_b)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsDisjointFrom) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_types()
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding
|
||||
.set_return_type(Type::BooleanLiteral(ty_a.is_disjoint_from(db, ty_b)));
|
||||
.set_return_ty(Type::BooleanLiteral(ty_a.is_disjoint_from(db, ty_b)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsGradualEquivalentTo) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_types()
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding.set_return_type(Type::BooleanLiteral(
|
||||
binding.set_return_ty(Type::BooleanLiteral(
|
||||
ty_a.is_gradual_equivalent_to(db, ty_b),
|
||||
));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsFullyStatic) => {
|
||||
let ty = binding.one_parameter_type().unwrap_or(Type::unknown());
|
||||
binding.set_return_type(Type::BooleanLiteral(ty.is_fully_static(db)));
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
|
||||
binding.set_return_ty(Type::BooleanLiteral(ty.is_fully_static(db)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsSingleton) => {
|
||||
let ty = binding.one_parameter_type().unwrap_or(Type::unknown());
|
||||
binding.set_return_type(Type::BooleanLiteral(ty.is_singleton(db)));
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
|
||||
binding.set_return_ty(Type::BooleanLiteral(ty.is_singleton(db)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsSingleValued) => {
|
||||
let ty = binding.one_parameter_type().unwrap_or(Type::unknown());
|
||||
binding.set_return_type(Type::BooleanLiteral(ty.is_single_valued(db)));
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
|
||||
binding.set_return_ty(Type::BooleanLiteral(ty.is_single_valued(db)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
|
||||
Some(KnownFunction::Len) => {
|
||||
if let Some(first_arg) = binding.one_parameter_type() {
|
||||
if let Some(first_arg) = binding.one_parameter_ty() {
|
||||
if let Some(len_ty) = first_arg.len(db) {
|
||||
binding.set_return_type(len_ty);
|
||||
binding.set_return_ty(len_ty);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1979,15 +1979,15 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
|
||||
Some(KnownFunction::Repr) => {
|
||||
if let Some(first_arg) = binding.one_parameter_type() {
|
||||
binding.set_return_type(first_arg.repr(db));
|
||||
if let Some(first_arg) = binding.one_parameter_ty() {
|
||||
binding.set_return_ty(first_arg.repr(db));
|
||||
};
|
||||
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
|
||||
Some(KnownFunction::AssertType) => {
|
||||
let Some((_, asserted_ty)) = binding.two_parameter_types() else {
|
||||
let Some((_, asserted_ty)) = binding.two_parameter_tys() else {
|
||||
return CallOutcome::callable(binding);
|
||||
};
|
||||
|
||||
@@ -1997,12 +1997,12 @@ impl<'db> Type<'db> {
|
||||
Some(KnownFunction::Cast) => {
|
||||
// TODO: Use `.two_parameter_tys()` exclusively
|
||||
// when overloads are supported.
|
||||
if binding.two_parameter_types().is_none() {
|
||||
if binding.two_parameter_tys().is_none() {
|
||||
return CallOutcome::callable(binding);
|
||||
};
|
||||
|
||||
if let Some(casted_ty) = arguments.first_argument() {
|
||||
binding.set_return_type(casted_ty);
|
||||
binding.set_return_ty(casted_ty);
|
||||
};
|
||||
|
||||
CallOutcome::callable(binding)
|
||||
@@ -2015,7 +2015,7 @@ impl<'db> Type<'db> {
|
||||
// TODO annotated return type on `__new__` or metaclass `__call__`
|
||||
// TODO check call vs signatures of `__new__` and/or `__init__`
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => {
|
||||
CallOutcome::callable(CallBinding::from_return_type(match class.known(db) {
|
||||
CallOutcome::callable(CallBinding::from_return_ty(match class.known(db) {
|
||||
// If the class is the builtin-bool class (for example `bool(1)`), we try to
|
||||
// return the specific truthiness value of the input arg, `Literal[True]` for
|
||||
// the example above.
|
||||
@@ -2061,7 +2061,7 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
|
||||
// Dynamic types are callable, and the return type is the same dynamic type
|
||||
Type::Dynamic(_) => CallOutcome::callable(CallBinding::from_return_type(self)),
|
||||
Type::Dynamic(_) => CallOutcome::callable(CallBinding::from_return_ty(self)),
|
||||
|
||||
Type::Union(union) => CallOutcome::union(
|
||||
self,
|
||||
@@ -2071,7 +2071,7 @@ impl<'db> Type<'db> {
|
||||
.map(|elem| elem.call(db, arguments)),
|
||||
),
|
||||
|
||||
Type::Intersection(_) => CallOutcome::callable(CallBinding::from_return_type(
|
||||
Type::Intersection(_) => CallOutcome::callable(CallBinding::from_return_ty(
|
||||
todo_type!("Type::Intersection.call()"),
|
||||
)),
|
||||
|
||||
@@ -2117,7 +2117,7 @@ impl<'db> Type<'db> {
|
||||
match dunder_iter_result {
|
||||
CallDunderResult::CallOutcome(ref call_outcome)
|
||||
| CallDunderResult::PossiblyUnbound(ref call_outcome) => {
|
||||
let Some(iterator_ty) = call_outcome.return_type(db) else {
|
||||
let Some(iterator_ty) = call_outcome.return_ty(db) else {
|
||||
return IterationOutcome::NotIterable {
|
||||
not_iterable_ty: self,
|
||||
};
|
||||
@@ -2125,7 +2125,7 @@ impl<'db> Type<'db> {
|
||||
|
||||
return if let Some(element_ty) = iterator_ty
|
||||
.call_dunder(db, "__next__", &CallArguments::positional([iterator_ty]))
|
||||
.return_type(db)
|
||||
.return_ty(db)
|
||||
{
|
||||
if matches!(dunder_iter_result, CallDunderResult::PossiblyUnbound(..)) {
|
||||
IterationOutcome::PossiblyUnboundDunderIter {
|
||||
@@ -2156,7 +2156,7 @@ impl<'db> Type<'db> {
|
||||
"__getitem__",
|
||||
&CallArguments::positional([self, KnownClass::Int.to_instance(db)]),
|
||||
)
|
||||
.return_type(db)
|
||||
.return_ty(db)
|
||||
{
|
||||
IterationOutcome::Iterable { element_ty }
|
||||
} else {
|
||||
@@ -2262,9 +2262,7 @@ impl<'db> Type<'db> {
|
||||
Type::Dynamic(_) => Ok(*self),
|
||||
// TODO map this to a new `Type::TypeVar` variant
|
||||
Type::KnownInstance(KnownInstanceType::TypeVar(_)) => Ok(*self),
|
||||
Type::KnownInstance(KnownInstanceType::TypeAliasType(alias)) => {
|
||||
Ok(alias.value_type(db))
|
||||
}
|
||||
Type::KnownInstance(KnownInstanceType::TypeAliasType(alias)) => Ok(alias.value_ty(db)),
|
||||
Type::KnownInstance(KnownInstanceType::Never | KnownInstanceType::NoReturn) => {
|
||||
Ok(Type::Never)
|
||||
}
|
||||
@@ -2360,8 +2358,7 @@ impl<'db> Type<'db> {
|
||||
ClassBase::Dynamic(_) => *self,
|
||||
ClassBase::Class(class) => SubclassOfType::from(
|
||||
db,
|
||||
ClassBase::try_from_type(db, class.metaclass(db))
|
||||
.unwrap_or(ClassBase::unknown()),
|
||||
ClassBase::try_from_ty(db, class.metaclass(db)).unwrap_or(ClassBase::unknown()),
|
||||
),
|
||||
},
|
||||
|
||||
@@ -2370,7 +2367,7 @@ impl<'db> Type<'db> {
|
||||
// TODO intersections
|
||||
Type::Intersection(_) => SubclassOfType::from(
|
||||
db,
|
||||
ClassBase::try_from_type(db, todo_type!("Intersection meta-type"))
|
||||
ClassBase::try_from_ty(db, todo_type!("Intersection meta-type"))
|
||||
.expect("Type::Todo should be a valid ClassBase"),
|
||||
),
|
||||
Type::AlwaysTruthy | Type::AlwaysFalsy => KnownClass::Type.to_instance(db),
|
||||
@@ -2494,7 +2491,7 @@ impl<'db> TypeAndQualifiers<'db> {
|
||||
}
|
||||
|
||||
/// Forget about type qualifiers and only return the inner type.
|
||||
pub(crate) fn inner_type(&self) -> Type<'db> {
|
||||
pub(crate) fn inner_ty(&self) -> Type<'db> {
|
||||
self.inner
|
||||
}
|
||||
|
||||
@@ -3313,14 +3310,6 @@ impl<'db> IterationOutcome<'db> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_without_diagnostic(self) -> Type<'db> {
|
||||
match self {
|
||||
Self::Iterable { element_ty } => element_ty,
|
||||
Self::NotIterable { .. } => Type::unknown(),
|
||||
Self::PossiblyUnboundDunderIter { element_ty, .. } => element_ty,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@@ -3789,7 +3778,7 @@ impl<'db> Class<'db> {
|
||||
class_stmt
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_node| definition_expression_type(db, class_definition, base_node))
|
||||
.map(|base_node| definition_expression_ty(db, class_definition, base_node))
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -3818,7 +3807,7 @@ impl<'db> Class<'db> {
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator_node| {
|
||||
definition_expression_type(db, class_definition, &decorator_node.expression)
|
||||
definition_expression_ty(db, class_definition, &decorator_node.expression)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -3877,7 +3866,7 @@ impl<'db> Class<'db> {
|
||||
.find_keyword("metaclass")?
|
||||
.value;
|
||||
let class_definition = semantic_index(db, self.file(db)).definition(class_stmt);
|
||||
let metaclass_ty = definition_expression_type(db, class_definition, metaclass_node);
|
||||
let metaclass_ty = definition_expression_ty(db, class_definition, metaclass_node);
|
||||
Some(metaclass_ty)
|
||||
}
|
||||
|
||||
@@ -3937,7 +3926,7 @@ impl<'db> Class<'db> {
|
||||
let return_ty = outcomes
|
||||
.iter()
|
||||
.fold(None, |acc, outcome| {
|
||||
let ty = outcome.return_type(db);
|
||||
let ty = outcome.return_ty(db);
|
||||
|
||||
match (acc, ty) {
|
||||
(acc, None) => {
|
||||
@@ -3968,7 +3957,7 @@ impl<'db> Class<'db> {
|
||||
CallOutcome::Callable { binding }
|
||||
| CallOutcome::RevealType { binding, .. }
|
||||
| CallOutcome::StaticAssertionError { binding, .. }
|
||||
| CallOutcome::AssertType { binding, .. } => Ok(binding.return_type()),
|
||||
| CallOutcome::AssertType { binding, .. } => Ok(binding.return_ty()),
|
||||
};
|
||||
|
||||
return return_ty_result.map(|ty| ty.to_meta_type(db));
|
||||
@@ -4121,7 +4110,7 @@ impl<'db> Class<'db> {
|
||||
}
|
||||
Err((declared_ty, _conflicting_declarations)) => {
|
||||
// Ignore conflicting declarations
|
||||
SymbolAndQualifiers(declared_ty.inner_type().into(), declared_ty.qualifiers())
|
||||
SymbolAndQualifiers(declared_ty.inner_ty().into(), declared_ty.qualifiers())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -4187,13 +4176,13 @@ pub struct TypeAliasType<'db> {
|
||||
#[salsa::tracked]
|
||||
impl<'db> TypeAliasType<'db> {
|
||||
#[salsa::tracked]
|
||||
pub fn value_type(self, db: &'db dyn Db) -> Type<'db> {
|
||||
pub fn value_ty(self, db: &'db dyn Db) -> Type<'db> {
|
||||
let scope = self.rhs_scope(db);
|
||||
|
||||
let type_alias_stmt_node = scope.node(db).expect_type_alias();
|
||||
let definition = semantic_index(db, scope.file(db)).definition(type_alias_stmt_node);
|
||||
|
||||
definition_expression_type(db, definition, &type_alias_stmt_node.value)
|
||||
definition_expression_ty(db, definition, &type_alias_stmt_node.value)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -89,13 +89,13 @@ impl<'db> CallOutcome<'db> {
|
||||
}
|
||||
|
||||
/// Get the return type of the call, or `None` if not callable.
|
||||
pub(super) fn return_type(&self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
pub(super) fn return_ty(&self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
match self {
|
||||
Self::Callable { binding } => Some(binding.return_type()),
|
||||
Self::Callable { binding } => Some(binding.return_ty()),
|
||||
Self::RevealType {
|
||||
binding,
|
||||
revealed_ty: _,
|
||||
} => Some(binding.return_type()),
|
||||
} => Some(binding.return_ty()),
|
||||
Self::NotCallable { not_callable_ty: _ } => None,
|
||||
Self::Union {
|
||||
outcomes,
|
||||
@@ -105,7 +105,7 @@ impl<'db> CallOutcome<'db> {
|
||||
// If all outcomes are NotCallable, we return None; if some outcomes are callable
|
||||
// and some are not, we return a union including Unknown.
|
||||
.fold(None, |acc, outcome| {
|
||||
let ty = outcome.return_type(db);
|
||||
let ty = outcome.return_ty(db);
|
||||
match (acc, ty) {
|
||||
(None, None) => None,
|
||||
(None, Some(ty)) => Some(UnionBuilder::new(db).add(ty)),
|
||||
@@ -113,12 +113,12 @@ impl<'db> CallOutcome<'db> {
|
||||
}
|
||||
})
|
||||
.map(UnionBuilder::build),
|
||||
Self::PossiblyUnboundDunderCall { call_outcome, .. } => call_outcome.return_type(db),
|
||||
Self::PossiblyUnboundDunderCall { call_outcome, .. } => call_outcome.return_ty(db),
|
||||
Self::StaticAssertionError { .. } => Some(Type::none(db)),
|
||||
Self::AssertType {
|
||||
binding,
|
||||
asserted_ty: _,
|
||||
} => Some(binding.return_type()),
|
||||
} => Some(binding.return_ty()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@ impl<'db> CallOutcome<'db> {
|
||||
context: &InferContext<'db>,
|
||||
node: ast::AnyNodeRef,
|
||||
) -> Type<'db> {
|
||||
match self.return_type_result(context, node) {
|
||||
match self.return_ty_result(context, node) {
|
||||
Ok(return_ty) => return_ty,
|
||||
Err(NotCallableError::Type {
|
||||
not_callable_ty,
|
||||
@@ -194,7 +194,7 @@ impl<'db> CallOutcome<'db> {
|
||||
}
|
||||
|
||||
/// Get the return type of the call as a result.
|
||||
pub(super) fn return_type_result(
|
||||
pub(super) fn return_ty_result(
|
||||
&self,
|
||||
context: &InferContext<'db>,
|
||||
node: ast::AnyNodeRef,
|
||||
@@ -205,7 +205,7 @@ impl<'db> CallOutcome<'db> {
|
||||
match self {
|
||||
Self::Callable { binding } => {
|
||||
binding.report_diagnostics(context, node);
|
||||
Ok(binding.return_type())
|
||||
Ok(binding.return_ty())
|
||||
}
|
||||
Self::RevealType {
|
||||
binding,
|
||||
@@ -218,7 +218,7 @@ impl<'db> CallOutcome<'db> {
|
||||
Severity::Info,
|
||||
format_args!("Revealed type is `{}`", revealed_ty.display(context.db())),
|
||||
);
|
||||
Ok(binding.return_type())
|
||||
Ok(binding.return_ty())
|
||||
}
|
||||
Self::NotCallable { not_callable_ty } => Err(NotCallableError::Type {
|
||||
not_callable_ty: *not_callable_ty,
|
||||
@@ -230,7 +230,7 @@ impl<'db> CallOutcome<'db> {
|
||||
} => Err(NotCallableError::PossiblyUnboundDunderCall {
|
||||
callable_ty: *called_ty,
|
||||
return_ty: call_outcome
|
||||
.return_type(context.db())
|
||||
.return_ty(context.db())
|
||||
.unwrap_or(Type::unknown()),
|
||||
}),
|
||||
Self::Union {
|
||||
@@ -251,7 +251,7 @@ impl<'db> CallOutcome<'db> {
|
||||
revealed_ty: _,
|
||||
} => {
|
||||
if revealed {
|
||||
binding.return_type()
|
||||
binding.return_ty()
|
||||
} else {
|
||||
revealed = true;
|
||||
outcome.unwrap_with_diagnostic(context, node)
|
||||
@@ -329,8 +329,8 @@ impl<'db> CallOutcome<'db> {
|
||||
binding,
|
||||
asserted_ty,
|
||||
} => {
|
||||
let [actual_ty, _asserted] = binding.parameter_types() else {
|
||||
return Ok(binding.return_type());
|
||||
let [actual_ty, _asserted] = binding.parameter_tys() else {
|
||||
return Ok(binding.return_ty());
|
||||
};
|
||||
|
||||
if !actual_ty.is_gradual_equivalent_to(context.db(), *asserted_ty) {
|
||||
@@ -345,7 +345,7 @@ impl<'db> CallOutcome<'db> {
|
||||
);
|
||||
}
|
||||
|
||||
Ok(binding.return_type())
|
||||
Ok(binding.return_ty())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -358,9 +358,9 @@ pub(super) enum CallDunderResult<'db> {
|
||||
}
|
||||
|
||||
impl<'db> CallDunderResult<'db> {
|
||||
pub(super) fn return_type(&self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
pub(super) fn return_ty(&self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
match self {
|
||||
Self::CallOutcome(outcome) => outcome.return_type(db),
|
||||
Self::CallOutcome(outcome) => outcome.return_ty(db),
|
||||
Self::PossiblyUnbound { .. } => None,
|
||||
Self::MethodNotAvailable => None,
|
||||
}
|
||||
@@ -394,7 +394,7 @@ pub(super) enum NotCallableError<'db> {
|
||||
|
||||
impl<'db> NotCallableError<'db> {
|
||||
/// The return type that should be used when a call is not callable.
|
||||
pub(super) fn return_type(&self) -> Type<'db> {
|
||||
pub(super) fn return_ty(&self) -> Type<'db> {
|
||||
match self {
|
||||
Self::Type { return_ty, .. } => *return_ty,
|
||||
Self::UnionElement { return_ty, .. } => *return_ty,
|
||||
@@ -407,7 +407,7 @@ impl<'db> NotCallableError<'db> {
|
||||
///
|
||||
/// For unions, returns the union type itself, which may contain a mix of callable and
|
||||
/// non-callable types.
|
||||
pub(super) fn called_type(&self) -> Type<'db> {
|
||||
pub(super) fn called_ty(&self) -> Type<'db> {
|
||||
match self {
|
||||
Self::Type {
|
||||
not_callable_ty, ..
|
||||
|
||||
@@ -73,7 +73,7 @@ pub(crate) fn bind_call<'db>(
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if let Some(expected_ty) = parameter.annotated_type() {
|
||||
if let Some(expected_ty) = parameter.annotated_ty() {
|
||||
if !argument_ty.is_assignable_to(db, expected_ty) {
|
||||
errors.push(CallBindingError::InvalidArgumentType {
|
||||
parameter: ParameterContext::new(parameter, index, positional),
|
||||
@@ -109,8 +109,7 @@ pub(crate) fn bind_call<'db>(
|
||||
for (index, bound_ty) in parameter_tys.iter().enumerate() {
|
||||
if bound_ty.is_none() {
|
||||
let param = ¶meters[index];
|
||||
if param.is_variadic() || param.is_keyword_variadic() || param.default_type().is_some()
|
||||
{
|
||||
if param.is_variadic() || param.is_keyword_variadic() || param.default_ty().is_some() {
|
||||
// variadic/keywords and defaulted arguments are not required
|
||||
continue;
|
||||
}
|
||||
@@ -152,7 +151,7 @@ pub(crate) struct CallBinding<'db> {
|
||||
|
||||
impl<'db> CallBinding<'db> {
|
||||
// TODO remove this constructor and construct always from `bind_call`
|
||||
pub(crate) fn from_return_type(return_ty: Type<'db>) -> Self {
|
||||
pub(crate) fn from_return_ty(return_ty: Type<'db>) -> Self {
|
||||
Self {
|
||||
callable_ty: None,
|
||||
return_ty,
|
||||
@@ -161,27 +160,27 @@ impl<'db> CallBinding<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn set_return_type(&mut self, return_ty: Type<'db>) {
|
||||
pub(crate) fn set_return_ty(&mut self, return_ty: Type<'db>) {
|
||||
self.return_ty = return_ty;
|
||||
}
|
||||
|
||||
pub(crate) fn return_type(&self) -> Type<'db> {
|
||||
pub(crate) fn return_ty(&self) -> Type<'db> {
|
||||
self.return_ty
|
||||
}
|
||||
|
||||
pub(crate) fn parameter_types(&self) -> &[Type<'db>] {
|
||||
pub(crate) fn parameter_tys(&self) -> &[Type<'db>] {
|
||||
&self.parameter_tys
|
||||
}
|
||||
|
||||
pub(crate) fn one_parameter_type(&self) -> Option<Type<'db>> {
|
||||
match self.parameter_types() {
|
||||
pub(crate) fn one_parameter_ty(&self) -> Option<Type<'db>> {
|
||||
match self.parameter_tys() {
|
||||
[ty] => Some(*ty),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn two_parameter_types(&self) -> Option<(Type<'db>, Type<'db>)> {
|
||||
match self.parameter_types() {
|
||||
pub(crate) fn two_parameter_tys(&self) -> Option<(Type<'db>, Type<'db>)> {
|
||||
match self.parameter_tys() {
|
||||
[first, second] => Some((*first, *second)),
|
||||
_ => None,
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ impl<'db> ClassBase<'db> {
|
||||
/// Attempt to resolve `ty` into a `ClassBase`.
|
||||
///
|
||||
/// Return `None` if `ty` is not an acceptable type for a class base.
|
||||
pub(super) fn try_from_type(db: &'db dyn Db, ty: Type<'db>) -> Option<Self> {
|
||||
pub(super) fn try_from_ty(db: &'db dyn Db, ty: Type<'db>) -> Option<Self> {
|
||||
match ty {
|
||||
Type::Dynamic(dynamic) => Some(Self::Dynamic(dynamic)),
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => Some(Self::Class(class)),
|
||||
@@ -112,40 +112,40 @@ impl<'db> ClassBase<'db> {
|
||||
KnownInstanceType::Any => Some(Self::any()),
|
||||
// TODO: Classes inheriting from `typing.Type` et al. also have `Generic` in their MRO
|
||||
KnownInstanceType::Dict => {
|
||||
Self::try_from_type(db, KnownClass::Dict.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::Dict.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::List => {
|
||||
Self::try_from_type(db, KnownClass::List.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::List.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::Type => {
|
||||
Self::try_from_type(db, KnownClass::Type.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::Type.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::Tuple => {
|
||||
Self::try_from_type(db, KnownClass::Tuple.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::Tuple.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::Set => {
|
||||
Self::try_from_type(db, KnownClass::Set.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::Set.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::FrozenSet => {
|
||||
Self::try_from_type(db, KnownClass::FrozenSet.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::FrozenSet.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::ChainMap => {
|
||||
Self::try_from_type(db, KnownClass::ChainMap.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::ChainMap.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::Counter => {
|
||||
Self::try_from_type(db, KnownClass::Counter.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::Counter.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::DefaultDict => {
|
||||
Self::try_from_type(db, KnownClass::DefaultDict.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::DefaultDict.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::Deque => {
|
||||
Self::try_from_type(db, KnownClass::Deque.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::Deque.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::OrderedDict => {
|
||||
Self::try_from_type(db, KnownClass::OrderedDict.to_class_literal(db))
|
||||
Self::try_from_ty(db, KnownClass::OrderedDict.to_class_literal(db))
|
||||
}
|
||||
KnownInstanceType::Callable => {
|
||||
Self::try_from_type(db, todo_type!("Support for Callable as a base class"))
|
||||
Self::try_from_ty(db, todo_type!("Support for Callable as a base class"))
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ use ruff_db::{
|
||||
use ruff_python_ast::AnyNodeRef;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use super::{binding_type, KnownFunction, TypeCheckDiagnostic, TypeCheckDiagnostics};
|
||||
use super::{binding_ty, KnownFunction, TypeCheckDiagnostic, TypeCheckDiagnostics};
|
||||
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
@@ -144,7 +144,7 @@ impl<'db> InferContext<'db> {
|
||||
.ancestor_scopes(scope_id)
|
||||
.filter_map(|(_, scope)| scope.node().as_function())
|
||||
.filter_map(|function| {
|
||||
binding_type(self.db, index.definition(function)).into_function_literal()
|
||||
binding_ty(self.db, index.definition(function)).into_function_literal()
|
||||
});
|
||||
|
||||
// Iterate over all functions and test if any is decorated with `@no_type_check`.
|
||||
|
||||
@@ -802,8 +802,8 @@ impl Diagnostic for TypeCheckDiagnostic {
|
||||
TypeCheckDiagnostic::message(self).into()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
Some(TypeCheckDiagnostic::file(self))
|
||||
fn file(&self) -> File {
|
||||
TypeCheckDiagnostic::file(self)
|
||||
}
|
||||
|
||||
fn range(&self) -> Option<TextRange> {
|
||||
@@ -984,13 +984,13 @@ pub(super) fn report_slice_step_size_zero(context: &InferContext, node: AnyNodeR
|
||||
);
|
||||
}
|
||||
|
||||
fn report_invalid_assignment_with_message(
|
||||
pub(super) fn report_invalid_assignment(
|
||||
context: &InferContext,
|
||||
node: AnyNodeRef,
|
||||
target_ty: Type,
|
||||
message: std::fmt::Arguments,
|
||||
declared_ty: Type,
|
||||
assigned_ty: Type,
|
||||
) {
|
||||
match target_ty {
|
||||
match declared_ty {
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => {
|
||||
context.report_lint(&INVALID_ASSIGNMENT, node, format_args!(
|
||||
"Implicit shadowing of class `{}`; annotate to make it explicit if this is intentional",
|
||||
@@ -1002,48 +1002,19 @@ fn report_invalid_assignment_with_message(
|
||||
function.name(context.db())));
|
||||
}
|
||||
_ => {
|
||||
context.report_lint(&INVALID_ASSIGNMENT, node, message);
|
||||
context.report_lint(
|
||||
&INVALID_ASSIGNMENT,
|
||||
node,
|
||||
format_args!(
|
||||
"Object of type `{}` is not assignable to `{}`",
|
||||
assigned_ty.display(context.db()),
|
||||
declared_ty.display(context.db()),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn report_invalid_assignment(
|
||||
context: &InferContext,
|
||||
node: AnyNodeRef,
|
||||
target_ty: Type,
|
||||
source_ty: Type,
|
||||
) {
|
||||
report_invalid_assignment_with_message(
|
||||
context,
|
||||
node,
|
||||
target_ty,
|
||||
format_args!(
|
||||
"Object of type `{}` is not assignable to `{}`",
|
||||
source_ty.display(context.db()),
|
||||
target_ty.display(context.db()),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
pub(super) fn report_invalid_attribute_assignment(
|
||||
context: &InferContext,
|
||||
node: AnyNodeRef,
|
||||
target_ty: Type,
|
||||
source_ty: Type,
|
||||
attribute_name: &'_ str,
|
||||
) {
|
||||
report_invalid_assignment_with_message(
|
||||
context,
|
||||
node,
|
||||
target_ty,
|
||||
format_args!(
|
||||
"Object of type `{}` is not assignable to attribute `{attribute_name}` of type `{}`",
|
||||
source_ty.display(context.db()),
|
||||
target_ty.display(context.db()),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
pub(super) fn report_possibly_unresolved_reference(
|
||||
context: &InferContext,
|
||||
expr_name_node: &ast::ExprName,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -76,7 +76,7 @@ impl<'db> Mro<'db> {
|
||||
// This *could* theoretically be handled by the final branch below,
|
||||
// but it's a common case (i.e., worth optimizing for),
|
||||
// and the `c3_merge` function requires lots of allocations.
|
||||
[single_base] => ClassBase::try_from_type(db, *single_base).map_or_else(
|
||||
[single_base] => ClassBase::try_from_ty(db, *single_base).map_or_else(
|
||||
|| Err(MroErrorKind::InvalidBases(Box::from([(0, *single_base)]))),
|
||||
|single_base| {
|
||||
Ok(std::iter::once(ClassBase::Class(class))
|
||||
@@ -95,7 +95,7 @@ impl<'db> Mro<'db> {
|
||||
let mut invalid_bases = vec![];
|
||||
|
||||
for (i, base) in multiple_bases.iter().enumerate() {
|
||||
match ClassBase::try_from_type(db, *base) {
|
||||
match ClassBase::try_from_ty(db, *base) {
|
||||
Some(valid_base) => valid_bases.push(valid_base),
|
||||
None => invalid_bases.push((i, *base)),
|
||||
}
|
||||
|
||||
@@ -322,9 +322,9 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
|
||||
for (op, (left, right)) in std::iter::zip(&**ops, comparator_tuples) {
|
||||
let lhs_ty = last_rhs_ty.unwrap_or_else(|| {
|
||||
inference.expression_type(left.scoped_expression_id(self.db, scope))
|
||||
inference.expression_ty(left.scoped_expression_id(self.db, scope))
|
||||
});
|
||||
let rhs_ty = inference.expression_type(right.scoped_expression_id(self.db, scope));
|
||||
let rhs_ty = inference.expression_ty(right.scoped_expression_id(self.db, scope));
|
||||
last_rhs_ty = Some(rhs_ty);
|
||||
|
||||
match left {
|
||||
@@ -393,7 +393,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
}
|
||||
|
||||
let callable_ty =
|
||||
inference.expression_type(callable.scoped_expression_id(self.db, scope));
|
||||
inference.expression_ty(callable.scoped_expression_id(self.db, scope));
|
||||
|
||||
if callable_ty
|
||||
.into_class_literal()
|
||||
@@ -422,7 +422,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
let inference = infer_expression_types(self.db, expression);
|
||||
|
||||
let callable_ty =
|
||||
inference.expression_type(expr_call.func.scoped_expression_id(self.db, scope));
|
||||
inference.expression_ty(expr_call.func.scoped_expression_id(self.db, scope));
|
||||
|
||||
// TODO: add support for PEP 604 union types on the right hand side of `isinstance`
|
||||
// and `issubclass`, for example `isinstance(x, str | (int | float))`.
|
||||
@@ -441,7 +441,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
|
||||
let class_info_ty =
|
||||
inference.expression_type(class_info.scoped_expression_id(self.db, scope));
|
||||
inference.expression_ty(class_info.scoped_expression_id(self.db, scope));
|
||||
|
||||
function
|
||||
.generate_constraint(self.db, class_info_ty)
|
||||
@@ -500,7 +500,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
let scope = self.scope();
|
||||
let inference = infer_expression_types(self.db, cls);
|
||||
let ty = inference
|
||||
.expression_type(cls.node_ref(self.db).scoped_expression_id(self.db, scope))
|
||||
.expression_ty(cls.node_ref(self.db).scoped_expression_id(self.db, scope))
|
||||
.to_instance(self.db);
|
||||
let mut constraints = NarrowingConstraints::default();
|
||||
constraints.insert(symbol, ty);
|
||||
@@ -524,7 +524,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
// filter our arms with statically known truthiness
|
||||
.filter(|expr| {
|
||||
inference
|
||||
.expression_type(expr.scoped_expression_id(self.db, scope))
|
||||
.expression_ty(expr.scoped_expression_id(self.db, scope))
|
||||
.bool(self.db)
|
||||
!= match expr_bool_op.op {
|
||||
BoolOp::And => Truthiness::AlwaysTrue,
|
||||
|
||||
@@ -461,12 +461,6 @@ mod stable {
|
||||
forall types s, t.
|
||||
s.is_fully_static(db) && s.is_gradual_equivalent_to(db, t) => s.is_equivalent_to(db, t)
|
||||
);
|
||||
|
||||
// `T` can be assigned to itself.
|
||||
type_property_test!(
|
||||
assignable_to_is_reflexive, db,
|
||||
forall types t. t.is_assignable_to(db, t)
|
||||
);
|
||||
}
|
||||
|
||||
/// This module contains property tests that currently lead to many false positives.
|
||||
@@ -481,6 +475,13 @@ mod flaky {
|
||||
|
||||
use super::{intersection, union};
|
||||
|
||||
// Currently fails due to https://github.com/astral-sh/ruff/issues/14899
|
||||
// `T` can be assigned to itself.
|
||||
type_property_test!(
|
||||
assignable_to_is_reflexive, db,
|
||||
forall types t. t.is_assignable_to(db, t)
|
||||
);
|
||||
|
||||
// Negating `T` twice is equivalent to `T`.
|
||||
type_property_test!(
|
||||
double_negation_is_identity, db,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use super::{definition_expression_type, Type};
|
||||
use super::{definition_expression_ty, Type};
|
||||
use crate::Db;
|
||||
use crate::{semantic_index::definition::Definition, types::todo_type};
|
||||
use ruff_python_ast::{self as ast, name::Name};
|
||||
@@ -39,7 +39,7 @@ impl<'db> Signature<'db> {
|
||||
if function_node.is_async {
|
||||
todo_type!("generic types.CoroutineType")
|
||||
} else {
|
||||
definition_expression_type(db, definition, returns.as_ref())
|
||||
definition_expression_ty(db, definition, returns.as_ref())
|
||||
}
|
||||
});
|
||||
|
||||
@@ -97,7 +97,7 @@ impl<'db> Parameters<'db> {
|
||||
parameter_with_default
|
||||
.default
|
||||
.as_deref()
|
||||
.map(|default| definition_expression_type(db, definition, default))
|
||||
.map(|default| definition_expression_ty(db, definition, default))
|
||||
};
|
||||
let positional_only = posonlyargs.iter().map(|arg| {
|
||||
Parameter::from_node_and_kind(
|
||||
@@ -245,7 +245,7 @@ impl<'db> Parameter<'db> {
|
||||
annotated_ty: parameter
|
||||
.annotation
|
||||
.as_deref()
|
||||
.map(|annotation| definition_expression_type(db, definition, annotation)),
|
||||
.map(|annotation| definition_expression_ty(db, definition, annotation)),
|
||||
kind,
|
||||
}
|
||||
}
|
||||
@@ -276,7 +276,7 @@ impl<'db> Parameter<'db> {
|
||||
}
|
||||
|
||||
/// Annotated type of the parameter, if annotated.
|
||||
pub(crate) fn annotated_type(&self) -> Option<Type<'db>> {
|
||||
pub(crate) fn annotated_ty(&self) -> Option<Type<'db>> {
|
||||
self.annotated_ty
|
||||
}
|
||||
|
||||
@@ -295,7 +295,7 @@ impl<'db> Parameter<'db> {
|
||||
}
|
||||
|
||||
/// Default-value type of the parameter, if any.
|
||||
pub(crate) fn default_type(&self) -> Option<Type<'db>> {
|
||||
pub(crate) fn default_ty(&self) -> Option<Type<'db>> {
|
||||
match self.kind {
|
||||
ParameterKind::PositionalOnly { default_ty } => default_ty,
|
||||
ParameterKind::PositionalOrKeyword { default_ty } => default_ty,
|
||||
|
||||
@@ -43,7 +43,7 @@ impl<'db> Unpacker<'db> {
|
||||
);
|
||||
|
||||
let mut value_ty = infer_expression_types(self.db(), value.expression())
|
||||
.expression_type(value.scoped_expression_id(self.db(), self.scope));
|
||||
.expression_ty(value.scoped_expression_id(self.db(), self.scope));
|
||||
|
||||
if value.is_assign()
|
||||
&& self.context.in_stub()
|
||||
|
||||
@@ -294,8 +294,8 @@ impl<'db> VisibilityConstraints<'db> {
|
||||
ConstraintNode::Expression(test_expr) => {
|
||||
let inference = infer_expression_types(db, test_expr);
|
||||
let scope = test_expr.scope(db);
|
||||
let ty = inference
|
||||
.expression_type(test_expr.node_ref(db).scoped_expression_id(db, scope));
|
||||
let ty =
|
||||
inference.expression_ty(test_expr.node_ref(db).scoped_expression_id(db, scope));
|
||||
|
||||
ty.bool(db).negate_if(!constraint.is_positive)
|
||||
}
|
||||
@@ -304,7 +304,7 @@ impl<'db> VisibilityConstraints<'db> {
|
||||
let subject_expression = inner.subject(db);
|
||||
let inference = infer_expression_types(db, *subject_expression);
|
||||
let scope = subject_expression.scope(db);
|
||||
let subject_ty = inference.expression_type(
|
||||
let subject_ty = inference.expression_ty(
|
||||
subject_expression
|
||||
.node_ref(db)
|
||||
.scoped_expression_id(db, scope),
|
||||
@@ -312,8 +312,8 @@ impl<'db> VisibilityConstraints<'db> {
|
||||
|
||||
let inference = infer_expression_types(db, *value);
|
||||
let scope = value.scope(db);
|
||||
let value_ty = inference
|
||||
.expression_type(value.node_ref(db).scoped_expression_id(db, scope));
|
||||
let value_ty =
|
||||
inference.expression_ty(value.node_ref(db).scoped_expression_id(db, scope));
|
||||
|
||||
if subject_ty.is_single_valued(db) {
|
||||
let truthiness =
|
||||
|
||||
@@ -75,9 +75,9 @@ fn to_lsp_diagnostic(
|
||||
diagnostic: &dyn ruff_db::diagnostic::Diagnostic,
|
||||
encoding: crate::PositionEncoding,
|
||||
) -> Diagnostic {
|
||||
let range = if let (Some(file), Some(range)) = (diagnostic.file(), diagnostic.range()) {
|
||||
let index = line_index(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file);
|
||||
let range = if let Some(range) = diagnostic.range() {
|
||||
let index = line_index(db.upcast(), diagnostic.file());
|
||||
let source = source_text(db.upcast(), diagnostic.file());
|
||||
|
||||
range.to_range(&source, &index, encoding)
|
||||
} else {
|
||||
|
||||
@@ -8,8 +8,8 @@ under a certain directory as test suites.
|
||||
|
||||
A Markdown test suite can contain any number of tests. A test consists of one or more embedded
|
||||
"files", each defined by a triple-backticks fenced code block. The code block must have a tag string
|
||||
specifying its language. We currently support `py` (Python files) and `pyi` (type stub files), as
|
||||
well as [typeshed `VERSIONS`] files and `toml` for configuration.
|
||||
specifying its language; currently only `py` (Python files) and `pyi` (type stub files) are
|
||||
supported.
|
||||
|
||||
The simplest possible test suite consists of just a single test, with a single embedded file:
|
||||
|
||||
@@ -243,20 +243,6 @@ section. Nested sections can override configurations from their parent sections.
|
||||
|
||||
See [`MarkdownTestConfig`](https://github.com/astral-sh/ruff/blob/main/crates/red_knot_test/src/config.rs) for the full list of supported configuration options.
|
||||
|
||||
### Specifying a custom typeshed
|
||||
|
||||
Some tests will need to override the default typeshed with custom files. The `[environment]`
|
||||
configuration option `typeshed` can be used to do this:
|
||||
|
||||
````markdown
|
||||
```toml
|
||||
[environment]
|
||||
typeshed = "/typeshed"
|
||||
```
|
||||
````
|
||||
|
||||
For more details, take a look at the [custom-typeshed Markdown test].
|
||||
|
||||
## Documentation of tests
|
||||
|
||||
Arbitrary Markdown syntax (including of course normal prose paragraphs) is permitted (and ignored by
|
||||
@@ -308,6 +294,36 @@ The column assertion `6` on the ending line should be optional.
|
||||
In cases of overlapping such assertions, resolve ambiguity using more angle brackets: `<<<<` begins
|
||||
an assertion ended by `>>>>`, etc.
|
||||
|
||||
### Non-Python files
|
||||
|
||||
Some tests may need to specify non-Python embedded files: typeshed `stdlib/VERSIONS`, `pth` files,
|
||||
`py.typed` files, `pyvenv.cfg` files...
|
||||
|
||||
We will allow specifying any of these using the `text` language in the code block tag string:
|
||||
|
||||
````markdown
|
||||
```text path=/third-party/foo/py.typed
|
||||
partial
|
||||
```
|
||||
````
|
||||
|
||||
We may want to also support testing Jupyter notebooks as embedded files; exact syntax for this is
|
||||
yet to be determined.
|
||||
|
||||
Of course, red-knot is only run directly on `py` and `pyi` files, and assertion comments are only
|
||||
possible in these files.
|
||||
|
||||
A fenced code block with no language will always be an error.
|
||||
|
||||
### Running just a single test from a suite
|
||||
|
||||
Having each test in a suite always run as a distinct Rust test would require writing our own test
|
||||
runner or code-generating tests in a build script; neither of these is planned.
|
||||
|
||||
We could still allow running just a single test from a suite, for debugging purposes, either via
|
||||
some "focus" syntax that could be easily temporarily added to a test, or via an environment
|
||||
variable.
|
||||
|
||||
### Configuring search paths and kinds
|
||||
|
||||
The red-knot TOML configuration format hasn't been finalized, and we may want to implement
|
||||
@@ -330,6 +346,38 @@ Paths for `workspace-root` and `third-party-root` must be absolute.
|
||||
Relative embedded-file paths are relative to the workspace root, even if it is explicitly set to a
|
||||
non-default value using the `workspace-root` config.
|
||||
|
||||
### Specifying a custom typeshed
|
||||
|
||||
Some tests will need to override the default typeshed with custom files. The `[environment]`
|
||||
configuration option `typeshed-path` can be used to do this:
|
||||
|
||||
````markdown
|
||||
```toml
|
||||
[environment]
|
||||
typeshed-path = "/typeshed"
|
||||
```
|
||||
|
||||
This file is importable as part of our custom typeshed, because it is within `/typeshed`, which we
|
||||
configured above as our custom typeshed root:
|
||||
|
||||
```py path=/typeshed/stdlib/builtins.pyi
|
||||
I_AM_THE_ONLY_BUILTIN = 1
|
||||
```
|
||||
|
||||
This file is written to `/src/test.py`, because the default workspace root is `/src/ and the default
|
||||
file path is `test.py`:
|
||||
|
||||
```py
|
||||
reveal_type(I_AM_THE_ONLY_BUILTIN) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
````
|
||||
|
||||
A fenced code block with language `text` can be used to provide a `stdlib/VERSIONS` file in the
|
||||
custom typeshed root. If no such file is created explicitly, one should be created implicitly
|
||||
including entries enabling all specified `<typeshed-root>/stdlib` files for all supported Python
|
||||
versions.
|
||||
|
||||
### I/O errors
|
||||
|
||||
We could use an `error=` configuration option in the tag string to make an embedded file cause an
|
||||
@@ -432,6 +480,3 @@ cold, to validate equivalence of cold and incremental check results.
|
||||
|
||||
[^extensions]: `typing-extensions` is a third-party module, but typeshed, and thus type checkers
|
||||
also, treat it as part of the standard library.
|
||||
|
||||
[custom-typeshed markdown test]: ../red_knot_python_semantic/resources/mdtest/mdtest_custom_typeshed.md
|
||||
[typeshed `versions`]: https://github.com/python/typeshed/blob/c546278aae47de0b2b664973da4edb613400f6ce/stdlib/VERSIONS#L1-L18%3E
|
||||
|
||||
@@ -34,12 +34,6 @@ impl MarkdownTestConfig {
|
||||
.as_ref()
|
||||
.and_then(|env| env.python_platform.clone())
|
||||
}
|
||||
|
||||
pub(crate) fn typeshed(&self) -> Option<&str> {
|
||||
self.environment
|
||||
.as_ref()
|
||||
.and_then(|env| env.typeshed.as_deref())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Default, Clone)]
|
||||
@@ -50,9 +44,6 @@ pub(crate) struct Environment {
|
||||
|
||||
/// Target platform to assume when resolving types.
|
||||
pub(crate) python_platform: Option<PythonPlatform>,
|
||||
|
||||
/// Path to a custom typeshed directory.
|
||||
pub(crate) typeshed: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
|
||||
@@ -198,8 +198,8 @@ mod tests {
|
||||
"dummy".into()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
Some(self.file)
|
||||
fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
|
||||
fn range(&self) -> Option<TextRange> {
|
||||
|
||||
@@ -3,7 +3,7 @@ use camino::Utf8Path;
|
||||
use colored::Colorize;
|
||||
use parser as test_parser;
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use red_knot_python_semantic::{Program, ProgramSettings, SearchPathSettings, SitePackages};
|
||||
use red_knot_python_semantic::Program;
|
||||
use ruff_db::diagnostic::{Diagnostic, ParseDiagnostic};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::panic::catch_unwind;
|
||||
@@ -12,7 +12,7 @@ use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::testing::{setup_logging, setup_logging_with_filter};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use ruff_text_size::TextSize;
|
||||
use std::fmt::Write;
|
||||
use salsa::Setter;
|
||||
|
||||
mod assertion;
|
||||
mod config;
|
||||
@@ -50,6 +50,13 @@ pub fn run(path: &Utf8Path, long_title: &str, short_title: &str, test_name: &str
|
||||
Log::Filter(filter) => setup_logging_with_filter(filter),
|
||||
});
|
||||
|
||||
Program::get(&db)
|
||||
.set_python_version(&mut db)
|
||||
.to(test.configuration().python_version().unwrap_or_default());
|
||||
Program::get(&db)
|
||||
.set_python_platform(&mut db)
|
||||
.to(test.configuration().python_platform().unwrap_or_default());
|
||||
|
||||
// Remove all files so that the db is in a "fresh" state.
|
||||
db.memory_file_system().remove_all();
|
||||
Files::sync_all(&mut db);
|
||||
@@ -91,10 +98,6 @@ pub fn run(path: &Utf8Path, long_title: &str, short_title: &str, test_name: &str
|
||||
|
||||
fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures> {
|
||||
let project_root = db.project_root().to_path_buf();
|
||||
let src_path = SystemPathBuf::from("/src");
|
||||
let custom_typeshed_path = test.configuration().typeshed().map(SystemPathBuf::from);
|
||||
let mut typeshed_files = vec![];
|
||||
let mut has_custom_versions_file = false;
|
||||
|
||||
let test_files: Vec<_> = test
|
||||
.files()
|
||||
@@ -104,33 +107,11 @@ fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures
|
||||
}
|
||||
|
||||
assert!(
|
||||
matches!(embedded.lang, "py" | "pyi" | "text"),
|
||||
"Supported file types are: py, pyi, text"
|
||||
matches!(embedded.lang, "py" | "pyi"),
|
||||
"Non-Python files not supported yet."
|
||||
);
|
||||
|
||||
let full_path = if embedded.path.starts_with('/') {
|
||||
SystemPathBuf::from(embedded.path)
|
||||
} else {
|
||||
project_root.join(embedded.path)
|
||||
};
|
||||
|
||||
if let Some(ref typeshed_path) = custom_typeshed_path {
|
||||
if let Ok(relative_path) = full_path.strip_prefix(typeshed_path.join("stdlib")) {
|
||||
if relative_path.as_str() == "VERSIONS" {
|
||||
has_custom_versions_file = true;
|
||||
} else if relative_path.extension().is_some_and(|ext| ext == "pyi") {
|
||||
typeshed_files.push(relative_path.to_path_buf());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let full_path = project_root.join(embedded.path);
|
||||
db.write_file(&full_path, embedded.code).unwrap();
|
||||
|
||||
if !full_path.starts_with(&src_path) || embedded.lang == "text" {
|
||||
// These files need to be written to the file system (above), but we don't run any checks on them.
|
||||
return None;
|
||||
}
|
||||
|
||||
let file = system_path_to_file(db, full_path).unwrap();
|
||||
|
||||
Some(TestFile {
|
||||
@@ -140,42 +121,6 @@ fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Create a custom typeshed `VERSIONS` file if none was provided.
|
||||
if let Some(ref typeshed_path) = custom_typeshed_path {
|
||||
if !has_custom_versions_file {
|
||||
let versions_file = typeshed_path.join("stdlib/VERSIONS");
|
||||
let contents = typeshed_files
|
||||
.iter()
|
||||
.fold(String::new(), |mut content, path| {
|
||||
// This is intentionally kept simple:
|
||||
let module_name = path
|
||||
.as_str()
|
||||
.trim_end_matches(".pyi")
|
||||
.trim_end_matches("/__init__")
|
||||
.replace('/', ".");
|
||||
let _ = writeln!(content, "{module_name}: 3.8-");
|
||||
content
|
||||
});
|
||||
db.write_file(&versions_file, contents).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
Program::get(db)
|
||||
.update_from_settings(
|
||||
db,
|
||||
ProgramSettings {
|
||||
python_version: test.configuration().python_version().unwrap_or_default(),
|
||||
python_platform: test.configuration().python_platform().unwrap_or_default(),
|
||||
search_paths: SearchPathSettings {
|
||||
src_roots: vec![src_path],
|
||||
extra_paths: vec![],
|
||||
custom_typeshed: custom_typeshed_path,
|
||||
site_packages: SitePackages::Known(vec![]),
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Failed to update Program settings in TestDb");
|
||||
|
||||
let failures: Failures = test_files
|
||||
.into_iter()
|
||||
.filter_map(|test_file| {
|
||||
|
||||
@@ -385,8 +385,8 @@ mod tests {
|
||||
self.message.into()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
Some(self.file)
|
||||
fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
|
||||
fn range(&self) -> Option<TextRange> {
|
||||
|
||||
@@ -133,17 +133,12 @@ struct EmbeddedFileId;
|
||||
|
||||
/// A single file embedded in a [`Section`] as a fenced code block.
|
||||
///
|
||||
/// Currently must be a Python file (`py` language), a type stub (`pyi`) or a [typeshed `VERSIONS`]
|
||||
/// file.
|
||||
///
|
||||
/// TOML configuration blocks are also supported, but are not stored as `EmbeddedFile`s. In the
|
||||
/// future we plan to support `pth` files as well.
|
||||
/// Currently must be a Python file (`py` language) or type stub (`pyi`). In the future we plan
|
||||
/// support other kinds of files as well (TOML configuration, typeshed VERSIONS, `pth` files...).
|
||||
///
|
||||
/// A Python embedded file makes its containing [`Section`] into a [`MarkdownTest`], and will be
|
||||
/// type-checked and searched for inline-comment assertions to match against the diagnostics from
|
||||
/// type checking.
|
||||
///
|
||||
/// [typeshed `VERSIONS`]: https://github.com/python/typeshed/blob/c546278aae47de0b2b664973da4edb613400f6ce/stdlib/VERSIONS#L1-L18
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct EmbeddedFile<'s> {
|
||||
section: SectionId,
|
||||
|
||||
@@ -4,7 +4,6 @@ use js_sys::Error;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use red_knot_project::metadata::value::RangedValue;
|
||||
use red_knot_project::ProjectMetadata;
|
||||
use red_knot_project::{Db, ProjectDatabase};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
@@ -49,7 +48,7 @@ impl Workspace {
|
||||
|
||||
workspace.apply_cli_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(settings.python_version.into())),
|
||||
python_version: Some(settings.python_version.into()),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.9.3"
|
||||
version = "0.9.2"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
use rayon::ThreadPoolBuilder;
|
||||
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use red_knot_project::metadata::value::RangedValue;
|
||||
use red_knot_project::watch::{ChangeEvent, ChangedKind};
|
||||
use red_knot_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||
use red_knot_python_semantic::PythonVersion;
|
||||
@@ -77,7 +76,7 @@ fn setup_case() -> Case {
|
||||
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
|
||||
metadata.apply_cli_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
python_version: Some(PythonVersion::PY312),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
|
||||
@@ -73,9 +73,6 @@ pub enum DiagnosticId {
|
||||
|
||||
/// A revealed type: Created by `reveal_type(expression)`.
|
||||
RevealedType,
|
||||
|
||||
/// No rule with the given name exists.
|
||||
UnknownRule,
|
||||
}
|
||||
|
||||
impl DiagnosticId {
|
||||
@@ -115,18 +112,15 @@ impl DiagnosticId {
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> Result<&str, DiagnosticAsStrError> {
|
||||
Ok(match self {
|
||||
DiagnosticId::Io => "io",
|
||||
DiagnosticId::InvalidSyntax => "invalid-syntax",
|
||||
DiagnosticId::Lint(name) => {
|
||||
return Err(DiagnosticAsStrError::Category {
|
||||
category: "lint",
|
||||
name: name.as_str(),
|
||||
})
|
||||
}
|
||||
DiagnosticId::RevealedType => "revealed-type",
|
||||
DiagnosticId::UnknownRule => "unknown-rule",
|
||||
})
|
||||
match self {
|
||||
DiagnosticId::Io => Ok("io"),
|
||||
DiagnosticId::InvalidSyntax => Ok("invalid-syntax"),
|
||||
DiagnosticId::Lint(name) => Err(DiagnosticAsStrError::Category {
|
||||
category: "lint",
|
||||
name: name.as_str(),
|
||||
}),
|
||||
DiagnosticId::RevealedType => Ok("revealed-type"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,18 +152,8 @@ pub trait Diagnostic: Send + Sync + std::fmt::Debug {
|
||||
|
||||
fn message(&self) -> Cow<str>;
|
||||
|
||||
/// The file this diagnostic is associated with.
|
||||
///
|
||||
/// File can be `None` for diagnostics that don't originate from a file.
|
||||
/// For example:
|
||||
/// * A diagnostic indicating that a directory couldn't be read.
|
||||
/// * A diagnostic related to a CLI argument
|
||||
fn file(&self) -> Option<File>;
|
||||
fn file(&self) -> File;
|
||||
|
||||
/// The primary range of the diagnostic in `file`.
|
||||
///
|
||||
/// The range can be `None` if the diagnostic doesn't have a file
|
||||
/// or it applies to the entire file (e.g. the file should be executable but isn't).
|
||||
fn range(&self) -> Option<TextRange>;
|
||||
|
||||
fn severity(&self) -> Severity;
|
||||
@@ -213,15 +197,16 @@ impl std::fmt::Display for DisplayDiagnostic<'_> {
|
||||
Severity::Fatal => f.write_str("fatal")?,
|
||||
}
|
||||
|
||||
write!(f, "[{rule}]", rule = self.diagnostic.id())?;
|
||||
write!(
|
||||
f,
|
||||
"[{rule}] {path}",
|
||||
rule = self.diagnostic.id(),
|
||||
path = self.diagnostic.file().path(self.db)
|
||||
)?;
|
||||
|
||||
if let Some(file) = self.diagnostic.file() {
|
||||
write!(f, " {path}", path = file.path(self.db))?;
|
||||
}
|
||||
|
||||
if let (Some(file), Some(range)) = (self.diagnostic.file(), self.diagnostic.range()) {
|
||||
let index = line_index(self.db, file);
|
||||
let source = source_text(self.db, file);
|
||||
if let Some(range) = self.diagnostic.range() {
|
||||
let index = line_index(self.db, self.diagnostic.file());
|
||||
let source = source_text(self.db, self.diagnostic.file());
|
||||
|
||||
let start = index.source_location(range.start(), &source);
|
||||
|
||||
@@ -244,7 +229,7 @@ where
|
||||
(**self).message()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
fn file(&self) -> File {
|
||||
(**self).file()
|
||||
}
|
||||
|
||||
@@ -269,7 +254,7 @@ where
|
||||
(**self).message()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
fn file(&self) -> File {
|
||||
(**self).file()
|
||||
}
|
||||
|
||||
@@ -291,7 +276,7 @@ impl Diagnostic for Box<dyn Diagnostic> {
|
||||
(**self).message()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
fn file(&self) -> File {
|
||||
(**self).file()
|
||||
}
|
||||
|
||||
@@ -325,8 +310,8 @@ impl Diagnostic for ParseDiagnostic {
|
||||
self.error.error.to_string().into()
|
||||
}
|
||||
|
||||
fn file(&self) -> Option<File> {
|
||||
Some(self.file)
|
||||
fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
|
||||
fn range(&self) -> Option<TextRange> {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.9.3"
|
||||
version = "0.9.2"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -26,8 +26,6 @@ def sla_callback(*arg, **kwargs):
|
||||
|
||||
DAG(dag_id="class_sla_callback", sla_miss_callback=sla_callback)
|
||||
|
||||
DAG(dag_id="class_sla_callback", fail_stop=True)
|
||||
|
||||
|
||||
@dag(schedule="0 * * * *")
|
||||
def decorator_schedule():
|
||||
|
||||
119
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_context.py
vendored
Normal file
119
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_context.py
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
import pendulum
|
||||
from airflow.models import DAG
|
||||
from airflow.operators.dummy import DummyOperator
|
||||
from datetime import datetime
|
||||
from airflow.plugins_manager import AirflowPlugin
|
||||
from airflow.models.baseoperator import BaseOperator
|
||||
from airflow.decorators import dag, task
|
||||
from airflow.providers.standard.operators.python import PythonOperator
|
||||
from airflow.utils.context import get_current_context
|
||||
|
||||
def access_invalid_key_in_context(**context):
|
||||
print("access invalid key", context["conf"])
|
||||
|
||||
@task
|
||||
def access_invalid_key_task_out_of_dag(**context):
|
||||
print("access invalid key", context.get("conf"))
|
||||
|
||||
@dag(
|
||||
schedule=None,
|
||||
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
|
||||
catchup=False,
|
||||
tags=[""],
|
||||
)
|
||||
def invalid_dag():
|
||||
@task()
|
||||
def access_invalid_key_task(**context):
|
||||
print("access invalid key", context.get("conf"))
|
||||
|
||||
task1 = PythonOperator(
|
||||
task_id="task1",
|
||||
python_callable=access_invalid_key_in_context,
|
||||
)
|
||||
access_invalid_key_task() >> task1
|
||||
access_invalid_key_task_out_of_dag()
|
||||
|
||||
invalid_dag()
|
||||
|
||||
@task
|
||||
def print_config(**context):
|
||||
# This should not throw an error as logical_date is part of airflow context.
|
||||
logical_date = context["logical_date"]
|
||||
|
||||
# Removed usage - should trigger violations
|
||||
execution_date = context["execution_date"]
|
||||
next_ds = context["next_ds"]
|
||||
next_ds_nodash = context["next_ds_nodash"]
|
||||
next_execution_date = context["next_execution_date"]
|
||||
prev_ds = context["prev_ds"]
|
||||
prev_ds_nodash = context["prev_ds_nodash"]
|
||||
prev_execution_date = context["prev_execution_date"]
|
||||
prev_execution_date_success = context["prev_execution_date_success"]
|
||||
tomorrow_ds = context["tomorrow_ds"]
|
||||
yesterday_ds = context["yesterday_ds"]
|
||||
yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
||||
with DAG(
|
||||
dag_id="example_dag",
|
||||
schedule_interval="@daily",
|
||||
start_date=datetime(2023, 1, 1),
|
||||
template_searchpath=["/templates"],
|
||||
) as dag:
|
||||
task1 = DummyOperator(
|
||||
task_id="task1",
|
||||
params={
|
||||
# Removed variables in template
|
||||
"execution_date": "{{ execution_date }}",
|
||||
"next_ds": "{{ next_ds }}",
|
||||
"prev_ds": "{{ prev_ds }}"
|
||||
},
|
||||
)
|
||||
|
||||
class CustomMacrosPlugin(AirflowPlugin):
|
||||
name = "custom_macros"
|
||||
macros = {
|
||||
"execution_date_macro": lambda context: context["execution_date"],
|
||||
"next_ds_macro": lambda context: context["next_ds"]
|
||||
}
|
||||
|
||||
@task
|
||||
def print_config():
|
||||
context = get_current_context()
|
||||
execution_date = context["execution_date"]
|
||||
next_ds = context["next_ds"]
|
||||
next_ds_nodash = context["next_ds_nodash"]
|
||||
next_execution_date = context["next_execution_date"]
|
||||
prev_ds = context["prev_ds"]
|
||||
prev_ds_nodash = context["prev_ds_nodash"]
|
||||
prev_execution_date = context["prev_execution_date"]
|
||||
prev_execution_date_success = context["prev_execution_date_success"]
|
||||
tomorrow_ds = context["tomorrow_ds"]
|
||||
yesterday_ds = context["yesterday_ds"]
|
||||
yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
||||
class CustomOperator(BaseOperator):
|
||||
def execute(self, context):
|
||||
execution_date = context["execution_date"]
|
||||
next_ds = context["next_ds"]
|
||||
next_ds_nodash = context["next_ds_nodash"]
|
||||
next_execution_date = context["next_execution_date"]
|
||||
prev_ds = context["prev_ds"]
|
||||
prev_ds_nodash = context["prev_ds_nodash"]
|
||||
prev_execution_date = context["prev_execution_date"]
|
||||
prev_execution_date_success = context["prev_execution_date_success"]
|
||||
tomorrow_ds = context["tomorrow_ds"]
|
||||
yesterday_ds = context["yesterday_ds"]
|
||||
yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
||||
@task
|
||||
def access_invalid_argument_task_out_of_dag(execution_date, tomorrow_ds, logical_date, **context):
|
||||
print("execution date", execution_date)
|
||||
print("access invalid key", context.get("conf"))
|
||||
|
||||
@task(task_id="print_the_context")
|
||||
def print_context(ds=None, **kwargs):
|
||||
"""Print the Airflow context and ds variable from the context."""
|
||||
print(ds)
|
||||
print(kwargs.get("tomorrow_ds"))
|
||||
c = get_current_context()
|
||||
c.get("execution_date")
|
||||
@@ -154,15 +154,3 @@ FROM {var}.table
|
||||
# to be handled separately
|
||||
# query58 = f"SELECT\
|
||||
# * FROM {var}.table"
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/15653
|
||||
query59 = f"""
|
||||
SELECT *, foo
|
||||
FROM ({user_input}) raw
|
||||
"""
|
||||
query60 = f"""
|
||||
SELECT *,
|
||||
foo
|
||||
FROM ({user_input}) raw
|
||||
"""
|
||||
|
||||
@@ -76,9 +76,3 @@ def func():
|
||||
|
||||
if msg.startswith(y) or msg.endswith(x) or msg.startswith("h"): # OK
|
||||
print("yes")
|
||||
|
||||
|
||||
def func():
|
||||
"Regression test for https://github.com/astral-sh/ruff/issues/9663"
|
||||
if x.startswith("a") or x.startswith("b") or re.match(r"a\.b", x):
|
||||
print("yes")
|
||||
|
||||
@@ -10,32 +10,3 @@ class Test(unittest.TestCase):
|
||||
def test_errors(self):
|
||||
with self.assertRaises(ValueError):
|
||||
raise ValueError
|
||||
|
||||
def test_rewrite_references(self):
|
||||
with self.assertRaises(ValueError) as e:
|
||||
raise ValueError
|
||||
|
||||
print(e.foo)
|
||||
print(e.exception)
|
||||
|
||||
def test_rewrite_references_multiple_items(self):
|
||||
with self.assertRaises(ValueError) as e1, \
|
||||
self.assertRaises(ValueError) as e2:
|
||||
raise ValueError
|
||||
|
||||
print(e1.foo)
|
||||
print(e1.exception)
|
||||
|
||||
print(e2.foo)
|
||||
print(e2.exception)
|
||||
|
||||
def test_rewrite_references_multiple_items_nested(self):
|
||||
with self.assertRaises(ValueError) as e1, \
|
||||
foo(self.assertRaises(ValueError)) as e2:
|
||||
raise ValueError
|
||||
|
||||
print(e1.foo)
|
||||
print(e1.exception)
|
||||
|
||||
print(e2.foo)
|
||||
print(e2.exception)
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
from typing import TypeVar
|
||||
|
||||
# Forward references are okay for both `bound` and `default` in a stub file
|
||||
_P = TypeVar("_P", bound=X, default=X)
|
||||
|
||||
class X: ...
|
||||
@@ -1,5 +1,5 @@
|
||||
import typing
|
||||
from typing import Any, TypeAlias
|
||||
from typing import TypeAlias
|
||||
|
||||
# UP040
|
||||
x: typing.TypeAlias = int
|
||||
@@ -43,10 +43,6 @@ class Foo:
|
||||
T = typing.TypeVar(*args)
|
||||
x: typing.TypeAlias = list[T]
|
||||
|
||||
# `default` should be skipped for now, added in Python 3.13
|
||||
T = typing.TypeVar("T", default=Any)
|
||||
x: typing.TypeAlias = list[T]
|
||||
|
||||
# OK
|
||||
x: TypeAlias
|
||||
x: int = 1
|
||||
@@ -89,7 +85,3 @@ T = TypeVar("T", bound=SupportGt)
|
||||
PositiveList = TypeAliasType(
|
||||
"PositiveList2", list[Annotated[T, Gt(0)]], type_params=(T,)
|
||||
)
|
||||
|
||||
# `default` should be skipped for now, added in Python 3.13
|
||||
T = typing.TypeVar("T", default=Any)
|
||||
AnyList = TypeAliasType("AnyList", list[T], typep_params=(T,))
|
||||
|
||||
@@ -1,136 +0,0 @@
|
||||
from typing import Any, AnyStr, Generic, ParamSpec, TypeVar, TypeVarTuple
|
||||
|
||||
from somewhere import SupportsRichComparisonT
|
||||
|
||||
S = TypeVar("S", str, bytes) # constrained type variable
|
||||
T = TypeVar("T", bound=float)
|
||||
Ts = TypeVarTuple("Ts")
|
||||
P = ParamSpec("P")
|
||||
|
||||
|
||||
class A(Generic[T]):
|
||||
# Comments in a class body are preserved
|
||||
var: T
|
||||
|
||||
|
||||
class B(Generic[*Ts]):
|
||||
var: tuple[*Ts]
|
||||
|
||||
|
||||
class C(Generic[P]):
|
||||
var: P
|
||||
|
||||
|
||||
class Constrained(Generic[S]):
|
||||
var: S
|
||||
|
||||
|
||||
# This case gets a diagnostic but not a fix because we can't look up the bounds
|
||||
# or constraints on the TypeVar imported from another module
|
||||
class ExternalType(Generic[T, SupportsRichComparisonT]):
|
||||
var: T
|
||||
compare: SupportsRichComparisonT
|
||||
|
||||
|
||||
# typing.AnyStr is a common external type variable, so treat it specially as a
|
||||
# known TypeVar
|
||||
class MyStr(Generic[AnyStr]):
|
||||
s: AnyStr
|
||||
|
||||
|
||||
class MultipleGenerics(Generic[S, T, *Ts, P]):
|
||||
var: S
|
||||
typ: T
|
||||
tup: tuple[*Ts]
|
||||
pep: P
|
||||
|
||||
|
||||
class MultipleBaseClasses(list, Generic[T]):
|
||||
var: T
|
||||
|
||||
|
||||
# these are just for the MoreBaseClasses and MultipleBaseAndGenerics cases
|
||||
class Base1: ...
|
||||
|
||||
|
||||
class Base2: ...
|
||||
|
||||
|
||||
class Base3: ...
|
||||
|
||||
|
||||
class MoreBaseClasses(Base1, Base2, Base3, Generic[T]):
|
||||
var: T
|
||||
|
||||
|
||||
class MultipleBaseAndGenerics(Base1, Base2, Base3, Generic[S, T, *Ts, P]):
|
||||
var: S
|
||||
typ: T
|
||||
tup: tuple[*Ts]
|
||||
pep: P
|
||||
|
||||
|
||||
class A(Generic[T]): ...
|
||||
|
||||
|
||||
class B(A[S], Generic[S]):
|
||||
var: S
|
||||
|
||||
|
||||
class C(A[S], Generic[S, T]):
|
||||
var: tuple[S, T]
|
||||
|
||||
|
||||
class D(A[int], Generic[T]):
|
||||
var: T
|
||||
|
||||
|
||||
class NotLast(Generic[T], Base1):
|
||||
var: T
|
||||
|
||||
|
||||
class Sandwich(Base1, Generic[T], Base2):
|
||||
var: T
|
||||
|
||||
|
||||
# runtime `TypeError` to inherit from `Generic` multiple times, but we still
|
||||
# emit a diagnostic
|
||||
class TooManyGenerics(Generic[T], Generic[S]):
|
||||
var: T
|
||||
var: S
|
||||
|
||||
|
||||
# These cases are not handled
|
||||
class D(Generic[T, T]): # duplicate generic variable, runtime error
|
||||
pass
|
||||
|
||||
|
||||
# TODO(brent) we should also apply the fix to methods, but it will need a
|
||||
# little more work. these should be left alone for now but be fixed eventually.
|
||||
class NotGeneric:
|
||||
# -> generic_method[T: float](t: T)
|
||||
def generic_method(t: T) -> T:
|
||||
return t
|
||||
|
||||
|
||||
# This one is strange in particular because of the mix of old- and new-style
|
||||
# generics, but according to the PEP, this is okay "if the class, function, or
|
||||
# type alias does not use the new syntax." `more_generic` doesn't use the new
|
||||
# syntax, so it can use T from the module and U from the class scope.
|
||||
class MixedGenerics[U]:
|
||||
def more_generic(u: U, t: T) -> tuple[U, T]:
|
||||
return (u, t)
|
||||
|
||||
|
||||
# TODO(brent) default requires 3.13
|
||||
V = TypeVar("V", default=Any, bound=str)
|
||||
|
||||
|
||||
class DefaultTypeVar(Generic[V]): # -> [V: str = Any]
|
||||
var: V
|
||||
|
||||
|
||||
# nested classes and functions are skipped
|
||||
class Outer:
|
||||
class Inner(Generic[T]):
|
||||
var: T
|
||||
@@ -1,12 +0,0 @@
|
||||
"""Replacing AnyStr requires specifying the constraints `bytes` and `str`, so
|
||||
it can't be replaced if these have been shadowed. This test is in a separate
|
||||
fixture because it doesn't seem possible to restore `str` to its builtin state
|
||||
"""
|
||||
|
||||
from typing import AnyStr, Generic
|
||||
|
||||
str = "string"
|
||||
|
||||
|
||||
class BadStr(Generic[AnyStr]):
|
||||
var: AnyStr
|
||||
@@ -1,59 +0,0 @@
|
||||
from collections.abc import Callable
|
||||
from typing import Any, AnyStr, ParamSpec, TypeVar, TypeVarTuple
|
||||
|
||||
from somewhere import Something
|
||||
|
||||
S = TypeVar("S", str, bytes) # constrained type variable
|
||||
T = TypeVar("T", bound=float)
|
||||
Ts = TypeVarTuple("Ts")
|
||||
P = ParamSpec("P")
|
||||
|
||||
|
||||
def f(t: T) -> T:
|
||||
return t
|
||||
|
||||
|
||||
def g(ts: tuple[*Ts]) -> tuple[*Ts]:
|
||||
return ts
|
||||
|
||||
|
||||
def h(
|
||||
p: Callable[P, T],
|
||||
# Comment in the middle of a parameter list should be preserved
|
||||
another_param,
|
||||
and_another,
|
||||
) -> Callable[P, T]:
|
||||
return p
|
||||
|
||||
|
||||
def i(s: S) -> S:
|
||||
return s
|
||||
|
||||
|
||||
# NOTE this case is the reason the fix is marked unsafe. If we can't confirm
|
||||
# that one of the type parameters (`Something` in this case) is a TypeVar,
|
||||
# which we can't do across module boundaries, we will not convert it to a
|
||||
# generic type parameter. This leads to code that mixes old-style standalone
|
||||
# TypeVars with the new-style generic syntax and will be rejected by type
|
||||
# checkers
|
||||
def broken_fix(okay: T, bad: Something) -> tuple[T, Something]:
|
||||
return (okay, bad)
|
||||
|
||||
|
||||
def any_str_param(s: AnyStr) -> AnyStr:
|
||||
return s
|
||||
|
||||
|
||||
# these cases are not handled
|
||||
|
||||
# TODO(brent) default requires 3.13
|
||||
V = TypeVar("V", default=Any, bound=str)
|
||||
|
||||
|
||||
def default_var(v: V) -> V:
|
||||
return v
|
||||
|
||||
|
||||
def outer():
|
||||
def inner(t: T) -> T:
|
||||
return t
|
||||
@@ -4,8 +4,7 @@ use ruff_text_size::Ranged;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{
|
||||
flake8_import_conventions, flake8_pyi, flake8_pytest_style, flake8_type_checking, pyflakes,
|
||||
pylint, ruff,
|
||||
flake8_import_conventions, flake8_pyi, flake8_type_checking, pyflakes, pylint, ruff,
|
||||
};
|
||||
|
||||
/// Run lint rules over the [`Binding`]s.
|
||||
@@ -21,7 +20,6 @@ pub(crate) fn bindings(checker: &mut Checker) {
|
||||
Rule::UnusedVariable,
|
||||
Rule::UnquotedTypeAlias,
|
||||
Rule::UsedDummyVariable,
|
||||
Rule::PytestUnittestRaisesAssertion,
|
||||
]) {
|
||||
return;
|
||||
}
|
||||
@@ -102,12 +100,5 @@ pub(crate) fn bindings(checker: &mut Checker) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::PytestUnittestRaisesAssertion) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_pytest_style::rules::unittest_raises_assertion_binding(checker, binding)
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -175,7 +175,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::NonPEP646Unpack) {
|
||||
pyupgrade::rules::use_pep646_unpack(checker, subscript);
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::Airflow3Removal) {
|
||||
airflow::rules::removed_in_3(checker, expr);
|
||||
}
|
||||
pandas_vet::rules::subscript(checker, value, expr);
|
||||
}
|
||||
Expr::Tuple(ast::ExprTuple {
|
||||
@@ -940,10 +942,18 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
flake8_pytest_style::rules::parametrize(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::PytestUnittestAssertion) {
|
||||
flake8_pytest_style::rules::unittest_assertion(checker, expr, func, args, keywords);
|
||||
if let Some(diagnostic) = flake8_pytest_style::rules::unittest_assertion(
|
||||
checker, expr, func, args, keywords,
|
||||
) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::PytestUnittestRaisesAssertion) {
|
||||
flake8_pytest_style::rules::unittest_raises_assertion_call(checker, call);
|
||||
if let Some(diagnostic) =
|
||||
flake8_pytest_style::rules::unittest_raises_assertion(checker, call)
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::SubprocessPopenPreexecFn) {
|
||||
pylint::rules::subprocess_popen_preexec_fn(checker, call);
|
||||
|
||||
@@ -376,8 +376,8 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::PytestParameterWithDefaultArgument) {
|
||||
flake8_pytest_style::rules::parameter_with_default_argument(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::NonPEP695GenericFunction) {
|
||||
pyupgrade::rules::non_pep695_generic_function(checker, function_def);
|
||||
if checker.enabled(Rule::Airflow3Removal) {
|
||||
airflow::rules::removed_in_3_function_def(checker, function_def);
|
||||
}
|
||||
}
|
||||
Stmt::Return(_) => {
|
||||
@@ -557,9 +557,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::DataclassEnum) {
|
||||
ruff::rules::dataclass_enum(checker, class_def);
|
||||
}
|
||||
if checker.enabled(Rule::NonPEP695GenericClass) {
|
||||
pyupgrade::rules::non_pep695_generic_class(checker, class_def);
|
||||
}
|
||||
}
|
||||
Stmt::Import(ast::StmtImport { names, range: _ }) => {
|
||||
if checker.enabled(Rule::MultipleImportsOnOneLine) {
|
||||
|
||||
@@ -1326,7 +1326,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
range: _,
|
||||
} = keyword;
|
||||
if let Some(id) = arg {
|
||||
if matches!(&**id, "bound" | "default") {
|
||||
if id.as_str() == "bound" {
|
||||
self.visit_type_definition(value);
|
||||
} else {
|
||||
self.visit_non_type_definition(value);
|
||||
|
||||
@@ -540,8 +540,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pyupgrade, "043") => (RuleGroup::Stable, rules::pyupgrade::rules::UnnecessaryDefaultTypeArgs),
|
||||
(Pyupgrade, "044") => (RuleGroup::Preview, rules::pyupgrade::rules::NonPEP646Unpack),
|
||||
(Pyupgrade, "045") => (RuleGroup::Preview, rules::pyupgrade::rules::NonPEP604AnnotationOptional),
|
||||
(Pyupgrade, "046") => (RuleGroup::Preview, rules::pyupgrade::rules::NonPEP695GenericClass),
|
||||
(Pyupgrade, "047") => (RuleGroup::Preview, rules::pyupgrade::rules::NonPEP695GenericFunction),
|
||||
|
||||
// pydocstyle
|
||||
(Pydocstyle, "100") => (RuleGroup::Stable, rules::pydocstyle::rules::UndocumentedPublicModule),
|
||||
|
||||
@@ -18,6 +18,7 @@ mod tests {
|
||||
#[test_case(Rule::Airflow3Removal, Path::new("AIR302_names.py"))]
|
||||
#[test_case(Rule::Airflow3Removal, Path::new("AIR302_class_attribute.py"))]
|
||||
#[test_case(Rule::Airflow3Removal, Path::new("AIR302_airflow_plugin.py"))]
|
||||
#[test_case(Rule::Airflow3Removal, Path::new("AIR302_context.py"))]
|
||||
#[test_case(Rule::Airflow3MovedToProvider, Path::new("AIR303.py"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
use crate::checkers::ast::Checker;
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, ViolationMetadata};
|
||||
use ruff_python_ast::helpers::map_callable;
|
||||
use ruff_python_ast::AnyParameterRef;
|
||||
use ruff_python_ast::{
|
||||
name::QualifiedName, Arguments, Expr, ExprAttribute, ExprCall, ExprContext, ExprName,
|
||||
StmtClassDef,
|
||||
ExprStringLiteral, ExprSubscript, Stmt, StmtClassDef, StmtFunctionDef,
|
||||
};
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_python_semantic::Modules;
|
||||
use ruff_python_semantic::ScopeKind;
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of deprecated Airflow functions and values.
|
||||
///
|
||||
@@ -71,6 +73,64 @@ impl Violation for Airflow3Removal {
|
||||
}
|
||||
}
|
||||
|
||||
const REMOVED_CONTEXT_KEYS: [&str; 12] = [
|
||||
"conf",
|
||||
"execution_date",
|
||||
"next_ds",
|
||||
"next_ds_nodash",
|
||||
"next_execution_date",
|
||||
"prev_ds",
|
||||
"prev_ds_nodash",
|
||||
"prev_execution_date",
|
||||
"prev_execution_date_success",
|
||||
"tomorrow_ds",
|
||||
"yesterday_ds",
|
||||
"yesterday_ds_nodash",
|
||||
];
|
||||
|
||||
fn extract_name_from_slice(slice: &Expr) -> Option<String> {
|
||||
match slice {
|
||||
Expr::StringLiteral(ExprStringLiteral { value, .. }) => Some(value.to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a subscript expression accesses a removed Airflow context variable.
|
||||
/// If a removed key is found, push a corresponding diagnostic.
|
||||
fn removed_context_variable(checker: &mut Checker, subscript: &ExprSubscript) {
|
||||
let ExprSubscript { value, slice, .. } = subscript;
|
||||
|
||||
let is_context_arg = if let Expr::Name(ExprName { id, .. }) = &**value {
|
||||
id.as_str() == "context" || id.as_str().starts_with("**")
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let is_current_context =
|
||||
if let Some(qualname) = typing::resolve_assignment(value, checker.semantic()) {
|
||||
matches!(
|
||||
qualname.segments(),
|
||||
["airflow", "utils", "context", "get_current_context"]
|
||||
)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if is_context_arg || is_current_context {
|
||||
if let Some(key) = extract_name_from_slice(slice) {
|
||||
if REMOVED_CONTEXT_KEYS.contains(&key.as_str()) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
Airflow3Removal {
|
||||
deprecated: key,
|
||||
replacement: Replacement::None,
|
||||
},
|
||||
slice.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// AIR302
|
||||
pub(crate) fn removed_in_3(checker: &mut Checker, expr: &Expr) {
|
||||
if !checker.semantic().seen_module(Modules::AIRFLOW) {
|
||||
@@ -87,6 +147,7 @@ pub(crate) fn removed_in_3(checker: &mut Checker, expr: &Expr) {
|
||||
check_call_arguments(checker, &qualname, arguments);
|
||||
};
|
||||
check_method(checker, call_expr);
|
||||
check_context_get(checker, call_expr);
|
||||
}
|
||||
Expr::Attribute(attribute_expr @ ExprAttribute { attr, .. }) => {
|
||||
check_name(checker, expr, attr.range());
|
||||
@@ -100,10 +161,43 @@ pub(crate) fn removed_in_3(checker: &mut Checker, expr: &Expr) {
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::Subscript(subscript_expr) => {
|
||||
removed_context_variable(checker, subscript_expr);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// AIR302
|
||||
pub(crate) fn removed_in_3_function_def(checker: &mut Checker, function_def: &StmtFunctionDef) {
|
||||
if !checker.semantic().seen_module(Modules::AIRFLOW) {
|
||||
return;
|
||||
}
|
||||
|
||||
if !is_airflow_task(function_def, checker.semantic()) {
|
||||
return;
|
||||
}
|
||||
|
||||
for param in function_def
|
||||
.parameters
|
||||
.posonlyargs
|
||||
.iter()
|
||||
.chain(function_def.parameters.args.iter())
|
||||
.chain(function_def.parameters.kwonlyargs.iter())
|
||||
{
|
||||
let param_name = param.parameter.name.as_str();
|
||||
if REMOVED_CONTEXT_KEYS.contains(¶m_name) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
Airflow3Removal {
|
||||
deprecated: param_name.to_string(),
|
||||
replacement: Replacement::None,
|
||||
},
|
||||
param.parameter.name.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
enum Replacement {
|
||||
None,
|
||||
@@ -138,11 +232,6 @@ fn check_call_arguments(checker: &mut Checker, qualname: &QualifiedName, argumen
|
||||
"sla_miss_callback",
|
||||
None,
|
||||
));
|
||||
checker.diagnostics.extend(diagnostic_for_argument(
|
||||
arguments,
|
||||
"fail_stop",
|
||||
Some("fail_fast"),
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
if is_airflow_auth_manager(qualname.segments()) {
|
||||
@@ -252,6 +341,143 @@ fn check_class_attribute(checker: &mut Checker, attribute_expr: &ExprAttribute)
|
||||
}
|
||||
}
|
||||
|
||||
/// Finds the parameter definition for a given name expression in a function.
|
||||
fn find_parameter<'a>(
|
||||
semantic: &'a SemanticModel,
|
||||
name: &'a ExprName,
|
||||
) -> Option<AnyParameterRef<'a>> {
|
||||
let binding_id = semantic.only_binding(name)?;
|
||||
let binding = semantic.binding(binding_id);
|
||||
let StmtFunctionDef { parameters, .. } = binding.statement(semantic)?.as_function_def_stmt()?;
|
||||
parameters
|
||||
.iter()
|
||||
.find(|parameter| parameter.name().range() == binding.range())
|
||||
}
|
||||
|
||||
/// Checks whether an Airflow 3.0–removed context key is used in a function decorated with `@task`.
|
||||
///
|
||||
/// Specifically, it flags two scenarios for task decorated function:
|
||||
/// 1. A removed context variable passed in as a function parameter name (e.g., `execution_date`).
|
||||
/// 2. A removed context key accessed via `context.get("...")`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// **Removed key used in `context.get(...)`:**
|
||||
/// ```python
|
||||
/// from airflow.decorators import task
|
||||
///
|
||||
/// @task
|
||||
/// def my_task(**context):
|
||||
/// # 'conf' is removed in Airflow 3.0
|
||||
/// print(context.get("conf"))
|
||||
/// ```
|
||||
///
|
||||
/// **Removed context variable as a parameter:**
|
||||
/// ```python
|
||||
/// from airflow.decorators import task
|
||||
///
|
||||
/// @task
|
||||
/// def another_task(execution_date, **kwargs):
|
||||
/// # 'execution_date' is removed in Airflow 3.0
|
||||
/// pass
|
||||
/// ```
|
||||
///
|
||||
/// **Accessing multiple keys:**
|
||||
/// ```python
|
||||
/// from airflow.decorators import task
|
||||
///
|
||||
/// @task
|
||||
/// def more_keys(**context):
|
||||
/// # 'prev_ds' is also removed in Airflow 3.0
|
||||
/// print(context.get("prev_ds"))
|
||||
/// ```
|
||||
fn check_context_get(checker: &mut Checker, call_expr: &ExprCall) {
|
||||
if !is_taskflow(checker) {
|
||||
return;
|
||||
}
|
||||
|
||||
let Expr::Attribute(ExprAttribute { value, attr, .. }) = &*call_expr.func else {
|
||||
return;
|
||||
};
|
||||
|
||||
let is_named_context = if let Expr::Name(name) = &**value {
|
||||
if let Some(parameter) = find_parameter(checker.semantic(), name) {
|
||||
matches!(parameter.name().as_str(), "context" | "kwargs")
|
||||
|| parameter.name().as_str().starts_with("**")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let is_assigned_from_gcc =
|
||||
if let Some(qualname) = typing::resolve_assignment(value, checker.semantic()) {
|
||||
matches!(
|
||||
qualname.segments(),
|
||||
["airflow", "utils", "context", "get_current_context"]
|
||||
)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if !(is_named_context || is_assigned_from_gcc) {
|
||||
return;
|
||||
}
|
||||
|
||||
if attr.as_str() != "get" {
|
||||
return;
|
||||
}
|
||||
|
||||
for removed_key in REMOVED_CONTEXT_KEYS {
|
||||
if let Some(argument) = call_expr.arguments.find_argument_value(removed_key, 0) {
|
||||
if let Expr::StringLiteral(ExprStringLiteral { value, .. }) = argument {
|
||||
if value == removed_key {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
Airflow3Removal {
|
||||
deprecated: removed_key.to_string(),
|
||||
replacement: Replacement::None,
|
||||
},
|
||||
argument.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check whether the function is decorated by @task
|
||||
///
|
||||
///
|
||||
/// Examples for the above patterns:
|
||||
/// ```python
|
||||
/// from airflow.decorators import task
|
||||
///
|
||||
///
|
||||
/// @task
|
||||
/// def access_invalid_key_task_out_of_dag(**context):
|
||||
/// print("access invalid key", context.get("conf"))
|
||||
/// ```
|
||||
fn is_taskflow(checker: &mut Checker) -> bool {
|
||||
let mut parents = checker.semantic().current_statements();
|
||||
if let Some(Stmt::FunctionDef(StmtFunctionDef { decorator_list, .. })) =
|
||||
parents.find(|stmt| stmt.is_function_def_stmt())
|
||||
{
|
||||
for decorator in decorator_list {
|
||||
if checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(map_callable(&decorator.expression))
|
||||
.is_some_and(|qualified_name| {
|
||||
matches!(qualified_name.segments(), ["airflow", "decorators", "task"])
|
||||
})
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Check whether a removed Airflow class method is called.
|
||||
///
|
||||
/// For example:
|
||||
@@ -860,3 +1086,14 @@ fn is_airflow_builtin_or_provider(segments: &[&str], module: &str, symbol_suffix
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given function is decorated with `@airflow.decorators.task`.
|
||||
fn is_airflow_task(function_def: &StmtFunctionDef, semantic: &SemanticModel) -> bool {
|
||||
function_def.decorator_list.iter().any(|decorator| {
|
||||
semantic
|
||||
.resolve_qualified_name(map_callable(&decorator.expression))
|
||||
.is_some_and(|qualified_name| {
|
||||
matches!(qualified_name.segments(), ["airflow", "decorators", "task"])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/airflow/mod.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
AIR302_args.py:18:39: AIR302 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
|
|
||||
@@ -46,228 +45,207 @@ AIR302_args.py:27:34: AIR302 `sla_miss_callback` is removed in Airflow 3.0
|
||||
|
|
||||
27 | DAG(dag_id="class_sla_callback", sla_miss_callback=sla_callback)
|
||||
| ^^^^^^^^^^^^^^^^^ AIR302
|
||||
28 |
|
||||
29 | DAG(dag_id="class_sla_callback", fail_stop=True)
|
||||
|
|
||||
|
||||
AIR302_args.py:29:34: AIR302 [*] `fail_stop` is removed in Airflow 3.0
|
||||
AIR302_args.py:35:6: AIR302 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
|
|
||||
27 | DAG(dag_id="class_sla_callback", sla_miss_callback=sla_callback)
|
||||
28 |
|
||||
29 | DAG(dag_id="class_sla_callback", fail_stop=True)
|
||||
| ^^^^^^^^^ AIR302
|
||||
|
|
||||
= help: Use `fail_fast` instead
|
||||
|
||||
ℹ Safe fix
|
||||
26 26 |
|
||||
27 27 | DAG(dag_id="class_sla_callback", sla_miss_callback=sla_callback)
|
||||
28 28 |
|
||||
29 |-DAG(dag_id="class_sla_callback", fail_stop=True)
|
||||
29 |+DAG(dag_id="class_sla_callback", fail_fast=True)
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 32 | @dag(schedule="0 * * * *")
|
||||
|
||||
AIR302_args.py:37:6: AIR302 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
|
|
||||
37 | @dag(schedule_interval="0 * * * *")
|
||||
35 | @dag(schedule_interval="0 * * * *")
|
||||
| ^^^^^^^^^^^^^^^^^ AIR302
|
||||
38 | def decorator_schedule_interval():
|
||||
39 | pass
|
||||
36 | def decorator_schedule_interval():
|
||||
37 | pass
|
||||
|
|
||||
= help: Use `schedule` instead
|
||||
|
||||
ℹ Safe fix
|
||||
34 34 | pass
|
||||
35 35 |
|
||||
36 36 |
|
||||
37 |-@dag(schedule_interval="0 * * * *")
|
||||
37 |+@dag(schedule="0 * * * *")
|
||||
38 38 | def decorator_schedule_interval():
|
||||
39 39 | pass
|
||||
40 40 |
|
||||
32 32 | pass
|
||||
33 33 |
|
||||
34 34 |
|
||||
35 |-@dag(schedule_interval="0 * * * *")
|
||||
35 |+@dag(schedule="0 * * * *")
|
||||
36 36 | def decorator_schedule_interval():
|
||||
37 37 | pass
|
||||
38 38 |
|
||||
|
||||
AIR302_args.py:42:6: AIR302 [*] `timetable` is removed in Airflow 3.0
|
||||
AIR302_args.py:40:6: AIR302 [*] `timetable` is removed in Airflow 3.0
|
||||
|
|
||||
42 | @dag(timetable=NullTimetable())
|
||||
40 | @dag(timetable=NullTimetable())
|
||||
| ^^^^^^^^^ AIR302
|
||||
43 | def decorator_timetable():
|
||||
44 | pass
|
||||
41 | def decorator_timetable():
|
||||
42 | pass
|
||||
|
|
||||
= help: Use `schedule` instead
|
||||
|
||||
ℹ Safe fix
|
||||
39 39 | pass
|
||||
40 40 |
|
||||
41 41 |
|
||||
42 |-@dag(timetable=NullTimetable())
|
||||
42 |+@dag(schedule=NullTimetable())
|
||||
43 43 | def decorator_timetable():
|
||||
44 44 | pass
|
||||
45 45 |
|
||||
37 37 | pass
|
||||
38 38 |
|
||||
39 39 |
|
||||
40 |-@dag(timetable=NullTimetable())
|
||||
40 |+@dag(schedule=NullTimetable())
|
||||
41 41 | def decorator_timetable():
|
||||
42 42 | pass
|
||||
43 43 |
|
||||
|
||||
AIR302_args.py:47:6: AIR302 `sla_miss_callback` is removed in Airflow 3.0
|
||||
AIR302_args.py:45:6: AIR302 `sla_miss_callback` is removed in Airflow 3.0
|
||||
|
|
||||
47 | @dag(sla_miss_callback=sla_callback)
|
||||
45 | @dag(sla_miss_callback=sla_callback)
|
||||
| ^^^^^^^^^^^^^^^^^ AIR302
|
||||
48 | def decorator_sla_callback():
|
||||
49 | pass
|
||||
46 | def decorator_sla_callback():
|
||||
47 | pass
|
||||
|
|
||||
|
||||
AIR302_args.py:55:39: AIR302 [*] `execution_date` is removed in Airflow 3.0
|
||||
AIR302_args.py:53:39: AIR302 [*] `execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
53 | def decorator_deprecated_operator_args():
|
||||
54 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
55 | task_id="trigger_dagrun_op1", execution_date="2024-12-04"
|
||||
51 | def decorator_deprecated_operator_args():
|
||||
52 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
53 | task_id="trigger_dagrun_op1", execution_date="2024-12-04"
|
||||
| ^^^^^^^^^^^^^^ AIR302
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
54 | )
|
||||
55 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
|
|
||||
= help: Use `logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
52 52 | @dag()
|
||||
53 53 | def decorator_deprecated_operator_args():
|
||||
54 54 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
55 |- task_id="trigger_dagrun_op1", execution_date="2024-12-04"
|
||||
55 |+ task_id="trigger_dagrun_op1", logical_date="2024-12-04"
|
||||
56 56 | )
|
||||
57 57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
58 58 | task_id="trigger_dagrun_op2", execution_date="2024-12-04"
|
||||
50 50 | @dag()
|
||||
51 51 | def decorator_deprecated_operator_args():
|
||||
52 52 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
53 |- task_id="trigger_dagrun_op1", execution_date="2024-12-04"
|
||||
53 |+ task_id="trigger_dagrun_op1", logical_date="2024-12-04"
|
||||
54 54 | )
|
||||
55 55 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
56 56 | task_id="trigger_dagrun_op2", execution_date="2024-12-04"
|
||||
|
||||
AIR302_args.py:58:39: AIR302 [*] `execution_date` is removed in Airflow 3.0
|
||||
AIR302_args.py:56:39: AIR302 [*] `execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
56 | )
|
||||
57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
58 | task_id="trigger_dagrun_op2", execution_date="2024-12-04"
|
||||
54 | )
|
||||
55 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
56 | task_id="trigger_dagrun_op2", execution_date="2024-12-04"
|
||||
| ^^^^^^^^^^^^^^ AIR302
|
||||
59 | )
|
||||
57 | )
|
||||
|
|
||||
= help: Use `logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
55 55 | task_id="trigger_dagrun_op1", execution_date="2024-12-04"
|
||||
56 56 | )
|
||||
57 57 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
58 |- task_id="trigger_dagrun_op2", execution_date="2024-12-04"
|
||||
58 |+ task_id="trigger_dagrun_op2", logical_date="2024-12-04"
|
||||
59 59 | )
|
||||
60 60 |
|
||||
61 61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
53 53 | task_id="trigger_dagrun_op1", execution_date="2024-12-04"
|
||||
54 54 | )
|
||||
55 55 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
56 |- task_id="trigger_dagrun_op2", execution_date="2024-12-04"
|
||||
56 |+ task_id="trigger_dagrun_op2", logical_date="2024-12-04"
|
||||
57 57 | )
|
||||
58 58 |
|
||||
59 59 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
|
||||
AIR302_args.py:62:33: AIR302 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
AIR302_args.py:60:33: AIR302 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
|
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
62 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
59 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
60 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
61 | )
|
||||
62 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
|
|
||||
= help: Use `use_task_logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
59 59 | )
|
||||
60 60 |
|
||||
61 61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
62 |- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
62 |+ task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
|
||||
63 63 | )
|
||||
64 64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 65 | task_id="branch_dt_op2",
|
||||
57 57 | )
|
||||
58 58 |
|
||||
59 59 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
60 |- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
60 |+ task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
|
||||
61 61 | )
|
||||
62 62 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
63 63 | task_id="branch_dt_op2",
|
||||
|
||||
AIR302_args.py:62:62: AIR302 [*] `task_concurrency` is removed in Airflow 3.0
|
||||
AIR302_args.py:60:62: AIR302 [*] `task_concurrency` is removed in Airflow 3.0
|
||||
|
|
||||
61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
62 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
59 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
60 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
63 | )
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
61 | )
|
||||
62 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
|
|
||||
= help: Use `max_active_tis_per_dag` instead
|
||||
|
||||
ℹ Safe fix
|
||||
59 59 | )
|
||||
60 60 |
|
||||
61 61 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
62 |- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
62 |+ task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
|
||||
63 63 | )
|
||||
64 64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 65 | task_id="branch_dt_op2",
|
||||
57 57 | )
|
||||
58 58 |
|
||||
59 59 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
60 |- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
60 |+ task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
|
||||
61 61 | )
|
||||
62 62 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
63 63 | task_id="branch_dt_op2",
|
||||
|
||||
AIR302_args.py:66:9: AIR302 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
AIR302_args.py:64:9: AIR302 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
|
|
||||
64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 | task_id="branch_dt_op2",
|
||||
66 | use_task_execution_day=True,
|
||||
62 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
63 | task_id="branch_dt_op2",
|
||||
64 | use_task_execution_day=True,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
67 | sla=timedelta(seconds=10),
|
||||
68 | )
|
||||
65 | sla=timedelta(seconds=10),
|
||||
66 | )
|
||||
|
|
||||
= help: Use `use_task_logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
63 63 | )
|
||||
64 64 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
65 65 | task_id="branch_dt_op2",
|
||||
66 |- use_task_execution_day=True,
|
||||
66 |+ use_task_logical_date=True,
|
||||
67 67 | sla=timedelta(seconds=10),
|
||||
68 68 | )
|
||||
69 69 |
|
||||
61 61 | )
|
||||
62 62 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
63 63 | task_id="branch_dt_op2",
|
||||
64 |- use_task_execution_day=True,
|
||||
64 |+ use_task_logical_date=True,
|
||||
65 65 | sla=timedelta(seconds=10),
|
||||
66 66 | )
|
||||
67 67 |
|
||||
|
||||
AIR302_args.py:67:9: AIR302 `sla` is removed in Airflow 3.0
|
||||
AIR302_args.py:65:9: AIR302 `sla` is removed in Airflow 3.0
|
||||
|
|
||||
65 | task_id="branch_dt_op2",
|
||||
66 | use_task_execution_day=True,
|
||||
67 | sla=timedelta(seconds=10),
|
||||
63 | task_id="branch_dt_op2",
|
||||
64 | use_task_execution_day=True,
|
||||
65 | sla=timedelta(seconds=10),
|
||||
| ^^^ AIR302
|
||||
68 | )
|
||||
66 | )
|
||||
|
|
||||
|
||||
AIR302_args.py:89:15: AIR302 `filename_template` is removed in Airflow 3.0
|
||||
AIR302_args.py:87:15: AIR302 `filename_template` is removed in Airflow 3.0
|
||||
|
|
||||
88 | # deprecated filename_template arugment in FileTaskHandler
|
||||
89 | S3TaskHandler(filename_template="/tmp/test")
|
||||
86 | # deprecated filename_template arugment in FileTaskHandler
|
||||
87 | S3TaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^ AIR302
|
||||
90 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
91 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
88 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
89 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR302_args.py:90:17: AIR302 `filename_template` is removed in Airflow 3.0
|
||||
AIR302_args.py:88:17: AIR302 `filename_template` is removed in Airflow 3.0
|
||||
|
|
||||
88 | # deprecated filename_template arugment in FileTaskHandler
|
||||
89 | S3TaskHandler(filename_template="/tmp/test")
|
||||
90 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
86 | # deprecated filename_template arugment in FileTaskHandler
|
||||
87 | S3TaskHandler(filename_template="/tmp/test")
|
||||
88 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^ AIR302
|
||||
91 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
92 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
89 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
90 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR302_args.py:91:26: AIR302 `filename_template` is removed in Airflow 3.0
|
||||
AIR302_args.py:89:26: AIR302 `filename_template` is removed in Airflow 3.0
|
||||
|
|
||||
89 | S3TaskHandler(filename_template="/tmp/test")
|
||||
90 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
91 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
87 | S3TaskHandler(filename_template="/tmp/test")
|
||||
88 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
89 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^ AIR302
|
||||
92 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
90 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
|
|
||||
|
||||
AIR302_args.py:92:16: AIR302 `filename_template` is removed in Airflow 3.0
|
||||
AIR302_args.py:90:16: AIR302 `filename_template` is removed in Airflow 3.0
|
||||
|
|
||||
90 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
91 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
92 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
88 | HdfsTaskHandler(filename_template="/tmp/test")
|
||||
89 | ElasticsearchTaskHandler(filename_template="/tmp/test")
|
||||
90 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
| ^^^^^^^^^^^^^^^^^ AIR302
|
||||
93 |
|
||||
94 | FabAuthManager(None)
|
||||
91 |
|
||||
92 | FabAuthManager(None)
|
||||
|
|
||||
|
||||
AIR302_args.py:94:15: AIR302 `appbuilder` is removed in Airflow 3.0; The constructor takes no parameter now
|
||||
AIR302_args.py:92:15: AIR302 `appbuilder` is removed in Airflow 3.0; The constructor takes no parameter now
|
||||
|
|
||||
92 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
93 |
|
||||
94 | FabAuthManager(None)
|
||||
90 | GCSTaskHandler(filename_template="/tmp/test")
|
||||
91 |
|
||||
92 | FabAuthManager(None)
|
||||
| ^^^^^^ AIR302
|
||||
|
|
||||
|
||||
@@ -0,0 +1,455 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/airflow/mod.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
AIR302_context.py:12:41: AIR302 `conf` is removed in Airflow 3.0
|
||||
|
|
||||
11 | def access_invalid_key_in_context(**context):
|
||||
12 | print("access invalid key", context["conf"])
|
||||
| ^^^^^^ AIR302
|
||||
13 |
|
||||
14 | @task
|
||||
|
|
||||
|
||||
AIR302_context.py:16:45: AIR302 `conf` is removed in Airflow 3.0
|
||||
|
|
||||
14 | @task
|
||||
15 | def access_invalid_key_task_out_of_dag(**context):
|
||||
16 | print("access invalid key", context.get("conf"))
|
||||
| ^^^^^^ AIR302
|
||||
17 |
|
||||
18 | @dag(
|
||||
|
|
||||
|
||||
AIR302_context.py:27:49: AIR302 `conf` is removed in Airflow 3.0
|
||||
|
|
||||
25 | @task()
|
||||
26 | def access_invalid_key_task(**context):
|
||||
27 | print("access invalid key", context.get("conf"))
|
||||
| ^^^^^^ AIR302
|
||||
28 |
|
||||
29 | task1 = PythonOperator(
|
||||
|
|
||||
|
||||
AIR302_context.py:44:30: AIR302 `execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
43 | # Removed usage - should trigger violations
|
||||
44 | execution_date = context["execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
45 | next_ds = context["next_ds"]
|
||||
46 | next_ds_nodash = context["next_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:45:23: AIR302 `next_ds` is removed in Airflow 3.0
|
||||
|
|
||||
43 | # Removed usage - should trigger violations
|
||||
44 | execution_date = context["execution_date"]
|
||||
45 | next_ds = context["next_ds"]
|
||||
| ^^^^^^^^^ AIR302
|
||||
46 | next_ds_nodash = context["next_ds_nodash"]
|
||||
47 | next_execution_date = context["next_execution_date"]
|
||||
|
|
||||
|
||||
AIR302_context.py:46:30: AIR302 `next_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
44 | execution_date = context["execution_date"]
|
||||
45 | next_ds = context["next_ds"]
|
||||
46 | next_ds_nodash = context["next_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
47 | next_execution_date = context["next_execution_date"]
|
||||
48 | prev_ds = context["prev_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:47:35: AIR302 `next_execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
45 | next_ds = context["next_ds"]
|
||||
46 | next_ds_nodash = context["next_ds_nodash"]
|
||||
47 | next_execution_date = context["next_execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
48 | prev_ds = context["prev_ds"]
|
||||
49 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:48:23: AIR302 `prev_ds` is removed in Airflow 3.0
|
||||
|
|
||||
46 | next_ds_nodash = context["next_ds_nodash"]
|
||||
47 | next_execution_date = context["next_execution_date"]
|
||||
48 | prev_ds = context["prev_ds"]
|
||||
| ^^^^^^^^^ AIR302
|
||||
49 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
50 | prev_execution_date = context["prev_execution_date"]
|
||||
|
|
||||
|
||||
AIR302_context.py:49:30: AIR302 `prev_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
47 | next_execution_date = context["next_execution_date"]
|
||||
48 | prev_ds = context["prev_ds"]
|
||||
49 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
50 | prev_execution_date = context["prev_execution_date"]
|
||||
51 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
|
|
||||
|
||||
AIR302_context.py:50:35: AIR302 `prev_execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
48 | prev_ds = context["prev_ds"]
|
||||
49 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
50 | prev_execution_date = context["prev_execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
51 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
52 | tomorrow_ds = context["tomorrow_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:51:43: AIR302 `prev_execution_date_success` is removed in Airflow 3.0
|
||||
|
|
||||
49 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
50 | prev_execution_date = context["prev_execution_date"]
|
||||
51 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
52 | tomorrow_ds = context["tomorrow_ds"]
|
||||
53 | yesterday_ds = context["yesterday_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:52:27: AIR302 `tomorrow_ds` is removed in Airflow 3.0
|
||||
|
|
||||
50 | prev_execution_date = context["prev_execution_date"]
|
||||
51 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
52 | tomorrow_ds = context["tomorrow_ds"]
|
||||
| ^^^^^^^^^^^^^ AIR302
|
||||
53 | yesterday_ds = context["yesterday_ds"]
|
||||
54 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:53:28: AIR302 `yesterday_ds` is removed in Airflow 3.0
|
||||
|
|
||||
51 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
52 | tomorrow_ds = context["tomorrow_ds"]
|
||||
53 | yesterday_ds = context["yesterday_ds"]
|
||||
| ^^^^^^^^^^^^^^ AIR302
|
||||
54 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:54:35: AIR302 `yesterday_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
52 | tomorrow_ds = context["tomorrow_ds"]
|
||||
53 | yesterday_ds = context["yesterday_ds"]
|
||||
54 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
55 |
|
||||
56 | with DAG(
|
||||
|
|
||||
|
||||
AIR302_context.py:58:5: AIR302 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
|
|
||||
56 | with DAG(
|
||||
57 | dag_id="example_dag",
|
||||
58 | schedule_interval="@daily",
|
||||
| ^^^^^^^^^^^^^^^^^ AIR302
|
||||
59 | start_date=datetime(2023, 1, 1),
|
||||
60 | template_searchpath=["/templates"],
|
||||
|
|
||||
= help: Use `schedule` instead
|
||||
|
||||
ℹ Safe fix
|
||||
55 55 |
|
||||
56 56 | with DAG(
|
||||
57 57 | dag_id="example_dag",
|
||||
58 |- schedule_interval="@daily",
|
||||
58 |+ schedule="@daily",
|
||||
59 59 | start_date=datetime(2023, 1, 1),
|
||||
60 60 | template_searchpath=["/templates"],
|
||||
61 61 | ) as dag:
|
||||
|
||||
AIR302_context.py:62:13: AIR302 `airflow.operators.dummy.DummyOperator` is removed in Airflow 3.0
|
||||
|
|
||||
60 | template_searchpath=["/templates"],
|
||||
61 | ) as dag:
|
||||
62 | task1 = DummyOperator(
|
||||
| ^^^^^^^^^^^^^ AIR302
|
||||
63 | task_id="task1",
|
||||
64 | params={
|
||||
|
|
||||
= help: Use `airflow.operators.empty.EmptyOperator` instead
|
||||
|
||||
AIR302_context.py:75:57: AIR302 `execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
73 | name = "custom_macros"
|
||||
74 | macros = {
|
||||
75 | "execution_date_macro": lambda context: context["execution_date"],
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
76 | "next_ds_macro": lambda context: context["next_ds"]
|
||||
77 | }
|
||||
|
|
||||
|
||||
AIR302_context.py:76:50: AIR302 `next_ds` is removed in Airflow 3.0
|
||||
|
|
||||
74 | macros = {
|
||||
75 | "execution_date_macro": lambda context: context["execution_date"],
|
||||
76 | "next_ds_macro": lambda context: context["next_ds"]
|
||||
| ^^^^^^^^^ AIR302
|
||||
77 | }
|
||||
|
|
||||
|
||||
AIR302_context.py:82:30: AIR302 `execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
80 | def print_config():
|
||||
81 | context = get_current_context()
|
||||
82 | execution_date = context["execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
83 | next_ds = context["next_ds"]
|
||||
84 | next_ds_nodash = context["next_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:83:23: AIR302 `next_ds` is removed in Airflow 3.0
|
||||
|
|
||||
81 | context = get_current_context()
|
||||
82 | execution_date = context["execution_date"]
|
||||
83 | next_ds = context["next_ds"]
|
||||
| ^^^^^^^^^ AIR302
|
||||
84 | next_ds_nodash = context["next_ds_nodash"]
|
||||
85 | next_execution_date = context["next_execution_date"]
|
||||
|
|
||||
|
||||
AIR302_context.py:84:30: AIR302 `next_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
82 | execution_date = context["execution_date"]
|
||||
83 | next_ds = context["next_ds"]
|
||||
84 | next_ds_nodash = context["next_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
85 | next_execution_date = context["next_execution_date"]
|
||||
86 | prev_ds = context["prev_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:85:35: AIR302 `next_execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
83 | next_ds = context["next_ds"]
|
||||
84 | next_ds_nodash = context["next_ds_nodash"]
|
||||
85 | next_execution_date = context["next_execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
86 | prev_ds = context["prev_ds"]
|
||||
87 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:86:23: AIR302 `prev_ds` is removed in Airflow 3.0
|
||||
|
|
||||
84 | next_ds_nodash = context["next_ds_nodash"]
|
||||
85 | next_execution_date = context["next_execution_date"]
|
||||
86 | prev_ds = context["prev_ds"]
|
||||
| ^^^^^^^^^ AIR302
|
||||
87 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
88 | prev_execution_date = context["prev_execution_date"]
|
||||
|
|
||||
|
||||
AIR302_context.py:87:30: AIR302 `prev_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
85 | next_execution_date = context["next_execution_date"]
|
||||
86 | prev_ds = context["prev_ds"]
|
||||
87 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
88 | prev_execution_date = context["prev_execution_date"]
|
||||
89 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
|
|
||||
|
||||
AIR302_context.py:88:35: AIR302 `prev_execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
86 | prev_ds = context["prev_ds"]
|
||||
87 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
88 | prev_execution_date = context["prev_execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
89 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
90 | tomorrow_ds = context["tomorrow_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:89:43: AIR302 `prev_execution_date_success` is removed in Airflow 3.0
|
||||
|
|
||||
87 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
88 | prev_execution_date = context["prev_execution_date"]
|
||||
89 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
90 | tomorrow_ds = context["tomorrow_ds"]
|
||||
91 | yesterday_ds = context["yesterday_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:90:27: AIR302 `tomorrow_ds` is removed in Airflow 3.0
|
||||
|
|
||||
88 | prev_execution_date = context["prev_execution_date"]
|
||||
89 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
90 | tomorrow_ds = context["tomorrow_ds"]
|
||||
| ^^^^^^^^^^^^^ AIR302
|
||||
91 | yesterday_ds = context["yesterday_ds"]
|
||||
92 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:91:28: AIR302 `yesterday_ds` is removed in Airflow 3.0
|
||||
|
|
||||
89 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
90 | tomorrow_ds = context["tomorrow_ds"]
|
||||
91 | yesterday_ds = context["yesterday_ds"]
|
||||
| ^^^^^^^^^^^^^^ AIR302
|
||||
92 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:92:35: AIR302 `yesterday_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
90 | tomorrow_ds = context["tomorrow_ds"]
|
||||
91 | yesterday_ds = context["yesterday_ds"]
|
||||
92 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
93 |
|
||||
94 | class CustomOperator(BaseOperator):
|
||||
|
|
||||
|
||||
AIR302_context.py:96:34: AIR302 `execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
94 | class CustomOperator(BaseOperator):
|
||||
95 | def execute(self, context):
|
||||
96 | execution_date = context["execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
97 | next_ds = context["next_ds"]
|
||||
98 | next_ds_nodash = context["next_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:97:27: AIR302 `next_ds` is removed in Airflow 3.0
|
||||
|
|
||||
95 | def execute(self, context):
|
||||
96 | execution_date = context["execution_date"]
|
||||
97 | next_ds = context["next_ds"]
|
||||
| ^^^^^^^^^ AIR302
|
||||
98 | next_ds_nodash = context["next_ds_nodash"]
|
||||
99 | next_execution_date = context["next_execution_date"]
|
||||
|
|
||||
|
||||
AIR302_context.py:98:34: AIR302 `next_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
96 | execution_date = context["execution_date"]
|
||||
97 | next_ds = context["next_ds"]
|
||||
98 | next_ds_nodash = context["next_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
99 | next_execution_date = context["next_execution_date"]
|
||||
100 | prev_ds = context["prev_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:99:39: AIR302 `next_execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
97 | next_ds = context["next_ds"]
|
||||
98 | next_ds_nodash = context["next_ds_nodash"]
|
||||
99 | next_execution_date = context["next_execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
100 | prev_ds = context["prev_ds"]
|
||||
101 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:100:27: AIR302 `prev_ds` is removed in Airflow 3.0
|
||||
|
|
||||
98 | next_ds_nodash = context["next_ds_nodash"]
|
||||
99 | next_execution_date = context["next_execution_date"]
|
||||
100 | prev_ds = context["prev_ds"]
|
||||
| ^^^^^^^^^ AIR302
|
||||
101 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
102 | prev_execution_date = context["prev_execution_date"]
|
||||
|
|
||||
|
||||
AIR302_context.py:101:34: AIR302 `prev_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
99 | next_execution_date = context["next_execution_date"]
|
||||
100 | prev_ds = context["prev_ds"]
|
||||
101 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
102 | prev_execution_date = context["prev_execution_date"]
|
||||
103 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
|
|
||||
|
||||
AIR302_context.py:102:39: AIR302 `prev_execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
100 | prev_ds = context["prev_ds"]
|
||||
101 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
102 | prev_execution_date = context["prev_execution_date"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
103 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
104 | tomorrow_ds = context["tomorrow_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:103:47: AIR302 `prev_execution_date_success` is removed in Airflow 3.0
|
||||
|
|
||||
101 | prev_ds_nodash = context["prev_ds_nodash"]
|
||||
102 | prev_execution_date = context["prev_execution_date"]
|
||||
103 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
104 | tomorrow_ds = context["tomorrow_ds"]
|
||||
105 | yesterday_ds = context["yesterday_ds"]
|
||||
|
|
||||
|
||||
AIR302_context.py:104:31: AIR302 `tomorrow_ds` is removed in Airflow 3.0
|
||||
|
|
||||
102 | prev_execution_date = context["prev_execution_date"]
|
||||
103 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
104 | tomorrow_ds = context["tomorrow_ds"]
|
||||
| ^^^^^^^^^^^^^ AIR302
|
||||
105 | yesterday_ds = context["yesterday_ds"]
|
||||
106 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:105:32: AIR302 `yesterday_ds` is removed in Airflow 3.0
|
||||
|
|
||||
103 | prev_execution_date_success = context["prev_execution_date_success"]
|
||||
104 | tomorrow_ds = context["tomorrow_ds"]
|
||||
105 | yesterday_ds = context["yesterday_ds"]
|
||||
| ^^^^^^^^^^^^^^ AIR302
|
||||
106 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
|
|
||||
|
||||
AIR302_context.py:106:39: AIR302 `yesterday_ds_nodash` is removed in Airflow 3.0
|
||||
|
|
||||
104 | tomorrow_ds = context["tomorrow_ds"]
|
||||
105 | yesterday_ds = context["yesterday_ds"]
|
||||
106 | yesterday_ds_nodash = context["yesterday_ds_nodash"]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
||||
107 |
|
||||
108 | @task
|
||||
|
|
||||
|
||||
AIR302_context.py:109:45: AIR302 `execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
108 | @task
|
||||
109 | def access_invalid_argument_task_out_of_dag(execution_date, tomorrow_ds, logical_date, **context):
|
||||
| ^^^^^^^^^^^^^^ AIR302
|
||||
110 | print("execution date", execution_date)
|
||||
111 | print("access invalid key", context.get("conf"))
|
||||
|
|
||||
|
||||
AIR302_context.py:109:61: AIR302 `tomorrow_ds` is removed in Airflow 3.0
|
||||
|
|
||||
108 | @task
|
||||
109 | def access_invalid_argument_task_out_of_dag(execution_date, tomorrow_ds, logical_date, **context):
|
||||
| ^^^^^^^^^^^ AIR302
|
||||
110 | print("execution date", execution_date)
|
||||
111 | print("access invalid key", context.get("conf"))
|
||||
|
|
||||
|
||||
AIR302_context.py:111:45: AIR302 `conf` is removed in Airflow 3.0
|
||||
|
|
||||
109 | def access_invalid_argument_task_out_of_dag(execution_date, tomorrow_ds, logical_date, **context):
|
||||
110 | print("execution date", execution_date)
|
||||
111 | print("access invalid key", context.get("conf"))
|
||||
| ^^^^^^ AIR302
|
||||
112 |
|
||||
113 | @task(task_id="print_the_context")
|
||||
|
|
||||
|
||||
AIR302_context.py:117:22: AIR302 `tomorrow_ds` is removed in Airflow 3.0
|
||||
|
|
||||
115 | """Print the Airflow context and ds variable from the context."""
|
||||
116 | print(ds)
|
||||
117 | print(kwargs.get("tomorrow_ds"))
|
||||
| ^^^^^^^^^^^^^ AIR302
|
||||
118 | c = get_current_context()
|
||||
119 | c.get("execution_date")
|
||||
|
|
||||
|
||||
AIR302_context.py:119:11: AIR302 `execution_date` is removed in Airflow 3.0
|
||||
|
|
||||
117 | print(kwargs.get("tomorrow_ds"))
|
||||
118 | c = get_current_context()
|
||||
119 | c.get("execution_date")
|
||||
| ^^^^^^^^^^^^^^^^ AIR302
|
||||
|
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user