Compare commits

..

12 Commits

Author SHA1 Message Date
Dhruv Manilawala
c76e15a45d Check context parameters directly from function definition 2025-01-23 14:13:29 +05:30
Ankit Chaurasia
d0aff2bbff Add find_parameter 2025-01-23 12:19:31 +05:45
Ankit Chaurasia
65db31f0e1 highlights the parameter itself 2025-01-22 12:59:39 +05:45
Wei Lee
c2c37b8052 test: update test fixture 2025-01-22 10:06:15 +08:00
Ankit Chaurasia
51613d9107 Add lint error for removed context variables for get_current_context 2025-01-22 09:57:26 +08:00
Ankit Chaurasia
f20e70cd62 remove use of vectors 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
4737824345 fix the logic for lint error message 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
1961b76d03 Refactor functions to use ExprSubscript 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
62a1e55705 refactor rename functions 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
8a7ec4c0a3 Fix PR comments 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
9b9540c3cd Check arguments and function decorated with @task 2025-01-22 09:57:25 +08:00
Ankit Chaurasia
ccafaf8e30 Add more checks for removed context variables
add lint rule to show error for removed context variables in airflow
2025-01-22 09:57:25 +08:00
164 changed files with 3223 additions and 6142 deletions

View File

@@ -6,10 +6,3 @@ failure-output = "immediate-final"
fail-fast = false
status-level = "skip"
# Mark tests that take longer than 1s as slow.
# Terminate after 60s as a stop-gap measure to terminate on deadlock.
slow-timeout = { period = "1s", terminate-after = 60 }
# Show slow jobs in the final summary
final-status-level = "slow"

View File

@@ -1,2 +0,0 @@
# This file cannot use the extension `.yaml`.
blank_issues_enabled: false

View File

@@ -1,22 +0,0 @@
name: New issue
description: A generic issue
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to report an issue! We're glad to have you involved with Ruff.
If you're filing a bug report, please consider including the following information:
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
e.g. "RUF001", "unused variable", "Jupyter notebook"
* A minimal code snippet that reproduces the bug.
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
* The current Ruff version (`ruff --version`).
- type: textarea
attributes:
label: Description
description: A description of the issue

View File

@@ -23,8 +23,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
env:
PACKAGE_NAME: ruff
MODULE_NAME: ruff

View File

@@ -51,7 +51,7 @@ jobs:
env:
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }}
run: |
version=$(grep -m 1 "^version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
if [ "${TAG}" != "${version}" ]; then
echo "The input tag does not match the version from pyproject.toml:" >&2
echo "${TAG}" >&2

View File

@@ -1,7 +1,5 @@
name: CI
permissions: {}
on:
push:
branches: [main]
@@ -61,7 +59,6 @@ jobs:
- Cargo.toml
- Cargo.lock
- crates/**
- "!crates/red_knot*/**"
- "!crates/ruff_python_formatter/**"
- "!crates/ruff_formatter/**"
- "!crates/ruff_dev/**"
@@ -119,11 +116,11 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: |
rustup component add clippy
rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
- name: "Clippy (wasm)"
@@ -133,13 +130,12 @@ jobs:
name: "cargo test (linux)"
runs-on: depot-ubuntu-22.04-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
@@ -152,6 +148,7 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -179,13 +176,12 @@ jobs:
name: "cargo test (linux, release)"
runs-on: depot-ubuntu-22.04-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
@@ -198,6 +194,7 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -208,23 +205,22 @@ jobs:
name: "cargo test (windows)"
runs-on: github-windows-2025-x86_64-16
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
NEXTEST_PROFILE: "ci"
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
run: |
@@ -235,13 +231,12 @@ jobs:
name: "cargo test (wasm)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup target add wasm32-unknown-unknown
- uses: actions/setup-node@v4
@@ -252,6 +247,7 @@ jobs:
- uses: jetli/wasm-pack-action@v0.4.0
with:
version: v0.13.1
- uses: Swatinem/rust-cache@v2
- name: "Test ruff_wasm"
run: |
cd crates/ruff_wasm
@@ -270,11 +266,11 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@v1
- uses: Swatinem/rust-cache@v2
- name: "Build"
run: cargo build --release --locked
@@ -282,7 +278,7 @@ jobs:
name: "cargo build (msrv)"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
@@ -293,7 +289,6 @@ jobs:
with:
file: "Cargo.toml"
field: "workspace.package.rust-version"
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
env:
MSRV: ${{ steps.msrv.outputs.value }}
@@ -308,6 +303,7 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
- name: "Run tests"
shell: bash
env:
@@ -325,11 +321,11 @@ jobs:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
with:
workspaces: "fuzz -> target"
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@main
with:
@@ -345,7 +341,7 @@ jobs:
needs:
- cargo-test-linux
- determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && needs.determine_changes.outputs.parser == 'true' }}
if: ${{ needs.determine_changes.outputs.parser == 'true' }}
timeout-minutes: 20
env:
FORCE_COLOR: 1
@@ -381,15 +377,15 @@ jobs:
name: "test scripts"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
timeout-minutes: 5
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup component add rustfmt
- uses: Swatinem/rust-cache@v2
# Run all code generation scripts, and verify that the current output is
# already checked into git.
- run: python crates/ruff_python_ast/generate.py
@@ -413,7 +409,7 @@ jobs:
- determine_changes
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
# Ecosystem check needs linter and/or formatter changes.
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
if: ${{ github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
timeout-minutes: 20
steps:
- uses: actions/checkout@v4
@@ -547,7 +543,6 @@ jobs:
name: "python package"
runs-on: ubuntu-latest
timeout-minutes: 20
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') }}
steps:
- uses: actions/checkout@v4
with:
@@ -582,9 +577,9 @@ jobs:
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@v2
- name: "Install pre-commit"
run: pip install pre-commit
- name: "Cache pre-commit"
@@ -616,7 +611,6 @@ jobs:
- uses: actions/setup-python@v5
with:
python-version: "3.13"
- uses: Swatinem/rust-cache@v2
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@v0.9.0
@@ -626,6 +620,7 @@ jobs:
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@v5
- uses: Swatinem/rust-cache@v2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
@@ -649,15 +644,16 @@ jobs:
name: "formatter instabilities and black similarity"
runs-on: ubuntu-latest
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main') }}
if: needs.determine_changes.outputs.formatter == 'true' || github.ref == 'refs/heads/main'
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
- name: "Cache rust"
uses: Swatinem/rust-cache@v2
- name: "Run checks"
run: scripts/formatter_ecosystem_checks.sh
- name: "Github step summary"
@@ -672,7 +668,7 @@ jobs:
needs:
- cargo-test-linux
- determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
steps:
- uses: extractions/setup-just@v2
env:
@@ -714,7 +710,7 @@ jobs:
benchmarks:
runs-on: ubuntu-22.04
needs: determine_changes
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
timeout-minutes: 20
steps:
- name: "Checkout Branch"
@@ -722,8 +718,6 @@ jobs:
with:
persist-credentials: false
- uses: Swatinem/rust-cache@v2
- name: "Install Rust toolchain"
run: rustup show
@@ -732,6 +726,8 @@ jobs:
with:
tool: cargo-codspeed
- uses: Swatinem/rust-cache@v2
- name: "Build benchmarks"
run: cargo codspeed build --features codspeed -p ruff_benchmark

7
.github/zizmor.yml vendored
View File

@@ -10,10 +10,3 @@ rules:
ignore:
- build-docker.yml
- publish-playground.yml
excessive-permissions:
# it's hard to test what the impact of removing these ignores would be
# without actually running the release workflow...
ignore:
- build-docker.yml
- publish-playground.yml
- publish-docs.yml

4
.gitignore vendored
View File

@@ -29,10 +29,6 @@ tracing.folded
tracing-flamechart.svg
tracing-flamegraph.svg
# insta
*.rs.pending-snap
###
# Rust.gitignore
###

View File

@@ -91,7 +91,7 @@ repos:
# zizmor detects security vulnerabilities in GitHub Actions workflows.
# Additional configuration for the tool is found in `.github/zizmor.yml`
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.2.2
rev: v1.1.1
hooks:
- id: zizmor

View File

@@ -1,65 +1,5 @@
# Changelog
## 0.9.3
### Preview features
- \[`airflow`\] Argument `fail_stop` in DAG has been renamed as `fail_fast` (`AIR302`) ([#15633](https://github.com/astral-sh/ruff/pull/15633))
- \[`airflow`\] Extend `AIR303` with more symbols ([#15611](https://github.com/astral-sh/ruff/pull/15611))
- \[`flake8-bandit`\] Report all references to suspicious functions (`S3`) ([#15541](https://github.com/astral-sh/ruff/pull/15541))
- \[`flake8-pytest-style`\] Do not emit diagnostics for empty `for` loops (`PT012`, `PT031`) ([#15542](https://github.com/astral-sh/ruff/pull/15542))
- \[`flake8-simplify`\] Avoid double negations (`SIM103`) ([#15562](https://github.com/astral-sh/ruff/pull/15562))
- \[`pyflakes`\] Fix infinite loop with unused local import in `__init__.py` (`F401`) ([#15517](https://github.com/astral-sh/ruff/pull/15517))
- \[`pylint`\] Do not report methods with only one `EM101`-compatible `raise` (`PLR6301`) ([#15507](https://github.com/astral-sh/ruff/pull/15507))
- \[`pylint`\] Implement `redefined-slots-in-subclass` (`W0244`) ([#9640](https://github.com/astral-sh/ruff/pull/9640))
- \[`pyupgrade`\] Add rules to use PEP 695 generics in classes and functions (`UP046`, `UP047`) ([#15565](https://github.com/astral-sh/ruff/pull/15565), [#15659](https://github.com/astral-sh/ruff/pull/15659))
- \[`refurb`\] Implement `for-loop-writes` (`FURB122`) ([#10630](https://github.com/astral-sh/ruff/pull/10630))
- \[`ruff`\] Implement `needless-else` clause (`RUF047`) ([#15051](https://github.com/astral-sh/ruff/pull/15051))
- \[`ruff`\] Implement `starmap-zip` (`RUF058`) ([#15483](https://github.com/astral-sh/ruff/pull/15483))
### Rule changes
- \[`flake8-bugbear`\] Do not raise error if keyword argument is present and target-python version is less or equals than 3.9 (`B903`) ([#15549](https://github.com/astral-sh/ruff/pull/15549))
- \[`flake8-comprehensions`\] strip parentheses around generators in `unnecessary-generator-set` (`C401`) ([#15553](https://github.com/astral-sh/ruff/pull/15553))
- \[`flake8-pytest-style`\] Rewrite references to `.exception` (`PT027`) ([#15680](https://github.com/astral-sh/ruff/pull/15680))
- \[`flake8-simplify`\] Mark fixes as unsafe (`SIM201`, `SIM202`) ([#15626](https://github.com/astral-sh/ruff/pull/15626))
- \[`flake8-type-checking`\] Fix some safe fixes being labeled unsafe (`TC006`,`TC008`) ([#15638](https://github.com/astral-sh/ruff/pull/15638))
- \[`isort`\] Omit trailing whitespace in `unsorted-imports` (`I001`) ([#15518](https://github.com/astral-sh/ruff/pull/15518))
- \[`pydoclint`\] Allow ignoring one line docstrings for `DOC` rules ([#13302](https://github.com/astral-sh/ruff/pull/13302))
- \[`pyflakes`\] Apply redefinition fixes by source code order (`F811`) ([#15575](https://github.com/astral-sh/ruff/pull/15575))
- \[`pyflakes`\] Avoid removing too many imports in `redefined-while-unused` (`F811`) ([#15585](https://github.com/astral-sh/ruff/pull/15585))
- \[`pyflakes`\] Group redefinition fixes by source statement (`F811`) ([#15574](https://github.com/astral-sh/ruff/pull/15574))
- \[`pylint`\] Include name of base class in message for `redefined-slots-in-subclass` (`W0244`) ([#15559](https://github.com/astral-sh/ruff/pull/15559))
- \[`ruff`\] Update fix for `RUF055` to use `var == value` ([#15605](https://github.com/astral-sh/ruff/pull/15605))
### Formatter
- Fix bracket spacing for single-element tuples in f-string expressions ([#15537](https://github.com/astral-sh/ruff/pull/15537))
- Fix unstable f-string formatting for expressions containing a trailing comma ([#15545](https://github.com/astral-sh/ruff/pull/15545))
### Performance
- Avoid quadratic membership check in import fixes ([#15576](https://github.com/astral-sh/ruff/pull/15576))
### Server
- Allow `unsafe-fixes` settings for code actions ([#15666](https://github.com/astral-sh/ruff/pull/15666))
### Bug fixes
- \[`flake8-bandit`\] Add missing single-line/dotall regex flag (`S608`) ([#15654](https://github.com/astral-sh/ruff/pull/15654))
- \[`flake8-import-conventions`\] Fix infinite loop between `ICN001` and `I002` (`ICN001`) ([#15480](https://github.com/astral-sh/ruff/pull/15480))
- \[`flake8-simplify`\] Do not emit diagnostics for expressions inside string type annotations (`SIM222`, `SIM223`) ([#15405](https://github.com/astral-sh/ruff/pull/15405))
- \[`pyflakes`\] Treat arguments passed to the `default=` parameter of `TypeVar` as type expressions (`F821`) ([#15679](https://github.com/astral-sh/ruff/pull/15679))
- \[`pyupgrade`\] Avoid syntax error when the iterable is a non-parenthesized tuple (`UP028`) ([#15543](https://github.com/astral-sh/ruff/pull/15543))
- \[`ruff`\] Exempt `NewType` calls where the original type is immutable (`RUF009`) ([#15588](https://github.com/astral-sh/ruff/pull/15588))
- Preserve raw string prefix and escapes in all codegen fixes ([#15694](https://github.com/astral-sh/ruff/pull/15694))
### Documentation
- Generate documentation redirects for lowercase rule codes ([#15564](https://github.com/astral-sh/ruff/pull/15564))
- `TRY300`: Add some extra notes on not catching exceptions you didn't expect ([#15036](https://github.com/astral-sh/ruff/pull/15036))
## 0.9.2
### Preview features

7
Cargo.lock generated
View File

@@ -2332,7 +2332,6 @@ dependencies = [
"red_knot_server",
"regex",
"ruff_db",
"ruff_python_trivia",
"salsa",
"tempfile",
"toml",
@@ -2584,7 +2583,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.9.3"
version = "0.9.2"
dependencies = [
"anyhow",
"argfile",
@@ -2818,7 +2817,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.9.3"
version = "0.9.2"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3135,7 +3134,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.9.3"
version = "0.9.2"
dependencies = [
"console_error_panic_hook",
"console_log",

View File

@@ -149,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.9.3/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.9.3/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.9.2/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.9.2/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -183,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.9.3
rev: v0.9.2
hooks:
# Run the linter.
- id: ruff

View File

@@ -33,7 +33,6 @@ tracing-tree = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_python_trivia = { workspace = true }
insta = { workspace = true, features = ["filters"] }
insta-cmd = { workspace = true }

View File

@@ -1,190 +0,0 @@
use crate::logging::Verbosity;
use crate::python_version::PythonVersion;
use clap::{ArgAction, ArgMatches, Error, Parser};
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
use red_knot_python_semantic::lint;
use ruff_db::system::SystemPathBuf;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
)]
#[command(version)]
pub(crate) struct Args {
#[command(subcommand)]
pub(crate) command: Command,
}
#[derive(Debug, clap::Subcommand)]
pub(crate) enum Command {
/// Check a project for type errors.
Check(CheckCommand),
/// Start the language server
Server,
}
#[derive(Debug, Parser)]
pub(crate) struct CheckCommand {
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
///
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
#[arg(long, value_name = "PROJECT")]
pub(crate) project: Option<SystemPathBuf>,
/// Path to the virtual environment the project uses.
///
/// If provided, red-knot will use the `site-packages` directory of this virtual environment
/// to resolve type information for the project's third-party dependencies.
#[arg(long, value_name = "PATH")]
pub(crate) venv_path: Option<SystemPathBuf>,
/// Custom directory to use for stdlib typeshed stubs.
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
pub(crate) typeshed: Option<SystemPathBuf>,
/// Additional path to use as a module-resolution source (can be passed multiple times).
#[arg(long, value_name = "PATH")]
pub(crate) extra_search_path: Option<Vec<SystemPathBuf>>,
/// Python version to assume when resolving types.
#[arg(long, value_name = "VERSION", alias = "target-version")]
pub(crate) python_version: Option<PythonVersion>,
#[clap(flatten)]
pub(crate) verbosity: Verbosity,
#[clap(flatten)]
pub(crate) rules: RulesArg,
/// Run in watch mode by re-running whenever files change.
#[arg(long, short = 'W')]
pub(crate) watch: bool,
}
impl CheckCommand {
pub(crate) fn into_options(self) -> Options {
let rules = if self.rules.is_empty() {
None
} else {
Some(
self.rules
.into_iter()
.map(|(rule, level)| (RangedValue::cli(rule), RangedValue::cli(level)))
.collect(),
)
};
Options {
environment: Some(EnvironmentOptions {
python_version: self
.python_version
.map(|version| RangedValue::cli(version.into())),
venv_path: self.venv_path.map(RelativePathBuf::cli),
typeshed: self.typeshed.map(RelativePathBuf::cli),
extra_paths: self.extra_search_path.map(|extra_search_paths| {
extra_search_paths
.into_iter()
.map(RelativePathBuf::cli)
.collect()
}),
..EnvironmentOptions::default()
}),
rules,
..Default::default()
}
}
}
/// A list of rules to enable or disable with a given severity.
///
/// This type is used to parse the `--error`, `--warn`, and `--ignore` arguments
/// while preserving the order in which they were specified (arguments last override previous severities).
#[derive(Debug)]
pub(crate) struct RulesArg(Vec<(String, lint::Level)>);
impl RulesArg {
fn is_empty(&self) -> bool {
self.0.is_empty()
}
fn into_iter(self) -> impl Iterator<Item = (String, lint::Level)> {
self.0.into_iter()
}
}
impl clap::FromArgMatches for RulesArg {
fn from_arg_matches(matches: &ArgMatches) -> Result<Self, Error> {
let mut rules = Vec::new();
for (level, arg_id) in [
(lint::Level::Ignore, "ignore"),
(lint::Level::Warn, "warn"),
(lint::Level::Error, "error"),
] {
let indices = matches.indices_of(arg_id).into_iter().flatten();
let levels = matches.get_many::<String>(arg_id).into_iter().flatten();
rules.extend(
indices
.zip(levels)
.map(|(index, rule)| (index, rule, level)),
);
}
// Sort by their index so that values specified later override earlier ones.
rules.sort_by_key(|(index, _, _)| *index);
Ok(Self(
rules
.into_iter()
.map(|(_, rule, level)| (rule.to_owned(), level))
.collect(),
))
}
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), Error> {
self.0 = Self::from_arg_matches(matches)?.0;
Ok(())
}
}
impl clap::Args for RulesArg {
fn augment_args(cmd: clap::Command) -> clap::Command {
const HELP_HEADING: &str = "Enabling / disabling rules";
cmd.arg(
clap::Arg::new("error")
.long("error")
.action(ArgAction::Append)
.help("Treat the given rule as having severity 'error'. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
.arg(
clap::Arg::new("warn")
.long("warn")
.action(ArgAction::Append)
.help("Treat the given rule as having severity 'warn'. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
.arg(
clap::Arg::new("ignore")
.long("ignore")
.action(ArgAction::Append)
.help("Disables the rule. Can be specified multiple times.")
.value_name("RULE")
.help_heading(HELP_HEADING),
)
}
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
Self::augment_args(cmd)
}
}

View File

@@ -1,13 +1,12 @@
use std::process::{ExitCode, Termination};
use std::sync::Mutex;
use crate::args::{Args, CheckCommand, Command};
use crate::logging::setup_tracing;
use anyhow::{anyhow, Context};
use clap::Parser;
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use red_knot_project::metadata::options::Options;
use python_version::PythonVersion;
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
use red_knot_project::watch;
use red_knot_project::watch::ProjectWatcher;
use red_knot_project::{ProjectDatabase, ProjectMetadata};
@@ -16,11 +15,91 @@ use ruff_db::diagnostic::Diagnostic;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use salsa::plumbing::ZalsaDatabase;
mod args;
use crate::logging::{setup_tracing, Verbosity};
mod logging;
mod python_version;
mod verbosity;
#[derive(Debug, Parser)]
#[command(
author,
name = "red-knot",
about = "An extremely fast Python type checker."
)]
#[command(version)]
struct Args {
#[command(subcommand)]
pub(crate) command: Option<Command>,
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
/// as will the project's virtual environment (`.venv`) unless the `venv-path` option is set.
///
/// Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
#[arg(long, value_name = "PROJECT")]
project: Option<SystemPathBuf>,
/// Path to the virtual environment the project uses.
///
/// If provided, red-knot will use the `site-packages` directory of this virtual environment
/// to resolve type information for the project's third-party dependencies.
#[arg(long, value_name = "PATH")]
venv_path: Option<SystemPathBuf>,
/// Custom directory to use for stdlib typeshed stubs.
#[arg(long, value_name = "PATH", alias = "custom-typeshed-dir")]
typeshed: Option<SystemPathBuf>,
/// Additional path to use as a module-resolution source (can be passed multiple times).
#[arg(long, value_name = "PATH")]
extra_search_path: Option<Vec<SystemPathBuf>>,
/// Python version to assume when resolving types.
#[arg(long, value_name = "VERSION", alias = "target-version")]
python_version: Option<PythonVersion>,
#[clap(flatten)]
verbosity: Verbosity,
/// Run in watch mode by re-running whenever files change.
#[arg(long, short = 'W')]
watch: bool,
}
impl Args {
fn to_options(&self, cli_cwd: &SystemPath) -> Options {
Options {
environment: Some(EnvironmentOptions {
python_version: self.python_version.map(Into::into),
venv_path: self
.venv_path
.as_ref()
.map(|venv_path| SystemPath::absolute(venv_path, cli_cwd)),
typeshed: self
.typeshed
.as_ref()
.map(|typeshed| SystemPath::absolute(typeshed, cli_cwd)),
extra_paths: self.extra_search_path.as_ref().map(|extra_search_paths| {
extra_search_paths
.iter()
.map(|path| SystemPath::absolute(path, cli_cwd))
.collect()
}),
..EnvironmentOptions::default()
}),
..Default::default()
}
}
}
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Start the language server
Server,
}
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
pub fn main() -> ExitStatus {
run().unwrap_or_else(|error| {
@@ -46,13 +125,10 @@ pub fn main() -> ExitStatus {
fn run() -> anyhow::Result<ExitStatus> {
let args = Args::parse_from(std::env::args());
match args.command {
Command::Server => run_server().map(|()| ExitStatus::Success),
Command::Check(check_args) => run_check(check_args),
if matches!(args.command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success);
}
}
fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
let verbosity = args.verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
@@ -82,9 +158,8 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
.transpose()?
.unwrap_or_else(|| cli_base_path.clone());
let system = OsSystem::new(cwd);
let watch = args.watch;
let cli_options = args.into_options();
let system = OsSystem::new(cwd.clone());
let cli_options = args.to_options(&cwd);
let mut workspace_metadata = ProjectMetadata::discover(system.current_directory(), &system)?;
workspace_metadata.apply_cli_options(cli_options.clone());
@@ -102,7 +177,7 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
}
})?;
let exit_status = if watch {
let exit_status = if args.watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)

View File

@@ -1,441 +1,60 @@
use anyhow::Context;
use insta::internals::SettingsBindDropGuard;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use std::path::{Path, PathBuf};
use std::process::Command;
use tempfile::TempDir;
/// Specifying an option on the CLI should take precedence over the same setting in the
/// project's configuration.
#[test]
fn config_override() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
"#,
),
(
"test.py",
r#"
import sys
fn test_config_override() -> anyhow::Result<()> {
let tempdir = TempDir::new()?;
# Access `sys.last_exc` that was only added in Python 3.12
print(sys.last_exc)
"#,
),
])?;
std::fs::write(
tempdir.path().join("pyproject.toml"),
r#"
[tool.knot.environment]
python-version = "3.11"
"#,
)
.context("Failed to write settings")?;
assert_cmd_snapshot!(case.command(), @r"
std::fs::write(
tempdir.path().join("test.py"),
r#"
import sys
# Access `sys.last_exc` that was only added in Python 3.12
print(sys.last_exc)
"#,
)
.context("Failed to write test.py")?;
insta::with_settings!({filters => vec![(&*tempdir_filter(&tempdir), "<temp_dir>/")]}, {
assert_cmd_snapshot!(knot().arg("--project").arg(tempdir.path()), @r"
success: false
exit_code: 1
----- stdout -----
error[lint:unresolved-attribute] <temp_dir>/test.py:5:7 Type `<module 'sys'>` has no attribute `last_exc`
----- stderr -----
");
");
});
assert_cmd_snapshot!(case.command().arg("--python-version").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
}
/// Paths specified on the CLI are relative to the current working directory and not the project root.
///
/// We test this by adding an extra search path from the CLI to the libs directory when
/// running the CLI from the child directory (using relative paths).
///
/// Project layout:
/// ```
/// - libs
/// |- utils.py
/// - child
/// | - test.py
/// - pyproject.toml
/// ```
///
/// And the command is run in the `child` directory.
#[test]
fn cli_arguments_are_relative_to_the_current_directory() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
"#,
),
(
"libs/utils.py",
r#"
def add(a: int, b: int) -> int:
a + b
"#,
),
(
"child/test.py",
r#"
from utils import add
stat = add(10, 15)
"#,
),
])?;
// Make sure that the CLI fails when the `libs` directory is not in the search path.
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")), @r#"
success: false
exit_code: 1
----- stdout -----
error[lint:unresolved-import] <temp_dir>/child/test.py:2:1 Cannot resolve import `utils`
----- stderr -----
"#);
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")).arg("--extra-search-path").arg("../libs"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
}
/// Paths specified in a configuration file are relative to the project root.
///
/// We test this by adding `libs` (as a relative path) to the extra search path in the configuration and run
/// the CLI from a subdirectory.
///
/// Project layout:
/// ```
/// - libs
/// |- utils.py
/// - child
/// | - test.py
/// - pyproject.toml
/// ```
#[test]
fn paths_in_configuration_files_are_relative_to_the_project_root() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.environment]
python-version = "3.11"
extra-paths = ["libs"]
"#,
),
(
"libs/utils.py",
r#"
def add(a: int, b: int) -> int:
a + b
"#,
),
(
"child/test.py",
r#"
from utils import add
stat = add(10, 15)
"#,
),
])?;
assert_cmd_snapshot!(case.command().current_dir(case.project_dir().join("child")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
Ok(())
}
/// The rule severity can be changed in the configuration file
#[test]
fn configuration_rule_severity() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r"
success: false
exit_code: 1
----- stdout -----
error[lint:division-by-zero] <temp_dir>/test.py:2:5 Cannot divide object of type `Literal[4]` by zero
warning[lint:possibly-unresolved-reference] <temp_dir>/test.py:7:7 Name `x` used when possibly not defined
----- stderr -----
");
case.write_file(
"pyproject.toml",
r#"
[tool.knot.rules]
division-by-zero = "warn" # demote to warn
possibly-unresolved-reference = "ignore"
"#,
)?;
assert_cmd_snapshot!(case.command(), @r"
success: false
exit_code: 1
----- stdout -----
warning[lint:division-by-zero] <temp_dir>/test.py:2:5 Cannot divide object of type `Literal[4]` by zero
----- stderr -----
");
Ok(())
}
/// The rule severity can be changed using `--ignore`, `--warn`, and `--error`
#[test]
fn cli_rule_severity() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
import does_not_exit
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r"
success: false
exit_code: 1
----- stdout -----
error[lint:unresolved-import] <temp_dir>/test.py:2:8 Cannot resolve import `does_not_exit`
error[lint:division-by-zero] <temp_dir>/test.py:4:5 Cannot divide object of type `Literal[4]` by zero
warning[lint:possibly-unresolved-reference] <temp_dir>/test.py:9:7 Name `x` used when possibly not defined
----- stderr -----
");
assert_cmd_snapshot!(
case
.command()
.arg("--ignore")
.arg("possibly-unresolved-reference")
.arg("--warn")
.arg("division-by-zero")
.arg("--warn")
.arg("unresolved-import"),
@r"
success: false
exit_code: 1
assert_cmd_snapshot!(knot().arg("--project").arg(tempdir.path()).arg("--python-version").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
warning[lint:unresolved-import] <temp_dir>/test.py:2:8 Cannot resolve import `does_not_exit`
warning[lint:division-by-zero] <temp_dir>/test.py:4:5 Cannot divide object of type `Literal[4]` by zero
----- stderr -----
"
);
Ok(())
}
/// The rule severity can be changed using `--ignore`, `--warn`, and `--error` and
/// values specified last override previous severities.
#[test]
fn cli_rule_severity_precedence() -> anyhow::Result<()> {
let case = TestCase::with_file(
"test.py",
r#"
y = 4 / 0
for a in range(0, y):
x = a
print(x) # possibly-unresolved-reference
"#,
)?;
// Assert that there's a possibly unresolved reference diagnostic
// and that division-by-zero has a severity of error by default.
assert_cmd_snapshot!(case.command(), @r"
success: false
exit_code: 1
----- stdout -----
error[lint:division-by-zero] <temp_dir>/test.py:2:5 Cannot divide object of type `Literal[4]` by zero
warning[lint:possibly-unresolved-reference] <temp_dir>/test.py:7:7 Name `x` used when possibly not defined
----- stderr -----
");
assert_cmd_snapshot!(
case
.command()
.arg("--error")
.arg("possibly-unresolved-reference")
.arg("--warn")
.arg("division-by-zero")
// Override the error severity with warning
.arg("--ignore")
.arg("possibly-unresolved-reference"),
@r"
success: false
exit_code: 1
----- stdout -----
warning[lint:division-by-zero] <temp_dir>/test.py:2:5 Cannot divide object of type `Literal[4]` by zero
----- stderr -----
"
);
Ok(())
}
/// Red Knot warns about unknown rules specified in a configuration file
#[test]
fn configuration_unknown_rules() -> anyhow::Result<()> {
let case = TestCase::with_files([
(
"pyproject.toml",
r#"
[tool.knot.rules]
division-by-zer = "warn" # incorrect rule name
"#,
),
("test.py", "print(10)"),
])?;
assert_cmd_snapshot!(case.command(), @r"
success: false
exit_code: 1
----- stdout -----
warning[unknown-rule] <temp_dir>/pyproject.toml:3:1 Unknown lint rule `division-by-zer`
----- stderr -----
");
Ok(())
}
/// Red Knot warns about unknown rules specified in a CLI argument
#[test]
fn cli_unknown_rules() -> anyhow::Result<()> {
let case = TestCase::with_file("test.py", "print(10)")?;
assert_cmd_snapshot!(case.command().arg("--ignore").arg("division-by-zer"), @r"
success: false
exit_code: 1
----- stdout -----
warning[unknown-rule] Unknown lint rule `division-by-zer`
----- stderr -----
");
Ok(())
fn knot() -> Command {
Command::new(get_cargo_bin("red_knot"))
}
struct TestCase {
_temp_dir: TempDir,
_settings_scope: SettingsBindDropGuard,
project_dir: PathBuf,
}
impl TestCase {
fn new() -> anyhow::Result<Self> {
let temp_dir = TempDir::new()?;
// Canonicalize the tempdir path because macos uses symlinks for tempdirs
// and that doesn't play well with our snapshot filtering.
let project_dir = temp_dir
.path()
.canonicalize()
.context("Failed to canonicalize project path")?;
let mut settings = insta::Settings::clone_current();
settings.add_filter(&tempdir_filter(&project_dir), "<temp_dir>/");
settings.add_filter(r#"\\(\w\w|\s|\.|")"#, "/$1");
let settings_scope = settings.bind_to_scope();
Ok(Self {
project_dir,
_temp_dir: temp_dir,
_settings_scope: settings_scope,
})
}
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
let case = Self::new()?;
case.write_files(files)?;
Ok(case)
}
fn with_file(path: impl AsRef<Path>, content: &str) -> anyhow::Result<Self> {
let case = Self::new()?;
case.write_file(path, content)?;
Ok(case)
}
fn write_files<'a>(
&self,
files: impl IntoIterator<Item = (&'a str, &'a str)>,
) -> anyhow::Result<()> {
for (path, content) in files {
self.write_file(path, content)?;
}
Ok(())
}
fn write_file(&self, path: impl AsRef<Path>, content: &str) -> anyhow::Result<()> {
let path = path.as_ref();
let path = self.project_dir.join(path);
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("Failed to create directory `{}`", parent.display()))?;
}
std::fs::write(&path, &*ruff_python_trivia::textwrap::dedent(content))
.with_context(|| format!("Failed to write file `{path}`", path = path.display()))?;
Ok(())
}
fn project_dir(&self) -> &Path {
&self.project_dir
}
fn command(&self) -> Command {
let mut command = Command::new(get_cargo_bin("red_knot"));
command.current_dir(&self.project_dir).arg("check");
command
}
}
fn tempdir_filter(path: &Path) -> String {
format!(r"{}\\?/?", regex::escape(path.to_str().unwrap()))
fn tempdir_filter(tempdir: &TempDir) -> String {
format!(r"{}\\?/?", regex::escape(tempdir.path().to_str().unwrap()))
}

View File

@@ -6,7 +6,6 @@ use std::time::{Duration, Instant};
use anyhow::{anyhow, Context};
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
use red_knot_project::metadata::pyproject::{PyProject, Tool};
use red_knot_project::metadata::value::{RangedValue, RelativePathBuf};
use red_knot_project::watch::{directory_watcher, ChangeEvent, ProjectWatcher};
use red_knot_project::{Db, ProjectDatabase, ProjectMetadata};
use red_knot_python_semantic::{resolve_module, ModuleName, PythonPlatform, PythonVersion};
@@ -47,7 +46,7 @@ impl TestCase {
#[track_caller]
fn panic_with_formatted_events(events: Vec<ChangeEvent>) -> Vec<ChangeEvent> {
panic!(
"Didn't observe the expected event. The following events occurred:\n{}",
"Didn't observe expected change:\n{}",
events
.into_iter()
.map(|event| format!(" - {event:?}"))
@@ -322,7 +321,7 @@ where
.search_paths
.extra_paths
.iter()
.chain(program_settings.search_paths.custom_typeshed.as_ref())
.chain(program_settings.search_paths.typeshed.as_ref())
{
std::fs::create_dir_all(path.as_std_path())
.with_context(|| format!("Failed to create search path `{path}`"))?;
@@ -792,7 +791,7 @@ fn search_path() -> anyhow::Result<()> {
let mut case = setup_with_options([("bar.py", "import sub.a")], |root_path, _project_path| {
Some(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli(root_path.join("site_packages"))]),
extra_paths: Some(vec![root_path.join("site_packages")]),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -833,7 +832,7 @@ fn add_search_path() -> anyhow::Result<()> {
// Register site-packages as a search path.
case.update_options(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli("site_packages")]),
extra_paths: Some(vec![site_packages.clone()]),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -856,7 +855,7 @@ fn remove_search_path() -> anyhow::Result<()> {
let mut case = setup_with_options([("bar.py", "import sub.a")], |root_path, _project_path| {
Some(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli(root_path.join("site_packages"))]),
extra_paths: Some(vec![root_path.join("site_packages")]),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -897,10 +896,8 @@ print(sys.last_exc, os.getegid())
|_root_path, _project_path| {
Some(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(PythonVersion::PY311)),
python_platform: Some(RangedValue::cli(PythonPlatform::Identifier(
"win32".to_string(),
))),
python_version: Some(PythonVersion::PY311),
python_platform: Some(PythonPlatform::Identifier("win32".to_string())),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -923,10 +920,8 @@ print(sys.last_exc, os.getegid())
// Change the python version
case.update_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
python_platform: Some(RangedValue::cli(PythonPlatform::Identifier(
"linux".to_string(),
))),
python_version: Some(PythonVersion::PY312),
python_platform: Some(PythonPlatform::Identifier("linux".to_string())),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -956,7 +951,7 @@ fn changed_versions_file() -> anyhow::Result<()> {
|root_path, _project_path| {
Some(Options {
environment: Some(EnvironmentOptions {
typeshed: Some(RelativePathBuf::cli(root_path.join("typeshed"))),
typeshed: Some(root_path.join("typeshed")),
..EnvironmentOptions::default()
}),
..Options::default()
@@ -1380,13 +1375,11 @@ mod unix {
Ok(())
},
|_root, _project| {
|_root, project| {
Some(Options {
environment: Some(EnvironmentOptions {
extra_paths: Some(vec![RelativePathBuf::cli(
".venv/lib/python3.12/site-packages",
)]),
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
extra_paths: Some(vec![project.join(".venv/lib/python3.12/site-packages")]),
python_version: Some(PythonVersion::PY312),
..EnvironmentOptions::default()
}),
..Options::default()

View File

@@ -2,7 +2,7 @@ use std::panic::RefUnwindSafe;
use std::sync::Arc;
use crate::DEFAULT_LINT_REGISTRY;
use crate::{Project, ProjectMetadata};
use crate::{check_file, Project, ProjectMetadata};
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
use red_knot_python_semantic::{Db as SemanticDb, Program};
use ruff_db::diagnostic::Diagnostic;
@@ -27,6 +27,7 @@ pub struct ProjectDatabase {
storage: salsa::Storage<ProjectDatabase>,
files: Files,
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
rule_selection: Arc<RuleSelection>,
}
impl ProjectDatabase {
@@ -34,11 +35,14 @@ impl ProjectDatabase {
where
S: System + 'static + Send + Sync + RefUnwindSafe,
{
let rule_selection = RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY);
let mut db = Self {
project: None,
storage: salsa::Storage::default(),
files: Files::default(),
system: Arc::new(system),
rule_selection: Arc::new(rule_selection),
};
// TODO: Use the `program_settings` to compute the key for the database's persistent
@@ -62,7 +66,7 @@ impl ProjectDatabase {
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
self.with_db(|db| self.project().check_file(db, file))
self.with_db(|db| check_file(db, file))
}
/// Returns a mutable reference to the system.
@@ -115,7 +119,7 @@ impl SemanticDb for ProjectDatabase {
}
fn rule_selection(&self) -> &RuleSelection {
self.project().rule_selection(self)
&self.rule_selection
}
fn lint_registry(&self) -> &LintRegistry {

View File

@@ -1,10 +1,6 @@
#![allow(clippy::ref_option)]
use crate::metadata::options::OptionDiagnostic;
pub use db::{Db, ProjectDatabase};
use files::{Index, Indexed, IndexedFiles};
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder, RuleSelection};
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder};
use red_knot_python_semantic::register_lints;
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
@@ -21,6 +17,10 @@ use salsa::Setter;
use std::borrow::Cow;
use std::sync::Arc;
pub use db::{Db, ProjectDatabase};
use files::{Index, Indexed, IndexedFiles};
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
pub mod combine;
mod db;
@@ -68,7 +68,6 @@ pub struct Project {
pub metadata: ProjectMetadata,
}
#[salsa::tracked]
impl Project {
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
Project::builder(metadata)
@@ -97,34 +96,13 @@ impl Project {
self.reload_files(db);
}
pub fn rule_selection(self, db: &dyn Db) -> &RuleSelection {
let (selection, _) = self.rule_selection_with_diagnostics(db);
selection
}
#[salsa::tracked(return_ref)]
fn rule_selection_with_diagnostics(
self,
db: &dyn Db,
) -> (RuleSelection, Vec<OptionDiagnostic>) {
self.metadata(db).options().to_rule_selection(db)
}
/// Checks all open files in the project and its dependencies.
pub(crate) fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
pub fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
let project_span = tracing::debug_span!("Project::check");
let _span = project_span.enter();
tracing::debug!("Checking project '{name}'", name = self.name(db));
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
let (_, options_diagnostics) = self.rule_selection_with_diagnostics(db);
diagnostics.extend(options_diagnostics.iter().map(|diagnostic| {
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
diagnostic
}));
let result = Arc::new(std::sync::Mutex::new(diagnostics));
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result);
let db = db.clone();
@@ -141,7 +119,7 @@ impl Project {
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
let _entered = check_file_span.entered();
let file_diagnostics = check_file_impl(&db, file);
let file_diagnostics = check_file(&db, file);
result.lock().unwrap().extend(file_diagnostics);
});
}
@@ -150,23 +128,6 @@ impl Project {
Arc::into_inner(result).unwrap().into_inner().unwrap()
}
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
let (_, options_diagnostics) = self.rule_selection_with_diagnostics(db);
let mut file_diagnostics: Vec<_> = options_diagnostics
.iter()
.map(|diagnostic| {
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
diagnostic
})
.collect();
let check_diagnostics = check_file_impl(db, file);
file_diagnostics.extend(check_diagnostics);
file_diagnostics
}
/// Opens a file in the project.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
@@ -304,9 +265,8 @@ impl Project {
}
}
fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
pub(crate) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
// Abort checking if there are IO errors.
let source = source_text(db.upcast(), file);
@@ -442,8 +402,8 @@ impl Diagnostic for IOErrorDiagnostic {
self.error.to_string().into()
}
fn file(&self) -> Option<File> {
Some(self.file)
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
@@ -458,7 +418,7 @@ impl Diagnostic for IOErrorDiagnostic {
#[cfg(test)]
mod tests {
use crate::db::tests::TestDb;
use crate::{check_file_impl, ProjectMetadata};
use crate::{check_file, ProjectMetadata};
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::system_path_to_file;
@@ -482,7 +442,7 @@ mod tests {
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file_impl(&db, file)
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
@@ -498,7 +458,7 @@ mod tests {
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file_impl(&db, file)
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),

View File

@@ -1,18 +1,15 @@
use red_knot_python_semantic::ProgramSettings;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
use std::sync::Arc;
use thiserror::Error;
use crate::combine::Combine;
use crate::metadata::pyproject::{Project, PyProject, PyProjectError};
use crate::metadata::value::ValueSource;
use options::KnotTomlError;
use options::Options;
pub mod options;
pub mod pyproject;
pub mod value;
#[derive(Debug, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
@@ -55,7 +52,7 @@ impl ProjectMetadata {
) -> Self {
let name = project
.and_then(|project| project.name.as_ref())
.map(|name| Name::new(&***name))
.map(|name| Name::new(&**name))
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
// TODO(https://github.com/astral-sh/ruff/issues/15491): Respect requires-python
@@ -90,10 +87,7 @@ impl ProjectMetadata {
let pyproject_path = project_root.join("pyproject.toml");
let pyproject = if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
match PyProject::from_toml_str(
&pyproject_str,
ValueSource::File(Arc::new(pyproject_path.clone())),
) {
match PyProject::from_toml_str(&pyproject_str) {
Ok(pyproject) => Some(pyproject),
Err(error) => {
return Err(ProjectDiscoveryError::InvalidPyProject {
@@ -109,10 +103,7 @@ impl ProjectMetadata {
// A `knot.toml` takes precedence over a `pyproject.toml`.
let knot_toml_path = project_root.join("knot.toml");
if let Ok(knot_str) = system.read_to_string(&knot_toml_path) {
let options = match Options::from_toml_str(
&knot_str,
ValueSource::File(Arc::new(knot_toml_path.clone())),
) {
let options = match Options::from_toml_str(&knot_str) {
Ok(options) => options,
Err(error) => {
return Err(ProjectDiscoveryError::InvalidKnotToml {

View File

@@ -1,37 +1,22 @@
use crate::metadata::value::{RangedValue, RelativePathBuf, ValueSource, ValueSourceGuard};
use crate::Db;
use red_knot_python_semantic::lint::{GetLintError, Level, LintSource, RuleSelection};
use red_knot_python_semantic::{
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
};
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::{System, SystemPath};
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_macros::Combine;
use ruff_text_size::TextRange;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::fmt::Debug;
use thiserror::Error;
/// The options for the project.
#[derive(Debug, Default, Clone, PartialEq, Eq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
pub struct Options {
#[serde(skip_serializing_if = "Option::is_none")]
pub environment: Option<EnvironmentOptions>,
#[serde(skip_serializing_if = "Option::is_none")]
pub src: Option<SrcOptions>,
#[serde(skip_serializing_if = "Option::is_none")]
pub rules: Option<Rules>,
}
impl Options {
pub(crate) fn from_toml_str(content: &str, source: ValueSource) -> Result<Self, KnotTomlError> {
let _guard = ValueSourceGuard::new(source);
pub(crate) fn from_toml_str(content: &str) -> Result<Self, KnotTomlError> {
let options = toml::from_str(content)?;
Ok(options)
}
@@ -44,12 +29,7 @@ impl Options {
let (python_version, python_platform) = self
.environment
.as_ref()
.map(|env| {
(
env.python_version.as_deref().copied(),
env.python_platform.as_deref(),
)
})
.map(|env| (env.python_version, env.python_platform.as_ref()))
.unwrap_or_default();
ProgramSettings {
@@ -64,19 +44,19 @@ impl Options {
project_root: &SystemPath,
system: &dyn System,
) -> SearchPathSettings {
let src_roots = if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_ref())
{
vec![src_root.absolute(project_root, system)]
} else {
let src = project_root.join("src");
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
if system.is_directory(&src) {
vec![project_root.to_path_buf(), src]
let src_roots =
if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_deref()) {
vec![src_root.to_path_buf()]
} else {
vec![project_root.to_path_buf()]
}
};
let src = project_root.join("src");
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
if system.is_directory(&src) {
vec![project_root.to_path_buf(), src]
} else {
vec![project_root.to_path_buf()]
}
};
let (extra_paths, python, typeshed) = self
.environment
@@ -91,139 +71,43 @@ impl Options {
.unwrap_or_default();
SearchPathSettings {
extra_paths: extra_paths
.unwrap_or_default()
.into_iter()
.map(|path| path.absolute(project_root, system))
.collect(),
extra_paths: extra_paths.unwrap_or_default(),
src_roots,
custom_typeshed: typeshed.map(|path| path.absolute(project_root, system)),
typeshed,
site_packages: python
.map(|venv_path| SitePackages::Derived {
venv_path: venv_path.absolute(project_root, system),
})
.map(|venv_path| SitePackages::Derived { venv_path })
.unwrap_or(SitePackages::Known(vec![])),
}
}
#[must_use]
pub(crate) fn to_rule_selection(&self, db: &dyn Db) -> (RuleSelection, Vec<OptionDiagnostic>) {
let registry = db.lint_registry();
let mut diagnostics = Vec::new();
// Initialize the selection with the defaults
let mut selection = RuleSelection::from_registry(registry);
let rules = self
.rules
.as_ref()
.into_iter()
.flat_map(|rules| rules.inner.iter());
for (rule_name, level) in rules {
let source = rule_name.source();
match registry.get(rule_name) {
Ok(lint) => {
let lint_source = match source {
ValueSource::File(_) => LintSource::File,
ValueSource::Cli => LintSource::Cli,
};
if let Ok(severity) = Severity::try_from(**level) {
selection.enable(lint, severity, lint_source);
} else {
selection.disable(lint);
}
}
Err(error) => {
// `system_path_to_file` can return `Err` if the file was deleted since the configuration
// was read. This should be rare and it should be okay to default to not showing a configuration
// file in that case.
let file = source
.file()
.and_then(|path| system_path_to_file(db.upcast(), path).ok());
// TODO: Add a note if the value was configured on the CLI
let diagnostic = match error {
GetLintError::Unknown(_) => OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!("Unknown lint rule `{rule_name}`"),
Severity::Warning,
),
GetLintError::PrefixedWithCategory { suggestion, .. } => {
OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!(
"Unknown lint rule `{rule_name}`. Did you mean `{suggestion}`?"
),
Severity::Warning,
)
}
GetLintError::Removed(_) => OptionDiagnostic::new(
DiagnosticId::UnknownRule,
format!("Unknown lint rule `{rule_name}`"),
Severity::Warning,
),
};
diagnostics.push(diagnostic.with_file(file).with_range(rule_name.range()));
}
}
}
(selection, diagnostics)
}
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
pub struct EnvironmentOptions {
#[serde(skip_serializing_if = "Option::is_none")]
pub python_version: Option<RangedValue<PythonVersion>>,
pub python_version: Option<PythonVersion>,
#[serde(skip_serializing_if = "Option::is_none")]
pub python_platform: Option<RangedValue<PythonPlatform>>,
pub python_platform: Option<PythonPlatform>,
/// List of user-provided paths that should take first priority in the module resolution.
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
/// or pyright's stubPath configuration setting.
#[serde(skip_serializing_if = "Option::is_none")]
pub extra_paths: Option<Vec<RelativePathBuf>>,
pub extra_paths: Option<Vec<SystemPathBuf>>,
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
/// bundled as a zip file in the binary
#[serde(skip_serializing_if = "Option::is_none")]
pub typeshed: Option<RelativePathBuf>,
pub typeshed: Option<SystemPathBuf>,
// TODO: Rename to python, see https://github.com/astral-sh/ruff/issues/15530
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
#[serde(skip_serializing_if = "Option::is_none")]
pub venv_path: Option<RelativePathBuf>,
pub venv_path: Option<SystemPathBuf>,
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
pub struct SrcOptions {
/// The root of the project, used for finding first-party modules.
#[serde(skip_serializing_if = "Option::is_none")]
pub root: Option<RelativePathBuf>,
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", transparent)]
pub struct Rules {
inner: FxHashMap<RangedValue<String>, RangedValue<Level>>,
}
impl FromIterator<(RangedValue<String>, RangedValue<Level>)> for Rules {
fn from_iter<T: IntoIterator<Item = (RangedValue<String>, RangedValue<Level>)>>(
iter: T,
) -> Self {
Self {
inner: iter.into_iter().collect(),
}
}
pub root: Option<SystemPathBuf>,
}
#[derive(Error, Debug)]
@@ -231,58 +115,3 @@ pub enum KnotTomlError {
#[error(transparent)]
TomlSyntax(#[from] toml::de::Error),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct OptionDiagnostic {
id: DiagnosticId,
message: String,
severity: Severity,
file: Option<File>,
range: Option<TextRange>,
}
impl OptionDiagnostic {
pub fn new(id: DiagnosticId, message: String, severity: Severity) -> Self {
Self {
id,
message,
severity,
file: None,
range: None,
}
}
#[must_use]
fn with_file(mut self, file: Option<File>) -> Self {
self.file = file;
self
}
#[must_use]
fn with_range(mut self, range: Option<TextRange>) -> Self {
self.range = range;
self
}
}
impl Diagnostic for OptionDiagnostic {
fn id(&self) -> DiagnosticId {
self.id
}
fn message(&self) -> Cow<str> {
Cow::Borrowed(&self.message)
}
fn file(&self) -> Option<File> {
self.file
}
fn range(&self) -> Option<TextRange> {
self.range
}
fn severity(&self) -> Severity {
self.severity
}
}

View File

@@ -4,7 +4,6 @@ use std::ops::Deref;
use thiserror::Error;
use crate::metadata::options::Options;
use crate::metadata::value::{RangedValue, ValueSource, ValueSourceGuard};
/// A `pyproject.toml` as specified in PEP 517.
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
@@ -29,11 +28,7 @@ pub enum PyProjectError {
}
impl PyProject {
pub(crate) fn from_toml_str(
content: &str,
source: ValueSource,
) -> Result<Self, PyProjectError> {
let _guard = ValueSourceGuard::new(source);
pub(crate) fn from_toml_str(content: &str) -> Result<Self, PyProjectError> {
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
}
}
@@ -48,11 +43,11 @@ pub struct Project {
///
/// Note: Intentionally option to be more permissive during deserialization.
/// `PackageMetadata::from_pyproject` reports missing names.
pub name: Option<RangedValue<PackageName>>,
pub name: Option<PackageName>,
/// The version of the project
pub version: Option<RangedValue<Version>>,
pub version: Option<Version>,
/// The Python versions this project is compatible with.
pub requires_python: Option<RangedValue<VersionSpecifiers>>,
pub requires_python: Option<VersionSpecifiers>,
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]

View File

@@ -1,337 +0,0 @@
use crate::combine::Combine;
use crate::Db;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_text_size::{TextRange, TextSize};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::cell::RefCell;
use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
use toml::Spanned;
#[derive(Clone, Debug)]
pub enum ValueSource {
/// Value loaded from a project's configuration file.
///
/// Ideally, we'd use [`ruff_db::files::File`] but we can't because the database hasn't been
/// created when loading the configuration.
File(Arc<SystemPathBuf>),
/// The value comes from a CLI argument, while it's left open if specified using a short argument,
/// long argument (`--extra-paths`) or `--config key=value`.
Cli,
}
impl ValueSource {
pub fn file(&self) -> Option<&SystemPath> {
match self {
ValueSource::File(path) => Some(&**path),
ValueSource::Cli => None,
}
}
}
thread_local! {
/// Serde doesn't provide any easy means to pass a value to a [`Deserialize`] implementation,
/// but we want to associate each deserialized [`RelativePath`] with the source from
/// which it originated. We use a thread local variable to work around this limitation.
///
/// Use the [`ValueSourceGuard`] to initialize the thread local before calling into any
/// deserialization code. It ensures that the thread local variable gets cleaned up
/// once deserialization is done (once the guard gets dropped).
static VALUE_SOURCE: RefCell<Option<ValueSource>> = const { RefCell::new(None) };
}
/// Guard to safely change the [`VALUE_SOURCE`] for the current thread.
#[must_use]
pub(super) struct ValueSourceGuard {
prev_value: Option<ValueSource>,
}
impl ValueSourceGuard {
pub(super) fn new(source: ValueSource) -> Self {
let prev = VALUE_SOURCE.replace(Some(source));
Self { prev_value: prev }
}
}
impl Drop for ValueSourceGuard {
fn drop(&mut self) {
VALUE_SOURCE.set(self.prev_value.take());
}
}
/// A value that "remembers" where it comes from (source) and its range in source.
///
/// ## Equality, Hash, and Ordering
/// The equality, hash, and ordering are solely based on the value. They disregard the value's range
/// or source.
///
/// This ensures that two resolved configurations are identical even if the position of a value has changed
/// or if the values were loaded from different sources.
#[derive(Clone)]
pub struct RangedValue<T> {
value: T,
source: ValueSource,
/// The byte range of `value` in `source`.
///
/// Can be `None` because not all sources support a range.
/// For example, arguments provided on the CLI won't have a range attached.
range: Option<TextRange>,
}
impl<T> RangedValue<T> {
pub fn new(value: T, source: ValueSource) -> Self {
Self::with_range(value, source, TextRange::default())
}
pub fn cli(value: T) -> Self {
Self::with_range(value, ValueSource::Cli, TextRange::default())
}
pub fn with_range(value: T, source: ValueSource, range: TextRange) -> Self {
Self {
value,
range: Some(range),
source,
}
}
pub fn range(&self) -> Option<TextRange> {
self.range
}
pub fn source(&self) -> &ValueSource {
&self.source
}
#[must_use]
pub fn with_source(mut self, source: ValueSource) -> Self {
self.source = source;
self
}
pub fn into_inner(self) -> T {
self.value
}
}
impl<T> Combine for RangedValue<T> {
fn combine(self, _other: Self) -> Self
where
Self: Sized,
{
self
}
fn combine_with(&mut self, _other: Self) {}
}
impl<T> IntoIterator for RangedValue<T>
where
T: IntoIterator,
{
type Item = T::Item;
type IntoIter = T::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
// The type already has an `iter` method thanks to `Deref`.
#[allow(clippy::into_iter_without_iter)]
impl<'a, T> IntoIterator for &'a RangedValue<T>
where
&'a T: IntoIterator,
{
type Item = <&'a T as IntoIterator>::Item;
type IntoIter = <&'a T as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
// The type already has a `into_iter_mut` method thanks to `DerefMut`.
#[allow(clippy::into_iter_without_iter)]
impl<'a, T> IntoIterator for &'a mut RangedValue<T>
where
&'a mut T: IntoIterator,
{
type Item = <&'a mut T as IntoIterator>::Item;
type IntoIter = <&'a mut T as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.value.into_iter()
}
}
impl<T> fmt::Debug for RangedValue<T>
where
T: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.value.fmt(f)
}
}
impl<T> fmt::Display for RangedValue<T>
where
T: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.value.fmt(f)
}
}
impl<T> Deref for RangedValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl<T> DerefMut for RangedValue<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.value
}
}
impl<T, U: ?Sized> AsRef<U> for RangedValue<T>
where
T: AsRef<U>,
{
fn as_ref(&self) -> &U {
self.value.as_ref()
}
}
impl<T: PartialEq> PartialEq for RangedValue<T> {
fn eq(&self, other: &Self) -> bool {
self.value.eq(&other.value)
}
}
impl<T: PartialEq<T>> PartialEq<T> for RangedValue<T> {
fn eq(&self, other: &T) -> bool {
self.value.eq(other)
}
}
impl<T: Eq> Eq for RangedValue<T> {}
impl<T: Hash> Hash for RangedValue<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.value.hash(state);
}
}
impl<T: PartialOrd> PartialOrd for RangedValue<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.value.partial_cmp(&other.value)
}
}
impl<T: PartialOrd<T>> PartialOrd<T> for RangedValue<T> {
fn partial_cmp(&self, other: &T) -> Option<Ordering> {
self.value.partial_cmp(other)
}
}
impl<T: Ord> Ord for RangedValue<T> {
fn cmp(&self, other: &Self) -> Ordering {
self.value.cmp(&other.value)
}
}
impl<'de, T> Deserialize<'de> for RangedValue<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let spanned: Spanned<T> = Spanned::deserialize(deserializer)?;
let span = spanned.span();
let range = TextRange::new(
TextSize::try_from(span.start).expect("Configuration file to be smaller than 4GB"),
TextSize::try_from(span.end).expect("Configuration file to be smaller than 4GB"),
);
Ok(VALUE_SOURCE.with_borrow(|source| {
let source = source.clone().unwrap();
Self::with_range(spanned.into_inner(), source, range)
}))
}
}
impl<T> Serialize for RangedValue<T>
where
T: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.value.serialize(serializer)
}
}
/// A possibly relative path in a configuration file.
///
/// Relative paths in configuration files or from CLI options
/// require different anchoring:
///
/// * CLI: The path is relative to the current working directory
/// * Configuration file: The path is relative to the project's root.
#[derive(
Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash,
)]
#[serde(transparent)]
pub struct RelativePathBuf(RangedValue<SystemPathBuf>);
impl RelativePathBuf {
pub fn new(path: impl AsRef<SystemPath>, source: ValueSource) -> Self {
Self(RangedValue::new(path.as_ref().to_path_buf(), source))
}
pub fn cli(path: impl AsRef<SystemPath>) -> Self {
Self::new(path, ValueSource::Cli)
}
/// Returns the relative path as specified by the user.
pub fn path(&self) -> &SystemPath {
&self.0
}
/// Returns the owned relative path.
pub fn into_path_buf(self) -> SystemPathBuf {
self.0.into_inner()
}
/// Resolves the absolute path for `self` based on its origin.
pub fn absolute_with_db(&self, db: &dyn Db) -> SystemPathBuf {
self.absolute(db.project().root(db), db.system())
}
/// Resolves the absolute path for `self` based on its origin.
pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> SystemPathBuf {
let relative_to = match &self.0.source {
ValueSource::File(_) => project_root,
ValueSource::Cli => system.current_directory(),
};
SystemPath::absolute(&self.0, relative_to)
}
}
impl Combine for RelativePathBuf {
fn combine(self, other: Self) -> Self {
Self(self.0.combine(other.0))
}
fn combine_with(&mut self, other: Self) {
self.0.combine_with(other.0);
}
}

View File

@@ -6,6 +6,7 @@ ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),

View File

@@ -6,6 +6,7 @@ ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),

View File

@@ -8,6 +8,11 @@ ProjectMetadata(
options: Options(
environment: Some(EnvironmentOptions(
r#python-version: Some("3.10"),
r#python-platform: None,
r#extra-paths: None,
typeshed: None,
r#venv-path: None,
)),
src: None,
),
)

View File

@@ -5,5 +5,8 @@ expression: sub_project
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
options: Options(),
options: Options(
environment: None,
src: None,
),
)

View File

@@ -6,6 +6,7 @@ ProjectMetadata(
name: Name("super-app"),
root: "/app",
options: Options(
environment: None,
src: Some(SrcOptions(
root: Some("src"),
)),

View File

@@ -5,5 +5,8 @@ expression: project
ProjectMetadata(
name: Name("backend"),
root: "/app",
options: Options(),
options: Options(
environment: None,
src: None,
),
)

View File

@@ -5,5 +5,8 @@ expression: project
ProjectMetadata(
name: Name("app"),
root: "/app",
options: Options(),
options: Options(
environment: None,
src: None,
),
)

View File

@@ -1,6 +1,6 @@
use anyhow::{anyhow, Context};
use red_knot_project::{ProjectDatabase, ProjectMetadata};
use red_knot_python_semantic::{HasType, SemanticModel};
use red_knot_python_semantic::{HasTy, SemanticModel};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::parsed::parsed_module;
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
@@ -197,10 +197,10 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
fn visit_stmt(&mut self, stmt: &Stmt) {
match stmt {
Stmt::FunctionDef(function) => {
let _ty = function.inferred_type(&self.model);
let _ty = function.ty(&self.model);
}
Stmt::ClassDef(class) => {
let _ty = class.inferred_type(&self.model);
let _ty = class.ty(&self.model);
}
Stmt::Assign(assign) => {
for target in &assign.targets {
@@ -243,25 +243,25 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
}
fn visit_expr(&mut self, expr: &Expr) {
let _ty = expr.inferred_type(&self.model);
let _ty = expr.ty(&self.model);
source_order::walk_expr(self, expr);
}
fn visit_parameter(&mut self, parameter: &Parameter) {
let _ty = parameter.inferred_type(&self.model);
let _ty = parameter.ty(&self.model);
source_order::walk_parameter(self, parameter);
}
fn visit_parameter_with_default(&mut self, parameter_with_default: &ParameterWithDefault) {
let _ty = parameter_with_default.inferred_type(&self.model);
let _ty = parameter_with_default.ty(&self.model);
source_order::walk_parameter_with_default(self, parameter_with_default);
}
fn visit_alias(&mut self, alias: &Alias) {
let _ty = alias.inferred_type(&self.model);
let _ty = alias.ty(&self.model);
source_order::walk_alias(self, alias);
}

View File

@@ -61,13 +61,7 @@ class MDTestRunner:
return False
# Run it again with 'json' format to find the mdtest executable:
try:
json_output = self._run_cargo_test(message_format="json")
except subprocess.CalledProcessError as _:
# `cargo test` can still fail if something changed in between the two runs.
# Here we don't have a human-readable output, so just show a generic message:
self.console.print("[red]Error[/red]: Failed to compile tests")
return False
json_output = self._run_cargo_test(message_format="json")
if json_output:
self._get_executable_path_from_json(json_output)

View File

@@ -1,44 +0,0 @@
# Deferred annotations
## Deferred annotations in stubs always resolve
```pyi path=mod.pyi
def get_foo() -> Foo: ...
class Foo: ...
```
```py
from mod import get_foo
reveal_type(get_foo()) # revealed: Foo
```
## Deferred annotations in regular code fail
In (regular) source files, annotations are *not* deferred. This also tests that imports from
`__future__` that are not `annotations` are ignored.
```py
from __future__ import with_statement as annotations
# error: [unresolved-reference]
def get_foo() -> Foo: ...
class Foo: ...
reveal_type(get_foo()) # revealed: Unknown
```
## Deferred annotations in regular code with `__future__.annotations`
If `__future__.annotations` is imported, annotations *are* deferred.
```py
from __future__ import annotations
def get_foo() -> Foo: ...
class Foo: ...
reveal_type(get_foo()) # revealed: Foo
```

View File

@@ -36,7 +36,7 @@ def f():
reveal_type(a7) # revealed: None
reveal_type(a8) # revealed: Literal[1]
# TODO: This should be Color.RED
reveal_type(b1) # revealed: Unknown | Literal[0]
reveal_type(b1) # revealed: Literal[0]
# error: [invalid-type-form]
invalid1: Literal[3 + 4]

View File

@@ -102,7 +102,7 @@ reveal_type(C.pure_instance_variable) # revealed: str
# and pyright allow this.
C.pure_instance_variable = "overwritten on class"
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `pure_instance_variable` of type `str`"
# TODO: this should be an error (incompatible types in assignment)
c_instance.pure_instance_variable = 1
```
@@ -175,7 +175,7 @@ class C:
reveal_type(C.pure_class_variable1) # revealed: str
# TODO: Should be `Unknown | Literal[1]`.
# TODO: this should be `Literal[1]`, or `Unknown | Literal[1]`.
reveal_type(C.pure_class_variable2) # revealed: Unknown
c_instance = C()
@@ -191,7 +191,7 @@ c_instance.pure_class_variable1 = "value set on instance"
C.pure_class_variable1 = "overwritten on class"
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `pure_class_variable1` of type `str`"
# TODO: should raise an error (incompatible types in assignment)
C.pure_class_variable1 = 1
class Subclass(C):
@@ -252,7 +252,8 @@ class C:
reveal_type(C.variable_with_class_default1) # revealed: str
reveal_type(C.variable_with_class_default2) # revealed: Unknown | Literal[1]
# TODO: this should be `Unknown | Literal[1]`.
reveal_type(C.variable_with_class_default2) # revealed: Literal[1]
c_instance = C()
@@ -295,8 +296,8 @@ def _(flag: bool):
else:
x = 4
reveal_type(C1.x) # revealed: Unknown | Literal[1, 2]
reveal_type(C2.x) # revealed: Unknown | Literal[3, 4]
reveal_type(C1.x) # revealed: Literal[1, 2]
reveal_type(C2.x) # revealed: Literal[3, 4]
```
## Inherited class attributes
@@ -310,7 +311,7 @@ class A:
class B(A): ...
class C(B): ...
reveal_type(C.X) # revealed: Unknown | Literal["foo"]
reveal_type(C.X) # revealed: Literal["foo"]
```
### Multiple inheritance
@@ -333,7 +334,7 @@ class A(B, C): ...
reveal_type(A.__mro__)
# `E` is earlier in the MRO than `F`, so we should use the type of `E.X`
reveal_type(A.X) # revealed: Unknown | Literal[42]
reveal_type(A.X) # revealed: Literal[42]
```
## Unions with possibly unbound paths
@@ -355,7 +356,7 @@ def _(flag1: bool, flag2: bool):
C = C1 if flag1 else C2 if flag2 else C3
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C1, C2, C3]` is possibly unbound"
reveal_type(C.x) # revealed: Unknown | Literal[1, 3]
reveal_type(C.x) # revealed: Literal[1, 3]
```
### Possibly-unbound within a class
@@ -378,7 +379,7 @@ def _(flag: bool, flag1: bool, flag2: bool):
C = C1 if flag1 else C2 if flag2 else C3
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C1, C2, C3]` is possibly unbound"
reveal_type(C.x) # revealed: Unknown | Literal[1, 2, 3]
reveal_type(C.x) # revealed: Literal[1, 2, 3]
```
### Unions with all paths unbound
@@ -435,64 +436,6 @@ class Foo: ...
reveal_type(Foo.__class__) # revealed: Literal[type]
```
## Module attributes
```py path=mod.py
global_symbol: str = "a"
```
```py
import mod
reveal_type(mod.global_symbol) # revealed: str
mod.global_symbol = "b"
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `global_symbol` of type `str`"
mod.global_symbol = 1
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `global_symbol` of type `str`"
(_, mod.global_symbol) = (..., 1)
# TODO: this should be an error, but we do not understand list unpackings yet.
[_, mod.global_symbol] = [1, 2]
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# error: [invalid-assignment] "Object of type `int` is not assignable to attribute `global_symbol` of type `str`"
for mod.global_symbol in IntIterable():
pass
```
## Nested attributes
```py path=outer/__init__.py
```
```py path=outer/nested/__init__.py
```
```py path=outer/nested/inner.py
class Outer:
class Nested:
class Inner:
attr: int = 1
```
```py
import outer.nested.inner
reveal_type(outer.nested.inner.Outer.Nested.Inner.attr) # revealed: int
# error: [invalid-assignment]
outer.nested.inner.Outer.Nested.Inner.attr = "a"
```
## Literal types
### Function-literal attributes

View File

@@ -50,44 +50,46 @@ reveal_type(b | b) # revealed: Literal[False]
## Arithmetic with a variable
```py
def _(a: bool):
def lhs_is_int(x: int):
reveal_type(x + a) # revealed: int
reveal_type(x - a) # revealed: int
reveal_type(x * a) # revealed: int
reveal_type(x // a) # revealed: int
reveal_type(x / a) # revealed: float
reveal_type(x % a) # revealed: int
a = True
b = False
def rhs_is_int(x: int):
reveal_type(a + x) # revealed: int
reveal_type(a - x) # revealed: int
reveal_type(a * x) # revealed: int
reveal_type(a // x) # revealed: int
reveal_type(a / x) # revealed: float
reveal_type(a % x) # revealed: int
def lhs_is_int(x: int):
reveal_type(x + a) # revealed: int
reveal_type(x - a) # revealed: int
reveal_type(x * a) # revealed: int
reveal_type(x // a) # revealed: int
reveal_type(x / a) # revealed: float
reveal_type(x % a) # revealed: int
def lhs_is_bool(x: bool):
reveal_type(x + a) # revealed: int
reveal_type(x - a) # revealed: int
reveal_type(x * a) # revealed: int
reveal_type(x // a) # revealed: int
reveal_type(x / a) # revealed: float
reveal_type(x % a) # revealed: int
def rhs_is_int(x: int):
reveal_type(a + x) # revealed: int
reveal_type(a - x) # revealed: int
reveal_type(a * x) # revealed: int
reveal_type(a // x) # revealed: int
reveal_type(a / x) # revealed: float
reveal_type(a % x) # revealed: int
def rhs_is_bool(x: bool):
reveal_type(a + x) # revealed: int
reveal_type(a - x) # revealed: int
reveal_type(a * x) # revealed: int
reveal_type(a // x) # revealed: int
reveal_type(a / x) # revealed: float
reveal_type(a % x) # revealed: int
def lhs_is_bool(x: bool):
reveal_type(x + a) # revealed: int
reveal_type(x - a) # revealed: int
reveal_type(x * a) # revealed: int
reveal_type(x // a) # revealed: int
reveal_type(x / a) # revealed: float
reveal_type(x % a) # revealed: int
def both_are_bool(x: bool, y: bool):
reveal_type(x + y) # revealed: int
reveal_type(x - y) # revealed: int
reveal_type(x * y) # revealed: int
reveal_type(x // y) # revealed: int
reveal_type(x / y) # revealed: float
reveal_type(x % y) # revealed: int
def rhs_is_bool(x: bool):
reveal_type(a + x) # revealed: int
reveal_type(a - x) # revealed: int
reveal_type(a * x) # revealed: int
reveal_type(a // x) # revealed: int
reveal_type(a / x) # revealed: float
reveal_type(a % x) # revealed: int
def both_are_bool(x: bool, y: bool):
reveal_type(x + y) # revealed: int
reveal_type(x - y) # revealed: int
reveal_type(x * y) # revealed: int
reveal_type(x // y) # revealed: int
reveal_type(x / y) # revealed: float
reveal_type(x % y) # revealed: int
```

View File

@@ -262,8 +262,7 @@ class A:
class B:
__add__ = A()
# TODO: this could be `int` if we declare `B.__add__` using a `Callable` type
reveal_type(B() + B()) # revealed: Unknown | int
reveal_type(B() + B()) # revealed: int
```
## Integration test: numbers from typeshed

View File

@@ -5,11 +5,6 @@ that is, a use of a symbol from another scope. If a symbol has a declared type i
(e.g. `int`), we use that as the symbol's "public type" (the type of the symbol from the perspective
of other scopes) even if there is a more precise local inferred type for the symbol (`Literal[1]`).
If a symbol has no declared type, we use the union of `Unknown` with the inferred type as the public
type. If there is no declaration, then the symbol can be reassigned to any type from another scope;
the union with `Unknown` reflects that its type must at least be as large as the type of the
assigned value, but could be arbitrarily larger.
We test the whole matrix of possible boundness and declaredness states. The current behavior is
summarized in the following table, while the tests below demonstrate each case. Note that some of
this behavior is questionable and might change in the future. See the TODOs in `symbol_by_id`
@@ -17,11 +12,11 @@ this behavior is questionable and might change in the future. See the TODOs in `
In particular, we should raise errors in the "possibly-undeclared-and-unbound" as well as the
"undeclared-and-possibly-unbound" cases (marked with a "?").
| **Public type** | declared | possibly-undeclared | undeclared |
| ---------------- | ------------ | -------------------------- | ----------------------- |
| bound | `T_declared` | `T_declared \| T_inferred` | `Unknown \| T_inferred` |
| possibly-unbound | `T_declared` | `T_declared \| T_inferred` | `Unknown \| T_inferred` |
| unbound | `T_declared` | `T_declared` | `Unknown` |
| **Public type** | declared | possibly-undeclared | undeclared |
| ---------------- | ------------ | -------------------------- | ------------ |
| bound | `T_declared` | `T_declared \| T_inferred` | `T_inferred` |
| possibly-unbound | `T_declared` | `T_declared \| T_inferred` | `T_inferred` |
| unbound | `T_declared` | `T_declared` | `Unknown` |
| **Diagnostic** | declared | possibly-undeclared | undeclared |
| ---------------- | -------- | ------------------------- | ------------------- |
@@ -102,24 +97,17 @@ def flag() -> bool: ...
x = 1
y = 2
z = 3
if flag():
x: int
y: Any
x: Any
# error: [invalid-declaration]
z: str
y: str
```
```py
from mod import x, y, z
from mod import x, y
reveal_type(x) # revealed: int
reveal_type(y) # revealed: Literal[2] | Any
reveal_type(z) # revealed: Literal[3] | Unknown
# External modifications of `x` that violate the declared type are not allowed:
# error: [invalid-assignment]
x = None
reveal_type(x) # revealed: Literal[1] | Any
reveal_type(y) # revealed: Literal[2] | Unknown
```
### Possibly undeclared and possibly unbound
@@ -146,10 +134,6 @@ from mod import x, y
reveal_type(x) # revealed: Literal[1] | Any
reveal_type(y) # revealed: Literal[2] | str
# External modifications of `y` that violate the declared type are not allowed:
# error: [invalid-assignment]
y = None
```
### Possibly undeclared and unbound
@@ -170,16 +154,14 @@ if flag():
from mod import x
reveal_type(x) # revealed: int
# External modifications to `x` that violate the declared type are not allowed:
# error: [invalid-assignment]
x = None
```
## Undeclared
### Undeclared but bound
We use the inferred type as the public type, if a symbol has no declared type.
```py path=mod.py
x = 1
```
@@ -187,10 +169,7 @@ x = 1
```py
from mod import x
reveal_type(x) # revealed: Unknown | Literal[1]
# All external modifications of `x` are allowed:
x = None
reveal_type(x) # revealed: Literal[1]
```
### Undeclared and possibly unbound
@@ -210,10 +189,7 @@ if flag:
# on top of this document.
from mod import x
reveal_type(x) # revealed: Unknown | Literal[1]
# All external modifications of `x` are allowed:
x = None
reveal_type(x) # revealed: Literal[1]
```
### Undeclared and unbound
@@ -230,7 +206,4 @@ if False:
from mod import x
reveal_type(x) # revealed: Unknown
# Modifications allowed in this case:
x = None
```

View File

@@ -52,7 +52,7 @@ class NonCallable:
__call__ = 1
a = NonCallable()
# error: "Object of type `Unknown | Literal[1]` is not callable (due to union element `Literal[1]`)"
# error: "Object of type `NonCallable` is not callable"
reveal_type(a()) # revealed: Unknown
```

View File

@@ -1,151 +0,0 @@
# Comprehensions
## Basic comprehensions
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# revealed: int
[reveal_type(x) for x in IntIterable()]
class IteratorOfIterables:
def __next__(self) -> IntIterable:
return IntIterable()
class IterableOfIterables:
def __iter__(self) -> IteratorOfIterables:
return IteratorOfIterables()
# revealed: tuple[int, IntIterable]
[reveal_type((x, y)) for y in IterableOfIterables() for x in y]
# revealed: int
{reveal_type(x): 0 for x in IntIterable()}
# revealed: int
{0: reveal_type(x) for x in IntIterable()}
```
## Nested comprehension
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# TODO: This could be a `tuple[int, int]` if we model that `y` can not be modified in the outer comprehension scope
# revealed: tuple[int, Unknown | int]
[[reveal_type((x, y)) for x in IntIterable()] for y in IntIterable()]
```
## Comprehension referencing outer comprehension
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
class IteratorOfIterables:
def __next__(self) -> IntIterable:
return IntIterable()
class IterableOfIterables:
def __iter__(self) -> IteratorOfIterables:
return IteratorOfIterables()
# TODO: This could be a `tuple[int, int]` (see above)
# revealed: tuple[int, Unknown | IntIterable]
[[reveal_type((x, y)) for x in y] for y in IterableOfIterables()]
```
## Comprehension with unbound iterable
Iterating over an unbound iterable yields `Unknown`:
```py
# error: [unresolved-reference] "Name `x` used when not defined"
# revealed: Unknown
[reveal_type(z) for z in x]
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# error: [not-iterable] "Object of type `int` is not iterable"
# revealed: tuple[int, Unknown]
[reveal_type((x, z)) for x in IntIterable() for z in x]
```
## Starred expressions
Starred expressions must be iterable
```py
class NotIterable: ...
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator: ...
# This is fine:
x = [*Iterable()]
# error: [not-iterable] "Object of type `NotIterable` is not iterable"
y = [*NotIterable()]
```
## Async comprehensions
### Basic
```py
class AsyncIterator:
async def __anext__(self) -> int:
return 42
class AsyncIterable:
def __aiter__(self) -> AsyncIterator:
return AsyncIterator()
# revealed: @Todo(async iterables/iterators)
[reveal_type(x) async for x in AsyncIterable()]
```
### Invalid async comprehension
This tests that we understand that `async` comprehensions do *not* work according to the synchronous
iteration protocol
```py
class Iterator:
def __next__(self) -> int:
return 42
class Iterable:
def __iter__(self) -> Iterator:
return Iterator()
# revealed: @Todo(async iterables/iterators)
[reveal_type(x) async for x in Iterable()]
```

View File

@@ -1,43 +0,0 @@
# Comprehensions with invalid syntax
```py
class IntIterator:
def __next__(self) -> int:
return 42
class IntIterable:
def __iter__(self) -> IntIterator:
return IntIterator()
# Missing 'in' keyword.
# It's reasonably clear here what they *meant* to write,
# so we'll still infer the correct type:
# error: [invalid-syntax] "Expected 'in', found name"
# revealed: int
[reveal_type(a) for a IntIterable()]
# Missing iteration variable
# error: [invalid-syntax] "Expected an identifier, but found a keyword 'in' that cannot be used here"
# error: [invalid-syntax] "Expected 'in', found name"
# error: [unresolved-reference]
# revealed: Unknown
[reveal_type(b) for in IntIterable()]
# Missing iterable
# error: [invalid-syntax] "Expected an expression"
# revealed: Unknown
[reveal_type(c) for c in]
# Missing 'in' keyword and missing iterable
# error: [invalid-syntax] "Expected 'in', found ']'"
# revealed: Unknown
[reveal_type(d) for d]
```

View File

@@ -5,7 +5,7 @@
```py
def _(flag: bool):
class A:
always_bound: int = 1
always_bound = 1
if flag:
union = 1
@@ -13,21 +13,14 @@ def _(flag: bool):
union = "abc"
if flag:
union_declared: int = 1
else:
union_declared: str = "abc"
possibly_unbound = "abc"
if flag:
possibly_unbound: str = "abc"
reveal_type(A.always_bound) # revealed: Literal[1]
reveal_type(A.always_bound) # revealed: int
reveal_type(A.union) # revealed: Unknown | Literal[1, "abc"]
reveal_type(A.union_declared) # revealed: int | str
reveal_type(A.union) # revealed: Literal[1, "abc"]
# error: [possibly-unbound-attribute] "Attribute `possibly_unbound` on type `Literal[A]` is possibly unbound"
reveal_type(A.possibly_unbound) # revealed: str
reveal_type(A.possibly_unbound) # revealed: Literal["abc"]
# error: [unresolved-attribute] "Type `Literal[A]` has no attribute `non_existent`"
reveal_type(A.non_existent) # revealed: Unknown

View File

@@ -55,7 +55,7 @@ reveal_type("x" or "y" and "") # revealed: Literal["x"]
## Evaluates to builtin
```py path=a.py
redefined_builtin_bool: type[bool] = bool
redefined_builtin_bool = bool
def my_bool(x) -> bool:
return True

View File

@@ -172,10 +172,10 @@ class IntUnion:
def __len__(self) -> Literal[SomeEnum.INT, SomeEnum.INT_2]: ...
reveal_type(len(Auto())) # revealed: int
reveal_type(len(Int())) # revealed: int
reveal_type(len(Int())) # revealed: Literal[2]
reveal_type(len(Str())) # revealed: int
reveal_type(len(Tuple())) # revealed: int
reveal_type(len(IntUnion())) # revealed: int
reveal_type(len(IntUnion())) # revealed: Literal[2, 32]
```
### Negative integers

View File

@@ -20,7 +20,7 @@ wrong_innards: MyBox[int] = MyBox("five")
# TODO reveal int, do not leak the typevar
reveal_type(box.data) # revealed: T
reveal_type(MyBox.box_model_number) # revealed: Unknown | Literal[695]
reveal_type(MyBox.box_model_number) # revealed: Literal[695]
```
## Subclassing

View File

@@ -1,70 +1,8 @@
# Builtins
## Importing builtin module
Builtin symbols can be explicitly imported:
# Importing builtin module
```py
import builtins
reveal_type(builtins.chr) # revealed: Literal[chr]
```
## Implicit use of builtin
Or used implicitly:
```py
reveal_type(chr) # revealed: Literal[chr]
reveal_type(str) # revealed: Literal[str]
```
## Builtin symbol from custom typeshed
If we specify a custom typeshed, we can use the builtin symbol from it, and no longer access the
builtins from the "actual" vendored typeshed:
```toml
[environment]
typeshed = "/typeshed"
```
```pyi path=/typeshed/stdlib/builtins.pyi
class Custom: ...
custom_builtin: Custom
```
```pyi path=/typeshed/stdlib/typing_extensions.pyi
def reveal_type(obj, /): ...
```
```py
reveal_type(custom_builtin) # revealed: Custom
# error: [unresolved-reference]
reveal_type(str) # revealed: Unknown
```
## Unknown builtin (later defined)
`foo` has a type of `Unknown` in this example, as it relies on `bar` which has not been defined at
that point:
```toml
[environment]
typeshed = "/typeshed"
```
```pyi path=/typeshed/stdlib/builtins.pyi
foo = bar
bar = 1
```
```pyi path=/typeshed/stdlib/typing_extensions.pyi
def reveal_type(obj, /): ...
```
```py
reveal_type(foo) # revealed: Unknown
x = builtins.chr
reveal_type(x) # revealed: Literal[chr]
```

View File

@@ -23,8 +23,8 @@ reveal_type(y)
# error: [possibly-unbound-import] "Member `y` of module `maybe_unbound` is possibly unbound"
from maybe_unbound import x, y
reveal_type(x) # revealed: Unknown | Literal[3]
reveal_type(y) # revealed: Unknown | Literal[3]
reveal_type(x) # revealed: Literal[3]
reveal_type(y) # revealed: Literal[3]
```
## Maybe unbound annotated
@@ -52,7 +52,7 @@ Importing an annotated name prefers the declared type over the inferred type:
# error: [possibly-unbound-import] "Member `y` of module `maybe_unbound_annotated` is possibly unbound"
from maybe_unbound_annotated import x, y
reveal_type(x) # revealed: Unknown | Literal[3]
reveal_type(x) # revealed: Literal[3]
reveal_type(y) # revealed: int
```

View File

@@ -9,7 +9,7 @@ reveal_type(a.b) # revealed: <module 'a.b'>
```
```py path=a/__init__.py
b: int = 42
b = 42
```
```py path=a/b.py
@@ -20,11 +20,11 @@ b: int = 42
```py
from a import b
reveal_type(b) # revealed: int
reveal_type(b) # revealed: Literal[42]
```
```py path=a/__init__.py
b: int = 42
b = 42
```
```py path=a/b.py
@@ -41,7 +41,7 @@ reveal_type(a.b) # revealed: <module 'a.b'>
```
```py path=a/__init__.py
b: int = 42
b = 42
```
```py path=a/b.py
@@ -60,13 +60,13 @@ sees the submodule as the value of `b` instead of the integer.
from a import b
import a.b
# Python would say `int` for `b`
# Python would say `Literal[42]` for `b`
reveal_type(b) # revealed: <module 'a.b'>
reveal_type(a.b) # revealed: <module 'a.b'>
```
```py path=a/__init__.py
b: int = 42
b = 42
```
```py path=a/b.py

View File

@@ -20,12 +20,12 @@ from a import b.c
# TODO: Should these be inferred as Unknown?
reveal_type(b) # revealed: <module 'a.b'>
reveal_type(b.c) # revealed: int
reveal_type(b.c) # revealed: Literal[1]
```
```py path=a/__init__.py
```
```py path=a/b.py
c: int = 1
c = 1
```

View File

@@ -17,13 +17,13 @@ reveal_type(X) # revealed: Unknown
```
```py path=package/foo.py
X: int = 42
X = 42
```
```py path=package/bar.py
from .foo import X
reveal_type(X) # revealed: int
reveal_type(X) # revealed: Literal[42]
```
## Dotted
@@ -32,25 +32,25 @@ reveal_type(X) # revealed: int
```
```py path=package/foo/bar/baz.py
X: int = 42
X = 42
```
```py path=package/bar.py
from .foo.bar.baz import X
reveal_type(X) # revealed: int
reveal_type(X) # revealed: Literal[42]
```
## Bare to package
```py path=package/__init__.py
X: int = 42
X = 42
```
```py path=package/bar.py
from . import X
reveal_type(X) # revealed: int
reveal_type(X) # revealed: Literal[42]
```
## Non-existent + bare to package
@@ -66,11 +66,11 @@ reveal_type(X) # revealed: Unknown
```py path=package/__init__.py
from .foo import X
reveal_type(X) # revealed: int
reveal_type(X) # revealed: Literal[42]
```
```py path=package/foo.py
X: int = 42
X = 42
```
## Non-existent + dunder init
@@ -87,13 +87,13 @@ reveal_type(X) # revealed: Unknown
```
```py path=package/foo.py
X: int = 42
X = 42
```
```py path=package/subpackage/subsubpackage/bar.py
from ...foo import X
reveal_type(X) # revealed: int
reveal_type(X) # revealed: Literal[42]
```
## Unbound symbol
@@ -117,13 +117,13 @@ reveal_type(x) # revealed: Unknown
```
```py path=package/foo.py
X: int = 42
X = 42
```
```py path=package/bar.py
from . import foo
reveal_type(foo.X) # revealed: int
reveal_type(foo.X) # revealed: Literal[42]
```
## Non-existent + bare to module
@@ -152,7 +152,7 @@ submodule via the attribute on its parent package.
```
```py path=package/foo.py
X: int = 42
X = 42
```
```py path=package/bar.py

View File

@@ -109,9 +109,9 @@ reveal_type(x)
def _(flag: bool):
class NotIterable:
if flag:
__iter__: int = 1
__iter__ = 1
else:
__iter__: None = None
__iter__ = None
for x in NotIterable(): # error: "Object of type `NotIterable` is not iterable"
pass
@@ -135,7 +135,7 @@ for x in nonsense: # error: "Object of type `Literal[123]` is not iterable"
class NotIterable:
def __getitem__(self, key: int) -> int:
return 42
__iter__: None = None
__iter__ = None
for x in NotIterable(): # error: "Object of type `NotIterable` is not iterable"
pass

View File

@@ -1,95 +0,0 @@
# Custom typeshed
The `environment.typeshed` configuration option can be used to specify a custom typeshed directory
for Markdown-based tests. Custom typeshed stubs can then be placed in the specified directory using
fenced code blocks with language `pyi`, and will be used instead of the vendored copy of typeshed.
A fenced code block with language `text` can be used to provide a `stdlib/VERSIONS` file in the
custom typeshed root. If no such file is created explicitly, it will be automatically created with
entries enabling all specified `<typeshed-root>/stdlib` files for all supported Python versions.
## Basic example (auto-generated `VERSIONS` file)
First, we specify `/typeshed` as the custom typeshed directory:
```toml
[environment]
typeshed = "/typeshed"
```
We can then place custom stub files in `/typeshed/stdlib`, for example:
```pyi path=/typeshed/stdlib/builtins.pyi
class BuiltinClass: ...
builtin_symbol: BuiltinClass
```
```pyi path=/typeshed/stdlib/sys/__init__.pyi
version = "my custom Python"
```
And finally write a normal Python code block that makes use of the custom stubs:
```py
b: BuiltinClass = builtin_symbol
class OtherClass: ...
o: OtherClass = builtin_symbol # error: [invalid-assignment]
# Make sure that 'sys' has a proper entry in the auto-generated 'VERSIONS' file
import sys
```
## Custom `VERSIONS` file
If we want to specify a custom `VERSIONS` file, we can do so by creating a fenced code block with
language `text`. In the following test, we set the Python version to `3.10` and then make sure that
we can *not* import `new_module` with a version requirement of `3.11-`:
```toml
[environment]
python-version = "3.10"
typeshed = "/typeshed"
```
```pyi path=/typeshed/stdlib/old_module.pyi
class OldClass: ...
```
```pyi path=/typeshed/stdlib/new_module.pyi
class NewClass: ...
```
```text path=/typeshed/stdlib/VERSIONS
old_module: 3.0-
new_module: 3.11-
```
```py
from old_module import OldClass
# error: [unresolved-import] "Cannot resolve import `new_module`"
from new_module import NewClass
```
## Using `reveal_type` with a custom typeshed
When providing a custom typeshed directory, basic things like `reveal_type` will stop working
because we rely on being able to import it from `typing_extensions`. The actual definition of
`reveal_type` in typeshed is slightly involved (depends on generics, `TypeVar`, etc.), but a very
simple untyped definition is enough to make `reveal_type` work in tests:
```toml
[environment]
typeshed = "/typeshed"
```
```pyi path=/typeshed/stdlib/typing_extensions.pyi
def reveal_type(obj, /): ...
```
```py
reveal_type(()) # revealed: tuple[()]
```

View File

@@ -99,9 +99,9 @@ def _(x: str | int):
class A: ...
class B: ...
def _(x: A | B):
alias_for_type = type
alias_for_type = type
def _(x: A | B):
if alias_for_type(x) is A:
reveal_type(x) # revealed: A
```

View File

@@ -10,10 +10,10 @@ def returns_bool() -> bool:
return True
if returns_bool():
chr: int = 1
chr = 1
def f():
reveal_type(chr) # revealed: Literal[chr] | int
reveal_type(chr) # revealed: Literal[chr] | Literal[1]
```
## Conditionally global or builtin, with annotation

View File

@@ -6,7 +6,7 @@
def f():
x = 1
def g():
reveal_type(x) # revealed: Unknown | Literal[1]
reveal_type(x) # revealed: Literal[1]
```
## Two levels up
@@ -16,7 +16,7 @@ def f():
x = 1
def g():
def h():
reveal_type(x) # revealed: Unknown | Literal[1]
reveal_type(x) # revealed: Literal[1]
```
## Skips class scope
@@ -28,7 +28,7 @@ def f():
class C:
x = 2
def g():
reveal_type(x) # revealed: Unknown | Literal[1]
reveal_type(x) # revealed: Literal[1]
```
## Skips annotation-only assignment
@@ -41,16 +41,5 @@ def f():
# name is otherwise not defined; maybe should be an error?
x: int
def h():
reveal_type(x) # revealed: Unknown | Literal[1]
```
## Implicit global in function
A name reference to a never-defined symbol in a function is implicitly a global lookup.
```py
x = 1
def f():
reveal_type(x) # revealed: Unknown | Literal[1]
reveal_type(x) # revealed: Literal[1]
```

View File

@@ -17,8 +17,8 @@ class C:
x = 2
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C]` is possibly unbound"
reveal_type(C.x) # revealed: Unknown | Literal[2]
reveal_type(C.y) # revealed: Unknown | Literal[1]
reveal_type(C.x) # revealed: Literal[2]
reveal_type(C.y) # revealed: Literal[1]
```
## Possibly unbound in class and global scope
@@ -37,7 +37,7 @@ class C:
# error: [possibly-unresolved-reference]
y = x
reveal_type(C.y) # revealed: Unknown | Literal[1, "abc"]
reveal_type(C.y) # revealed: Literal[1, "abc"]
```
## Unbound function local

View File

@@ -182,34 +182,3 @@ class C(A, B): ...
# False negative: [incompatible-slots]
class A(int, str): ...
```
### Diagnostic if `__slots__` is externally modified
We special-case type inference for `__slots__` and return the pure inferred type, even if the symbol
is not declared — a case in which we union with `Unknown` for other public symbols. The reason for
this is that `__slots__` has a special handling in the runtime. Modifying it externally is actually
allowed, but those changes do not take effect. If you have a class `C` with `__slots__ = ("foo",)`
and externally set `C.__slots__ = ("bar",)`, you still can't access `C.bar`. And you can still
access `C.foo`. We therefore issue a diagnostic for such assignments:
```py
class A:
__slots__ = ("a",)
# Modifying `__slots__` from within the class body is fine:
__slots__ = ("a", "b")
# No `Unknown` here:
reveal_type(A.__slots__) # revealed: tuple[Literal["a"], Literal["b"]]
# But modifying it externally is not:
# error: [invalid-assignment]
A.__slots__ = ("a",)
# error: [invalid-assignment]
A.__slots__ = ("a", "b_new")
# error: [invalid-assignment]
A.__slots__ = ("a", "b", "c")
```

View File

@@ -11,7 +11,7 @@ version:
import sys
if sys.version_info >= (3, 9):
SomeFeature: str = "available"
SomeFeature = "available"
```
If we can statically determine that the condition is always true, then we can also understand that
@@ -21,7 +21,7 @@ If we can statically determine that the condition is always true, then we can al
from module1 import SomeFeature
# SomeFeature is unconditionally available here, because we are on Python 3.9 or newer:
reveal_type(SomeFeature) # revealed: str
reveal_type(SomeFeature) # revealed: Literal["available"]
```
Another scenario where this is useful is for `typing.TYPE_CHECKING` branches, which are often used

View File

@@ -31,7 +31,7 @@ def _(n: int):
## Slices
```py
b: bytes = b"\x00abc\xff"
b = b"\x00abc\xff"
reveal_type(b[0:2]) # revealed: Literal[b"\x00a"]
reveal_type(b[-3:]) # revealed: Literal[b"bc\xff"]

View File

@@ -14,8 +14,7 @@ a = NotSubscriptable()[0] # error: "Cannot subscript object of type `NotSubscri
class NotSubscriptable:
__getitem__ = None
# error: "Method `__getitem__` of type `Unknown | None` is not callable on object of type `NotSubscriptable`"
a = NotSubscriptable()[0]
a = NotSubscriptable()[0] # error: "Method `__getitem__` of type `None` is not callable on object of type `NotSubscriptable`"
```
## Valid getitem

View File

@@ -28,52 +28,52 @@ def _(n: int):
## Slices
```py
s = "abcde"
reveal_type(s[0:0]) # revealed: Literal[""]
reveal_type(s[0:1]) # revealed: Literal["a"]
reveal_type(s[0:2]) # revealed: Literal["ab"]
reveal_type(s[0:5]) # revealed: Literal["abcde"]
reveal_type(s[0:6]) # revealed: Literal["abcde"]
reveal_type(s[1:3]) # revealed: Literal["bc"]
reveal_type(s[-3:5]) # revealed: Literal["cde"]
reveal_type(s[-4:-2]) # revealed: Literal["bc"]
reveal_type(s[-10:10]) # revealed: Literal["abcde"]
reveal_type(s[0:]) # revealed: Literal["abcde"]
reveal_type(s[2:]) # revealed: Literal["cde"]
reveal_type(s[5:]) # revealed: Literal[""]
reveal_type(s[:2]) # revealed: Literal["ab"]
reveal_type(s[:0]) # revealed: Literal[""]
reveal_type(s[:2]) # revealed: Literal["ab"]
reveal_type(s[:10]) # revealed: Literal["abcde"]
reveal_type(s[:]) # revealed: Literal["abcde"]
reveal_type(s[::-1]) # revealed: Literal["edcba"]
reveal_type(s[::2]) # revealed: Literal["ace"]
reveal_type(s[-2:-5:-1]) # revealed: Literal["dcb"]
reveal_type(s[::-2]) # revealed: Literal["eca"]
reveal_type(s[-1::-3]) # revealed: Literal["eb"]
reveal_type(s[None:2:None]) # revealed: Literal["ab"]
reveal_type(s[1:None:1]) # revealed: Literal["bcde"]
reveal_type(s[None:None:None]) # revealed: Literal["abcde"]
start = 1
stop = None
step = 2
reveal_type(s[start:stop:step]) # revealed: Literal["bd"]
reveal_type(s[False:True]) # revealed: Literal["a"]
reveal_type(s[True:3]) # revealed: Literal["bc"]
s[0:4:0] # error: [zero-stepsize-in-slice]
s[:4:0] # error: [zero-stepsize-in-slice]
s[0::0] # error: [zero-stepsize-in-slice]
s[::0] # error: [zero-stepsize-in-slice]
def _(m: int, n: int, s2: str):
s = "abcde"
reveal_type(s[0:0]) # revealed: Literal[""]
reveal_type(s[0:1]) # revealed: Literal["a"]
reveal_type(s[0:2]) # revealed: Literal["ab"]
reveal_type(s[0:5]) # revealed: Literal["abcde"]
reveal_type(s[0:6]) # revealed: Literal["abcde"]
reveal_type(s[1:3]) # revealed: Literal["bc"]
reveal_type(s[-3:5]) # revealed: Literal["cde"]
reveal_type(s[-4:-2]) # revealed: Literal["bc"]
reveal_type(s[-10:10]) # revealed: Literal["abcde"]
reveal_type(s[0:]) # revealed: Literal["abcde"]
reveal_type(s[2:]) # revealed: Literal["cde"]
reveal_type(s[5:]) # revealed: Literal[""]
reveal_type(s[:2]) # revealed: Literal["ab"]
reveal_type(s[:0]) # revealed: Literal[""]
reveal_type(s[:2]) # revealed: Literal["ab"]
reveal_type(s[:10]) # revealed: Literal["abcde"]
reveal_type(s[:]) # revealed: Literal["abcde"]
reveal_type(s[::-1]) # revealed: Literal["edcba"]
reveal_type(s[::2]) # revealed: Literal["ace"]
reveal_type(s[-2:-5:-1]) # revealed: Literal["dcb"]
reveal_type(s[::-2]) # revealed: Literal["eca"]
reveal_type(s[-1::-3]) # revealed: Literal["eb"]
reveal_type(s[None:2:None]) # revealed: Literal["ab"]
reveal_type(s[1:None:1]) # revealed: Literal["bcde"]
reveal_type(s[None:None:None]) # revealed: Literal["abcde"]
start = 1
stop = None
step = 2
reveal_type(s[start:stop:step]) # revealed: Literal["bd"]
reveal_type(s[False:True]) # revealed: Literal["a"]
reveal_type(s[True:3]) # revealed: Literal["bc"]
s[0:4:0] # error: [zero-stepsize-in-slice]
s[:4:0] # error: [zero-stepsize-in-slice]
s[0::0] # error: [zero-stepsize-in-slice]
s[::0] # error: [zero-stepsize-in-slice]
substring1 = s[m:n]
# TODO: Support overloads... Should be `LiteralString`
reveal_type(substring1) # revealed: @Todo(return type)

View File

@@ -23,51 +23,51 @@ reveal_type(b) # revealed: Unknown
## Slices
```py
t = (1, "a", None, b"b")
reveal_type(t[0:0]) # revealed: tuple[()]
reveal_type(t[0:1]) # revealed: tuple[Literal[1]]
reveal_type(t[0:2]) # revealed: tuple[Literal[1], Literal["a"]]
reveal_type(t[0:4]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[0:5]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[1:3]) # revealed: tuple[Literal["a"], None]
reveal_type(t[-2:4]) # revealed: tuple[None, Literal[b"b"]]
reveal_type(t[-3:-1]) # revealed: tuple[Literal["a"], None]
reveal_type(t[-10:10]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[0:]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[2:]) # revealed: tuple[None, Literal[b"b"]]
reveal_type(t[4:]) # revealed: tuple[()]
reveal_type(t[:0]) # revealed: tuple[()]
reveal_type(t[:2]) # revealed: tuple[Literal[1], Literal["a"]]
reveal_type(t[:10]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[:]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[::-1]) # revealed: tuple[Literal[b"b"], None, Literal["a"], Literal[1]]
reveal_type(t[::2]) # revealed: tuple[Literal[1], None]
reveal_type(t[-2:-5:-1]) # revealed: tuple[None, Literal["a"], Literal[1]]
reveal_type(t[::-2]) # revealed: tuple[Literal[b"b"], Literal["a"]]
reveal_type(t[-1::-3]) # revealed: tuple[Literal[b"b"], Literal[1]]
reveal_type(t[None:2:None]) # revealed: tuple[Literal[1], Literal["a"]]
reveal_type(t[1:None:1]) # revealed: tuple[Literal["a"], None, Literal[b"b"]]
reveal_type(t[None:None:None]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
start = 1
stop = None
step = 2
reveal_type(t[start:stop:step]) # revealed: tuple[Literal["a"], Literal[b"b"]]
reveal_type(t[False:True]) # revealed: tuple[Literal[1]]
reveal_type(t[True:3]) # revealed: tuple[Literal["a"], None]
t[0:4:0] # error: [zero-stepsize-in-slice]
t[:4:0] # error: [zero-stepsize-in-slice]
t[0::0] # error: [zero-stepsize-in-slice]
t[::0] # error: [zero-stepsize-in-slice]
def _(m: int, n: int):
t = (1, "a", None, b"b")
reveal_type(t[0:0]) # revealed: tuple[()]
reveal_type(t[0:1]) # revealed: tuple[Literal[1]]
reveal_type(t[0:2]) # revealed: tuple[Literal[1], Literal["a"]]
reveal_type(t[0:4]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[0:5]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[1:3]) # revealed: tuple[Literal["a"], None]
reveal_type(t[-2:4]) # revealed: tuple[None, Literal[b"b"]]
reveal_type(t[-3:-1]) # revealed: tuple[Literal["a"], None]
reveal_type(t[-10:10]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[0:]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[2:]) # revealed: tuple[None, Literal[b"b"]]
reveal_type(t[4:]) # revealed: tuple[()]
reveal_type(t[:0]) # revealed: tuple[()]
reveal_type(t[:2]) # revealed: tuple[Literal[1], Literal["a"]]
reveal_type(t[:10]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[:]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
reveal_type(t[::-1]) # revealed: tuple[Literal[b"b"], None, Literal["a"], Literal[1]]
reveal_type(t[::2]) # revealed: tuple[Literal[1], None]
reveal_type(t[-2:-5:-1]) # revealed: tuple[None, Literal["a"], Literal[1]]
reveal_type(t[::-2]) # revealed: tuple[Literal[b"b"], Literal["a"]]
reveal_type(t[-1::-3]) # revealed: tuple[Literal[b"b"], Literal[1]]
reveal_type(t[None:2:None]) # revealed: tuple[Literal[1], Literal["a"]]
reveal_type(t[1:None:1]) # revealed: tuple[Literal["a"], None, Literal[b"b"]]
reveal_type(t[None:None:None]) # revealed: tuple[Literal[1], Literal["a"], None, Literal[b"b"]]
start = 1
stop = None
step = 2
reveal_type(t[start:stop:step]) # revealed: tuple[Literal["a"], Literal[b"b"]]
reveal_type(t[False:True]) # revealed: tuple[Literal[1]]
reveal_type(t[True:3]) # revealed: tuple[Literal["a"], None]
t[0:4:0] # error: [zero-stepsize-in-slice]
t[:4:0] # error: [zero-stepsize-in-slice]
t[0::0] # error: [zero-stepsize-in-slice]
t[::0] # error: [zero-stepsize-in-slice]
tuple_slice = t[m:n]
# TODO: Support overloads... Should be `tuple[Literal[1, 'a', b"b"] | None, ...]`
reveal_type(tuple_slice) # revealed: @Todo(return type)

View File

@@ -180,11 +180,3 @@ a = 4 / 0 # error: [division-by-zero]
# error: [unknown-rule] "Unknown rule `is-equal-14`"
a = 10 + 4 # knot: ignore[is-equal-14]
```
## Code with `lint:` prefix
```py
# error:[unknown-rule] "Unknown rule `lint:division-by-zero`. Did you mean `division-by-zero`?"
# error: [division-by-zero]
a = 10 / 0 # knot: ignore[lint:division-by-zero]
```

View File

@@ -263,12 +263,15 @@ static_assert(not is_assignable_to(int, Not[int]))
static_assert(not is_assignable_to(int, Not[Literal[1]]))
static_assert(not is_assignable_to(Intersection[Any, Parent], Unrelated))
static_assert(is_assignable_to(Intersection[Unrelated, Any], Intersection[Unrelated, Any]))
static_assert(is_assignable_to(Intersection[Unrelated, Any], Intersection[Unrelated, Not[Any]]))
# TODO: The following assertions should not fail (see https://github.com/astral-sh/ruff/issues/14899)
# error: [static-assert-error]
static_assert(is_assignable_to(Intersection[Any, int], int))
# error: [static-assert-error]
static_assert(is_assignable_to(Intersection[Unrelated, Any], Intersection[Unrelated, Any]))
# error: [static-assert-error]
static_assert(is_assignable_to(Intersection[Unrelated, Any], Intersection[Unrelated, Not[Any]]))
# error: [static-assert-error]
static_assert(is_assignable_to(Intersection[Unrelated, Any], Not[tuple[Unrelated, Any]]))
```

View File

@@ -139,9 +139,7 @@ reveal_type(not AlwaysFalse())
# We don't get into a cycle if someone sets their `__bool__` method to the `bool` builtin:
class BoolIsBool:
# TODO: The `type[bool]` declaration here is a workaround to avoid running into
# https://github.com/astral-sh/ruff/issues/15672
__bool__: type[bool] = bool
__bool__ = bool
# revealed: bool
reveal_type(not BoolIsBool())

View File

@@ -76,11 +76,11 @@ with Manager():
```py
class Manager:
__enter__: int = 42
__enter__ = 42
def __exit__(self, exc_tpe, exc_value, traceback): ...
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because the method `__enter__` of type `int` is not callable"
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because the method `__enter__` of type `Literal[42]` is not callable"
with Manager():
...
```
@@ -91,9 +91,9 @@ with Manager():
class Manager:
def __enter__(self) -> Self: ...
__exit__: int = 32
__exit__ = 32
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because the method `__exit__` of type `int` is not callable"
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because the method `__exit__` of type `Literal[32]` is not callable"
with Manager():
...
```

View File

@@ -156,6 +156,11 @@ pub(crate) mod tests {
self
}
pub(crate) fn with_custom_typeshed(mut self, path: &str) -> Self {
self.custom_typeshed = Some(SystemPathBuf::from(path));
self
}
pub(crate) fn with_file(mut self, path: &'a str, content: &'a str) -> Self {
self.files.push((path, content));
self
@@ -171,7 +176,7 @@ pub(crate) mod tests {
.context("Failed to write test files")?;
let mut search_paths = SearchPathSettings::new(vec![src_root]);
search_paths.custom_typeshed = self.custom_typeshed;
search_paths.typeshed = self.custom_typeshed;
Program::from_settings(
&db,

View File

@@ -10,7 +10,7 @@ pub use module_resolver::{resolve_module, system_module_search_paths, KnownModul
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
pub use python_platform::PythonPlatform;
pub use python_version::PythonVersion;
pub use semantic_model::{HasType, SemanticModel};
pub use semantic_model::{HasTy, SemanticModel};
pub mod ast_node_ref;
mod db;

View File

@@ -1,5 +1,5 @@
use itertools::Itertools;
use ruff_db::diagnostic::{DiagnosticId, LintName, Severity};
use ruff_db::diagnostic::{LintName, Severity};
use rustc_hash::FxHashMap;
use std::hash::Hasher;
use thiserror::Error;
@@ -31,11 +31,6 @@ pub struct LintMetadata {
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
#[cfg_attr(
feature = "serde",
derive(serde::Serialize, serde::Deserialize),
serde(rename_all = "kebab-case")
)]
pub enum Level {
/// The lint is disabled and should not run.
Ignore,
@@ -345,18 +340,7 @@ impl LintRegistry {
}
}
Some(LintEntry::Removed(lint)) => Err(GetLintError::Removed(lint.name())),
None => {
if let Some(without_prefix) = DiagnosticId::strip_category(code) {
if let Some(entry) = self.by_name.get(without_prefix) {
return Err(GetLintError::PrefixedWithCategory {
prefixed: code.to_string(),
suggestion: entry.id().name.to_string(),
});
}
}
Err(GetLintError::Unknown(code.to_string()))
}
None => Err(GetLintError::Unknown(code.to_string())),
}
}
@@ -393,20 +377,12 @@ impl LintRegistry {
#[derive(Error, Debug, Clone, PartialEq, Eq)]
pub enum GetLintError {
/// The name maps to this removed lint.
#[error("lint `{0}` has been removed")]
#[error("lint {0} has been removed")]
Removed(LintName),
/// No lint with the given name is known.
#[error("unknown lint `{0}`")]
#[error("unknown lint {0}")]
Unknown(String),
/// The name uses the full qualified diagnostic id `lint:<rule>` instead of just `rule`.
/// The String is the name without the `lint:` category prefix.
#[error("unknown lint `{prefixed}`. Did you mean `{suggestion}`?")]
PrefixedWithCategory {
prefixed: String,
suggestion: String,
},
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
@@ -418,16 +394,6 @@ pub enum LintEntry {
Alias(LintId),
}
impl LintEntry {
fn id(self) -> LintId {
match self {
LintEntry::Lint(id) => id,
LintEntry::Removed(id) => id,
LintEntry::Alias(id) => id,
}
}
}
impl From<&'static LintMetadata> for LintEntry {
fn from(metadata: &'static LintMetadata) -> Self {
if metadata.status.is_removed() {
@@ -438,12 +404,12 @@ impl From<&'static LintMetadata> for LintEntry {
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
#[derive(Debug, Clone, Default)]
pub struct RuleSelection {
/// Map with the severity for each enabled lint rule.
///
/// If a rule isn't present in this map, then it should be considered disabled.
lints: FxHashMap<LintId, (Severity, LintSource)>,
lints: FxHashMap<LintId, Severity>,
}
impl RuleSelection {
@@ -456,7 +422,7 @@ impl RuleSelection {
.filter_map(|lint| {
Severity::try_from(lint.default_level())
.ok()
.map(|severity| (*lint, (severity, LintSource::Default)))
.map(|severity| (*lint, severity))
})
.collect();
@@ -470,14 +436,12 @@ impl RuleSelection {
/// Returns an iterator over all enabled lints and their severity.
pub fn iter(&self) -> impl ExactSizeIterator<Item = (LintId, Severity)> + '_ {
self.lints
.iter()
.map(|(&lint, &(severity, _))| (lint, severity))
self.lints.iter().map(|(&lint, &severity)| (lint, severity))
}
/// Returns the configured severity for the lint with the given id or `None` if the lint is disabled.
pub fn severity(&self, lint: LintId) -> Option<Severity> {
self.lints.get(&lint).map(|(severity, _)| *severity)
self.lints.get(&lint).copied()
}
/// Returns `true` if the `lint` is enabled.
@@ -488,25 +452,19 @@ impl RuleSelection {
/// Enables `lint` and configures with the given `severity`.
///
/// Overrides any previous configuration for the lint.
pub fn enable(&mut self, lint: LintId, severity: Severity, source: LintSource) {
self.lints.insert(lint, (severity, source));
pub fn enable(&mut self, lint: LintId, severity: Severity) {
self.lints.insert(lint, severity);
}
/// Disables `lint` if it was previously enabled.
pub fn disable(&mut self, lint: LintId) {
self.lints.remove(&lint);
}
}
#[derive(Default, Copy, Clone, Debug, PartialEq, Eq)]
pub enum LintSource {
/// The user didn't enable the rule explicitly, instead it's enabled by default.
#[default]
Default,
/// The rule was enabled by using a CLI argument
Cli,
/// The rule was enabled in a configuration file.
File,
/// Merges the enabled lints from `other` into this selection.
///
/// Lints from `other` will override any existing configuration.
pub fn merge(&mut self, other: &RuleSelection) {
self.lints.extend(other.iter());
}
}

View File

@@ -169,7 +169,7 @@ impl SearchPaths {
let SearchPathSettings {
extra_paths,
src_roots,
custom_typeshed: typeshed,
typeshed,
site_packages: site_packages_paths,
} = settings;
@@ -1308,7 +1308,7 @@ mod tests {
search_paths: SearchPathSettings {
extra_paths: vec![],
src_roots: vec![src.clone()],
custom_typeshed: Some(custom_typeshed),
typeshed: Some(custom_typeshed),
site_packages: SitePackages::Known(vec![site_packages]),
},
},
@@ -1814,7 +1814,7 @@ not_a_directory
search_paths: SearchPathSettings {
extra_paths: vec![],
src_roots: vec![SystemPathBuf::from("/src")],
custom_typeshed: None,
typeshed: None,
site_packages: SitePackages::Known(vec![
venv_site_packages,
system_site_packages,

View File

@@ -73,7 +73,7 @@ pub(crate) struct UnspecifiedTypeshed;
///
/// For tests checking that standard-library module resolution is working
/// correctly, you should usually create a [`MockedTypeshed`] instance
/// and pass it to the [`TestCaseBuilder::with_mocked_typeshed`] method.
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
/// If you need to check something that involves the vendored typeshed stubs
/// we include as part of the binary, you can instead use the
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
@@ -238,7 +238,7 @@ impl TestCaseBuilder<MockedTypeshed> {
search_paths: SearchPathSettings {
extra_paths: vec![],
src_roots: vec![src.clone()],
custom_typeshed: Some(typeshed.clone()),
typeshed: Some(typeshed.clone()),
site_packages: SitePackages::Known(vec![site_packages.clone()]),
},
},

View File

@@ -108,7 +108,7 @@ pub struct SearchPathSettings {
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
/// bundled as a zip file in the binary
pub custom_typeshed: Option<SystemPathBuf>,
pub typeshed: Option<SystemPathBuf>,
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
pub site_packages: SitePackages,
@@ -119,7 +119,7 @@ impl SearchPathSettings {
Self {
src_roots,
extra_paths: vec![],
custom_typeshed: None,
typeshed: None,
site_packages: SitePackages::Known(vec![]),
}
}

View File

@@ -1,3 +1,4 @@
use std::iter::FusedIterator;
use std::sync::Arc;
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
@@ -5,7 +6,7 @@ use salsa::plumbing::AsId;
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_index::IndexVec;
use ruff_index::{IndexSlice, IndexVec};
use crate::module_name::ModuleName;
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
@@ -16,6 +17,7 @@ use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolTable,
};
use crate::semantic_index::use_def::UseDefMap;
use crate::Db;
pub mod ast_ids;
@@ -27,7 +29,8 @@ pub mod symbol;
mod use_def;
pub(crate) use self::use_def::{
BindingWithConstraints, DeclarationWithConstraint, ScopedVisibilityConstraintId, UseDefMap,
BindingWithConstraints, BindingWithConstraintsIterator, DeclarationWithConstraint,
DeclarationsIterator, ScopedVisibilityConstraintId,
};
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), FxBuildHasher>;
@@ -203,43 +206,20 @@ impl<'db> SemanticIndex<'db> {
/// Returns an iterator over the descendent scopes of `scope`.
#[allow(unused)]
pub(crate) fn descendent_scopes(
&self,
scope_id: FileScopeId,
) -> impl Iterator<Item = (FileScopeId, &Scope)> + '_ {
let scope = &self.scopes[scope_id];
let scopes = &self.scopes[scope.descendents.clone()];
let mut next_id = scope_id + 1;
scopes.iter().map(move |descendent| {
let result = (next_id, descendent);
next_id = next_id + 1;
result
})
pub(crate) fn descendent_scopes(&self, scope: FileScopeId) -> DescendentsIter {
DescendentsIter::new(self, scope)
}
/// Returns an iterator over the direct child scopes of `scope`.
#[allow(unused)]
pub(crate) fn child_scopes(
&self,
scope_id: FileScopeId,
) -> impl Iterator<Item = (FileScopeId, &Scope)> + '_ {
self.descendent_scopes(scope_id)
.filter(move |(_, scope)| scope.parent == Some(scope_id))
pub(crate) fn child_scopes(&self, scope: FileScopeId) -> ChildrenIter {
ChildrenIter::new(self, scope)
}
/// Returns an iterator over all ancestors of `scope`, starting with `scope` itself.
#[allow(unused)]
pub(crate) fn ancestor_scopes(
&self,
scope_id: FileScopeId,
) -> impl Iterator<Item = (FileScopeId, &Scope)> + '_ {
let mut next_id = Some(scope_id);
std::iter::from_fn(move || {
let current_id = next_id?;
let current = &self.scopes[current_id];
next_id = current.parent;
Some((current_id, current))
})
pub(crate) fn ancestor_scopes(&self, scope: FileScopeId) -> AncestorsIter {
AncestorsIter::new(self, scope)
}
/// Returns the [`Definition`] salsa ingredient for `definition_key`.
@@ -286,6 +266,98 @@ impl<'db> SemanticIndex<'db> {
}
}
pub struct AncestorsIter<'a> {
scopes: &'a IndexSlice<FileScopeId, Scope>,
next_id: Option<FileScopeId>,
}
impl<'a> AncestorsIter<'a> {
fn new(module_symbol_table: &'a SemanticIndex, start: FileScopeId) -> Self {
Self {
scopes: &module_symbol_table.scopes,
next_id: Some(start),
}
}
}
impl<'a> Iterator for AncestorsIter<'a> {
type Item = (FileScopeId, &'a Scope);
fn next(&mut self) -> Option<Self::Item> {
let current_id = self.next_id?;
let current = &self.scopes[current_id];
self.next_id = current.parent;
Some((current_id, current))
}
}
impl FusedIterator for AncestorsIter<'_> {}
pub struct DescendentsIter<'a> {
next_id: FileScopeId,
descendents: std::slice::Iter<'a, Scope>,
}
impl<'a> DescendentsIter<'a> {
fn new(symbol_table: &'a SemanticIndex, scope_id: FileScopeId) -> Self {
let scope = &symbol_table.scopes[scope_id];
let scopes = &symbol_table.scopes[scope.descendents.clone()];
Self {
next_id: scope_id + 1,
descendents: scopes.iter(),
}
}
}
impl<'a> Iterator for DescendentsIter<'a> {
type Item = (FileScopeId, &'a Scope);
fn next(&mut self) -> Option<Self::Item> {
let descendent = self.descendents.next()?;
let id = self.next_id;
self.next_id = self.next_id + 1;
Some((id, descendent))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.descendents.size_hint()
}
}
impl FusedIterator for DescendentsIter<'_> {}
impl ExactSizeIterator for DescendentsIter<'_> {}
pub struct ChildrenIter<'a> {
parent: FileScopeId,
descendents: DescendentsIter<'a>,
}
impl<'a> ChildrenIter<'a> {
fn new(module_symbol_table: &'a SemanticIndex, parent: FileScopeId) -> Self {
let descendents = DescendentsIter::new(module_symbol_table, parent);
Self {
parent,
descendents,
}
}
}
impl<'a> Iterator for ChildrenIter<'a> {
type Item = (FileScopeId, &'a Scope);
fn next(&mut self) -> Option<Self::Item> {
self.descendents
.find(|(_, scope)| scope.parent == Some(self.parent))
}
}
impl FusedIterator for ChildrenIter<'_> {}
#[cfg(test)]
mod tests {
use ruff_db::files::{system_path_to_file, File};

View File

@@ -603,8 +603,8 @@ impl<'db> SemanticIndexBuilder<'db> {
let definition = self.add_definition(symbol, parameter);
// Insert a mapping from the inner Parameter node to the same definition. This
// ensures that calling `HasType::inferred_type` on the inner parameter returns
// Insert a mapping from the inner Parameter node to the same definition.
// This ensures that calling `HasTy::ty` on the inner parameter returns
// a valid type (and doesn't panic)
let existing_definition = self
.definitions_by_node

View File

@@ -255,8 +255,11 @@
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
//! visits a `StmtIf` node.
use self::symbol_state::{
BindingIdWithConstraintsIterator, ConstraintIdIterator, DeclarationIdIterator,
ScopedDefinitionId, SymbolBindings, SymbolDeclarations, SymbolState,
};
pub(crate) use self::symbol_state::{ScopedConstraintId, ScopedVisibilityConstraintId};
use self::symbol_state::{ScopedDefinitionId, SymbolBindings, SymbolDeclarations, SymbolState};
use crate::semantic_index::ast_ids::ScopedUseId;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::symbol::ScopedSymbolId;
@@ -283,7 +286,7 @@ pub(crate) struct UseDefMap<'db> {
all_constraints: AllConstraints<'db>,
/// Array of [`VisibilityConstraint`]s in this scope.
pub(crate) visibility_constraints: VisibilityConstraints<'db>,
visibility_constraints: VisibilityConstraints<'db>,
/// [`SymbolBindings`] reaching a [`ScopedUseId`].
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
@@ -310,24 +313,21 @@ impl<'db> UseDefMap<'db> {
pub(crate) fn bindings_at_use(
&self,
use_id: ScopedUseId,
) -> impl Iterator<Item = BindingWithConstraints<'db, impl Iterator<Item = Constraint<'db>> + '_>> + '_
{
) -> BindingWithConstraintsIterator<'_, 'db> {
self.bindings_iterator(&self.bindings_by_use[use_id])
}
pub(crate) fn public_bindings(
&self,
symbol: ScopedSymbolId,
) -> impl Iterator<Item = BindingWithConstraints<'db, impl Iterator<Item = Constraint<'db>> + '_>> + '_
{
) -> BindingWithConstraintsIterator<'_, 'db> {
self.bindings_iterator(self.public_symbols[symbol].bindings())
}
pub(crate) fn bindings_at_declaration(
&self,
declaration: Definition<'db>,
) -> impl Iterator<Item = BindingWithConstraints<'db, impl Iterator<Item = Constraint<'db>> + '_>> + '_
{
) -> BindingWithConstraintsIterator<'_, 'db> {
if let SymbolDefinitions::Bindings(bindings) = &self.definitions_by_definition[&declaration]
{
self.bindings_iterator(bindings)
@@ -336,10 +336,10 @@ impl<'db> UseDefMap<'db> {
}
}
pub(crate) fn declarations_at_binding(
&self,
pub(crate) fn declarations_at_binding<'map>(
&'map self,
binding: Definition<'db>,
) -> impl Iterator<Item = DeclarationWithConstraint<'db>> + '_ {
) -> DeclarationsIterator<'map, 'db> {
if let SymbolDefinitions::Declarations(declarations) =
&self.definitions_by_definition[&binding]
{
@@ -349,10 +349,10 @@ impl<'db> UseDefMap<'db> {
}
}
pub(crate) fn public_declarations(
&self,
pub(crate) fn public_declarations<'map>(
&'map self,
symbol: ScopedSymbolId,
) -> impl Iterator<Item = DeclarationWithConstraint<'db>> + '_ {
) -> DeclarationsIterator<'map, 'db> {
let declarations = self.public_symbols[symbol].declarations();
self.declarations_iterator(declarations)
}
@@ -360,35 +360,24 @@ impl<'db> UseDefMap<'db> {
fn bindings_iterator<'map>(
&'map self,
bindings: &'map SymbolBindings,
) -> impl Iterator<
Item = BindingWithConstraints<'db, impl Iterator<Item = Constraint<'db>> + 'map>,
> + 'map {
bindings
.iter()
.map(|binding_id_with_constraints| BindingWithConstraints {
binding: self.all_definitions[binding_id_with_constraints.definition],
constraints: binding_id_with_constraints
.constraint_ids
.map(|constraint_id| self.all_constraints[constraint_id]),
visibility_constraint: binding_id_with_constraints.visibility_constraint,
})
) -> BindingWithConstraintsIterator<'map, 'db> {
BindingWithConstraintsIterator {
all_definitions: &self.all_definitions,
all_constraints: &self.all_constraints,
visibility_constraints: &self.visibility_constraints,
inner: bindings.iter(),
}
}
fn declarations_iterator<'map>(
&'map self,
declarations: &'map SymbolDeclarations,
) -> impl Iterator<Item = DeclarationWithConstraint<'db>> + 'map {
declarations.iter().map(
move |DeclarationIdWithConstraint {
definition,
visibility_constraint,
}| {
DeclarationWithConstraint {
declaration: self.all_definitions[definition],
visibility_constraint,
}
},
)
) -> DeclarationsIterator<'map, 'db> {
DeclarationsIterator {
all_definitions: &self.all_definitions,
visibility_constraints: &self.visibility_constraints,
inner: declarations.iter(),
}
}
}
@@ -399,17 +388,89 @@ enum SymbolDefinitions {
Declarations(SymbolDeclarations),
}
pub(crate) struct BindingWithConstraints<'db, I> {
#[derive(Debug)]
pub(crate) struct BindingWithConstraintsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Option<Definition<'db>>>,
all_constraints: &'map AllConstraints<'db>,
pub(crate) visibility_constraints: &'map VisibilityConstraints<'db>,
inner: BindingIdWithConstraintsIterator<'map>,
}
impl<'map, 'db> Iterator for BindingWithConstraintsIterator<'map, 'db> {
type Item = BindingWithConstraints<'map, 'db>;
fn next(&mut self) -> Option<Self::Item> {
let all_constraints = self.all_constraints;
self.inner
.next()
.map(|binding_id_with_constraints| BindingWithConstraints {
binding: self.all_definitions[binding_id_with_constraints.definition],
constraints: ConstraintsIterator {
all_constraints,
constraint_ids: binding_id_with_constraints.constraint_ids,
},
visibility_constraint: binding_id_with_constraints.visibility_constraint,
})
}
}
impl std::iter::FusedIterator for BindingWithConstraintsIterator<'_, '_> {}
pub(crate) struct BindingWithConstraints<'map, 'db> {
pub(crate) binding: Option<Definition<'db>>,
pub(crate) constraints: I,
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
pub(crate) visibility_constraint: ScopedVisibilityConstraintId,
}
pub(crate) struct ConstraintsIterator<'map, 'db> {
all_constraints: &'map AllConstraints<'db>,
constraint_ids: ConstraintIdIterator<'map>,
}
impl<'db> Iterator for ConstraintsIterator<'_, 'db> {
type Item = Constraint<'db>;
fn next(&mut self) -> Option<Self::Item> {
self.constraint_ids
.next()
.map(|constraint_id| self.all_constraints[constraint_id])
}
}
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
pub(crate) struct DeclarationsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Option<Definition<'db>>>,
pub(crate) visibility_constraints: &'map VisibilityConstraints<'db>,
inner: DeclarationIdIterator<'map>,
}
pub(crate) struct DeclarationWithConstraint<'db> {
pub(crate) declaration: Option<Definition<'db>>,
pub(crate) visibility_constraint: ScopedVisibilityConstraintId,
}
impl<'db> Iterator for DeclarationsIterator<'_, 'db> {
type Item = DeclarationWithConstraint<'db>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(
|DeclarationIdWithConstraint {
definition,
visibility_constraint,
}| {
DeclarationWithConstraint {
declaration: self.all_definitions[definition],
visibility_constraint,
}
},
)
}
}
impl std::iter::FusedIterator for DeclarationsIterator<'_, '_> {}
/// A snapshot of the definitions and constraints state at a particular point in control flow.
#[derive(Clone, Debug)]
pub(super) struct FlowSnapshot {

View File

@@ -43,14 +43,12 @@
//!
//! Tracking live declarations is simpler, since constraints are not involved, but otherwise very
//! similar to tracking live bindings.
use crate::semantic_index::use_def::VisibilityConstraints;
use itertools::{EitherOrBoth, Itertools};
use super::bitset::{BitSet, BitSetIterator};
use ruff_index::newtype_index;
use smallvec::SmallVec;
use crate::semantic_index::use_def::bitset::BitSet;
use crate::semantic_index::use_def::VisibilityConstraints;
/// A newtype-index for a definition in a particular scope.
#[newtype_index]
pub(super) struct ScopedDefinitionId;
@@ -73,12 +71,14 @@ const INLINE_BINDING_BLOCKS: usize = 3;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live bindings of a symbol in a scope.
type Bindings = BitSet<INLINE_BINDING_BLOCKS>;
type BindingsIterator<'a> = BitSetIterator<'a, INLINE_BINDING_BLOCKS>;
/// Can reference this * 64 total declarations inline; more will fall back to the heap.
const INLINE_DECLARATION_BLOCKS: usize = 3;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live declarations of a symbol in a scope.
type Declarations = BitSet<INLINE_DECLARATION_BLOCKS>;
type DeclarationsIterator<'a> = BitSetIterator<'a, INLINE_DECLARATION_BLOCKS>;
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
@@ -94,6 +94,10 @@ type InlineConstraintArray = [Constraints; INLINE_BINDINGS_PER_SYMBOL];
/// One [`BitSet`] of applicable [`ScopedConstraintId`]s per live binding.
type ConstraintsPerBinding = SmallVec<InlineConstraintArray>;
/// Iterate over all constraints for a single binding.
type ConstraintsIterator<'a> = std::slice::Iter<'a, Constraints>;
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
/// A newtype-index for a visibility constraint in a particular scope.
#[newtype_index]
pub(crate) struct ScopedVisibilityConstraintId;
@@ -116,18 +120,16 @@ type VisibilityConstraintPerDeclaration = SmallVec<InlineVisibilityConstraintsAr
/// One [`ScopedVisibilityConstraintId`] per live binding.
type VisibilityConstraintPerBinding = SmallVec<InlineVisibilityConstraintsArray>;
/// Iterator over the visibility constraints for all live bindings/declarations.
type VisibilityConstraintsIterator<'a> = std::slice::Iter<'a, ScopedVisibilityConstraintId>;
type VisibilityConstraintsIntoIterator = smallvec::IntoIter<InlineVisibilityConstraintsArray>;
/// Live declarations for a single symbol at some point in control flow, with their
/// corresponding visibility constraints.
#[derive(Clone, Debug, Default, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolDeclarations {
/// [`BitSet`]: which declarations (as [`ScopedDefinitionId`]) can reach the current location?
///
/// Invariant: Because this is a `BitSet`, it can be viewed as a _sorted_ set of definition
/// IDs. The `visibility_constraints` field stores constraints for each definition. Therefore
/// those fields must always have the same `len()` as `live_declarations`, and the elements
/// must appear in the same order. Effectively, this means that elements must always be added
/// in sorted order, or via a binary search that determines the correct place to insert new
/// constraints.
pub(crate) live_declarations: Declarations,
/// For each live declaration, which visibility constraint applies to it?
@@ -165,61 +167,19 @@ impl SymbolDeclarations {
}
/// Return an iterator over live declarations for this symbol.
pub(super) fn iter(&self) -> impl Iterator<Item = DeclarationIdWithConstraint> + '_ {
(self.live_declarations.iter())
.zip(self.visibility_constraints.iter())
.map(
|(declaration, &visibility_constraint)| DeclarationIdWithConstraint {
definition: ScopedDefinitionId::from_u32(declaration),
visibility_constraint,
},
)
}
fn merge(&mut self, b: Self, visibility_constraints: &mut VisibilityConstraints) {
let a = std::mem::take(self);
self.live_declarations = a.live_declarations.clone();
self.live_declarations.union(&b.live_declarations);
// Invariant: These zips are well-formed since we maintain an invariant that all of our
// fields are sets/vecs with the same length.
let a = (a.live_declarations.iter()).zip(a.visibility_constraints);
let b = (b.live_declarations.iter()).zip(b.visibility_constraints);
// Invariant: merge_join_by consumes the two iterators in sorted order, which ensures that
// the definition IDs and constraints line up correctly in the merged result. If a
// definition is found in both `a` and `b`, we compose the constraints from the two paths
// in an appropriate way (intersection for narrowing constraints; ternary OR for visibility
// constraints). If a definition is found in only one path, it is used as-is.
for zipped in a.merge_join_by(b, |(a_decl, _), (b_decl, _)| a_decl.cmp(b_decl)) {
match zipped {
EitherOrBoth::Both((_, a_vis_constraint), (_, b_vis_constraint)) => {
let vis_constraint = visibility_constraints
.add_or_constraint(a_vis_constraint, b_vis_constraint);
self.visibility_constraints.push(vis_constraint);
}
EitherOrBoth::Left((_, vis_constraint))
| EitherOrBoth::Right((_, vis_constraint)) => {
self.visibility_constraints.push(vis_constraint);
}
}
pub(super) fn iter(&self) -> DeclarationIdIterator {
DeclarationIdIterator {
declarations: self.live_declarations.iter(),
visibility_constraints: self.visibility_constraints.iter(),
}
}
}
/// Live bindings for a single symbol at some point in control flow. Each live binding comes
/// with a set of narrowing constraints and a visibility constraint.
#[derive(Clone, Debug, Default, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolBindings {
/// [`BitSet`]: which bindings (as [`ScopedDefinitionId`]) can reach the current location?
///
/// Invariant: Because this is a `BitSet`, it can be viewed as a _sorted_ set of definition
/// IDs. The `constraints` and `visibility_constraints` field stores constraints for each
/// definition. Therefore those fields must always have the same `len()` as
/// `live_bindings`, and the elements must appear in the same order. Effectively, this means
/// that elements must always be added in sorted order, or via a binary search that determines
/// the correct place to insert new constraints.
live_bindings: Bindings,
/// For each live binding, which [`ScopedConstraintId`] apply?
@@ -275,75 +235,11 @@ impl SymbolBindings {
}
/// Iterate over currently live bindings for this symbol
pub(super) fn iter(
&self,
) -> impl Iterator<Item = BindingIdWithConstraints<impl Iterator<Item = ScopedConstraintId> + '_>> + '_
{
let i = (self.live_bindings.iter())
.zip(self.constraints.iter())
.zip(self.visibility_constraints.iter());
i.map(
|((def, constraints), visibility_constraint_id)| BindingIdWithConstraints {
definition: ScopedDefinitionId::from_u32(def),
constraint_ids: constraints.iter().map(ScopedConstraintId::from_u32),
visibility_constraint: *visibility_constraint_id,
},
)
}
fn merge(&mut self, mut b: Self, visibility_constraints: &mut VisibilityConstraints) {
let mut a = std::mem::take(self);
self.live_bindings = a.live_bindings.clone();
self.live_bindings.union(&b.live_bindings);
// Invariant: These zips are well-formed since we maintain an invariant that all of our
// fields are sets/vecs with the same length.
//
// Performance: We iterate over the `constraints` smallvecs via mut reference, because the
// individual elements are `BitSet`s (currently 24 bytes in size), and we don't want to
// move them by value multiple times during iteration. By iterating by reference, we only
// have to copy single pointers around. In the loop below, the `std::mem::take` calls
// specify precisely where we want to move them into the merged `constraints` smallvec.
//
// We don't need a similar optimization for `visibility_constraints`, since those elements
// are 32-bit IndexVec IDs, and so are already cheap to move/copy.
let a = (a.live_bindings.iter())
.zip(a.constraints.iter_mut())
.zip(a.visibility_constraints);
let b = (b.live_bindings.iter())
.zip(b.constraints.iter_mut())
.zip(b.visibility_constraints);
// Invariant: merge_join_by consumes the two iterators in sorted order, which ensures that
// the definition IDs and constraints line up correctly in the merged result. If a
// definition is found in both `a` and `b`, we compose the constraints from the two paths
// in an appropriate way (intersection for narrowing constraints; ternary OR for visibility
// constraints). If a definition is found in only one path, it is used as-is.
for zipped in a.merge_join_by(b, |((a_def, _), _), ((b_def, _), _)| a_def.cmp(b_def)) {
match zipped {
EitherOrBoth::Both(
((_, a_constraints), a_vis_constraint),
((_, b_constraints), b_vis_constraint),
) => {
// If the same definition is visible through both paths, any constraint
// that applies on only one path is irrelevant to the resulting type from
// unioning the two paths, so we intersect the constraints.
let constraints = a_constraints;
constraints.intersect(b_constraints);
self.constraints.push(std::mem::take(constraints));
// For visibility constraints, we merge them using a ternary OR operation:
let vis_constraint = visibility_constraints
.add_or_constraint(a_vis_constraint, b_vis_constraint);
self.visibility_constraints.push(vis_constraint);
}
EitherOrBoth::Left(((_, constraints), vis_constraint))
| EitherOrBoth::Right(((_, constraints), vis_constraint)) => {
self.constraints.push(std::mem::take(constraints));
self.visibility_constraints.push(vis_constraint);
}
}
pub(super) fn iter(&self) -> BindingIdWithConstraintsIterator {
BindingIdWithConstraintsIterator {
definitions: self.live_bindings.iter(),
constraints: self.constraints.iter(),
visibility_constraints: self.visibility_constraints.iter(),
}
}
}
@@ -407,9 +303,202 @@ impl SymbolState {
b: SymbolState,
visibility_constraints: &mut VisibilityConstraints,
) {
self.bindings.merge(b.bindings, visibility_constraints);
let mut a = Self {
bindings: SymbolBindings {
live_bindings: Bindings::default(),
constraints: ConstraintsPerBinding::default(),
visibility_constraints: VisibilityConstraintPerBinding::default(),
},
declarations: SymbolDeclarations {
live_declarations: self.declarations.live_declarations.clone(),
visibility_constraints: VisibilityConstraintPerDeclaration::default(),
},
};
std::mem::swap(&mut a, self);
self.declarations
.merge(b.declarations, visibility_constraints);
.live_declarations
.union(&b.declarations.live_declarations);
let mut a_defs_iter = a.bindings.live_bindings.iter();
let mut b_defs_iter = b.bindings.live_bindings.iter();
let mut a_constraints_iter = a.bindings.constraints.into_iter();
let mut b_constraints_iter = b.bindings.constraints.into_iter();
let mut a_vis_constraints_iter = a.bindings.visibility_constraints.into_iter();
let mut b_vis_constraints_iter = b.bindings.visibility_constraints.into_iter();
let mut opt_a_def: Option<u32> = a_defs_iter.next();
let mut opt_b_def: Option<u32> = b_defs_iter.next();
// Iterate through the definitions from `a` and `b`, always processing the lower definition
// ID first, and pushing each definition onto the merged `SymbolState` with its
// constraints. If a definition is found in both `a` and `b`, push it with the intersection
// of the constraints from the two paths; a constraint that applies from only one possible
// path is irrelevant.
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
let push = |def,
constraints_iter: &mut ConstraintsIntoIterator,
visibility_constraints_iter: &mut VisibilityConstraintsIntoIterator,
merged: &mut Self| {
merged.bindings.live_bindings.insert(def);
// SAFETY: we only ever create SymbolState using [`SymbolState::undefined`], which adds
// one "unbound" definition with corresponding narrowing and visibility constraints, or
// using [`SymbolState::record_binding`] or [`SymbolState::record_declaration`], which
// similarly add one definition with corresponding constraints. [`SymbolState::merge`]
// always pushes one definition and one constraint bitset and one visibility constraint
// together (just below), so the number of definitions and the number of constraints can
// never get out of sync.
// get out of sync.
let constraints = constraints_iter
.next()
.expect("definitions and constraints length mismatch");
let visibility_constraints = visibility_constraints_iter
.next()
.expect("definitions and visibility_constraints length mismatch");
merged.bindings.constraints.push(constraints);
merged
.bindings
.visibility_constraints
.push(visibility_constraints);
};
loop {
match (opt_a_def, opt_b_def) {
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
std::cmp::Ordering::Less => {
// Next definition ID is only in `a`, push it to `self` and advance `a`.
push(
a_def,
&mut a_constraints_iter,
&mut a_vis_constraints_iter,
self,
);
opt_a_def = a_defs_iter.next();
}
std::cmp::Ordering::Greater => {
// Next definition ID is only in `b`, push it to `self` and advance `b`.
push(
b_def,
&mut b_constraints_iter,
&mut b_vis_constraints_iter,
self,
);
opt_b_def = b_defs_iter.next();
}
std::cmp::Ordering::Equal => {
// Next definition is in both; push to `self` and intersect constraints.
push(
a_def,
&mut b_constraints_iter,
&mut b_vis_constraints_iter,
self,
);
// SAFETY: see comment in `push` above.
let a_constraints = a_constraints_iter
.next()
.expect("definitions and constraints length mismatch");
let current_constraints = self.bindings.constraints.last_mut().unwrap();
// If the same definition is visible through both paths, any constraint
// that applies on only one path is irrelevant to the resulting type from
// unioning the two paths, so we intersect the constraints.
current_constraints.intersect(&a_constraints);
// For visibility constraints, we merge them using a ternary OR operation:
let a_vis_constraint = a_vis_constraints_iter
.next()
.expect("visibility_constraints length mismatch");
let current_vis_constraint =
self.bindings.visibility_constraints.last_mut().unwrap();
*current_vis_constraint = visibility_constraints
.add_or_constraint(*current_vis_constraint, a_vis_constraint);
opt_a_def = a_defs_iter.next();
opt_b_def = b_defs_iter.next();
}
},
(Some(a_def), None) => {
// We've exhausted `b`, just push the def from `a` and move on to the next.
push(
a_def,
&mut a_constraints_iter,
&mut a_vis_constraints_iter,
self,
);
opt_a_def = a_defs_iter.next();
}
(None, Some(b_def)) => {
// We've exhausted `a`, just push the def from `b` and move on to the next.
push(
b_def,
&mut b_constraints_iter,
&mut b_vis_constraints_iter,
self,
);
opt_b_def = b_defs_iter.next();
}
(None, None) => break,
}
}
// Same as above, but for declarations.
let mut a_decls_iter = a.declarations.live_declarations.iter();
let mut b_decls_iter = b.declarations.live_declarations.iter();
let mut a_vis_constraints_iter = a.declarations.visibility_constraints.into_iter();
let mut b_vis_constraints_iter = b.declarations.visibility_constraints.into_iter();
let mut opt_a_decl: Option<u32> = a_decls_iter.next();
let mut opt_b_decl: Option<u32> = b_decls_iter.next();
let push = |vis_constraints_iter: &mut VisibilityConstraintsIntoIterator,
merged: &mut Self| {
let vis_constraints = vis_constraints_iter
.next()
.expect("declarations and visibility_constraints length mismatch");
merged
.declarations
.visibility_constraints
.push(vis_constraints);
};
loop {
match (opt_a_decl, opt_b_decl) {
(Some(a_decl), Some(b_decl)) => match a_decl.cmp(&b_decl) {
std::cmp::Ordering::Less => {
push(&mut a_vis_constraints_iter, self);
opt_a_decl = a_decls_iter.next();
}
std::cmp::Ordering::Greater => {
push(&mut b_vis_constraints_iter, self);
opt_b_decl = b_decls_iter.next();
}
std::cmp::Ordering::Equal => {
push(&mut b_vis_constraints_iter, self);
let a_vis_constraint = a_vis_constraints_iter
.next()
.expect("declarations and visibility_constraints length mismatch");
let current = self.declarations.visibility_constraints.last_mut().unwrap();
*current =
visibility_constraints.add_or_constraint(*current, a_vis_constraint);
opt_a_decl = a_decls_iter.next();
opt_b_decl = b_decls_iter.next();
}
},
(Some(_), None) => {
push(&mut a_vis_constraints_iter, self);
opt_a_decl = a_decls_iter.next();
}
(None, Some(_)) => {
push(&mut b_vis_constraints_iter, self);
opt_b_decl = b_decls_iter.next();
}
(None, None) => break,
}
}
}
pub(super) fn bindings(&self) -> &SymbolBindings {
@@ -425,12 +514,61 @@ impl SymbolState {
/// narrowing constraints ([`ScopedConstraintId`]) and a corresponding visibility
/// visibility constraint ([`ScopedVisibilityConstraintId`]).
#[derive(Debug)]
pub(super) struct BindingIdWithConstraints<I> {
pub(super) struct BindingIdWithConstraints<'map> {
pub(super) definition: ScopedDefinitionId,
pub(super) constraint_ids: I,
pub(super) constraint_ids: ConstraintIdIterator<'map>,
pub(super) visibility_constraint: ScopedVisibilityConstraintId,
}
#[derive(Debug)]
pub(super) struct BindingIdWithConstraintsIterator<'map> {
definitions: BindingsIterator<'map>,
constraints: ConstraintsIterator<'map>,
visibility_constraints: VisibilityConstraintsIterator<'map>,
}
impl<'map> Iterator for BindingIdWithConstraintsIterator<'map> {
type Item = BindingIdWithConstraints<'map>;
fn next(&mut self) -> Option<Self::Item> {
match (
self.definitions.next(),
self.constraints.next(),
self.visibility_constraints.next(),
) {
(None, None, None) => None,
(Some(def), Some(constraints), Some(visibility_constraint_id)) => {
Some(BindingIdWithConstraints {
definition: ScopedDefinitionId::from_u32(def),
constraint_ids: ConstraintIdIterator {
wrapped: constraints.iter(),
},
visibility_constraint: *visibility_constraint_id,
})
}
// SAFETY: see above.
_ => unreachable!("definitions and constraints length mismatch"),
}
}
}
impl std::iter::FusedIterator for BindingIdWithConstraintsIterator<'_> {}
#[derive(Debug)]
pub(super) struct ConstraintIdIterator<'a> {
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
}
impl Iterator for ConstraintIdIterator<'_> {
type Item = ScopedConstraintId;
fn next(&mut self) -> Option<Self::Item> {
self.wrapped.next().map(ScopedConstraintId::from_u32)
}
}
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
/// A single declaration (as [`ScopedDefinitionId`]) with a corresponding visibility
/// visibility constraint ([`ScopedVisibilityConstraintId`]).
#[derive(Debug)]
@@ -439,6 +577,31 @@ pub(super) struct DeclarationIdWithConstraint {
pub(super) visibility_constraint: ScopedVisibilityConstraintId,
}
pub(super) struct DeclarationIdIterator<'map> {
pub(crate) declarations: DeclarationsIterator<'map>,
pub(crate) visibility_constraints: VisibilityConstraintsIterator<'map>,
}
impl Iterator for DeclarationIdIterator<'_> {
type Item = DeclarationIdWithConstraint;
fn next(&mut self) -> Option<Self::Item> {
match (self.declarations.next(), self.visibility_constraints.next()) {
(None, None) => None,
(Some(declaration), Some(&visibility_constraint)) => {
Some(DeclarationIdWithConstraint {
definition: ScopedDefinitionId::from_u32(declaration),
visibility_constraint,
})
}
// SAFETY: see above.
_ => unreachable!("declarations and visibility_constraints length mismatch"),
}
}
}
impl std::iter::FusedIterator for DeclarationIdIterator<'_> {}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -8,7 +8,7 @@ use crate::module_name::ModuleName;
use crate::module_resolver::{resolve_module, Module};
use crate::semantic_index::ast_ids::HasScopedExpressionId;
use crate::semantic_index::semantic_index;
use crate::types::{binding_type, infer_scope_types, Type};
use crate::types::{binding_ty, infer_scope_types, Type};
use crate::Db;
pub struct SemanticModel<'db> {
@@ -40,117 +40,117 @@ impl<'db> SemanticModel<'db> {
}
}
pub trait HasType {
pub trait HasTy {
/// Returns the inferred type of `self`.
///
/// ## Panics
/// May panic if `self` is from another file than `model`.
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db>;
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db>;
}
impl HasType for ast::ExprRef<'_> {
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
impl HasTy for ast::ExprRef<'_> {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let file_scope = index.expression_scope_id(*self);
let scope = file_scope.to_scope_id(model.db, model.file);
let expression_id = self.scoped_expression_id(model.db, scope);
infer_scope_types(model.db, scope).expression_type(expression_id)
infer_scope_types(model.db, scope).expression_ty(expression_id)
}
}
macro_rules! impl_expression_has_type {
macro_rules! impl_expression_has_ty {
($ty: ty) => {
impl HasType for $ty {
impl HasTy for $ty {
#[inline]
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let expression_ref = ExprRef::from(self);
expression_ref.inferred_type(model)
expression_ref.ty(model)
}
}
};
}
impl_expression_has_type!(ast::ExprBoolOp);
impl_expression_has_type!(ast::ExprNamed);
impl_expression_has_type!(ast::ExprBinOp);
impl_expression_has_type!(ast::ExprUnaryOp);
impl_expression_has_type!(ast::ExprLambda);
impl_expression_has_type!(ast::ExprIf);
impl_expression_has_type!(ast::ExprDict);
impl_expression_has_type!(ast::ExprSet);
impl_expression_has_type!(ast::ExprListComp);
impl_expression_has_type!(ast::ExprSetComp);
impl_expression_has_type!(ast::ExprDictComp);
impl_expression_has_type!(ast::ExprGenerator);
impl_expression_has_type!(ast::ExprAwait);
impl_expression_has_type!(ast::ExprYield);
impl_expression_has_type!(ast::ExprYieldFrom);
impl_expression_has_type!(ast::ExprCompare);
impl_expression_has_type!(ast::ExprCall);
impl_expression_has_type!(ast::ExprFString);
impl_expression_has_type!(ast::ExprStringLiteral);
impl_expression_has_type!(ast::ExprBytesLiteral);
impl_expression_has_type!(ast::ExprNumberLiteral);
impl_expression_has_type!(ast::ExprBooleanLiteral);
impl_expression_has_type!(ast::ExprNoneLiteral);
impl_expression_has_type!(ast::ExprEllipsisLiteral);
impl_expression_has_type!(ast::ExprAttribute);
impl_expression_has_type!(ast::ExprSubscript);
impl_expression_has_type!(ast::ExprStarred);
impl_expression_has_type!(ast::ExprName);
impl_expression_has_type!(ast::ExprList);
impl_expression_has_type!(ast::ExprTuple);
impl_expression_has_type!(ast::ExprSlice);
impl_expression_has_type!(ast::ExprIpyEscapeCommand);
impl_expression_has_ty!(ast::ExprBoolOp);
impl_expression_has_ty!(ast::ExprNamed);
impl_expression_has_ty!(ast::ExprBinOp);
impl_expression_has_ty!(ast::ExprUnaryOp);
impl_expression_has_ty!(ast::ExprLambda);
impl_expression_has_ty!(ast::ExprIf);
impl_expression_has_ty!(ast::ExprDict);
impl_expression_has_ty!(ast::ExprSet);
impl_expression_has_ty!(ast::ExprListComp);
impl_expression_has_ty!(ast::ExprSetComp);
impl_expression_has_ty!(ast::ExprDictComp);
impl_expression_has_ty!(ast::ExprGenerator);
impl_expression_has_ty!(ast::ExprAwait);
impl_expression_has_ty!(ast::ExprYield);
impl_expression_has_ty!(ast::ExprYieldFrom);
impl_expression_has_ty!(ast::ExprCompare);
impl_expression_has_ty!(ast::ExprCall);
impl_expression_has_ty!(ast::ExprFString);
impl_expression_has_ty!(ast::ExprStringLiteral);
impl_expression_has_ty!(ast::ExprBytesLiteral);
impl_expression_has_ty!(ast::ExprNumberLiteral);
impl_expression_has_ty!(ast::ExprBooleanLiteral);
impl_expression_has_ty!(ast::ExprNoneLiteral);
impl_expression_has_ty!(ast::ExprEllipsisLiteral);
impl_expression_has_ty!(ast::ExprAttribute);
impl_expression_has_ty!(ast::ExprSubscript);
impl_expression_has_ty!(ast::ExprStarred);
impl_expression_has_ty!(ast::ExprName);
impl_expression_has_ty!(ast::ExprList);
impl_expression_has_ty!(ast::ExprTuple);
impl_expression_has_ty!(ast::ExprSlice);
impl_expression_has_ty!(ast::ExprIpyEscapeCommand);
impl HasType for ast::Expr {
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
impl HasTy for ast::Expr {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
match self {
Expr::BoolOp(inner) => inner.inferred_type(model),
Expr::Named(inner) => inner.inferred_type(model),
Expr::BinOp(inner) => inner.inferred_type(model),
Expr::UnaryOp(inner) => inner.inferred_type(model),
Expr::Lambda(inner) => inner.inferred_type(model),
Expr::If(inner) => inner.inferred_type(model),
Expr::Dict(inner) => inner.inferred_type(model),
Expr::Set(inner) => inner.inferred_type(model),
Expr::ListComp(inner) => inner.inferred_type(model),
Expr::SetComp(inner) => inner.inferred_type(model),
Expr::DictComp(inner) => inner.inferred_type(model),
Expr::Generator(inner) => inner.inferred_type(model),
Expr::Await(inner) => inner.inferred_type(model),
Expr::Yield(inner) => inner.inferred_type(model),
Expr::YieldFrom(inner) => inner.inferred_type(model),
Expr::Compare(inner) => inner.inferred_type(model),
Expr::Call(inner) => inner.inferred_type(model),
Expr::FString(inner) => inner.inferred_type(model),
Expr::StringLiteral(inner) => inner.inferred_type(model),
Expr::BytesLiteral(inner) => inner.inferred_type(model),
Expr::NumberLiteral(inner) => inner.inferred_type(model),
Expr::BooleanLiteral(inner) => inner.inferred_type(model),
Expr::NoneLiteral(inner) => inner.inferred_type(model),
Expr::EllipsisLiteral(inner) => inner.inferred_type(model),
Expr::Attribute(inner) => inner.inferred_type(model),
Expr::Subscript(inner) => inner.inferred_type(model),
Expr::Starred(inner) => inner.inferred_type(model),
Expr::Name(inner) => inner.inferred_type(model),
Expr::List(inner) => inner.inferred_type(model),
Expr::Tuple(inner) => inner.inferred_type(model),
Expr::Slice(inner) => inner.inferred_type(model),
Expr::IpyEscapeCommand(inner) => inner.inferred_type(model),
Expr::BoolOp(inner) => inner.ty(model),
Expr::Named(inner) => inner.ty(model),
Expr::BinOp(inner) => inner.ty(model),
Expr::UnaryOp(inner) => inner.ty(model),
Expr::Lambda(inner) => inner.ty(model),
Expr::If(inner) => inner.ty(model),
Expr::Dict(inner) => inner.ty(model),
Expr::Set(inner) => inner.ty(model),
Expr::ListComp(inner) => inner.ty(model),
Expr::SetComp(inner) => inner.ty(model),
Expr::DictComp(inner) => inner.ty(model),
Expr::Generator(inner) => inner.ty(model),
Expr::Await(inner) => inner.ty(model),
Expr::Yield(inner) => inner.ty(model),
Expr::YieldFrom(inner) => inner.ty(model),
Expr::Compare(inner) => inner.ty(model),
Expr::Call(inner) => inner.ty(model),
Expr::FString(inner) => inner.ty(model),
Expr::StringLiteral(inner) => inner.ty(model),
Expr::BytesLiteral(inner) => inner.ty(model),
Expr::NumberLiteral(inner) => inner.ty(model),
Expr::BooleanLiteral(inner) => inner.ty(model),
Expr::NoneLiteral(inner) => inner.ty(model),
Expr::EllipsisLiteral(inner) => inner.ty(model),
Expr::Attribute(inner) => inner.ty(model),
Expr::Subscript(inner) => inner.ty(model),
Expr::Starred(inner) => inner.ty(model),
Expr::Name(inner) => inner.ty(model),
Expr::List(inner) => inner.ty(model),
Expr::Tuple(inner) => inner.ty(model),
Expr::Slice(inner) => inner.ty(model),
Expr::IpyEscapeCommand(inner) => inner.ty(model),
}
}
}
macro_rules! impl_binding_has_ty {
($ty: ty) => {
impl HasType for $ty {
impl HasTy for $ty {
#[inline]
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let binding = index.definition(self);
binding_type(model.db, binding)
binding_ty(model.db, binding)
}
}
};
@@ -168,10 +168,10 @@ mod tests {
use ruff_db::parsed::parsed_module;
use crate::db::tests::TestDbBuilder;
use crate::{HasType, SemanticModel};
use crate::{HasTy, SemanticModel};
#[test]
fn function_type() -> anyhow::Result<()> {
fn function_ty() -> anyhow::Result<()> {
let db = TestDbBuilder::new()
.with_file("/src/foo.py", "def test(): pass")
.build()?;
@@ -182,7 +182,7 @@ mod tests {
let function = ast.suite()[0].as_function_def_stmt().unwrap();
let model = SemanticModel::new(&db, foo);
let ty = function.inferred_type(&model);
let ty = function.ty(&model);
assert!(ty.is_function_literal());
@@ -190,7 +190,7 @@ mod tests {
}
#[test]
fn class_type() -> anyhow::Result<()> {
fn class_ty() -> anyhow::Result<()> {
let db = TestDbBuilder::new()
.with_file("/src/foo.py", "class Test: pass")
.build()?;
@@ -201,7 +201,7 @@ mod tests {
let class = ast.suite()[0].as_class_def_stmt().unwrap();
let model = SemanticModel::new(&db, foo);
let ty = class.inferred_type(&model);
let ty = class.ty(&model);
assert!(ty.is_class_literal());
@@ -209,7 +209,7 @@ mod tests {
}
#[test]
fn alias_type() -> anyhow::Result<()> {
fn alias_ty() -> anyhow::Result<()> {
let db = TestDbBuilder::new()
.with_file("/src/foo.py", "class Test: pass")
.with_file("/src/bar.py", "from foo import Test")
@@ -222,7 +222,7 @@ mod tests {
let import = ast.suite()[0].as_import_from_stmt().unwrap();
let alias = &import.names[0];
let model = SemanticModel::new(&db, bar);
let ty = alias.inferred_type(&model);
let ty = alias.ty(&model);
assert!(ty.is_class_literal());

View File

@@ -163,17 +163,6 @@ fn check_unknown_rule(context: &mut CheckSuppressionsContext) {
format_args!("Unknown rule `{rule}`"),
);
}
GetLintError::PrefixedWithCategory {
prefixed,
suggestion,
} => {
context.report_lint(
&UNKNOWN_RULE,
unknown.range,
format_args!("Unknown rule `{prefixed}`. Did you mean `{suggestion}`?"),
);
}
};
}
}
@@ -214,7 +203,7 @@ fn check_unused_suppressions(context: &mut CheckSuppressionsContext) {
);
// Collect all suppressions that are unused after type-checking.
for suppression in all.iter() {
for suppression in all {
if context.diagnostics.is_used(suppression.id()) {
continue;
}
@@ -401,11 +390,23 @@ impl Suppressions {
})
}
fn iter(&self) -> impl Iterator<Item = &Suppression> + '_ {
fn iter(&self) -> SuppressionsIter {
self.file.iter().chain(&self.line)
}
}
pub(crate) type SuppressionsIter<'a> =
std::iter::Chain<std::slice::Iter<'a, Suppression>, std::slice::Iter<'a, Suppression>>;
impl<'a> IntoIterator for &'a Suppressions {
type Item = &'a Suppression;
type IntoIter = SuppressionsIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
/// A `type: ignore` or `knot: ignore` suppression.
///
/// Suppression comments that suppress multiple codes
@@ -764,9 +765,8 @@ impl<'src> SuppressionParser<'src> {
fn eat_word(&mut self) -> bool {
if self.cursor.eat_if(char::is_alphabetic) {
// Allow `:` for better error recovery when someone uses `lint:code` instead of just `code`.
self.cursor
.eat_while(|c| c.is_alphanumeric() || matches!(c, '_' | '-' | ':'));
.eat_while(|c| c.is_alphanumeric() || matches!(c, '_' | '-'));
true
} else {
false

View File

@@ -85,14 +85,6 @@ impl<'db> Symbol<'db> {
Symbol::Unbound => self,
}
}
#[must_use]
pub(crate) fn map_type(self, f: impl FnOnce(Type<'db>) -> Type<'db>) -> Symbol<'db> {
match self {
Symbol::Type(ty, boundness) => Symbol::Type(f(ty), boundness),
Symbol::Unbound => Symbol::Unbound,
}
}
}
#[cfg(test)]

View File

@@ -23,12 +23,12 @@ pub use self::subclass_of::SubclassOfType;
use crate::module_name::ModuleName;
use crate::module_resolver::{file_to_module, resolve_module, KnownModule};
use crate::semantic_index::ast_ids::HasScopedExpressionId;
use crate::semantic_index::constraint::Constraint;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::symbol::{self as symbol, ScopeId, ScopedSymbolId};
use crate::semantic_index::{
global_scope, imported_modules, semantic_index, symbol_table, use_def_map,
BindingWithConstraints, DeclarationWithConstraint, UseDefMap,
BindingWithConstraints, BindingWithConstraintsIterator, DeclarationWithConstraint,
DeclarationsIterator,
};
use crate::stdlib::{builtins_symbol, known_module_symbol, typing_extensions_symbol};
use crate::suppression::check_suppressions;
@@ -80,55 +80,30 @@ pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
diagnostics
}
/// Computes a possibly-widened type `Unknown | T_inferred` from the inferred type `T_inferred`
/// of a symbol, unless the type is a known-instance type (e.g. `typing.Any`) or the symbol is
/// considered non-modifiable (e.g. when the symbol is `@Final`). We need this for public uses
/// of symbols that have no declared type.
fn widen_type_for_undeclared_public_symbol<'db>(
db: &'db dyn Db,
inferred: Symbol<'db>,
is_considered_non_modifiable: bool,
) -> Symbol<'db> {
// We special-case known-instance types here since symbols like `typing.Any` are typically
// not declared in the stubs (e.g. `Any = object()`), but we still want to treat them as
// such.
let is_known_instance = inferred
.ignore_possibly_unbound()
.is_some_and(|ty| matches!(ty, Type::KnownInstance(_)));
if is_considered_non_modifiable || is_known_instance {
inferred
} else {
inferred.map_type(|ty| UnionType::from_elements(db, [Type::unknown(), ty]))
}
}
/// Infer the public type of a symbol (its type as seen from outside its scope).
fn symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str) -> Symbol<'db> {
#[salsa::tracked]
fn symbol_by_id<'db>(
db: &'db dyn Db,
scope: ScopeId<'db>,
is_dunder_slots: bool,
symbol_id: ScopedSymbolId,
symbol: ScopedSymbolId,
) -> Symbol<'db> {
let use_def = use_def_map(db, scope);
// If the symbol is declared, the public type is based on declarations; otherwise, it's based
// on inference from bindings.
let declarations = use_def.public_declarations(symbol_id);
let declared = symbol_from_declarations(db, use_def.as_ref(), declarations);
let is_final = declared.as_ref().is_ok_and(SymbolAndQualifiers::is_final);
let declared = declared.map(|SymbolAndQualifiers(symbol, _)| symbol);
let declarations = use_def.public_declarations(symbol);
let declared =
symbol_from_declarations(db, declarations).map(|SymbolAndQualifiers(ty, _)| ty);
match declared {
// Symbol is declared, trust the declared type
Ok(symbol @ Symbol::Type(_, Boundness::Bound)) => symbol,
// Symbol is possibly declared
Ok(Symbol::Type(declared_ty, Boundness::PossiblyUnbound)) => {
let bindings = use_def.public_bindings(symbol_id);
let inferred = symbol_from_bindings(db, use_def.as_ref(), bindings);
let bindings = use_def.public_bindings(symbol);
let inferred = symbol_from_bindings(db, bindings);
match inferred {
// Symbol is possibly undeclared and definitely unbound
@@ -145,18 +120,16 @@ fn symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str) -> Symbol<'db>
),
}
}
// Symbol is undeclared, return the union of `Unknown` with the inferred type
// Symbol is undeclared, return the inferred type
Ok(Symbol::Unbound) => {
let bindings = use_def.public_bindings(symbol_id);
let inferred = symbol_from_bindings(db, use_def.as_ref(), bindings);
widen_type_for_undeclared_public_symbol(db, inferred, is_dunder_slots || is_final)
let bindings = use_def.public_bindings(symbol);
symbol_from_bindings(db, bindings)
}
// Symbol has conflicting declared types
// Symbol is possibly undeclared
Err((declared_ty, _)) => {
// Intentionally ignore conflicting declared types; that's not our problem,
// it's the problem of the module we are importing from.
declared_ty.inner_type().into()
declared_ty.inner_ty().into()
}
}
@@ -204,15 +177,9 @@ fn symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str) -> Symbol<'db>
}
let table = symbol_table(db, scope);
// `__slots__` is a symbol with special behavior in Python's runtime. It can be
// modified externally, but those changes do not take effect. We therefore issue
// a diagnostic if we see it being modified externally. In type inference, we
// can assign a "narrow" type to it even if it is not *declared*. This means, we
// do not have to call [`widen_type_for_undeclared_public_symbol`].
let is_dunder_slots = name == "__slots__";
table
.symbol_id_by_name(name)
.map(|symbol| symbol_by_id(db, scope, is_dunder_slots, symbol))
.map(|symbol| symbol_by_id(db, scope, symbol))
.unwrap_or(Symbol::Unbound)
}
@@ -276,15 +243,15 @@ pub(crate) fn global_symbol<'db>(db: &'db dyn Db, file: File, name: &str) -> Sym
}
/// Infer the type of a binding.
pub(crate) fn binding_type<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
pub(crate) fn binding_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
let inference = infer_definition_types(db, definition);
inference.binding_type(definition)
inference.binding_ty(definition)
}
/// Infer the type of a declaration.
fn declaration_type<'db>(db: &'db dyn Db, definition: Definition<'db>) -> TypeAndQualifiers<'db> {
fn declaration_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> TypeAndQualifiers<'db> {
let inference = infer_definition_types(db, definition);
inference.declaration_type(definition)
inference.declaration_ty(definition)
}
/// Infer the type of a (possibly deferred) sub-expression of a [`Definition`].
@@ -293,7 +260,7 @@ fn declaration_type<'db>(db: &'db dyn Db, definition: Definition<'db>) -> TypeAn
///
/// ## Panics
/// If the given expression is not a sub-expression of the given [`Definition`].
fn definition_expression_type<'db>(
fn definition_expression_ty<'db>(
db: &'db dyn Db,
definition: Definition<'db>,
expression: &ast::Expr,
@@ -306,14 +273,14 @@ fn definition_expression_type<'db>(
if scope == definition.scope(db) {
// expression is in the definition scope
let inference = infer_definition_types(db, definition);
if let Some(ty) = inference.try_expression_type(expr_id) {
if let Some(ty) = inference.try_expression_ty(expr_id) {
ty
} else {
infer_deferred_types(db, definition).expression_type(expr_id)
infer_deferred_types(db, definition).expression_ty(expr_id)
}
} else {
// expression is in a type-params sub-scope
infer_scope_types(db, scope).expression_type(expr_id)
infer_scope_types(db, scope).expression_ty(expr_id)
}
}
@@ -321,14 +288,11 @@ fn definition_expression_type<'db>(
/// together with boundness information in a [`Symbol`].
///
/// The type will be a union if there are multiple bindings with different types.
fn symbol_from_bindings<'map, 'db: 'map>(
fn symbol_from_bindings<'db>(
db: &'db dyn Db,
use_def: &UseDefMap<'map>,
bindings_with_constraints: impl Iterator<
Item = BindingWithConstraints<'db, impl Iterator<Item = Constraint<'db>>>,
>,
bindings_with_constraints: BindingWithConstraintsIterator<'_, 'db>,
) -> Symbol<'db> {
let visibility_constraints = &use_def.visibility_constraints;
let visibility_constraints = bindings_with_constraints.visibility_constraints;
let mut bindings_with_constraints = bindings_with_constraints.peekable();
let unbound_visibility = if let Some(BindingWithConstraints {
@@ -359,7 +323,7 @@ fn symbol_from_bindings<'map, 'db: 'map>(
.filter_map(|constraint| narrowing_constraint(db, constraint, binding))
.peekable();
let binding_ty = binding_type(db, binding);
let binding_ty = binding_ty(db, binding);
if constraint_tys.peek().is_some() {
let intersection_ty = constraint_tys
.fold(
@@ -414,10 +378,6 @@ impl SymbolAndQualifiers<'_> {
fn is_class_var(&self) -> bool {
self.1.contains(TypeQualifiers::CLASS_VAR)
}
fn is_final(&self) -> bool {
self.1.contains(TypeQualifiers::FINAL)
}
}
impl<'db> From<Symbol<'db>> for SymbolAndQualifiers<'db> {
@@ -436,7 +396,7 @@ impl<'db> From<Type<'db>> for SymbolAndQualifiers<'db> {
type SymbolFromDeclarationsResult<'db> =
Result<SymbolAndQualifiers<'db>, (TypeAndQualifiers<'db>, Box<[Type<'db>]>)>;
/// Build a declared type from an iterator of [`DeclarationWithConstraint`]s.
/// Build a declared type from a [`DeclarationsIterator`].
///
/// If there is only one declaration, or all declarations declare the same type, returns
/// `Ok(..)`. If there are conflicting declarations, returns an `Err(..)` variant with
@@ -446,10 +406,9 @@ type SymbolFromDeclarationsResult<'db> =
/// [`TypeQualifiers`] that have been specified on the declaration(s).
fn symbol_from_declarations<'db>(
db: &'db dyn Db,
use_def: &UseDefMap,
declarations: impl Iterator<Item = DeclarationWithConstraint<'db>>,
declarations: DeclarationsIterator<'_, 'db>,
) -> SymbolFromDeclarationsResult<'db> {
let visibility_constraints = &use_def.visibility_constraints;
let visibility_constraints = declarations.visibility_constraints;
let mut declarations = declarations.peekable();
let undeclared_visibility = if let Some(DeclarationWithConstraint {
@@ -473,7 +432,7 @@ fn symbol_from_declarations<'db>(
if static_visibility.is_always_false() {
None
} else {
Some(declaration_type(db, declaration))
Some(declaration_ty(db, declaration))
}
},
);
@@ -481,12 +440,12 @@ fn symbol_from_declarations<'db>(
if let Some(first) = types.next() {
let mut conflicting: Vec<Type<'db>> = vec![];
let declared_ty = if let Some(second) = types.next() {
let ty_first = first.inner_type();
let ty_first = first.inner_ty();
let mut qualifiers = first.qualifiers();
let mut builder = UnionBuilder::new(db).add(ty_first);
for other in std::iter::once(second).chain(types) {
let other_ty = other.inner_type();
let other_ty = other.inner_ty();
if !ty_first.is_equivalent_to(db, other_ty) {
conflicting.push(other_ty);
}
@@ -507,13 +466,13 @@ fn symbol_from_declarations<'db>(
};
Ok(SymbolAndQualifiers(
Symbol::Type(declared_ty.inner_type(), boundness),
Symbol::Type(declared_ty.inner_ty(), boundness),
declared_ty.qualifiers(),
))
} else {
Err((
declared_ty,
std::iter::once(first.inner_type())
std::iter::once(first.inner_ty())
.chain(conflicting)
.collect(),
))
@@ -1049,7 +1008,7 @@ impl<'db> Type<'db> {
///
/// [assignable to]: https://typing.readthedocs.io/en/latest/spec/concepts.html#the-assignable-to-or-consistent-subtyping-relation
pub(crate) fn is_assignable_to(self, db: &'db dyn Db, target: Type<'db>) -> bool {
if self.is_gradual_equivalent_to(db, target) {
if self.is_equivalent_to(db, target) {
return true;
}
match (self, target) {
@@ -1828,7 +1787,7 @@ impl<'db> Type<'db> {
if let Some(Type::BooleanLiteral(bool_val)) = bool_method
.call(db, &CallArguments::positional([*instance_ty]))
.return_type(db)
.return_ty(db)
{
bool_val.into()
} else {
@@ -1905,7 +1864,7 @@ impl<'db> Type<'db> {
CallDunderResult::MethodNotAvailable => return None,
CallDunderResult::CallOutcome(outcome) | CallDunderResult::PossiblyUnbound(outcome) => {
outcome.return_type(db)?
outcome.return_ty(db)?
}
};
@@ -1920,11 +1879,11 @@ impl<'db> Type<'db> {
let mut binding = bind_call(db, arguments, function_type.signature(db), Some(self));
match function_type.known(db) {
Some(KnownFunction::RevealType) => {
let revealed_ty = binding.one_parameter_type().unwrap_or(Type::unknown());
let revealed_ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
CallOutcome::revealed(binding, revealed_ty)
}
Some(KnownFunction::StaticAssert) => {
if let Some((parameter_ty, message)) = binding.two_parameter_types() {
if let Some((parameter_ty, message)) = binding.two_parameter_tys() {
let truthiness = parameter_ty.bool(db);
if truthiness.is_always_true() {
@@ -1955,64 +1914,64 @@ impl<'db> Type<'db> {
}
Some(KnownFunction::IsEquivalentTo) => {
let (ty_a, ty_b) = binding
.two_parameter_types()
.two_parameter_tys()
.unwrap_or((Type::unknown(), Type::unknown()));
binding
.set_return_type(Type::BooleanLiteral(ty_a.is_equivalent_to(db, ty_b)));
.set_return_ty(Type::BooleanLiteral(ty_a.is_equivalent_to(db, ty_b)));
CallOutcome::callable(binding)
}
Some(KnownFunction::IsSubtypeOf) => {
let (ty_a, ty_b) = binding
.two_parameter_types()
.two_parameter_tys()
.unwrap_or((Type::unknown(), Type::unknown()));
binding.set_return_type(Type::BooleanLiteral(ty_a.is_subtype_of(db, ty_b)));
binding.set_return_ty(Type::BooleanLiteral(ty_a.is_subtype_of(db, ty_b)));
CallOutcome::callable(binding)
}
Some(KnownFunction::IsAssignableTo) => {
let (ty_a, ty_b) = binding
.two_parameter_types()
.two_parameter_tys()
.unwrap_or((Type::unknown(), Type::unknown()));
binding
.set_return_type(Type::BooleanLiteral(ty_a.is_assignable_to(db, ty_b)));
.set_return_ty(Type::BooleanLiteral(ty_a.is_assignable_to(db, ty_b)));
CallOutcome::callable(binding)
}
Some(KnownFunction::IsDisjointFrom) => {
let (ty_a, ty_b) = binding
.two_parameter_types()
.two_parameter_tys()
.unwrap_or((Type::unknown(), Type::unknown()));
binding
.set_return_type(Type::BooleanLiteral(ty_a.is_disjoint_from(db, ty_b)));
.set_return_ty(Type::BooleanLiteral(ty_a.is_disjoint_from(db, ty_b)));
CallOutcome::callable(binding)
}
Some(KnownFunction::IsGradualEquivalentTo) => {
let (ty_a, ty_b) = binding
.two_parameter_types()
.two_parameter_tys()
.unwrap_or((Type::unknown(), Type::unknown()));
binding.set_return_type(Type::BooleanLiteral(
binding.set_return_ty(Type::BooleanLiteral(
ty_a.is_gradual_equivalent_to(db, ty_b),
));
CallOutcome::callable(binding)
}
Some(KnownFunction::IsFullyStatic) => {
let ty = binding.one_parameter_type().unwrap_or(Type::unknown());
binding.set_return_type(Type::BooleanLiteral(ty.is_fully_static(db)));
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
binding.set_return_ty(Type::BooleanLiteral(ty.is_fully_static(db)));
CallOutcome::callable(binding)
}
Some(KnownFunction::IsSingleton) => {
let ty = binding.one_parameter_type().unwrap_or(Type::unknown());
binding.set_return_type(Type::BooleanLiteral(ty.is_singleton(db)));
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
binding.set_return_ty(Type::BooleanLiteral(ty.is_singleton(db)));
CallOutcome::callable(binding)
}
Some(KnownFunction::IsSingleValued) => {
let ty = binding.one_parameter_type().unwrap_or(Type::unknown());
binding.set_return_type(Type::BooleanLiteral(ty.is_single_valued(db)));
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
binding.set_return_ty(Type::BooleanLiteral(ty.is_single_valued(db)));
CallOutcome::callable(binding)
}
Some(KnownFunction::Len) => {
if let Some(first_arg) = binding.one_parameter_type() {
if let Some(first_arg) = binding.one_parameter_ty() {
if let Some(len_ty) = first_arg.len(db) {
binding.set_return_type(len_ty);
binding.set_return_ty(len_ty);
}
};
@@ -2020,15 +1979,15 @@ impl<'db> Type<'db> {
}
Some(KnownFunction::Repr) => {
if let Some(first_arg) = binding.one_parameter_type() {
binding.set_return_type(first_arg.repr(db));
if let Some(first_arg) = binding.one_parameter_ty() {
binding.set_return_ty(first_arg.repr(db));
};
CallOutcome::callable(binding)
}
Some(KnownFunction::AssertType) => {
let Some((_, asserted_ty)) = binding.two_parameter_types() else {
let Some((_, asserted_ty)) = binding.two_parameter_tys() else {
return CallOutcome::callable(binding);
};
@@ -2038,12 +1997,12 @@ impl<'db> Type<'db> {
Some(KnownFunction::Cast) => {
// TODO: Use `.two_parameter_tys()` exclusively
// when overloads are supported.
if binding.two_parameter_types().is_none() {
if binding.two_parameter_tys().is_none() {
return CallOutcome::callable(binding);
};
if let Some(casted_ty) = arguments.first_argument() {
binding.set_return_type(casted_ty);
binding.set_return_ty(casted_ty);
};
CallOutcome::callable(binding)
@@ -2056,7 +2015,7 @@ impl<'db> Type<'db> {
// TODO annotated return type on `__new__` or metaclass `__call__`
// TODO check call vs signatures of `__new__` and/or `__init__`
Type::ClassLiteral(ClassLiteralType { class }) => {
CallOutcome::callable(CallBinding::from_return_type(match class.known(db) {
CallOutcome::callable(CallBinding::from_return_ty(match class.known(db) {
// If the class is the builtin-bool class (for example `bool(1)`), we try to
// return the specific truthiness value of the input arg, `Literal[True]` for
// the example above.
@@ -2102,7 +2061,7 @@ impl<'db> Type<'db> {
}
// Dynamic types are callable, and the return type is the same dynamic type
Type::Dynamic(_) => CallOutcome::callable(CallBinding::from_return_type(self)),
Type::Dynamic(_) => CallOutcome::callable(CallBinding::from_return_ty(self)),
Type::Union(union) => CallOutcome::union(
self,
@@ -2112,7 +2071,7 @@ impl<'db> Type<'db> {
.map(|elem| elem.call(db, arguments)),
),
Type::Intersection(_) => CallOutcome::callable(CallBinding::from_return_type(
Type::Intersection(_) => CallOutcome::callable(CallBinding::from_return_ty(
todo_type!("Type::Intersection.call()"),
)),
@@ -2158,7 +2117,7 @@ impl<'db> Type<'db> {
match dunder_iter_result {
CallDunderResult::CallOutcome(ref call_outcome)
| CallDunderResult::PossiblyUnbound(ref call_outcome) => {
let Some(iterator_ty) = call_outcome.return_type(db) else {
let Some(iterator_ty) = call_outcome.return_ty(db) else {
return IterationOutcome::NotIterable {
not_iterable_ty: self,
};
@@ -2166,7 +2125,7 @@ impl<'db> Type<'db> {
return if let Some(element_ty) = iterator_ty
.call_dunder(db, "__next__", &CallArguments::positional([iterator_ty]))
.return_type(db)
.return_ty(db)
{
if matches!(dunder_iter_result, CallDunderResult::PossiblyUnbound(..)) {
IterationOutcome::PossiblyUnboundDunderIter {
@@ -2197,7 +2156,7 @@ impl<'db> Type<'db> {
"__getitem__",
&CallArguments::positional([self, KnownClass::Int.to_instance(db)]),
)
.return_type(db)
.return_ty(db)
{
IterationOutcome::Iterable { element_ty }
} else {
@@ -2303,9 +2262,7 @@ impl<'db> Type<'db> {
Type::Dynamic(_) => Ok(*self),
// TODO map this to a new `Type::TypeVar` variant
Type::KnownInstance(KnownInstanceType::TypeVar(_)) => Ok(*self),
Type::KnownInstance(KnownInstanceType::TypeAliasType(alias)) => {
Ok(alias.value_type(db))
}
Type::KnownInstance(KnownInstanceType::TypeAliasType(alias)) => Ok(alias.value_ty(db)),
Type::KnownInstance(KnownInstanceType::Never | KnownInstanceType::NoReturn) => {
Ok(Type::Never)
}
@@ -2401,8 +2358,7 @@ impl<'db> Type<'db> {
ClassBase::Dynamic(_) => *self,
ClassBase::Class(class) => SubclassOfType::from(
db,
ClassBase::try_from_type(db, class.metaclass(db))
.unwrap_or(ClassBase::unknown()),
ClassBase::try_from_ty(db, class.metaclass(db)).unwrap_or(ClassBase::unknown()),
),
},
@@ -2411,7 +2367,7 @@ impl<'db> Type<'db> {
// TODO intersections
Type::Intersection(_) => SubclassOfType::from(
db,
ClassBase::try_from_type(db, todo_type!("Intersection meta-type"))
ClassBase::try_from_ty(db, todo_type!("Intersection meta-type"))
.expect("Type::Todo should be a valid ClassBase"),
),
Type::AlwaysTruthy | Type::AlwaysFalsy => KnownClass::Type.to_instance(db),
@@ -2535,7 +2491,7 @@ impl<'db> TypeAndQualifiers<'db> {
}
/// Forget about type qualifiers and only return the inner type.
pub(crate) fn inner_type(&self) -> Type<'db> {
pub(crate) fn inner_ty(&self) -> Type<'db> {
self.inner
}
@@ -3354,14 +3310,6 @@ impl<'db> IterationOutcome<'db> {
}
}
}
fn unwrap_without_diagnostic(self) -> Type<'db> {
match self {
Self::Iterable { element_ty } => element_ty,
Self::NotIterable { .. } => Type::unknown(),
Self::PossiblyUnboundDunderIter { element_ty, .. } => element_ty,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
@@ -3830,7 +3778,7 @@ impl<'db> Class<'db> {
class_stmt
.bases()
.iter()
.map(|base_node| definition_expression_type(db, class_definition, base_node))
.map(|base_node| definition_expression_ty(db, class_definition, base_node))
.collect()
}
@@ -3859,7 +3807,7 @@ impl<'db> Class<'db> {
.decorator_list
.iter()
.map(|decorator_node| {
definition_expression_type(db, class_definition, &decorator_node.expression)
definition_expression_ty(db, class_definition, &decorator_node.expression)
})
.collect()
}
@@ -3918,7 +3866,7 @@ impl<'db> Class<'db> {
.find_keyword("metaclass")?
.value;
let class_definition = semantic_index(db, self.file(db)).definition(class_stmt);
let metaclass_ty = definition_expression_type(db, class_definition, metaclass_node);
let metaclass_ty = definition_expression_ty(db, class_definition, metaclass_node);
Some(metaclass_ty)
}
@@ -3978,7 +3926,7 @@ impl<'db> Class<'db> {
let return_ty = outcomes
.iter()
.fold(None, |acc, outcome| {
let ty = outcome.return_type(db);
let ty = outcome.return_ty(db);
match (acc, ty) {
(acc, None) => {
@@ -4009,7 +3957,7 @@ impl<'db> Class<'db> {
CallOutcome::Callable { binding }
| CallOutcome::RevealType { binding, .. }
| CallOutcome::StaticAssertionError { binding, .. }
| CallOutcome::AssertType { binding, .. } => Ok(binding.return_type()),
| CallOutcome::AssertType { binding, .. } => Ok(binding.return_ty()),
};
return return_ty_result.map(|ty| ty.to_meta_type(db));
@@ -4117,7 +4065,7 @@ impl<'db> Class<'db> {
/// this class, not on its superclasses.
fn own_instance_member(self, db: &'db dyn Db, name: &str) -> SymbolAndQualifiers<'db> {
// TODO: There are many things that are not yet implemented here:
// - `typing.Final`
// - `typing.ClassVar` and `typing.Final`
// - Proper diagnostics
// - Handling of possibly-undeclared/possibly-unbound attributes
// - The descriptor protocol
@@ -4125,12 +4073,12 @@ impl<'db> Class<'db> {
let body_scope = self.body_scope(db);
let table = symbol_table(db, body_scope);
if let Some(symbol_id) = table.symbol_id_by_name(name) {
if let Some(symbol) = table.symbol_id_by_name(name) {
let use_def = use_def_map(db, body_scope);
let declarations = use_def.public_declarations(symbol_id);
let declarations = use_def.public_declarations(symbol);
match symbol_from_declarations(db, use_def.as_ref(), declarations) {
match symbol_from_declarations(db, declarations) {
Ok(SymbolAndQualifiers(Symbol::Type(declared_ty, _), qualifiers)) => {
if let Some(function) = declared_ty.into_function_literal() {
// TODO: Eventually, we are going to process all decorators correctly. This is
@@ -4145,18 +4093,24 @@ impl<'db> Class<'db> {
SymbolAndQualifiers(Symbol::Type(declared_ty, Boundness::Bound), qualifiers)
}
}
Ok(symbol @ SymbolAndQualifiers(Symbol::Unbound, qualifiers)) => {
let bindings = use_def.public_bindings(symbol_id);
let inferred = symbol_from_bindings(db, use_def.as_ref(), bindings);
Ok(SymbolAndQualifiers(Symbol::Unbound, qualifiers)) => {
let bindings = use_def.public_bindings(symbol);
let inferred = symbol_from_bindings(db, bindings);
SymbolAndQualifiers(
widen_type_for_undeclared_public_symbol(db, inferred, symbol.is_final()),
qualifiers,
)
match inferred {
Symbol::Type(ty, _) => SymbolAndQualifiers(
Symbol::Type(
UnionType::from_elements(db, [Type::unknown(), ty]),
Boundness::Bound,
),
qualifiers,
),
Symbol::Unbound => SymbolAndQualifiers(Symbol::Unbound, qualifiers),
}
}
Err((declared_ty, _conflicting_declarations)) => {
// Ignore conflicting declarations
SymbolAndQualifiers(declared_ty.inner_type().into(), declared_ty.qualifiers())
SymbolAndQualifiers(declared_ty.inner_ty().into(), declared_ty.qualifiers())
}
}
} else {
@@ -4222,13 +4176,13 @@ pub struct TypeAliasType<'db> {
#[salsa::tracked]
impl<'db> TypeAliasType<'db> {
#[salsa::tracked]
pub fn value_type(self, db: &'db dyn Db) -> Type<'db> {
pub fn value_ty(self, db: &'db dyn Db) -> Type<'db> {
let scope = self.rhs_scope(db);
let type_alias_stmt_node = scope.node(db).expect_type_alias();
let definition = semantic_index(db, scope.file(db)).definition(type_alias_stmt_node);
definition_expression_type(db, definition, &type_alias_stmt_node.value)
definition_expression_ty(db, definition, &type_alias_stmt_node.value)
}
}
@@ -4729,10 +4683,7 @@ pub(crate) mod tests {
let bar = system_path_to_file(&db, "src/bar.py")?;
let a = global_symbol(&db, bar, "a");
assert_eq!(
a.expect_type(),
UnionType::from_elements(&db, [Type::unknown(), KnownClass::Int.to_instance(&db)])
);
assert_eq!(a.expect_type(), KnownClass::Int.to_instance(&db));
// Add a docstring to foo to trigger a re-run.
// The bar-call site of foo should not be re-run because of that
@@ -4748,10 +4699,7 @@ pub(crate) mod tests {
let a = global_symbol(&db, bar, "a");
assert_eq!(
a.expect_type(),
UnionType::from_elements(&db, [Type::unknown(), KnownClass::Int.to_instance(&db)])
);
assert_eq!(a.expect_type(), KnownClass::Int.to_instance(&db));
let events = db.take_salsa_events();
let call = &*parsed_module(&db, bar).syntax().body[1]

View File

@@ -89,13 +89,13 @@ impl<'db> CallOutcome<'db> {
}
/// Get the return type of the call, or `None` if not callable.
pub(super) fn return_type(&self, db: &'db dyn Db) -> Option<Type<'db>> {
pub(super) fn return_ty(&self, db: &'db dyn Db) -> Option<Type<'db>> {
match self {
Self::Callable { binding } => Some(binding.return_type()),
Self::Callable { binding } => Some(binding.return_ty()),
Self::RevealType {
binding,
revealed_ty: _,
} => Some(binding.return_type()),
} => Some(binding.return_ty()),
Self::NotCallable { not_callable_ty: _ } => None,
Self::Union {
outcomes,
@@ -105,7 +105,7 @@ impl<'db> CallOutcome<'db> {
// If all outcomes are NotCallable, we return None; if some outcomes are callable
// and some are not, we return a union including Unknown.
.fold(None, |acc, outcome| {
let ty = outcome.return_type(db);
let ty = outcome.return_ty(db);
match (acc, ty) {
(None, None) => None,
(None, Some(ty)) => Some(UnionBuilder::new(db).add(ty)),
@@ -113,12 +113,12 @@ impl<'db> CallOutcome<'db> {
}
})
.map(UnionBuilder::build),
Self::PossiblyUnboundDunderCall { call_outcome, .. } => call_outcome.return_type(db),
Self::PossiblyUnboundDunderCall { call_outcome, .. } => call_outcome.return_ty(db),
Self::StaticAssertionError { .. } => Some(Type::none(db)),
Self::AssertType {
binding,
asserted_ty: _,
} => Some(binding.return_type()),
} => Some(binding.return_ty()),
}
}
@@ -128,7 +128,7 @@ impl<'db> CallOutcome<'db> {
context: &InferContext<'db>,
node: ast::AnyNodeRef,
) -> Type<'db> {
match self.return_type_result(context, node) {
match self.return_ty_result(context, node) {
Ok(return_ty) => return_ty,
Err(NotCallableError::Type {
not_callable_ty,
@@ -194,7 +194,7 @@ impl<'db> CallOutcome<'db> {
}
/// Get the return type of the call as a result.
pub(super) fn return_type_result(
pub(super) fn return_ty_result(
&self,
context: &InferContext<'db>,
node: ast::AnyNodeRef,
@@ -205,7 +205,7 @@ impl<'db> CallOutcome<'db> {
match self {
Self::Callable { binding } => {
binding.report_diagnostics(context, node);
Ok(binding.return_type())
Ok(binding.return_ty())
}
Self::RevealType {
binding,
@@ -218,7 +218,7 @@ impl<'db> CallOutcome<'db> {
Severity::Info,
format_args!("Revealed type is `{}`", revealed_ty.display(context.db())),
);
Ok(binding.return_type())
Ok(binding.return_ty())
}
Self::NotCallable { not_callable_ty } => Err(NotCallableError::Type {
not_callable_ty: *not_callable_ty,
@@ -230,7 +230,7 @@ impl<'db> CallOutcome<'db> {
} => Err(NotCallableError::PossiblyUnboundDunderCall {
callable_ty: *called_ty,
return_ty: call_outcome
.return_type(context.db())
.return_ty(context.db())
.unwrap_or(Type::unknown()),
}),
Self::Union {
@@ -251,7 +251,7 @@ impl<'db> CallOutcome<'db> {
revealed_ty: _,
} => {
if revealed {
binding.return_type()
binding.return_ty()
} else {
revealed = true;
outcome.unwrap_with_diagnostic(context, node)
@@ -329,8 +329,8 @@ impl<'db> CallOutcome<'db> {
binding,
asserted_ty,
} => {
let [actual_ty, _asserted] = binding.parameter_types() else {
return Ok(binding.return_type());
let [actual_ty, _asserted] = binding.parameter_tys() else {
return Ok(binding.return_ty());
};
if !actual_ty.is_gradual_equivalent_to(context.db(), *asserted_ty) {
@@ -345,7 +345,7 @@ impl<'db> CallOutcome<'db> {
);
}
Ok(binding.return_type())
Ok(binding.return_ty())
}
}
}
@@ -358,9 +358,9 @@ pub(super) enum CallDunderResult<'db> {
}
impl<'db> CallDunderResult<'db> {
pub(super) fn return_type(&self, db: &'db dyn Db) -> Option<Type<'db>> {
pub(super) fn return_ty(&self, db: &'db dyn Db) -> Option<Type<'db>> {
match self {
Self::CallOutcome(outcome) => outcome.return_type(db),
Self::CallOutcome(outcome) => outcome.return_ty(db),
Self::PossiblyUnbound { .. } => None,
Self::MethodNotAvailable => None,
}
@@ -394,7 +394,7 @@ pub(super) enum NotCallableError<'db> {
impl<'db> NotCallableError<'db> {
/// The return type that should be used when a call is not callable.
pub(super) fn return_type(&self) -> Type<'db> {
pub(super) fn return_ty(&self) -> Type<'db> {
match self {
Self::Type { return_ty, .. } => *return_ty,
Self::UnionElement { return_ty, .. } => *return_ty,
@@ -407,7 +407,7 @@ impl<'db> NotCallableError<'db> {
///
/// For unions, returns the union type itself, which may contain a mix of callable and
/// non-callable types.
pub(super) fn called_type(&self) -> Type<'db> {
pub(super) fn called_ty(&self) -> Type<'db> {
match self {
Self::Type {
not_callable_ty, ..

View File

@@ -73,7 +73,7 @@ pub(crate) fn bind_call<'db>(
continue;
}
};
if let Some(expected_ty) = parameter.annotated_type() {
if let Some(expected_ty) = parameter.annotated_ty() {
if !argument_ty.is_assignable_to(db, expected_ty) {
errors.push(CallBindingError::InvalidArgumentType {
parameter: ParameterContext::new(parameter, index, positional),
@@ -109,8 +109,7 @@ pub(crate) fn bind_call<'db>(
for (index, bound_ty) in parameter_tys.iter().enumerate() {
if bound_ty.is_none() {
let param = &parameters[index];
if param.is_variadic() || param.is_keyword_variadic() || param.default_type().is_some()
{
if param.is_variadic() || param.is_keyword_variadic() || param.default_ty().is_some() {
// variadic/keywords and defaulted arguments are not required
continue;
}
@@ -152,7 +151,7 @@ pub(crate) struct CallBinding<'db> {
impl<'db> CallBinding<'db> {
// TODO remove this constructor and construct always from `bind_call`
pub(crate) fn from_return_type(return_ty: Type<'db>) -> Self {
pub(crate) fn from_return_ty(return_ty: Type<'db>) -> Self {
Self {
callable_ty: None,
return_ty,
@@ -161,27 +160,27 @@ impl<'db> CallBinding<'db> {
}
}
pub(crate) fn set_return_type(&mut self, return_ty: Type<'db>) {
pub(crate) fn set_return_ty(&mut self, return_ty: Type<'db>) {
self.return_ty = return_ty;
}
pub(crate) fn return_type(&self) -> Type<'db> {
pub(crate) fn return_ty(&self) -> Type<'db> {
self.return_ty
}
pub(crate) fn parameter_types(&self) -> &[Type<'db>] {
pub(crate) fn parameter_tys(&self) -> &[Type<'db>] {
&self.parameter_tys
}
pub(crate) fn one_parameter_type(&self) -> Option<Type<'db>> {
match self.parameter_types() {
pub(crate) fn one_parameter_ty(&self) -> Option<Type<'db>> {
match self.parameter_tys() {
[ty] => Some(*ty),
_ => None,
}
}
pub(crate) fn two_parameter_types(&self) -> Option<(Type<'db>, Type<'db>)> {
match self.parameter_types() {
pub(crate) fn two_parameter_tys(&self) -> Option<(Type<'db>, Type<'db>)> {
match self.parameter_tys() {
[first, second] => Some((*first, *second)),
_ => None,
}

View File

@@ -62,7 +62,7 @@ impl<'db> ClassBase<'db> {
/// Attempt to resolve `ty` into a `ClassBase`.
///
/// Return `None` if `ty` is not an acceptable type for a class base.
pub(super) fn try_from_type(db: &'db dyn Db, ty: Type<'db>) -> Option<Self> {
pub(super) fn try_from_ty(db: &'db dyn Db, ty: Type<'db>) -> Option<Self> {
match ty {
Type::Dynamic(dynamic) => Some(Self::Dynamic(dynamic)),
Type::ClassLiteral(ClassLiteralType { class }) => Some(Self::Class(class)),
@@ -112,40 +112,40 @@ impl<'db> ClassBase<'db> {
KnownInstanceType::Any => Some(Self::any()),
// TODO: Classes inheriting from `typing.Type` et al. also have `Generic` in their MRO
KnownInstanceType::Dict => {
Self::try_from_type(db, KnownClass::Dict.to_class_literal(db))
Self::try_from_ty(db, KnownClass::Dict.to_class_literal(db))
}
KnownInstanceType::List => {
Self::try_from_type(db, KnownClass::List.to_class_literal(db))
Self::try_from_ty(db, KnownClass::List.to_class_literal(db))
}
KnownInstanceType::Type => {
Self::try_from_type(db, KnownClass::Type.to_class_literal(db))
Self::try_from_ty(db, KnownClass::Type.to_class_literal(db))
}
KnownInstanceType::Tuple => {
Self::try_from_type(db, KnownClass::Tuple.to_class_literal(db))
Self::try_from_ty(db, KnownClass::Tuple.to_class_literal(db))
}
KnownInstanceType::Set => {
Self::try_from_type(db, KnownClass::Set.to_class_literal(db))
Self::try_from_ty(db, KnownClass::Set.to_class_literal(db))
}
KnownInstanceType::FrozenSet => {
Self::try_from_type(db, KnownClass::FrozenSet.to_class_literal(db))
Self::try_from_ty(db, KnownClass::FrozenSet.to_class_literal(db))
}
KnownInstanceType::ChainMap => {
Self::try_from_type(db, KnownClass::ChainMap.to_class_literal(db))
Self::try_from_ty(db, KnownClass::ChainMap.to_class_literal(db))
}
KnownInstanceType::Counter => {
Self::try_from_type(db, KnownClass::Counter.to_class_literal(db))
Self::try_from_ty(db, KnownClass::Counter.to_class_literal(db))
}
KnownInstanceType::DefaultDict => {
Self::try_from_type(db, KnownClass::DefaultDict.to_class_literal(db))
Self::try_from_ty(db, KnownClass::DefaultDict.to_class_literal(db))
}
KnownInstanceType::Deque => {
Self::try_from_type(db, KnownClass::Deque.to_class_literal(db))
Self::try_from_ty(db, KnownClass::Deque.to_class_literal(db))
}
KnownInstanceType::OrderedDict => {
Self::try_from_type(db, KnownClass::OrderedDict.to_class_literal(db))
Self::try_from_ty(db, KnownClass::OrderedDict.to_class_literal(db))
}
KnownInstanceType::Callable => {
Self::try_from_type(db, todo_type!("Support for Callable as a base class"))
Self::try_from_ty(db, todo_type!("Support for Callable as a base class"))
}
},
}

View File

@@ -8,7 +8,7 @@ use ruff_db::{
use ruff_python_ast::AnyNodeRef;
use ruff_text_size::Ranged;
use super::{binding_type, KnownFunction, TypeCheckDiagnostic, TypeCheckDiagnostics};
use super::{binding_ty, KnownFunction, TypeCheckDiagnostic, TypeCheckDiagnostics};
use crate::semantic_index::semantic_index;
use crate::semantic_index::symbol::ScopeId;
@@ -144,7 +144,7 @@ impl<'db> InferContext<'db> {
.ancestor_scopes(scope_id)
.filter_map(|(_, scope)| scope.node().as_function())
.filter_map(|function| {
binding_type(self.db, index.definition(function)).into_function_literal()
binding_ty(self.db, index.definition(function)).into_function_literal()
});
// Iterate over all functions and test if any is decorated with `@no_type_check`.

View File

@@ -802,8 +802,8 @@ impl Diagnostic for TypeCheckDiagnostic {
TypeCheckDiagnostic::message(self).into()
}
fn file(&self) -> Option<File> {
Some(TypeCheckDiagnostic::file(self))
fn file(&self) -> File {
TypeCheckDiagnostic::file(self)
}
fn range(&self) -> Option<TextRange> {
@@ -984,13 +984,13 @@ pub(super) fn report_slice_step_size_zero(context: &InferContext, node: AnyNodeR
);
}
fn report_invalid_assignment_with_message(
pub(super) fn report_invalid_assignment(
context: &InferContext,
node: AnyNodeRef,
target_ty: Type,
message: std::fmt::Arguments,
declared_ty: Type,
assigned_ty: Type,
) {
match target_ty {
match declared_ty {
Type::ClassLiteral(ClassLiteralType { class }) => {
context.report_lint(&INVALID_ASSIGNMENT, node, format_args!(
"Implicit shadowing of class `{}`; annotate to make it explicit if this is intentional",
@@ -1002,48 +1002,19 @@ fn report_invalid_assignment_with_message(
function.name(context.db())));
}
_ => {
context.report_lint(&INVALID_ASSIGNMENT, node, message);
context.report_lint(
&INVALID_ASSIGNMENT,
node,
format_args!(
"Object of type `{}` is not assignable to `{}`",
assigned_ty.display(context.db()),
declared_ty.display(context.db()),
),
);
}
}
}
pub(super) fn report_invalid_assignment(
context: &InferContext,
node: AnyNodeRef,
target_ty: Type,
source_ty: Type,
) {
report_invalid_assignment_with_message(
context,
node,
target_ty,
format_args!(
"Object of type `{}` is not assignable to `{}`",
source_ty.display(context.db()),
target_ty.display(context.db()),
),
);
}
pub(super) fn report_invalid_attribute_assignment(
context: &InferContext,
node: AnyNodeRef,
target_ty: Type,
source_ty: Type,
attribute_name: &'_ str,
) {
report_invalid_assignment_with_message(
context,
node,
target_ty,
format_args!(
"Object of type `{}` is not assignable to attribute `{attribute_name}` of type `{}`",
source_ty.display(context.db()),
target_ty.display(context.db()),
),
);
}
pub(super) fn report_possibly_unresolved_reference(
context: &InferContext,
expr_name_node: &ast::ExprName,

File diff suppressed because it is too large Load Diff

View File

@@ -76,7 +76,7 @@ impl<'db> Mro<'db> {
// This *could* theoretically be handled by the final branch below,
// but it's a common case (i.e., worth optimizing for),
// and the `c3_merge` function requires lots of allocations.
[single_base] => ClassBase::try_from_type(db, *single_base).map_or_else(
[single_base] => ClassBase::try_from_ty(db, *single_base).map_or_else(
|| Err(MroErrorKind::InvalidBases(Box::from([(0, *single_base)]))),
|single_base| {
Ok(std::iter::once(ClassBase::Class(class))
@@ -95,7 +95,7 @@ impl<'db> Mro<'db> {
let mut invalid_bases = vec![];
for (i, base) in multiple_bases.iter().enumerate() {
match ClassBase::try_from_type(db, *base) {
match ClassBase::try_from_ty(db, *base) {
Some(valid_base) => valid_bases.push(valid_base),
None => invalid_bases.push((i, *base)),
}

View File

@@ -322,9 +322,9 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
for (op, (left, right)) in std::iter::zip(&**ops, comparator_tuples) {
let lhs_ty = last_rhs_ty.unwrap_or_else(|| {
inference.expression_type(left.scoped_expression_id(self.db, scope))
inference.expression_ty(left.scoped_expression_id(self.db, scope))
});
let rhs_ty = inference.expression_type(right.scoped_expression_id(self.db, scope));
let rhs_ty = inference.expression_ty(right.scoped_expression_id(self.db, scope));
last_rhs_ty = Some(rhs_ty);
match left {
@@ -393,7 +393,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
}
let callable_ty =
inference.expression_type(callable.scoped_expression_id(self.db, scope));
inference.expression_ty(callable.scoped_expression_id(self.db, scope));
if callable_ty
.into_class_literal()
@@ -422,7 +422,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
let inference = infer_expression_types(self.db, expression);
let callable_ty =
inference.expression_type(expr_call.func.scoped_expression_id(self.db, scope));
inference.expression_ty(expr_call.func.scoped_expression_id(self.db, scope));
// TODO: add support for PEP 604 union types on the right hand side of `isinstance`
// and `issubclass`, for example `isinstance(x, str | (int | float))`.
@@ -441,7 +441,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
let class_info_ty =
inference.expression_type(class_info.scoped_expression_id(self.db, scope));
inference.expression_ty(class_info.scoped_expression_id(self.db, scope));
function
.generate_constraint(self.db, class_info_ty)
@@ -500,7 +500,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
let scope = self.scope();
let inference = infer_expression_types(self.db, cls);
let ty = inference
.expression_type(cls.node_ref(self.db).scoped_expression_id(self.db, scope))
.expression_ty(cls.node_ref(self.db).scoped_expression_id(self.db, scope))
.to_instance(self.db);
let mut constraints = NarrowingConstraints::default();
constraints.insert(symbol, ty);
@@ -524,7 +524,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
// filter our arms with statically known truthiness
.filter(|expr| {
inference
.expression_type(expr.scoped_expression_id(self.db, scope))
.expression_ty(expr.scoped_expression_id(self.db, scope))
.bool(self.db)
!= match expr_bool_op.op {
BoolOp::And => Truthiness::AlwaysTrue,

View File

@@ -461,12 +461,6 @@ mod stable {
forall types s, t.
s.is_fully_static(db) && s.is_gradual_equivalent_to(db, t) => s.is_equivalent_to(db, t)
);
// `T` can be assigned to itself.
type_property_test!(
assignable_to_is_reflexive, db,
forall types t. t.is_assignable_to(db, t)
);
}
/// This module contains property tests that currently lead to many false positives.
@@ -481,6 +475,13 @@ mod flaky {
use super::{intersection, union};
// Currently fails due to https://github.com/astral-sh/ruff/issues/14899
// `T` can be assigned to itself.
type_property_test!(
assignable_to_is_reflexive, db,
forall types t. t.is_assignable_to(db, t)
);
// Negating `T` twice is equivalent to `T`.
type_property_test!(
double_negation_is_identity, db,

View File

@@ -1,4 +1,4 @@
use super::{definition_expression_type, Type};
use super::{definition_expression_ty, Type};
use crate::Db;
use crate::{semantic_index::definition::Definition, types::todo_type};
use ruff_python_ast::{self as ast, name::Name};
@@ -39,7 +39,7 @@ impl<'db> Signature<'db> {
if function_node.is_async {
todo_type!("generic types.CoroutineType")
} else {
definition_expression_type(db, definition, returns.as_ref())
definition_expression_ty(db, definition, returns.as_ref())
}
});
@@ -97,7 +97,7 @@ impl<'db> Parameters<'db> {
parameter_with_default
.default
.as_deref()
.map(|default| definition_expression_type(db, definition, default))
.map(|default| definition_expression_ty(db, definition, default))
};
let positional_only = posonlyargs.iter().map(|arg| {
Parameter::from_node_and_kind(
@@ -245,7 +245,7 @@ impl<'db> Parameter<'db> {
annotated_ty: parameter
.annotation
.as_deref()
.map(|annotation| definition_expression_type(db, definition, annotation)),
.map(|annotation| definition_expression_ty(db, definition, annotation)),
kind,
}
}
@@ -276,7 +276,7 @@ impl<'db> Parameter<'db> {
}
/// Annotated type of the parameter, if annotated.
pub(crate) fn annotated_type(&self) -> Option<Type<'db>> {
pub(crate) fn annotated_ty(&self) -> Option<Type<'db>> {
self.annotated_ty
}
@@ -295,7 +295,7 @@ impl<'db> Parameter<'db> {
}
/// Default-value type of the parameter, if any.
pub(crate) fn default_type(&self) -> Option<Type<'db>> {
pub(crate) fn default_ty(&self) -> Option<Type<'db>> {
match self.kind {
ParameterKind::PositionalOnly { default_ty } => default_ty,
ParameterKind::PositionalOrKeyword { default_ty } => default_ty,
@@ -543,10 +543,7 @@ mod tests {
assert_eq!(a_name, "a");
assert_eq!(b_name, "b");
// TODO resolution should not be deferred; we should see A not B
assert_eq!(
a_annotated_ty.unwrap().display(&db).to_string(),
"Unknown | B"
);
assert_eq!(a_annotated_ty.unwrap().display(&db).to_string(), "B");
assert_eq!(b_annotated_ty.unwrap().display(&db).to_string(), "T");
}
@@ -586,10 +583,7 @@ mod tests {
assert_eq!(a_name, "a");
assert_eq!(b_name, "b");
// Parameter resolution deferred; we should see B
assert_eq!(
a_annotated_ty.unwrap().display(&db).to_string(),
"Unknown | B"
);
assert_eq!(a_annotated_ty.unwrap().display(&db).to_string(), "B");
assert_eq!(b_annotated_ty.unwrap().display(&db).to_string(), "T");
}

View File

@@ -43,7 +43,7 @@ impl<'db> Unpacker<'db> {
);
let mut value_ty = infer_expression_types(self.db(), value.expression())
.expression_type(value.scoped_expression_id(self.db(), self.scope));
.expression_ty(value.scoped_expression_id(self.db(), self.scope));
if value.is_assign()
&& self.context.in_stub()

View File

@@ -294,8 +294,8 @@ impl<'db> VisibilityConstraints<'db> {
ConstraintNode::Expression(test_expr) => {
let inference = infer_expression_types(db, test_expr);
let scope = test_expr.scope(db);
let ty = inference
.expression_type(test_expr.node_ref(db).scoped_expression_id(db, scope));
let ty =
inference.expression_ty(test_expr.node_ref(db).scoped_expression_id(db, scope));
ty.bool(db).negate_if(!constraint.is_positive)
}
@@ -304,7 +304,7 @@ impl<'db> VisibilityConstraints<'db> {
let subject_expression = inner.subject(db);
let inference = infer_expression_types(db, *subject_expression);
let scope = subject_expression.scope(db);
let subject_ty = inference.expression_type(
let subject_ty = inference.expression_ty(
subject_expression
.node_ref(db)
.scoped_expression_id(db, scope),
@@ -312,8 +312,8 @@ impl<'db> VisibilityConstraints<'db> {
let inference = infer_expression_types(db, *value);
let scope = value.scope(db);
let value_ty = inference
.expression_type(value.node_ref(db).scoped_expression_id(db, scope));
let value_ty =
inference.expression_ty(value.node_ref(db).scoped_expression_id(db, scope));
if subject_ty.is_single_valued(db) {
let truthiness =

View File

@@ -75,9 +75,9 @@ fn to_lsp_diagnostic(
diagnostic: &dyn ruff_db::diagnostic::Diagnostic,
encoding: crate::PositionEncoding,
) -> Diagnostic {
let range = if let (Some(file), Some(range)) = (diagnostic.file(), diagnostic.range()) {
let index = line_index(db.upcast(), file);
let source = source_text(db.upcast(), file);
let range = if let Some(range) = diagnostic.range() {
let index = line_index(db.upcast(), diagnostic.file());
let source = source_text(db.upcast(), diagnostic.file());
range.to_range(&source, &index, encoding)
} else {

View File

@@ -8,8 +8,8 @@ under a certain directory as test suites.
A Markdown test suite can contain any number of tests. A test consists of one or more embedded
"files", each defined by a triple-backticks fenced code block. The code block must have a tag string
specifying its language. We currently support `py` (Python files) and `pyi` (type stub files), as
well as [typeshed `VERSIONS`] files and `toml` for configuration.
specifying its language; currently only `py` (Python files) and `pyi` (type stub files) are
supported.
The simplest possible test suite consists of just a single test, with a single embedded file:
@@ -243,20 +243,6 @@ section. Nested sections can override configurations from their parent sections.
See [`MarkdownTestConfig`](https://github.com/astral-sh/ruff/blob/main/crates/red_knot_test/src/config.rs) for the full list of supported configuration options.
### Specifying a custom typeshed
Some tests will need to override the default typeshed with custom files. The `[environment]`
configuration option `typeshed` can be used to do this:
````markdown
```toml
[environment]
typeshed = "/typeshed"
```
````
For more details, take a look at the [custom-typeshed Markdown test].
## Documentation of tests
Arbitrary Markdown syntax (including of course normal prose paragraphs) is permitted (and ignored by
@@ -308,6 +294,36 @@ The column assertion `6` on the ending line should be optional.
In cases of overlapping such assertions, resolve ambiguity using more angle brackets: `<<<<` begins
an assertion ended by `>>>>`, etc.
### Non-Python files
Some tests may need to specify non-Python embedded files: typeshed `stdlib/VERSIONS`, `pth` files,
`py.typed` files, `pyvenv.cfg` files...
We will allow specifying any of these using the `text` language in the code block tag string:
````markdown
```text path=/third-party/foo/py.typed
partial
```
````
We may want to also support testing Jupyter notebooks as embedded files; exact syntax for this is
yet to be determined.
Of course, red-knot is only run directly on `py` and `pyi` files, and assertion comments are only
possible in these files.
A fenced code block with no language will always be an error.
### Running just a single test from a suite
Having each test in a suite always run as a distinct Rust test would require writing our own test
runner or code-generating tests in a build script; neither of these is planned.
We could still allow running just a single test from a suite, for debugging purposes, either via
some "focus" syntax that could be easily temporarily added to a test, or via an environment
variable.
### Configuring search paths and kinds
The red-knot TOML configuration format hasn't been finalized, and we may want to implement
@@ -330,6 +346,38 @@ Paths for `workspace-root` and `third-party-root` must be absolute.
Relative embedded-file paths are relative to the workspace root, even if it is explicitly set to a
non-default value using the `workspace-root` config.
### Specifying a custom typeshed
Some tests will need to override the default typeshed with custom files. The `[environment]`
configuration option `typeshed-path` can be used to do this:
````markdown
```toml
[environment]
typeshed-path = "/typeshed"
```
This file is importable as part of our custom typeshed, because it is within `/typeshed`, which we
configured above as our custom typeshed root:
```py path=/typeshed/stdlib/builtins.pyi
I_AM_THE_ONLY_BUILTIN = 1
```
This file is written to `/src/test.py`, because the default workspace root is `/src/ and the default
file path is `test.py`:
```py
reveal_type(I_AM_THE_ONLY_BUILTIN) # revealed: Literal[1]
```
````
A fenced code block with language `text` can be used to provide a `stdlib/VERSIONS` file in the
custom typeshed root. If no such file is created explicitly, one should be created implicitly
including entries enabling all specified `<typeshed-root>/stdlib` files for all supported Python
versions.
### I/O errors
We could use an `error=` configuration option in the tag string to make an embedded file cause an
@@ -432,6 +480,3 @@ cold, to validate equivalence of cold and incremental check results.
[^extensions]: `typing-extensions` is a third-party module, but typeshed, and thus type checkers
also, treat it as part of the standard library.
[custom-typeshed markdown test]: ../red_knot_python_semantic/resources/mdtest/mdtest_custom_typeshed.md
[typeshed `versions`]: https://github.com/python/typeshed/blob/c546278aae47de0b2b664973da4edb613400f6ce/stdlib/VERSIONS#L1-L18%3E

View File

@@ -34,12 +34,6 @@ impl MarkdownTestConfig {
.as_ref()
.and_then(|env| env.python_platform.clone())
}
pub(crate) fn typeshed(&self) -> Option<&str> {
self.environment
.as_ref()
.and_then(|env| env.typeshed.as_deref())
}
}
#[derive(Deserialize, Debug, Default, Clone)]
@@ -50,9 +44,6 @@ pub(crate) struct Environment {
/// Target platform to assume when resolving types.
pub(crate) python_platform: Option<PythonPlatform>,
/// Path to a custom typeshed directory.
pub(crate) typeshed: Option<String>,
}
#[derive(Deserialize, Debug, Clone)]

View File

@@ -198,8 +198,8 @@ mod tests {
"dummy".into()
}
fn file(&self) -> Option<File> {
Some(self.file)
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {

View File

@@ -3,7 +3,7 @@ use camino::Utf8Path;
use colored::Colorize;
use parser as test_parser;
use red_knot_python_semantic::types::check_types;
use red_knot_python_semantic::{Program, ProgramSettings, SearchPathSettings, SitePackages};
use red_knot_python_semantic::Program;
use ruff_db::diagnostic::{Diagnostic, ParseDiagnostic};
use ruff_db::files::{system_path_to_file, File, Files};
use ruff_db::panic::catch_unwind;
@@ -12,7 +12,7 @@ use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use ruff_db::testing::{setup_logging, setup_logging_with_filter};
use ruff_source_file::{LineIndex, OneIndexed};
use ruff_text_size::TextSize;
use std::fmt::Write;
use salsa::Setter;
mod assertion;
mod config;
@@ -50,6 +50,13 @@ pub fn run(path: &Utf8Path, long_title: &str, short_title: &str, test_name: &str
Log::Filter(filter) => setup_logging_with_filter(filter),
});
Program::get(&db)
.set_python_version(&mut db)
.to(test.configuration().python_version().unwrap_or_default());
Program::get(&db)
.set_python_platform(&mut db)
.to(test.configuration().python_platform().unwrap_or_default());
// Remove all files so that the db is in a "fresh" state.
db.memory_file_system().remove_all();
Files::sync_all(&mut db);
@@ -91,10 +98,6 @@ pub fn run(path: &Utf8Path, long_title: &str, short_title: &str, test_name: &str
fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures> {
let project_root = db.project_root().to_path_buf();
let src_path = SystemPathBuf::from("/src");
let custom_typeshed_path = test.configuration().typeshed().map(SystemPathBuf::from);
let mut typeshed_files = vec![];
let mut has_custom_versions_file = false;
let test_files: Vec<_> = test
.files()
@@ -104,33 +107,11 @@ fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures
}
assert!(
matches!(embedded.lang, "py" | "pyi" | "text"),
"Supported file types are: py, pyi, text"
matches!(embedded.lang, "py" | "pyi"),
"Non-Python files not supported yet."
);
let full_path = if embedded.path.starts_with('/') {
SystemPathBuf::from(embedded.path)
} else {
project_root.join(embedded.path)
};
if let Some(ref typeshed_path) = custom_typeshed_path {
if let Ok(relative_path) = full_path.strip_prefix(typeshed_path.join("stdlib")) {
if relative_path.as_str() == "VERSIONS" {
has_custom_versions_file = true;
} else if relative_path.extension().is_some_and(|ext| ext == "pyi") {
typeshed_files.push(relative_path.to_path_buf());
}
}
}
let full_path = project_root.join(embedded.path);
db.write_file(&full_path, embedded.code).unwrap();
if !full_path.starts_with(&src_path) || embedded.lang == "text" {
// These files need to be written to the file system (above), but we don't run any checks on them.
return None;
}
let file = system_path_to_file(db, full_path).unwrap();
Some(TestFile {
@@ -140,42 +121,6 @@ fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures
})
.collect();
// Create a custom typeshed `VERSIONS` file if none was provided.
if let Some(ref typeshed_path) = custom_typeshed_path {
if !has_custom_versions_file {
let versions_file = typeshed_path.join("stdlib/VERSIONS");
let contents = typeshed_files
.iter()
.fold(String::new(), |mut content, path| {
// This is intentionally kept simple:
let module_name = path
.as_str()
.trim_end_matches(".pyi")
.trim_end_matches("/__init__")
.replace('/', ".");
let _ = writeln!(content, "{module_name}: 3.8-");
content
});
db.write_file(&versions_file, contents).unwrap();
}
}
Program::get(db)
.update_from_settings(
db,
ProgramSettings {
python_version: test.configuration().python_version().unwrap_or_default(),
python_platform: test.configuration().python_platform().unwrap_or_default(),
search_paths: SearchPathSettings {
src_roots: vec![src_path],
extra_paths: vec![],
custom_typeshed: custom_typeshed_path,
site_packages: SitePackages::Known(vec![]),
},
},
)
.expect("Failed to update Program settings in TestDb");
let failures: Failures = test_files
.into_iter()
.filter_map(|test_file| {

View File

@@ -385,8 +385,8 @@ mod tests {
self.message.into()
}
fn file(&self) -> Option<File> {
Some(self.file)
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {

View File

@@ -133,17 +133,12 @@ struct EmbeddedFileId;
/// A single file embedded in a [`Section`] as a fenced code block.
///
/// Currently must be a Python file (`py` language), a type stub (`pyi`) or a [typeshed `VERSIONS`]
/// file.
///
/// TOML configuration blocks are also supported, but are not stored as `EmbeddedFile`s. In the
/// future we plan to support `pth` files as well.
/// Currently must be a Python file (`py` language) or type stub (`pyi`). In the future we plan
/// support other kinds of files as well (TOML configuration, typeshed VERSIONS, `pth` files...).
///
/// A Python embedded file makes its containing [`Section`] into a [`MarkdownTest`], and will be
/// type-checked and searched for inline-comment assertions to match against the diagnostics from
/// type checking.
///
/// [typeshed `VERSIONS`]: https://github.com/python/typeshed/blob/c546278aae47de0b2b664973da4edb613400f6ce/stdlib/VERSIONS#L1-L18
#[derive(Debug)]
pub(crate) struct EmbeddedFile<'s> {
section: SectionId,

View File

@@ -4,7 +4,6 @@ use js_sys::Error;
use wasm_bindgen::prelude::*;
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
use red_knot_project::metadata::value::RangedValue;
use red_knot_project::ProjectMetadata;
use red_knot_project::{Db, ProjectDatabase};
use ruff_db::diagnostic::Diagnostic;
@@ -49,7 +48,7 @@ impl Workspace {
workspace.apply_cli_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(settings.python_version.into())),
python_version: Some(settings.python_version.into()),
..EnvironmentOptions::default()
}),
..Options::default()

Some files were not shown because too many files have changed in this diff Show More