Compare commits
40 Commits
0.11.6
...
dcreager/f
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7c976dc570 | ||
|
|
b44fb47f25 | ||
|
|
a4531bf865 | ||
|
|
be54b840e9 | ||
|
|
45b5dedee2 | ||
|
|
9ff4772a2c | ||
|
|
c077b109ce | ||
|
|
8a2dd01db4 | ||
|
|
f888e51a34 | ||
|
|
d11e959ad5 | ||
|
|
a56eef444a | ||
|
|
14ff67fd46 | ||
|
|
ada7d4da0d | ||
|
|
4cafb44ba7 | ||
|
|
1445836872 | ||
|
|
da6b68cb58 | ||
|
|
2a478ce1b2 | ||
|
|
8fe2dd5e03 | ||
|
|
0a4dec0323 | ||
|
|
454ad15aee | ||
|
|
fd3fc34a9e | ||
|
|
c550b4d565 | ||
|
|
f8061e8b99 | ||
|
|
27a315b740 | ||
|
|
08221454f6 | ||
|
|
5fec1039ed | ||
|
|
787bcd1c6a | ||
|
|
5853eb28dd | ||
|
|
84d064a14c | ||
|
|
e4e405d2a1 | ||
|
|
1918c61623 | ||
|
|
44ad201262 | ||
|
|
c7372d218d | ||
|
|
de8f4e62e2 | ||
|
|
edfa03a692 | ||
|
|
9965cee998 | ||
|
|
58807b2980 | ||
|
|
9c47b6dbb0 | ||
|
|
d2ebfd6ed7 | ||
|
|
c36f3f5304 |
2
.github/workflows/build-binaries.yml
vendored
2
.github/workflows/build-binaries.yml
vendored
@@ -377,7 +377,7 @@ jobs:
|
||||
args: --release --locked --out dist
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@v3
|
||||
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3
|
||||
with:
|
||||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
|
||||
34
.github/workflows/ci.yaml
vendored
34
.github/workflows/ci.yaml
vendored
@@ -237,13 +237,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@be7c31b6745feec79dec5eb79178466c0670bb2d # v2
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@be7c31b6745feec79dec5eb79178466c0670bb2d # v2
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: Red-knot mdtests (GitHub annotations)
|
||||
@@ -291,13 +291,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@be7c31b6745feec79dec5eb79178466c0670bb2d # v2
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@be7c31b6745feec79dec5eb79178466c0670bb2d # v2
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -320,7 +320,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@be7c31b6745feec79dec5eb79178466c0670bb2d # v2
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
@@ -376,7 +376,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -401,13 +401,13 @@ jobs:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@be7c31b6745feec79dec5eb79178466c0670bb2d # v2
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@be7c31b6745feec79dec5eb79178466c0670bb2d # v2
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -433,7 +433,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
uses: cargo-bins/cargo-binstall@63aaa5c1932cebabc34eceda9d92a70215dcead6 # v1.12.3
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -455,7 +455,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
|
||||
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -641,7 +641,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@main
|
||||
- uses: cargo-bins/cargo-binstall@63aaa5c1932cebabc34eceda9d92a70215dcead6 # v1.12.3
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -681,7 +681,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
|
||||
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- name: "Cache pre-commit"
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
with:
|
||||
@@ -720,7 +720,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -857,7 +857,7 @@ jobs:
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@be7c31b6745feec79dec5eb79178466c0670bb2d # v2
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
||||
4
.github/workflows/daily_fuzz.yaml
vendored
4
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,11 +34,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
|
||||
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
|
||||
2
.github/workflows/daily_property_tests.yaml
vendored
2
.github/workflows/daily_property_tests.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build Red Knot
|
||||
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
|
||||
|
||||
7
.github/workflows/mypy_primer.yaml
vendored
7
.github/workflows/mypy_primer.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
@@ -52,6 +52,8 @@ jobs:
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/red_knot_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
@@ -62,13 +64,14 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker knot \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector '/(mypy_primer|black|pyp|git-revise|zipp|arrow|isort|itsdangerous|rich|packaging|pybind11|pyinstrument|typeshed-stats|scrapy|werkzeug|bidict|async-utils|python-chess|dacite|python-htmlgen|paroxython|porcupine|psycopg)$' \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
@@ -79,7 +79,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.5
|
||||
rev: v0.11.6
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -97,7 +97,7 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.5.2
|
||||
rev: v1.6.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
|
||||
55
Cargo.lock
generated
55
Cargo.lock
generated
@@ -334,9 +334,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.36"
|
||||
version = "4.5.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2df961d8c8a0d08aa9945718ccf584145eee3f3aa06cddbeac12933781102e04"
|
||||
checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -344,9 +344,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.36"
|
||||
version = "4.5.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "132dbda40fb6753878316a489d5a1242a8ef2f0d9e47ba01c951ea8aa7d013a5"
|
||||
checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -478,7 +478,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -487,7 +487,7 @@ version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -918,7 +918,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1499,7 +1499,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi 0.5.0",
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1553,9 +1553,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "jiff"
|
||||
version = "0.2.4"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d699bc6dfc879fb1bf9bdff0d4c56f0884fc6f0d0eb0fba397a6d00cd9a6b85e"
|
||||
checksum = "59ec30f7142be6fe14e1b021f50b85db8df2d4324ea6e91ec3e5dcde092021d0"
|
||||
dependencies = [
|
||||
"jiff-static",
|
||||
"jiff-tzdb-platform",
|
||||
@@ -1563,14 +1563,14 @@ dependencies = [
|
||||
"portable-atomic",
|
||||
"portable-atomic-util",
|
||||
"serde",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jiff-static"
|
||||
version = "0.2.4"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d16e75759ee0aa64c57a56acbf43916987b20c77373cb7e808979e02b93c9f9"
|
||||
checksum = "526b834d727fd59d37b076b0c3236d9adde1b1729a4361e20b2026f738cc1dbe"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1645,9 +1645,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.171"
|
||||
version = "0.2.172"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6"
|
||||
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
@@ -2327,9 +2327,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.94"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
|
||||
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@@ -2420,13 +2420,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
|
||||
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||
dependencies = [
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3084,7 +3083,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools 0.14.0",
|
||||
"rand 0.9.0",
|
||||
"rand 0.9.1",
|
||||
"ruff_diagnostics",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
@@ -3428,7 +3427,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.15",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3441,7 +3440,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.3",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3675,9 +3674,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "shellexpand"
|
||||
version = "3.1.0"
|
||||
version = "3.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b"
|
||||
checksum = "8b1fdf65dd6331831494dd616b30351c38e96e45921a27745cf98490458b90bb"
|
||||
dependencies = [
|
||||
"dirs",
|
||||
]
|
||||
@@ -3827,7 +3826,7 @@ dependencies = [
|
||||
"getrandom 0.3.2",
|
||||
"once_cell",
|
||||
"rustix 1.0.2",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4328,7 +4327,7 @@ checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9"
|
||||
dependencies = [
|
||||
"getrandom 0.3.2",
|
||||
"js-sys",
|
||||
"rand 0.9.0",
|
||||
"rand 0.9.1",
|
||||
"uuid-macro-internal",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
@@ -4599,7 +4598,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -31,7 +31,7 @@ mypy_primer \
|
||||
```
|
||||
|
||||
This will show the diagnostics diff for the `black` project between the `main` branch and your `my/feature` branch. To run the
|
||||
diff for all projects, you currently need to copy the project-selector regex from the CI pipeline in `.github/workflows/mypy_primer.yaml`.
|
||||
diff for all projects we currently enable in CI, use `--project-selector "/($(paste -s -d'|' crates/red_knot_python_semantic/resources/primer/good.txt))\$"`.
|
||||
|
||||
You can also take a look at the [full list of ecosystem projects]. Note that some of them might still need a `knot_paths` configuration
|
||||
option to work correctly.
|
||||
|
||||
@@ -252,7 +252,7 @@ fn configuration_rule_severity() -> anyhow::Result<()> {
|
||||
r#"
|
||||
y = 4 / 0
|
||||
|
||||
for a in range(0, y):
|
||||
for a in range(0, int(y)):
|
||||
x = a
|
||||
|
||||
print(x) # possibly-unresolved-reference
|
||||
@@ -271,7 +271,7 @@ fn configuration_rule_severity() -> anyhow::Result<()> {
|
||||
2 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
4 | for a in range(0, int(y)):
|
||||
|
|
||||
|
||||
warning: lint:possibly-unresolved-reference
|
||||
@@ -307,7 +307,7 @@ fn configuration_rule_severity() -> anyhow::Result<()> {
|
||||
2 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
4 | for a in range(0, int(y)):
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
@@ -328,7 +328,7 @@ fn cli_rule_severity() -> anyhow::Result<()> {
|
||||
|
||||
y = 4 / 0
|
||||
|
||||
for a in range(0, y):
|
||||
for a in range(0, int(y)):
|
||||
x = a
|
||||
|
||||
print(x) # possibly-unresolved-reference
|
||||
@@ -358,7 +358,7 @@ fn cli_rule_severity() -> anyhow::Result<()> {
|
||||
4 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
5 |
|
||||
6 | for a in range(0, y):
|
||||
6 | for a in range(0, int(y)):
|
||||
|
|
||||
|
||||
warning: lint:possibly-unresolved-reference
|
||||
@@ -405,7 +405,7 @@ fn cli_rule_severity() -> anyhow::Result<()> {
|
||||
4 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
5 |
|
||||
6 | for a in range(0, y):
|
||||
6 | for a in range(0, int(y)):
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
@@ -426,7 +426,7 @@ fn cli_rule_severity_precedence() -> anyhow::Result<()> {
|
||||
r#"
|
||||
y = 4 / 0
|
||||
|
||||
for a in range(0, y):
|
||||
for a in range(0, int(y)):
|
||||
x = a
|
||||
|
||||
print(x) # possibly-unresolved-reference
|
||||
@@ -445,7 +445,7 @@ fn cli_rule_severity_precedence() -> anyhow::Result<()> {
|
||||
2 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
4 | for a in range(0, int(y)):
|
||||
|
|
||||
|
||||
warning: lint:possibly-unresolved-reference
|
||||
@@ -482,7 +482,7 @@ fn cli_rule_severity_precedence() -> anyhow::Result<()> {
|
||||
2 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
4 | for a in range(0, int(y)):
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
@@ -814,7 +814,7 @@ fn user_configuration() -> anyhow::Result<()> {
|
||||
r#"
|
||||
y = 4 / 0
|
||||
|
||||
for a in range(0, y):
|
||||
for a in range(0, int(y)):
|
||||
x = a
|
||||
|
||||
print(x)
|
||||
@@ -841,7 +841,7 @@ fn user_configuration() -> anyhow::Result<()> {
|
||||
2 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
4 | for a in range(0, int(y)):
|
||||
|
|
||||
|
||||
warning: lint:possibly-unresolved-reference
|
||||
@@ -883,7 +883,7 @@ fn user_configuration() -> anyhow::Result<()> {
|
||||
2 | y = 4 / 0
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
3 |
|
||||
4 | for a in range(0, y):
|
||||
4 | for a in range(0, int(y)):
|
||||
|
|
||||
|
||||
error: lint:possibly-unresolved-reference
|
||||
|
||||
@@ -10,7 +10,7 @@ pub(crate) mod tests {
|
||||
|
||||
use super::Db;
|
||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use red_knot_python_semantic::{default_lint_registry, Db as SemanticDb};
|
||||
use red_knot_python_semantic::{default_lint_registry, Db as SemanticDb, Program};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
@@ -83,6 +83,10 @@ pub(crate) mod tests {
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
fn python_version(&self) -> ruff_python_ast::PythonVersion {
|
||||
Program::get(self).python_version(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
|
||||
@@ -149,6 +149,10 @@ impl SourceDb for ProjectDatabase {
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
fn python_version(&self) -> ruff_python_ast::PythonVersion {
|
||||
Program::get(self).python_version(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
@@ -207,7 +211,7 @@ pub(crate) mod tests {
|
||||
use salsa::Event;
|
||||
|
||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use red_knot_python_semantic::Db as SemanticDb;
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Program};
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
@@ -281,6 +285,10 @@ pub(crate) mod tests {
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
fn python_version(&self) -> ruff_python_ast::PythonVersion {
|
||||
Program::get(self).python_version(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for TestDb {
|
||||
|
||||
@@ -10,7 +10,8 @@ use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder, RuleSele
|
||||
use red_knot_python_semantic::register_lints;
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::diagnostic::{
|
||||
create_parse_diagnostic, Annotation, Diagnostic, DiagnosticId, Severity, Span,
|
||||
create_parse_diagnostic, create_unsupported_syntax_diagnostic, Annotation, Diagnostic,
|
||||
DiagnosticId, Severity, Span,
|
||||
};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
@@ -424,6 +425,13 @@ fn check_file_impl(db: &dyn Db, file: File) -> Vec<Diagnostic> {
|
||||
.map(|error| create_parse_diagnostic(file, error)),
|
||||
);
|
||||
|
||||
diagnostics.extend(
|
||||
parsed
|
||||
.unsupported_syntax_errors()
|
||||
.iter()
|
||||
.map(|error| create_unsupported_syntax_diagnostic(file, error)),
|
||||
);
|
||||
|
||||
diagnostics.extend(check_types(db.upcast(), file).into_iter().cloned());
|
||||
|
||||
diagnostics.sort_unstable_by_key(|diagnostic| {
|
||||
@@ -520,11 +528,13 @@ mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::{check_file_impl, ProjectMetadata};
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use red_knot_python_semantic::{Program, ProgramSettings, PythonPlatform, SearchPathSettings};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{DbWithTestSystem, DbWithWritableSystem as _, SystemPath, SystemPathBuf};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
|
||||
#[test]
|
||||
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
|
||||
@@ -532,6 +542,16 @@ mod tests {
|
||||
let mut db = TestDb::new(project);
|
||||
let path = SystemPath::new("test.py");
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersion::default(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths: SearchPathSettings::new(vec![SystemPathBuf::from(".")]),
|
||||
},
|
||||
)
|
||||
.expect("Failed to configure program settings");
|
||||
|
||||
db.write_file(path, "x = 10")?;
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
|
||||
@@ -6,7 +6,9 @@ use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
|
||||
use ruff_python_ast::visitor::source_order;
|
||||
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
|
||||
use ruff_python_ast::{self as ast, Alias, Expr, Parameter, ParameterWithDefault, Stmt};
|
||||
use ruff_python_ast::{
|
||||
self as ast, Alias, Comprehension, Expr, Parameter, ParameterWithDefault, Stmt,
|
||||
};
|
||||
|
||||
fn setup_db(project_root: &SystemPath, system: TestSystem) -> anyhow::Result<ProjectDatabase> {
|
||||
let project = ProjectMetadata::discover(project_root, &system)?;
|
||||
@@ -258,6 +260,14 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
||||
source_order::walk_expr(self, expr);
|
||||
}
|
||||
|
||||
fn visit_comprehension(&mut self, comprehension: &Comprehension) {
|
||||
self.visit_expr(&comprehension.iter);
|
||||
self.visit_target(&comprehension.target);
|
||||
for if_expr in &comprehension.ifs {
|
||||
self.visit_expr(if_expr);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &Parameter) {
|
||||
let _ty = parameter.inferred_type(&self.model);
|
||||
|
||||
|
||||
@@ -237,6 +237,11 @@ def _(c: Callable[[Concatenate[int, str, ...], int], int]):
|
||||
|
||||
## Using `typing.ParamSpec`
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
Using a `ParamSpec` in a `Callable` annotation:
|
||||
|
||||
```py
|
||||
|
||||
@@ -48,6 +48,11 @@ reveal_type(get_foo()) # revealed: Foo
|
||||
|
||||
## Deferred self-reference annotations in a class definition
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -94,6 +99,11 @@ class Foo:
|
||||
|
||||
## Non-deferred self-reference annotations in a class definition
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
# error: [unresolved-reference]
|
||||
@@ -146,3 +156,24 @@ def _():
|
||||
def f(self) -> C:
|
||||
return self
|
||||
```
|
||||
|
||||
## Base class references
|
||||
|
||||
### Not deferred by __future__.annotations
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A(B): # error: [unresolved-reference]
|
||||
pass
|
||||
|
||||
class B:
|
||||
pass
|
||||
```
|
||||
|
||||
### Deferred in stub files
|
||||
|
||||
```pyi
|
||||
class A(B): ...
|
||||
class B: ...
|
||||
```
|
||||
|
||||
@@ -89,7 +89,7 @@ def _(
|
||||
reveal_type(k) # revealed: Unknown
|
||||
reveal_type(p) # revealed: Unknown
|
||||
reveal_type(q) # revealed: int | Unknown
|
||||
reveal_type(r) # revealed: @Todo(generics)
|
||||
reveal_type(r) # revealed: @Todo(unknown type subscript)
|
||||
```
|
||||
|
||||
## Invalid Collection based AST nodes
|
||||
|
||||
@@ -137,7 +137,7 @@ from other import Literal
|
||||
a1: Literal[26]
|
||||
|
||||
def f():
|
||||
reveal_type(a1) # revealed: @Todo(generics)
|
||||
reveal_type(a1) # revealed: @Todo(unknown type subscript)
|
||||
```
|
||||
|
||||
## Detecting typing_extensions.Literal
|
||||
|
||||
@@ -72,13 +72,11 @@ reveal_type(baz) # revealed: Literal["bazfoo"]
|
||||
qux = (foo, bar)
|
||||
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
|
||||
|
||||
# TODO: Infer "LiteralString"
|
||||
reveal_type(foo.join(qux)) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(foo.join(qux)) # revealed: LiteralString
|
||||
|
||||
template: LiteralString = "{}, {}"
|
||||
reveal_type(template) # revealed: Literal["{}, {}"]
|
||||
# TODO: Infer `LiteralString`
|
||||
reveal_type(template.format(foo, bar)) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(template.format(foo, bar)) # revealed: LiteralString
|
||||
```
|
||||
|
||||
### Assignability
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Starred expression annotations
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.11"
|
||||
```
|
||||
|
||||
Type annotations for `*args` can be starred expressions themselves:
|
||||
|
||||
```py
|
||||
|
||||
@@ -67,21 +67,24 @@ import typing
|
||||
|
||||
####################
|
||||
### Built-ins
|
||||
####################
|
||||
|
||||
class ListSubclass(typing.List): ...
|
||||
|
||||
# revealed: tuple[Literal[ListSubclass], Literal[list], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
||||
# TODO: generic protocols
|
||||
# revealed: tuple[Literal[ListSubclass], Literal[list], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
|
||||
reveal_type(ListSubclass.__mro__)
|
||||
|
||||
class DictSubclass(typing.Dict): ...
|
||||
|
||||
# TODO: should have `Generic`, should not have `Unknown`
|
||||
# revealed: tuple[Literal[DictSubclass], Literal[dict], Unknown, Literal[object]]
|
||||
# TODO: generic protocols
|
||||
# revealed: tuple[Literal[DictSubclass], Literal[dict], Literal[MutableMapping], Literal[Mapping], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
|
||||
reveal_type(DictSubclass.__mro__)
|
||||
|
||||
class SetSubclass(typing.Set): ...
|
||||
|
||||
# revealed: tuple[Literal[SetSubclass], Literal[set], Literal[MutableSet], Literal[AbstractSet], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
||||
# TODO: generic protocols
|
||||
# revealed: tuple[Literal[SetSubclass], Literal[set], Literal[MutableSet], Literal[AbstractSet], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
|
||||
reveal_type(SetSubclass.__mro__)
|
||||
|
||||
class FrozenSetSubclass(typing.FrozenSet): ...
|
||||
@@ -92,11 +95,12 @@ reveal_type(FrozenSetSubclass.__mro__)
|
||||
|
||||
####################
|
||||
### `collections`
|
||||
####################
|
||||
|
||||
class ChainMapSubclass(typing.ChainMap): ...
|
||||
|
||||
# TODO: Should be (ChainMapSubclass, ChainMap, MutableMapping, Mapping, Collection, Sized, Iterable, Container, Generic, object)
|
||||
# revealed: tuple[Literal[ChainMapSubclass], Literal[ChainMap], Unknown, Literal[object]]
|
||||
# TODO: generic protocols
|
||||
# revealed: tuple[Literal[ChainMapSubclass], Literal[ChainMap], Literal[MutableMapping], Literal[Mapping], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
|
||||
reveal_type(ChainMapSubclass.__mro__)
|
||||
|
||||
class CounterSubclass(typing.Counter): ...
|
||||
@@ -113,7 +117,8 @@ reveal_type(DefaultDictSubclass.__mro__)
|
||||
|
||||
class DequeSubclass(typing.Deque): ...
|
||||
|
||||
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
||||
# TODO: generic protocols
|
||||
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
|
||||
reveal_type(DequeSubclass.__mro__)
|
||||
|
||||
class OrderedDictSubclass(typing.OrderedDict): ...
|
||||
|
||||
@@ -41,7 +41,7 @@ class Foo:
|
||||
One thing that is supported is error messages for using special forms in type expressions.
|
||||
|
||||
```py
|
||||
from typing_extensions import Unpack, TypeGuard, TypeIs, Concatenate, ParamSpec
|
||||
from typing_extensions import Unpack, TypeGuard, TypeIs, Concatenate, ParamSpec, Generic
|
||||
|
||||
def _(
|
||||
a: Unpack, # error: [invalid-type-form] "`typing.Unpack` requires exactly one argument when used in a type expression"
|
||||
@@ -49,6 +49,7 @@ def _(
|
||||
c: TypeIs, # error: [invalid-type-form] "`typing.TypeIs` requires exactly one argument when used in a type expression"
|
||||
d: Concatenate, # error: [invalid-type-form] "`typing.Concatenate` requires at least two arguments when used in a type expression"
|
||||
e: ParamSpec,
|
||||
f: Generic, # error: [invalid-type-form] "`typing.Generic` is not allowed in type expressions"
|
||||
) -> None:
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
@@ -65,7 +66,7 @@ You can't inherit from most of these. `typing.Callable` is an exception.
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
from typing_extensions import Self, Unpack, TypeGuard, TypeIs, Concatenate
|
||||
from typing_extensions import Self, Unpack, TypeGuard, TypeIs, Concatenate, Generic
|
||||
|
||||
class A(Self): ... # error: [invalid-base]
|
||||
class B(Unpack): ... # error: [invalid-base]
|
||||
@@ -73,12 +74,18 @@ class C(TypeGuard): ... # error: [invalid-base]
|
||||
class D(TypeIs): ... # error: [invalid-base]
|
||||
class E(Concatenate): ... # error: [invalid-base]
|
||||
class F(Callable): ...
|
||||
class G(Generic): ... # error: [invalid-base] "Cannot inherit from plain `Generic`"
|
||||
|
||||
reveal_type(F.__mro__) # revealed: tuple[Literal[F], @Todo(Support for Callable as a base class), Literal[object]]
|
||||
```
|
||||
|
||||
## Subscriptability
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
Some of these are not subscriptable:
|
||||
|
||||
```py
|
||||
|
||||
@@ -25,6 +25,11 @@ x = "foo" # error: [invalid-assignment] "Object of type `Literal["foo"]` is not
|
||||
|
||||
## Tuple annotations are understood
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
`module.py`:
|
||||
|
||||
```py
|
||||
@@ -56,7 +61,7 @@ reveal_type(d) # revealed: tuple[tuple[str, str], tuple[int, int]]
|
||||
reveal_type(e) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(f) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(g) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(h) # revealed: tuple[@Todo(generics), @Todo(generics)]
|
||||
reveal_type(h) # revealed: tuple[@Todo(specialized non-generic class), @Todo(specialized non-generic class)]
|
||||
|
||||
reveal_type(i) # revealed: tuple[str | int, str | int]
|
||||
reveal_type(j) # revealed: tuple[str | int]
|
||||
|
||||
@@ -302,7 +302,7 @@ class C:
|
||||
|
||||
c_instance = C()
|
||||
reveal_type(c_instance.a) # revealed: Unknown | Literal[1]
|
||||
reveal_type(c_instance.b) # revealed: Unknown | @Todo(starred unpacking)
|
||||
reveal_type(c_instance.b) # revealed: Unknown
|
||||
```
|
||||
|
||||
#### Attributes defined in for-loop (unpacking)
|
||||
@@ -397,15 +397,27 @@ class IntIterable:
|
||||
def __iter__(self) -> IntIterator:
|
||||
return IntIterator()
|
||||
|
||||
class TupleIterator:
|
||||
def __next__(self) -> tuple[int, str]:
|
||||
return (1, "a")
|
||||
|
||||
class TupleIterable:
|
||||
def __iter__(self) -> TupleIterator:
|
||||
return TupleIterator()
|
||||
|
||||
class C:
|
||||
def __init__(self) -> None:
|
||||
[... for self.a in IntIterable()]
|
||||
[... for (self.b, self.c) in TupleIterable()]
|
||||
[... for self.d in IntIterable() for self.e in IntIterable()]
|
||||
|
||||
c_instance = C()
|
||||
|
||||
# TODO: Should be `Unknown | int`
|
||||
# error: [unresolved-attribute]
|
||||
reveal_type(c_instance.a) # revealed: Unknown
|
||||
reveal_type(c_instance.a) # revealed: Unknown | int
|
||||
reveal_type(c_instance.b) # revealed: Unknown | int
|
||||
reveal_type(c_instance.c) # revealed: Unknown | str
|
||||
reveal_type(c_instance.d) # revealed: Unknown | int
|
||||
reveal_type(c_instance.e) # revealed: Unknown | int
|
||||
```
|
||||
|
||||
#### Conditionally declared / bound attributes
|
||||
@@ -1665,7 +1677,7 @@ functions are instances of that class:
|
||||
def f(): ...
|
||||
|
||||
reveal_type(f.__defaults__) # revealed: @Todo(full tuple[...] support) | None
|
||||
reveal_type(f.__kwdefaults__) # revealed: @Todo(generics) | None
|
||||
reveal_type(f.__kwdefaults__) # revealed: @Todo(specialized non-generic class) | None
|
||||
```
|
||||
|
||||
Some attributes are special-cased, however:
|
||||
@@ -1698,9 +1710,9 @@ Most attribute accesses on bool-literal types are delegated to `builtins.bool`,
|
||||
bools are instances of that class:
|
||||
|
||||
```py
|
||||
# revealed: bound method Literal[True].__and__(**kwargs: @Todo(todo signature **kwargs)) -> @Todo(return type of overloaded function)
|
||||
# revealed: Overload[(value: bool, /) -> bool, (value: int, /) -> int]
|
||||
reveal_type(True.__and__)
|
||||
# revealed: bound method Literal[False].__or__(**kwargs: @Todo(todo signature **kwargs)) -> @Todo(return type of overloaded function)
|
||||
# revealed: Overload[(value: bool, /) -> bool, (value: int, /) -> int]
|
||||
reveal_type(False.__or__)
|
||||
```
|
||||
|
||||
@@ -1716,7 +1728,8 @@ reveal_type(False.real) # revealed: Literal[0]
|
||||
All attribute access on literal `bytes` types is currently delegated to `builtins.bytes`:
|
||||
|
||||
```py
|
||||
reveal_type(b"foo".join) # revealed: bound method Literal[b"foo"].join(iterable_of_bytes: @Todo(generics), /) -> bytes
|
||||
# revealed: bound method Literal[b"foo"].join(iterable_of_bytes: @Todo(specialized non-generic class), /) -> bytes
|
||||
reveal_type(b"foo".join)
|
||||
# revealed: bound method Literal[b"foo"].endswith(suffix: @Todo(Support for `typing.TypeAlias`), start: SupportsIndex | None = ellipsis, end: SupportsIndex | None = ellipsis, /) -> bool
|
||||
reveal_type(b"foo".endswith)
|
||||
```
|
||||
@@ -1819,6 +1832,89 @@ def f(never: Never):
|
||||
never.another_attribute = never
|
||||
```
|
||||
|
||||
### Cyclic implicit attributes
|
||||
|
||||
Inferring types for undeclared implicit attributes can be cyclic:
|
||||
|
||||
```py
|
||||
class C:
|
||||
def __init__(self):
|
||||
self.x = 1
|
||||
|
||||
def copy(self, other: "C"):
|
||||
self.x = other.x
|
||||
|
||||
reveal_type(C().x) # revealed: Unknown | Literal[1]
|
||||
```
|
||||
|
||||
If the only assignment to a name is cyclic, we just infer `Unknown` for that attribute:
|
||||
|
||||
```py
|
||||
class D:
|
||||
def copy(self, other: "D"):
|
||||
self.x = other.x
|
||||
|
||||
reveal_type(D().x) # revealed: Unknown
|
||||
```
|
||||
|
||||
If there is an annotation for a name, we don't try to infer any type from the RHS of assignments to
|
||||
that name, so these cases don't trigger any cycle:
|
||||
|
||||
```py
|
||||
class E:
|
||||
def __init__(self):
|
||||
self.x: int = 1
|
||||
|
||||
def copy(self, other: "E"):
|
||||
self.x = other.x
|
||||
|
||||
reveal_type(E().x) # revealed: int
|
||||
|
||||
class F:
|
||||
def __init__(self):
|
||||
self.x = 1
|
||||
|
||||
def copy(self, other: "F"):
|
||||
self.x: int = other.x
|
||||
|
||||
reveal_type(F().x) # revealed: int
|
||||
|
||||
class G:
|
||||
def copy(self, other: "G"):
|
||||
self.x: int = other.x
|
||||
|
||||
reveal_type(G().x) # revealed: int
|
||||
```
|
||||
|
||||
We can even handle cycles involving multiple classes:
|
||||
|
||||
```py
|
||||
class A:
|
||||
def __init__(self):
|
||||
self.x = 1
|
||||
|
||||
def copy(self, other: "B"):
|
||||
self.x = other.x
|
||||
|
||||
class B:
|
||||
def copy(self, other: "A"):
|
||||
self.x = other.x
|
||||
|
||||
reveal_type(B().x) # revealed: Unknown | Literal[1]
|
||||
reveal_type(A().x) # revealed: Unknown | Literal[1]
|
||||
```
|
||||
|
||||
This case additionally tests our union/intersection simplification logic:
|
||||
|
||||
```py
|
||||
class H:
|
||||
def __init__(self):
|
||||
self.x = 1
|
||||
|
||||
def copy(self, other: "H"):
|
||||
self.x = other.x or self.x
|
||||
```
|
||||
|
||||
### Builtin types attributes
|
||||
|
||||
This test can probably be removed eventually, but we currently include it because we do not yet
|
||||
|
||||
@@ -310,9 +310,7 @@ reveal_type(A() + 1) # revealed: A
|
||||
reveal_type(1 + A()) # revealed: A
|
||||
|
||||
reveal_type(A() + "foo") # revealed: A
|
||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||
# TODO overloads
|
||||
reveal_type("foo" + A()) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type("foo" + A()) # revealed: A
|
||||
|
||||
reveal_type(A() + b"foo") # revealed: A
|
||||
# TODO should be `A` since `bytes.__add__` doesn't support `A` instances
|
||||
@@ -320,16 +318,14 @@ reveal_type(b"foo" + A()) # revealed: bytes
|
||||
|
||||
reveal_type(A() + ()) # revealed: A
|
||||
# TODO this should be `A`, since `tuple.__add__` doesn't support `A` instances
|
||||
reveal_type(() + A()) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(() + A()) # revealed: @Todo(full tuple[...] support)
|
||||
|
||||
literal_string_instance = "foo" * 1_000_000_000
|
||||
# the test is not testing what it's meant to be testing if this isn't a `LiteralString`:
|
||||
reveal_type(literal_string_instance) # revealed: LiteralString
|
||||
|
||||
reveal_type(A() + literal_string_instance) # revealed: A
|
||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||
# TODO overloads
|
||||
reveal_type(literal_string_instance + A()) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(literal_string_instance + A()) # revealed: A
|
||||
```
|
||||
|
||||
## Operations involving instances of classes inheriting from `Any`
|
||||
|
||||
@@ -50,9 +50,11 @@ reveal_type(1 ** (largest_u32 + 1)) # revealed: int
|
||||
reveal_type(2**largest_u32) # revealed: int
|
||||
|
||||
def variable(x: int):
|
||||
reveal_type(x**2) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(2**x) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(x**x) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(x**2) # revealed: int
|
||||
# TODO: should be `Any` (overload 5 on `__pow__`), requires correct overload matching
|
||||
reveal_type(2**x) # revealed: int
|
||||
# TODO: should be `Any` (overload 5 on `__pow__`), requires correct overload matching
|
||||
reveal_type(x**x) # revealed: int
|
||||
```
|
||||
|
||||
If the second argument is \<0, a `float` is returned at runtime. If the first argument is \<0 but
|
||||
|
||||
@@ -43,7 +43,7 @@ if True and (x := 1):
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
flag or (x := 1) or reveal_type(x) # revealed: Literal[1]
|
||||
flag or (x := 1) or reveal_type(x) # revealed: Never
|
||||
|
||||
# error: [unresolved-reference]
|
||||
flag or reveal_type(y) or (y := 1) # revealed: Unknown
|
||||
|
||||
@@ -21,6 +21,11 @@ reveal_type(get_int_async()) # revealed: @Todo(generic types.CoroutineType)
|
||||
|
||||
## Generic
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
def get_int[T]() -> int:
|
||||
return 42
|
||||
|
||||
@@ -94,7 +94,7 @@ function object. We model this explicitly, which means that we can access `__kwd
|
||||
methods, even though it is not available on `types.MethodType`:
|
||||
|
||||
```py
|
||||
reveal_type(bound_method.__kwdefaults__) # revealed: @Todo(generics) | None
|
||||
reveal_type(bound_method.__kwdefaults__) # revealed: @Todo(specialized non-generic class) | None
|
||||
```
|
||||
|
||||
## Basic method calls on class objects and instances
|
||||
@@ -399,6 +399,11 @@ reveal_type(getattr_static(C, "f").__get__("dummy", C)) # revealed: bound metho
|
||||
|
||||
### Classmethods mixed with other decorators
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
When a `@classmethod` is additionally decorated with another decorator, it is still treated as a
|
||||
class method:
|
||||
|
||||
|
||||
@@ -201,3 +201,15 @@ def _(literals_2: Literal[0, 1], b: bool, flag: bool):
|
||||
# Now union the two:
|
||||
reveal_type(bool_and_literals_128 if flag else literals_128_shifted) # revealed: int
|
||||
```
|
||||
|
||||
## Simplifying gradually-equivalent types
|
||||
|
||||
If two types are gradually equivalent, we can keep just one of them in a union:
|
||||
|
||||
```py
|
||||
from typing import Any, Union
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
def _(x: Union[Intersection[Any, Not[int]], Intersection[Any, Not[int]]]):
|
||||
reveal_type(x) # revealed: Any & ~int
|
||||
```
|
||||
|
||||
@@ -265,6 +265,11 @@ def f(flag: bool):
|
||||
|
||||
## Supers with Generic Classes
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from knot_extensions import TypeOf, static_assert, is_subtype_of
|
||||
|
||||
@@ -316,6 +321,11 @@ class A:
|
||||
|
||||
### Failing Condition Checks
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
`super()` requires its first argument to be a valid class, and its second argument to be either an
|
||||
instance or a subclass of the first. If either condition is violated, a `TypeError` is raised at
|
||||
runtime.
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Pattern matching
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
## With wildcard
|
||||
|
||||
```py
|
||||
|
||||
@@ -297,6 +297,11 @@ reveal_type(WithoutEq(1) == WithoutEq(2)) # revealed: bool
|
||||
|
||||
### `order`
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
`order` is set to `False` by default. If `order=True`, `__lt__`, `__le__`, `__gt__`, and `__ge__`
|
||||
methods will be generated:
|
||||
|
||||
@@ -471,6 +476,11 @@ reveal_type(C.__init__) # revealed: (x: int = Literal[15], y: int = Literal[0],
|
||||
|
||||
## Generic dataclasses
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from dataclasses import dataclass
|
||||
|
||||
@@ -537,14 +547,14 @@ the descriptor's `__get__` method as if it had been called on the class itself,
|
||||
for the `instance` argument.
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
from typing import Literal, overload
|
||||
from dataclasses import dataclass
|
||||
|
||||
class ConvertToLength:
|
||||
_len: int = 0
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type) -> str: ...
|
||||
def __get__(self, instance: None, owner: type) -> Literal[""]: ...
|
||||
@overload
|
||||
def __get__(self, instance: object, owner: type | None) -> int: ...
|
||||
def __get__(self, instance: object | None, owner: type | None) -> str | int:
|
||||
@@ -560,12 +570,10 @@ class ConvertToLength:
|
||||
class C:
|
||||
converter: ConvertToLength = ConvertToLength()
|
||||
|
||||
# TODO: Should be `(converter: str = Literal[""]) -> None` once we understand overloads
|
||||
reveal_type(C.__init__) # revealed: (converter: str = str | int) -> None
|
||||
reveal_type(C.__init__) # revealed: (converter: str = Literal[""]) -> None
|
||||
|
||||
c = C("abc")
|
||||
# TODO: Should be `int` once we understand overloads
|
||||
reveal_type(c.converter) # revealed: str | int
|
||||
reveal_type(c.converter) # revealed: int
|
||||
|
||||
# This is also okay:
|
||||
C()
|
||||
@@ -601,8 +609,7 @@ class AcceptsStrAndInt:
|
||||
class C:
|
||||
field: AcceptsStrAndInt = AcceptsStrAndInt()
|
||||
|
||||
# TODO: Should be `field: str | int = int` once we understand overloads
|
||||
reveal_type(C.__init__) # revealed: (field: Unknown = int) -> None
|
||||
reveal_type(C.__init__) # revealed: (field: str | int = int) -> None
|
||||
```
|
||||
|
||||
## `dataclasses.field`
|
||||
|
||||
@@ -145,10 +145,10 @@ def f(x: int) -> int:
|
||||
return x**2
|
||||
|
||||
# TODO: Should be `_lru_cache_wrapper[int]`
|
||||
reveal_type(f) # revealed: @Todo(generics)
|
||||
reveal_type(f) # revealed: @Todo(specialized non-generic class)
|
||||
|
||||
# TODO: Should be `int`
|
||||
reveal_type(f(1)) # revealed: @Todo(generics)
|
||||
reveal_type(f(1)) # revealed: @Todo(specialized non-generic class)
|
||||
```
|
||||
|
||||
## Lambdas as decorators
|
||||
|
||||
@@ -459,11 +459,9 @@ class Descriptor:
|
||||
class C:
|
||||
d: Descriptor = Descriptor()
|
||||
|
||||
# TODO: should be `Literal["called on class object"]
|
||||
reveal_type(C.d) # revealed: LiteralString
|
||||
reveal_type(C.d) # revealed: Literal["called on class object"]
|
||||
|
||||
# TODO: should be `Literal["called on instance"]
|
||||
reveal_type(C().d) # revealed: LiteralString
|
||||
reveal_type(C().d) # revealed: Literal["called on instance"]
|
||||
```
|
||||
|
||||
## Descriptor protocol for dunder methods
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
# Version-related syntax error diagnostics
|
||||
|
||||
## `match` statement
|
||||
|
||||
The `match` statement was introduced in Python 3.10.
|
||||
|
||||
### Before 3.10
|
||||
|
||||
<!-- snapshot-diagnostics -->
|
||||
|
||||
We should emit a syntax error before 3.10.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.9"
|
||||
```
|
||||
|
||||
```py
|
||||
match 2: # error: 1 [invalid-syntax] "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
case 1:
|
||||
print("it's one")
|
||||
```
|
||||
|
||||
### After 3.10
|
||||
|
||||
On or after 3.10, no error should be reported.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
```py
|
||||
match 2:
|
||||
case 1:
|
||||
print("it's one")
|
||||
```
|
||||
@@ -26,6 +26,11 @@ def _(never: Never, any_: Any, unknown: Unknown, flag: bool):
|
||||
|
||||
## Use case: Type narrowing and exhaustiveness checking
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
`assert_never` can be used in combination with type narrowing as a way to make sure that all cases
|
||||
are handled in a series of `isinstance` checks or other narrowing patterns that are supported.
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ from knot_extensions import Unknown
|
||||
|
||||
def f(x: Any, y: Unknown, z: Any | str | int):
|
||||
a = cast(dict[str, Any], x)
|
||||
reveal_type(a) # revealed: @Todo(generics)
|
||||
reveal_type(a) # revealed: @Todo(specialized non-generic class)
|
||||
|
||||
b = cast(Any, y)
|
||||
reveal_type(b) # revealed: Any
|
||||
|
||||
@@ -76,6 +76,11 @@ def g(x: Any = "foo"):
|
||||
|
||||
## Stub functions
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
### In Protocol
|
||||
|
||||
```py
|
||||
|
||||
@@ -56,6 +56,11 @@ def f() -> int:
|
||||
|
||||
### In Protocol
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import Protocol, TypeVar
|
||||
|
||||
@@ -69,8 +74,6 @@ class Baz(Bar):
|
||||
T = TypeVar("T")
|
||||
|
||||
class Qux(Protocol[T]):
|
||||
# TODO: no error
|
||||
# error: [invalid-return-type]
|
||||
def f(self) -> int: ...
|
||||
|
||||
class Foo(Protocol):
|
||||
@@ -85,6 +88,11 @@ class Lorem(t[0]):
|
||||
|
||||
### In abstract method
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Generic classes
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.13"
|
||||
```
|
||||
|
||||
## PEP 695 syntax
|
||||
|
||||
TODO: Add a `red_knot_extension` function that asserts whether a function or class is generic.
|
||||
@@ -40,8 +45,6 @@ from typing import Generic, TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
# TODO: no error
|
||||
# error: [invalid-base]
|
||||
class C(Generic[T]): ...
|
||||
```
|
||||
|
||||
@@ -159,12 +162,12 @@ consistent with each other.
|
||||
|
||||
```py
|
||||
class C[T]:
|
||||
def __new__(cls, x: T) -> "C"[T]:
|
||||
def __new__(cls, x: T) -> "C[T]":
|
||||
return object.__new__(cls)
|
||||
|
||||
reveal_type(C(1)) # revealed: C[Literal[1]]
|
||||
|
||||
# TODO: error: [invalid-argument-type]
|
||||
# error: [invalid-assignment] "Object of type `C[Literal["five"]]` is not assignable to `C[int]`"
|
||||
wrong_innards: C[int] = C("five")
|
||||
```
|
||||
|
||||
@@ -176,7 +179,7 @@ class C[T]:
|
||||
|
||||
reveal_type(C(1)) # revealed: C[Literal[1]]
|
||||
|
||||
# TODO: error: [invalid-argument-type]
|
||||
# error: [invalid-assignment] "Object of type `C[Literal["five"]]` is not assignable to `C[int]`"
|
||||
wrong_innards: C[int] = C("five")
|
||||
```
|
||||
|
||||
@@ -184,14 +187,14 @@ wrong_innards: C[int] = C("five")
|
||||
|
||||
```py
|
||||
class C[T]:
|
||||
def __new__(cls, x: T) -> "C"[T]:
|
||||
def __new__(cls, x: T) -> "C[T]":
|
||||
return object.__new__(cls)
|
||||
|
||||
def __init__(self, x: T) -> None: ...
|
||||
|
||||
reveal_type(C(1)) # revealed: C[Literal[1]]
|
||||
|
||||
# TODO: error: [invalid-argument-type]
|
||||
# error: [invalid-assignment] "Object of type `C[Literal["five"]]` is not assignable to `C[int]`"
|
||||
wrong_innards: C[int] = C("five")
|
||||
```
|
||||
|
||||
@@ -199,25 +202,25 @@ wrong_innards: C[int] = C("five")
|
||||
|
||||
```py
|
||||
class C[T]:
|
||||
def __new__(cls, *args, **kwargs) -> "C"[T]:
|
||||
def __new__(cls, *args, **kwargs) -> "C[T]":
|
||||
return object.__new__(cls)
|
||||
|
||||
def __init__(self, x: T) -> None: ...
|
||||
|
||||
reveal_type(C(1)) # revealed: C[Literal[1]]
|
||||
|
||||
# TODO: error: [invalid-argument-type]
|
||||
# error: [invalid-assignment] "Object of type `C[Literal["five"]]` is not assignable to `C[int]`"
|
||||
wrong_innards: C[int] = C("five")
|
||||
|
||||
class D[T]:
|
||||
def __new__(cls, x: T) -> "D"[T]:
|
||||
def __new__(cls, x: T) -> "D[T]":
|
||||
return object.__new__(cls)
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None: ...
|
||||
|
||||
reveal_type(D(1)) # revealed: D[Literal[1]]
|
||||
|
||||
# TODO: error: [invalid-argument-type]
|
||||
# error: [invalid-assignment] "Object of type `D[Literal["five"]]` is not assignable to `D[int]`"
|
||||
wrong_innards: D[int] = D("five")
|
||||
```
|
||||
|
||||
@@ -242,7 +245,7 @@ reveal_type(C(1, "string")) # revealed: C[Unknown]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(C(1, True)) # revealed: C[Unknown]
|
||||
|
||||
# TODO: error for the correct reason
|
||||
# TODO: [invalid-assignment] "Object of type `C[Literal["five"]]` is not assignable to `C[int]`"
|
||||
# error: [invalid-argument-type] "Argument to this function is incorrect: Expected `S`, found `Literal[1]`"
|
||||
wrong_innards: C[int] = C("five", 1)
|
||||
```
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Generic functions
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
## Typevar must be used at least twice
|
||||
|
||||
If you're only using a typevar for a single parameter, you don't need the typevar — just use
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# PEP 695 Generics
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
[PEP 695] and Python 3.12 introduced new, more ergonomic syntax for type variables.
|
||||
|
||||
## Type variables
|
||||
@@ -59,19 +64,19 @@ is.)
|
||||
from knot_extensions import is_fully_static, static_assert
|
||||
from typing import Any
|
||||
|
||||
def unbounded_unconstrained[T](t: list[T]) -> None:
|
||||
def unbounded_unconstrained[T](t: T) -> None:
|
||||
static_assert(is_fully_static(T))
|
||||
|
||||
def bounded[T: int](t: list[T]) -> None:
|
||||
def bounded[T: int](t: T) -> None:
|
||||
static_assert(is_fully_static(T))
|
||||
|
||||
def bounded_by_gradual[T: Any](t: list[T]) -> None:
|
||||
def bounded_by_gradual[T: Any](t: T) -> None:
|
||||
static_assert(not is_fully_static(T))
|
||||
|
||||
def constrained[T: (int, str)](t: list[T]) -> None:
|
||||
def constrained[T: (int, str)](t: T) -> None:
|
||||
static_assert(is_fully_static(T))
|
||||
|
||||
def constrained_by_gradual[T: (int, Any)](t: list[T]) -> None:
|
||||
def constrained_by_gradual[T: (int, Any)](t: T) -> None:
|
||||
static_assert(not is_fully_static(T))
|
||||
```
|
||||
|
||||
@@ -94,7 +99,7 @@ class Base(Super): ...
|
||||
class Sub(Base): ...
|
||||
class Unrelated: ...
|
||||
|
||||
def unbounded_unconstrained[T, U](t: list[T], u: list[U]) -> None:
|
||||
def unbounded_unconstrained[T, U](t: T, u: U) -> None:
|
||||
static_assert(is_assignable_to(T, T))
|
||||
static_assert(is_assignable_to(T, object))
|
||||
static_assert(not is_assignable_to(T, Super))
|
||||
@@ -124,7 +129,7 @@ is a final class, since the typevar can still be specialized to `Never`.)
|
||||
from typing import Any
|
||||
from typing_extensions import final
|
||||
|
||||
def bounded[T: Super](t: list[T]) -> None:
|
||||
def bounded[T: Super](t: T) -> None:
|
||||
static_assert(is_assignable_to(T, Super))
|
||||
static_assert(not is_assignable_to(T, Sub))
|
||||
static_assert(not is_assignable_to(Super, T))
|
||||
@@ -135,7 +140,7 @@ def bounded[T: Super](t: list[T]) -> None:
|
||||
static_assert(not is_subtype_of(Super, T))
|
||||
static_assert(not is_subtype_of(Sub, T))
|
||||
|
||||
def bounded_by_gradual[T: Any](t: list[T]) -> None:
|
||||
def bounded_by_gradual[T: Any](t: T) -> None:
|
||||
static_assert(is_assignable_to(T, Any))
|
||||
static_assert(is_assignable_to(Any, T))
|
||||
static_assert(is_assignable_to(T, Super))
|
||||
@@ -153,7 +158,7 @@ def bounded_by_gradual[T: Any](t: list[T]) -> None:
|
||||
@final
|
||||
class FinalClass: ...
|
||||
|
||||
def bounded_final[T: FinalClass](t: list[T]) -> None:
|
||||
def bounded_final[T: FinalClass](t: T) -> None:
|
||||
static_assert(is_assignable_to(T, FinalClass))
|
||||
static_assert(not is_assignable_to(FinalClass, T))
|
||||
|
||||
@@ -167,14 +172,14 @@ true even if both typevars are bounded by the same final class, since you can sp
|
||||
typevars to `Never` in addition to that final class.
|
||||
|
||||
```py
|
||||
def two_bounded[T: Super, U: Super](t: list[T], u: list[U]) -> None:
|
||||
def two_bounded[T: Super, U: Super](t: T, u: U) -> None:
|
||||
static_assert(not is_assignable_to(T, U))
|
||||
static_assert(not is_assignable_to(U, T))
|
||||
|
||||
static_assert(not is_subtype_of(T, U))
|
||||
static_assert(not is_subtype_of(U, T))
|
||||
|
||||
def two_final_bounded[T: FinalClass, U: FinalClass](t: list[T], u: list[U]) -> None:
|
||||
def two_final_bounded[T: FinalClass, U: FinalClass](t: T, u: U) -> None:
|
||||
static_assert(not is_assignable_to(T, U))
|
||||
static_assert(not is_assignable_to(U, T))
|
||||
|
||||
@@ -189,7 +194,7 @@ intersection of all of its constraints is a subtype of the typevar.
|
||||
```py
|
||||
from knot_extensions import Intersection
|
||||
|
||||
def constrained[T: (Base, Unrelated)](t: list[T]) -> None:
|
||||
def constrained[T: (Base, Unrelated)](t: T) -> None:
|
||||
static_assert(not is_assignable_to(T, Super))
|
||||
static_assert(not is_assignable_to(T, Base))
|
||||
static_assert(not is_assignable_to(T, Sub))
|
||||
@@ -214,7 +219,7 @@ def constrained[T: (Base, Unrelated)](t: list[T]) -> None:
|
||||
static_assert(not is_subtype_of(Super | Unrelated, T))
|
||||
static_assert(is_subtype_of(Intersection[Base, Unrelated], T))
|
||||
|
||||
def constrained_by_gradual[T: (Base, Any)](t: list[T]) -> None:
|
||||
def constrained_by_gradual[T: (Base, Any)](t: T) -> None:
|
||||
static_assert(is_assignable_to(T, Super))
|
||||
static_assert(is_assignable_to(T, Base))
|
||||
static_assert(not is_assignable_to(T, Sub))
|
||||
@@ -256,7 +261,7 @@ distinct constraints, meaning that there is (still) no guarantee that they will
|
||||
the same type.
|
||||
|
||||
```py
|
||||
def two_constrained[T: (int, str), U: (int, str)](t: list[T], u: list[U]) -> None:
|
||||
def two_constrained[T: (int, str), U: (int, str)](t: T, u: U) -> None:
|
||||
static_assert(not is_assignable_to(T, U))
|
||||
static_assert(not is_assignable_to(U, T))
|
||||
|
||||
@@ -266,7 +271,7 @@ def two_constrained[T: (int, str), U: (int, str)](t: list[T], u: list[U]) -> Non
|
||||
@final
|
||||
class AnotherFinalClass: ...
|
||||
|
||||
def two_final_constrained[T: (FinalClass, AnotherFinalClass), U: (FinalClass, AnotherFinalClass)](t: list[T], u: list[U]) -> None:
|
||||
def two_final_constrained[T: (FinalClass, AnotherFinalClass), U: (FinalClass, AnotherFinalClass)](t: T, u: U) -> None:
|
||||
static_assert(not is_assignable_to(T, U))
|
||||
static_assert(not is_assignable_to(U, T))
|
||||
|
||||
@@ -285,7 +290,7 @@ non-singleton type.
|
||||
```py
|
||||
from knot_extensions import is_singleton, is_single_valued, static_assert
|
||||
|
||||
def unbounded_unconstrained[T](t: list[T]) -> None:
|
||||
def unbounded_unconstrained[T](t: T) -> None:
|
||||
static_assert(not is_singleton(T))
|
||||
static_assert(not is_single_valued(T))
|
||||
```
|
||||
@@ -294,7 +299,7 @@ A bounded typevar is not a singleton, even if its bound is a singleton, since it
|
||||
specialized to `Never`.
|
||||
|
||||
```py
|
||||
def bounded[T: None](t: list[T]) -> None:
|
||||
def bounded[T: None](t: T) -> None:
|
||||
static_assert(not is_singleton(T))
|
||||
static_assert(not is_single_valued(T))
|
||||
```
|
||||
@@ -305,14 +310,14 @@ specialize a constrained typevar to a subtype of a constraint.)
|
||||
```py
|
||||
from typing_extensions import Literal
|
||||
|
||||
def constrained_non_singletons[T: (int, str)](t: list[T]) -> None:
|
||||
def constrained_non_singletons[T: (int, str)](t: T) -> None:
|
||||
static_assert(not is_singleton(T))
|
||||
static_assert(not is_single_valued(T))
|
||||
|
||||
def constrained_singletons[T: (Literal[True], Literal[False])](t: list[T]) -> None:
|
||||
def constrained_singletons[T: (Literal[True], Literal[False])](t: T) -> None:
|
||||
static_assert(is_singleton(T))
|
||||
|
||||
def constrained_single_valued[T: (Literal[True], tuple[()])](t: list[T]) -> None:
|
||||
def constrained_single_valued[T: (Literal[True], tuple[()])](t: T) -> None:
|
||||
static_assert(is_single_valued(T))
|
||||
```
|
||||
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Scoping rules for type variables
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
Most of these tests come from the [Scoping rules for type variables][scoping] section of the typing
|
||||
spec.
|
||||
|
||||
@@ -132,8 +137,6 @@ from typing import TypeVar, Generic
|
||||
T = TypeVar("T")
|
||||
S = TypeVar("S")
|
||||
|
||||
# TODO: no error
|
||||
# error: [invalid-base]
|
||||
class Legacy(Generic[T]):
|
||||
def m(self, x: T, y: S) -> S:
|
||||
return y
|
||||
@@ -169,13 +172,11 @@ S = TypeVar("S")
|
||||
|
||||
def f(x: T) -> None:
|
||||
x: list[T] = []
|
||||
# TODO: error
|
||||
# TODO: invalid-assignment error
|
||||
y: list[S] = []
|
||||
|
||||
# TODO: no error
|
||||
# error: [invalid-base]
|
||||
class C(Generic[T]):
|
||||
# TODO: error
|
||||
# TODO: error: cannot use S if it's not in the current generic context
|
||||
x: list[S] = []
|
||||
|
||||
# This is not an error, as shown in the previous test
|
||||
@@ -195,11 +196,11 @@ S = TypeVar("S")
|
||||
|
||||
def f[T](x: T) -> None:
|
||||
x: list[T] = []
|
||||
# TODO: error
|
||||
# TODO: invalid assignment error
|
||||
y: list[S] = []
|
||||
|
||||
class C[T]:
|
||||
# TODO: error
|
||||
# TODO: error: cannot use S if it's not in the current generic context
|
||||
x: list[S] = []
|
||||
|
||||
def m1(self, x: S) -> S:
|
||||
@@ -254,8 +255,7 @@ def f[T](x: T, y: T) -> None:
|
||||
class Ok[S]: ...
|
||||
# TODO: error for reuse of typevar
|
||||
class Bad1[T]: ...
|
||||
# TODO: no non-subscriptable error, error for reuse of typevar
|
||||
# error: [non-subscriptable]
|
||||
# TODO: error for reuse of typevar
|
||||
class Bad2(Iterable[T]): ...
|
||||
```
|
||||
|
||||
@@ -268,8 +268,7 @@ class C[T]:
|
||||
class Ok1[S]: ...
|
||||
# TODO: error for reuse of typevar
|
||||
class Bad1[T]: ...
|
||||
# TODO: no non-subscriptable error, error for reuse of typevar
|
||||
# error: [non-subscriptable]
|
||||
# TODO: error for reuse of typevar
|
||||
class Bad2(Iterable[T]): ...
|
||||
```
|
||||
|
||||
@@ -283,7 +282,7 @@ class C[T]:
|
||||
ok1: list[T] = []
|
||||
|
||||
class Bad:
|
||||
# TODO: error
|
||||
# TODO: error: cannot refer to T in nested scope
|
||||
bad: list[T] = []
|
||||
|
||||
class Inner[S]: ...
|
||||
|
||||
@@ -0,0 +1,277 @@
|
||||
# Variance
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
Type variables have a property called _variance_ that affects the subtyping and assignability
|
||||
relations. Much more detail can be found in the [spec]. To summarize, each typevar is either
|
||||
**covariant**, **contravariant**, **invariant**, or **bivariant**. (Note that bivariance is not
|
||||
currently mentioned in the typing spec, but is a fourth case that we must consider.)
|
||||
|
||||
For all of the examples below, we will consider a typevar `T`, a generic class using that typevar
|
||||
`C[T]`, and two types `A` and `B`.
|
||||
|
||||
## Covariance
|
||||
|
||||
With a covariant typevar, subtyping is in "alignment": if `A <: B`, then `C[A] <: C[B]`.
|
||||
|
||||
Types that "produce" data on demand are covariant in their typevar. If you expect a sequence of
|
||||
`int`s, someone can safely provide a sequence of `bool`s, since each `bool` element that you would
|
||||
get from the sequence is a valid `int`.
|
||||
|
||||
```py
|
||||
from knot_extensions import is_assignable_to, is_equivalent_to, is_gradual_equivalent_to, is_subtype_of, static_assert, Unknown
|
||||
from typing import Any
|
||||
|
||||
class A: ...
|
||||
class B(A): ...
|
||||
|
||||
class C[T]:
|
||||
def receive(self) -> T:
|
||||
raise ValueError
|
||||
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_assignable_to(C[B], C[A]))
|
||||
static_assert(not is_assignable_to(C[A], C[B]))
|
||||
static_assert(is_assignable_to(C[A], C[Any]))
|
||||
static_assert(is_assignable_to(C[B], C[Any]))
|
||||
static_assert(is_assignable_to(C[Any], C[A]))
|
||||
static_assert(is_assignable_to(C[Any], C[B]))
|
||||
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_subtype_of(C[B], C[A]))
|
||||
static_assert(not is_subtype_of(C[A], C[B]))
|
||||
static_assert(not is_subtype_of(C[A], C[Any]))
|
||||
static_assert(not is_subtype_of(C[B], C[Any]))
|
||||
static_assert(not is_subtype_of(C[Any], C[A]))
|
||||
static_assert(not is_subtype_of(C[Any], C[B]))
|
||||
|
||||
static_assert(is_equivalent_to(C[A], C[A]))
|
||||
static_assert(is_equivalent_to(C[B], C[B]))
|
||||
static_assert(not is_equivalent_to(C[B], C[A]))
|
||||
static_assert(not is_equivalent_to(C[A], C[B]))
|
||||
static_assert(not is_equivalent_to(C[A], C[Any]))
|
||||
static_assert(not is_equivalent_to(C[B], C[Any]))
|
||||
static_assert(not is_equivalent_to(C[Any], C[A]))
|
||||
static_assert(not is_equivalent_to(C[Any], C[B]))
|
||||
|
||||
static_assert(is_gradual_equivalent_to(C[A], C[A]))
|
||||
static_assert(is_gradual_equivalent_to(C[B], C[B]))
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[Any]))
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[Unknown]))
|
||||
static_assert(not is_gradual_equivalent_to(C[B], C[A]))
|
||||
static_assert(not is_gradual_equivalent_to(C[A], C[B]))
|
||||
static_assert(not is_gradual_equivalent_to(C[A], C[Any]))
|
||||
static_assert(not is_gradual_equivalent_to(C[B], C[Any]))
|
||||
static_assert(not is_gradual_equivalent_to(C[Any], C[A]))
|
||||
static_assert(not is_gradual_equivalent_to(C[Any], C[B]))
|
||||
```
|
||||
|
||||
## Contravariance
|
||||
|
||||
With a contravariant typevar, subtyping is in "opposition": if `A <: B`, then `C[B] <: C[A]`.
|
||||
|
||||
Types that "consume" data are contravariant in their typevar. If you expect a consumer that receives
|
||||
`bool`s, someone can safely provide a consumer that expects to receive `int`s, since each `bool`
|
||||
that you pass into the consumer is a valid `int`.
|
||||
|
||||
```py
|
||||
from knot_extensions import is_assignable_to, is_equivalent_to, is_gradual_equivalent_to, is_subtype_of, static_assert, Unknown
|
||||
from typing import Any
|
||||
|
||||
class A: ...
|
||||
class B(A): ...
|
||||
|
||||
class C[T]:
|
||||
def send(self, value: T): ...
|
||||
|
||||
static_assert(not is_assignable_to(C[B], C[A]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_assignable_to(C[A], C[B]))
|
||||
static_assert(is_assignable_to(C[A], C[Any]))
|
||||
static_assert(is_assignable_to(C[B], C[Any]))
|
||||
static_assert(is_assignable_to(C[Any], C[A]))
|
||||
static_assert(is_assignable_to(C[Any], C[B]))
|
||||
|
||||
static_assert(not is_subtype_of(C[B], C[A]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_subtype_of(C[A], C[B]))
|
||||
static_assert(not is_subtype_of(C[A], C[Any]))
|
||||
static_assert(not is_subtype_of(C[B], C[Any]))
|
||||
static_assert(not is_subtype_of(C[Any], C[A]))
|
||||
static_assert(not is_subtype_of(C[Any], C[B]))
|
||||
|
||||
static_assert(is_equivalent_to(C[A], C[A]))
|
||||
static_assert(is_equivalent_to(C[B], C[B]))
|
||||
static_assert(not is_equivalent_to(C[B], C[A]))
|
||||
static_assert(not is_equivalent_to(C[A], C[B]))
|
||||
static_assert(not is_equivalent_to(C[A], C[Any]))
|
||||
static_assert(not is_equivalent_to(C[B], C[Any]))
|
||||
static_assert(not is_equivalent_to(C[Any], C[A]))
|
||||
static_assert(not is_equivalent_to(C[Any], C[B]))
|
||||
|
||||
static_assert(is_gradual_equivalent_to(C[A], C[A]))
|
||||
static_assert(is_gradual_equivalent_to(C[B], C[B]))
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[Any]))
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[Unknown]))
|
||||
static_assert(not is_gradual_equivalent_to(C[B], C[A]))
|
||||
static_assert(not is_gradual_equivalent_to(C[A], C[B]))
|
||||
static_assert(not is_gradual_equivalent_to(C[A], C[Any]))
|
||||
static_assert(not is_gradual_equivalent_to(C[B], C[Any]))
|
||||
static_assert(not is_gradual_equivalent_to(C[Any], C[A]))
|
||||
static_assert(not is_gradual_equivalent_to(C[Any], C[B]))
|
||||
```
|
||||
|
||||
## Invariance
|
||||
|
||||
With an invariant typevar, _no_ specializations of the generic class are subtypes of each other.
|
||||
|
||||
This often occurs for types that are both producers _and_ consumers, like a mutable `list`.
|
||||
Iterating over the elements in a list would work with a covariant typevar, just like with the
|
||||
"producer" type above. Appending elements to a list would work with a contravariant typevar, just
|
||||
like with the "consumer" type above. However, a typevar cannot be both covariant and contravariant
|
||||
at the same time!
|
||||
|
||||
If you expect a mutable list of `int`s, it's not safe for someone to provide you with a mutable list
|
||||
of `bool`s, since you might try to add an element to the list: if you try to add an `int`, the list
|
||||
would no longer only contain elements that are subtypes of `bool`.
|
||||
|
||||
Conversely, if you expect a mutable list of `bool`s, it's not safe for someone to provide you with a
|
||||
mutable list of `int`s, since you might try to extract elements from the list: you expect every
|
||||
element that you extract to be a subtype of `bool`, but the list can contain any `int`.
|
||||
|
||||
In the end, if you expect a mutable list, you must always be given a list of exactly that type,
|
||||
since we can't know in advance which of the allowed methods you'll want to use.
|
||||
|
||||
```py
|
||||
from knot_extensions import is_assignable_to, is_equivalent_to, is_gradual_equivalent_to, is_subtype_of, static_assert, Unknown
|
||||
from typing import Any
|
||||
|
||||
class A: ...
|
||||
class B(A): ...
|
||||
|
||||
class C[T]:
|
||||
def send(self, value: T): ...
|
||||
def receive(self) -> T:
|
||||
raise ValueError
|
||||
|
||||
static_assert(not is_assignable_to(C[B], C[A]))
|
||||
static_assert(not is_assignable_to(C[A], C[B]))
|
||||
static_assert(is_assignable_to(C[A], C[Any]))
|
||||
static_assert(is_assignable_to(C[B], C[Any]))
|
||||
static_assert(is_assignable_to(C[Any], C[A]))
|
||||
static_assert(is_assignable_to(C[Any], C[B]))
|
||||
|
||||
static_assert(not is_subtype_of(C[B], C[A]))
|
||||
static_assert(not is_subtype_of(C[A], C[B]))
|
||||
static_assert(not is_subtype_of(C[A], C[Any]))
|
||||
static_assert(not is_subtype_of(C[B], C[Any]))
|
||||
static_assert(not is_subtype_of(C[Any], C[A]))
|
||||
static_assert(not is_subtype_of(C[Any], C[B]))
|
||||
|
||||
static_assert(is_equivalent_to(C[A], C[A]))
|
||||
static_assert(is_equivalent_to(C[B], C[B]))
|
||||
static_assert(not is_equivalent_to(C[B], C[A]))
|
||||
static_assert(not is_equivalent_to(C[A], C[B]))
|
||||
static_assert(not is_equivalent_to(C[A], C[Any]))
|
||||
static_assert(not is_equivalent_to(C[B], C[Any]))
|
||||
static_assert(not is_equivalent_to(C[Any], C[A]))
|
||||
static_assert(not is_equivalent_to(C[Any], C[B]))
|
||||
|
||||
static_assert(is_gradual_equivalent_to(C[A], C[A]))
|
||||
static_assert(is_gradual_equivalent_to(C[B], C[B]))
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[Any]))
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[Unknown]))
|
||||
static_assert(not is_gradual_equivalent_to(C[B], C[A]))
|
||||
static_assert(not is_gradual_equivalent_to(C[A], C[B]))
|
||||
static_assert(not is_gradual_equivalent_to(C[A], C[Any]))
|
||||
static_assert(not is_gradual_equivalent_to(C[B], C[Any]))
|
||||
static_assert(not is_gradual_equivalent_to(C[Any], C[A]))
|
||||
static_assert(not is_gradual_equivalent_to(C[Any], C[B]))
|
||||
```
|
||||
|
||||
## Bivariance
|
||||
|
||||
With a bivariant typevar, _all_ specializations of the generic class are subtypes of (and in fact,
|
||||
equivalent to) each other.
|
||||
|
||||
This is a bit of pathological case, which really only happens when the class doesn't use the typevar
|
||||
at all. (If it did, it would have to be covariant, contravariant, or invariant, depending on _how_
|
||||
the typevar was used.)
|
||||
|
||||
```py
|
||||
from knot_extensions import is_assignable_to, is_equivalent_to, is_gradual_equivalent_to, is_subtype_of, static_assert, Unknown
|
||||
from typing import Any
|
||||
|
||||
class A: ...
|
||||
class B(A): ...
|
||||
|
||||
class C[T]:
|
||||
pass
|
||||
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_assignable_to(C[B], C[A]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_assignable_to(C[A], C[B]))
|
||||
static_assert(is_assignable_to(C[A], C[Any]))
|
||||
static_assert(is_assignable_to(C[B], C[Any]))
|
||||
static_assert(is_assignable_to(C[Any], C[A]))
|
||||
static_assert(is_assignable_to(C[Any], C[B]))
|
||||
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_subtype_of(C[B], C[A]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_subtype_of(C[A], C[B]))
|
||||
static_assert(not is_subtype_of(C[A], C[Any]))
|
||||
static_assert(not is_subtype_of(C[B], C[Any]))
|
||||
static_assert(not is_subtype_of(C[Any], C[A]))
|
||||
static_assert(not is_subtype_of(C[Any], C[B]))
|
||||
|
||||
static_assert(is_equivalent_to(C[A], C[A]))
|
||||
static_assert(is_equivalent_to(C[B], C[B]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_equivalent_to(C[B], C[A]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_equivalent_to(C[A], C[B]))
|
||||
static_assert(not is_equivalent_to(C[A], C[Any]))
|
||||
static_assert(not is_equivalent_to(C[B], C[Any]))
|
||||
static_assert(not is_equivalent_to(C[Any], C[A]))
|
||||
static_assert(not is_equivalent_to(C[Any], C[B]))
|
||||
|
||||
static_assert(is_gradual_equivalent_to(C[A], C[A]))
|
||||
static_assert(is_gradual_equivalent_to(C[B], C[B]))
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[Any]))
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[Unknown]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_gradual_equivalent_to(C[B], C[A]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_gradual_equivalent_to(C[A], C[B]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_gradual_equivalent_to(C[A], C[Any]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_gradual_equivalent_to(C[B], C[Any]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[A]))
|
||||
# TODO: no error
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_gradual_equivalent_to(C[Any], C[B]))
|
||||
```
|
||||
|
||||
[spec]: https://typing.python.org/en/latest/spec/generics.html#variance
|
||||
@@ -122,6 +122,11 @@ from c import Y # error: [unresolved-import]
|
||||
|
||||
## Esoteric definitions and redefinintions
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
We understand all public symbols defined in an external module as being imported by a `*` import,
|
||||
not just those that are defined in `StmtAssign` nodes and `StmtAnnAssign` nodes. This section
|
||||
provides tests for definitions, and redefinitions, that use more esoteric AST nodes.
|
||||
|
||||
@@ -842,7 +842,7 @@ def unknown(
|
||||
|
||||
### Mixed dynamic types
|
||||
|
||||
We currently do not simplify mixed dynamic types, but might consider doing so in the future:
|
||||
Gradually-equivalent types can be simplified out of intersections:
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
@@ -854,10 +854,10 @@ def mixed(
|
||||
i3: Intersection[Not[Any], Unknown],
|
||||
i4: Intersection[Not[Any], Not[Unknown]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Any & Unknown
|
||||
reveal_type(i2) # revealed: Any & Unknown
|
||||
reveal_type(i3) # revealed: Any & Unknown
|
||||
reveal_type(i4) # revealed: Any & Unknown
|
||||
reveal_type(i1) # revealed: Any
|
||||
reveal_type(i2) # revealed: Any
|
||||
reveal_type(i3) # revealed: Any
|
||||
reveal_type(i4) # revealed: Any
|
||||
```
|
||||
|
||||
## Invalid
|
||||
|
||||
@@ -216,6 +216,11 @@ reveal_type(A.__class__) # revealed: type[Unknown]
|
||||
|
||||
## PEP 695 generic
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
class M(type): ...
|
||||
class A[T: str](metaclass=M): ...
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
# Narrowing with assert statements
|
||||
|
||||
## `assert` a value `is None` or `is not None`
|
||||
|
||||
```py
|
||||
def _(x: str | None, y: str | None):
|
||||
assert x is not None
|
||||
reveal_type(x) # revealed: str
|
||||
assert y is None
|
||||
reveal_type(y) # revealed: None
|
||||
```
|
||||
|
||||
## `assert` a value is truthy or falsy
|
||||
|
||||
```py
|
||||
def _(x: bool, y: bool):
|
||||
assert x
|
||||
reveal_type(x) # revealed: Literal[True]
|
||||
assert not y
|
||||
reveal_type(y) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
## `assert` with `is` and `==` for literals
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def _(x: Literal[1, 2, 3], y: Literal[1, 2, 3]):
|
||||
assert x is 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
assert y == 2
|
||||
reveal_type(y) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
|
||||
## `assert` with `isinstance`
|
||||
|
||||
```py
|
||||
def _(x: int | str):
|
||||
assert isinstance(x, int)
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
## `assert` a value `in` a tuple
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def _(x: Literal[1, 2, 3], y: Literal[1, 2, 3]):
|
||||
assert x in (1, 2)
|
||||
reveal_type(x) # revealed: Literal[1, 2]
|
||||
assert y not in (1, 2)
|
||||
reveal_type(y) # revealed: Literal[3]
|
||||
```
|
||||
@@ -223,3 +223,15 @@ def _(x: str | None, y: str | None):
|
||||
if y is not x:
|
||||
reveal_type(y) # revealed: str | None
|
||||
```
|
||||
|
||||
## Assignment expressions
|
||||
|
||||
```py
|
||||
def f() -> bool:
|
||||
return True
|
||||
|
||||
if x := f():
|
||||
reveal_type(x) # revealed: Literal[True]
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
@@ -47,3 +47,16 @@ def _(flag1: bool, flag2: bool):
|
||||
# TODO should be Never
|
||||
reveal_type(x) # revealed: Literal[1, 2]
|
||||
```
|
||||
|
||||
## Assignment expressions
|
||||
|
||||
```py
|
||||
def f() -> int | str | None: ...
|
||||
|
||||
if isinstance(x := f(), int):
|
||||
reveal_type(x) # revealed: int
|
||||
elif isinstance(x, str):
|
||||
reveal_type(x) # revealed: str & ~int
|
||||
else:
|
||||
reveal_type(x) # revealed: None
|
||||
```
|
||||
|
||||
@@ -78,3 +78,17 @@ def _(x: Literal[1, "a", "b", "c", "d"]):
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[1, "d"]
|
||||
```
|
||||
|
||||
## Assignment expressions
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f() -> Literal[1, 2, 3]:
|
||||
return 1
|
||||
|
||||
if (x := f()) in (1,):
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
```
|
||||
|
||||
@@ -100,3 +100,16 @@ def _(flag: bool):
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Assignment expressions
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f() -> Literal[1, 2] | None: ...
|
||||
|
||||
if (x := f()) is None:
|
||||
reveal_type(x) # revealed: None
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[1, 2]
|
||||
```
|
||||
|
||||
@@ -82,3 +82,14 @@ def _(x_flag: bool, y_flag: bool):
|
||||
reveal_type(x) # revealed: bool
|
||||
reveal_type(y) # revealed: bool
|
||||
```
|
||||
|
||||
## Assignment expressions
|
||||
|
||||
```py
|
||||
def f() -> int | str | None: ...
|
||||
|
||||
if (x := f()) is not None:
|
||||
reveal_type(x) # revealed: int | str
|
||||
else:
|
||||
reveal_type(x) # revealed: None
|
||||
```
|
||||
|
||||
@@ -89,3 +89,18 @@ def _(flag1: bool, flag2: bool, a: int):
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[1, 2]
|
||||
```
|
||||
|
||||
## Assignment expressions
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f() -> Literal[1, 2, 3]:
|
||||
return 1
|
||||
|
||||
if (x := f()) != 1:
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
else:
|
||||
# TODO should be Literal[1]
|
||||
reveal_type(x) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Narrowing for `match` statements
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
## Single `match` pattern
|
||||
|
||||
```py
|
||||
@@ -34,8 +39,7 @@ match x:
|
||||
case A():
|
||||
reveal_type(x) # revealed: A
|
||||
case B():
|
||||
# TODO could be `B & ~A`
|
||||
reveal_type(x) # revealed: B
|
||||
reveal_type(x) # revealed: B & ~A
|
||||
|
||||
reveal_type(x) # revealed: object
|
||||
```
|
||||
@@ -83,7 +87,7 @@ match x:
|
||||
case 6.0:
|
||||
reveal_type(x) # revealed: float
|
||||
case 1j:
|
||||
reveal_type(x) # revealed: complex
|
||||
reveal_type(x) # revealed: complex & ~float
|
||||
case b"foo":
|
||||
reveal_type(x) # revealed: Literal[b"foo"]
|
||||
|
||||
@@ -129,11 +133,11 @@ match x:
|
||||
case "foo" | 42 | None:
|
||||
reveal_type(x) # revealed: Literal["foo", 42] | None
|
||||
case "foo" | tuple():
|
||||
reveal_type(x) # revealed: Literal["foo"] | tuple
|
||||
reveal_type(x) # revealed: tuple
|
||||
case True | False:
|
||||
reveal_type(x) # revealed: bool
|
||||
case 3.14 | 2.718 | 1.414:
|
||||
reveal_type(x) # revealed: float
|
||||
reveal_type(x) # revealed: float & ~tuple
|
||||
|
||||
reveal_type(x) # revealed: object
|
||||
```
|
||||
@@ -160,3 +164,49 @@ match x:
|
||||
|
||||
reveal_type(x) # revealed: object
|
||||
```
|
||||
|
||||
## Narrowing due to guard
|
||||
|
||||
```py
|
||||
def get_object() -> object:
|
||||
return object()
|
||||
|
||||
x = get_object()
|
||||
|
||||
reveal_type(x) # revealed: object
|
||||
|
||||
match x:
|
||||
case str() | float() if type(x) is str:
|
||||
reveal_type(x) # revealed: str
|
||||
case "foo" | 42 | None if isinstance(x, int):
|
||||
reveal_type(x) # revealed: Literal[42]
|
||||
case False if x:
|
||||
reveal_type(x) # revealed: Never
|
||||
case "foo" if x := "bar":
|
||||
reveal_type(x) # revealed: Literal["bar"]
|
||||
|
||||
reveal_type(x) # revealed: object
|
||||
```
|
||||
|
||||
## Guard and reveal_type in guard
|
||||
|
||||
```py
|
||||
def get_object() -> object:
|
||||
return object()
|
||||
|
||||
x = get_object()
|
||||
|
||||
reveal_type(x) # revealed: object
|
||||
|
||||
match x:
|
||||
case str() | float() if type(x) is str and reveal_type(x): # revealed: str
|
||||
pass
|
||||
case "foo" | 42 | None if isinstance(x, int) and reveal_type(x): # revealed: Literal[42]
|
||||
pass
|
||||
case False if x and reveal_type(x): # revealed: Never
|
||||
pass
|
||||
case "foo" if (x := "bar") and reveal_type(x): # revealed: Literal["bar"]
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: object
|
||||
```
|
||||
|
||||
@@ -246,7 +246,7 @@ class MetaTruthy(type):
|
||||
|
||||
class MetaDeferred(type):
|
||||
def __bool__(self) -> MetaAmbiguous:
|
||||
return MetaAmbiguous()
|
||||
raise NotImplementedError
|
||||
|
||||
class AmbiguousClass(metaclass=MetaAmbiguous): ...
|
||||
class FalsyClass(metaclass=MetaFalsy): ...
|
||||
|
||||
@@ -111,6 +111,11 @@ def _(x: A | B):
|
||||
|
||||
## Narrowing for generic classes
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.13"
|
||||
```
|
||||
|
||||
Note that `type` returns the runtime class of an object, which does _not_ include specializations in
|
||||
the case of a generic class. (The typevars are erased.) That means we cannot narrow the type to the
|
||||
specialization that we compare with; we must narrow to an unknown specialization of the generic
|
||||
@@ -139,3 +144,13 @@ def _(x: Base):
|
||||
# express a constraint like `Base & ~ProperSubtypeOf[Base]`.
|
||||
reveal_type(x) # revealed: Base
|
||||
```
|
||||
|
||||
## Assignment expressions
|
||||
|
||||
```py
|
||||
def _(x: object):
|
||||
if (y := type(x)) is bool:
|
||||
reveal_type(y) # revealed: Literal[bool]
|
||||
if (type(y := x)) is bool:
|
||||
reveal_type(y) # revealed: bool
|
||||
```
|
||||
|
||||
638
crates/red_knot_python_semantic/resources/mdtest/overloads.md
Normal file
638
crates/red_knot_python_semantic/resources/mdtest/overloads.md
Normal file
@@ -0,0 +1,638 @@
|
||||
# Overloads
|
||||
|
||||
Reference: <https://typing.python.org/en/latest/spec/overload.html>
|
||||
|
||||
## `typing.overload`
|
||||
|
||||
The definition of `typing.overload` in typeshed is an identity function.
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
|
||||
def foo(x: int) -> int:
|
||||
return x
|
||||
|
||||
reveal_type(foo) # revealed: def foo(x: int) -> int
|
||||
bar = overload(foo)
|
||||
reveal_type(bar) # revealed: def foo(x: int) -> int
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
|
||||
@overload
|
||||
def add() -> None: ...
|
||||
@overload
|
||||
def add(x: int) -> int: ...
|
||||
@overload
|
||||
def add(x: int, y: int) -> int: ...
|
||||
def add(x: int | None = None, y: int | None = None) -> int | None:
|
||||
return (x or 0) + (y or 0)
|
||||
|
||||
reveal_type(add) # revealed: Overload[() -> None, (x: int) -> int, (x: int, y: int) -> int]
|
||||
reveal_type(add()) # revealed: None
|
||||
reveal_type(add(1)) # revealed: int
|
||||
reveal_type(add(1, 2)) # revealed: int
|
||||
```
|
||||
|
||||
## Overriding
|
||||
|
||||
These scenarios are to verify that the overloaded and non-overloaded definitions are correctly
|
||||
overridden by each other.
|
||||
|
||||
An overloaded function is overriding another overloaded function:
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
|
||||
@overload
|
||||
def foo() -> None: ...
|
||||
@overload
|
||||
def foo(x: int) -> int: ...
|
||||
def foo(x: int | None = None) -> int | None:
|
||||
return x
|
||||
|
||||
reveal_type(foo) # revealed: Overload[() -> None, (x: int) -> int]
|
||||
reveal_type(foo()) # revealed: None
|
||||
reveal_type(foo(1)) # revealed: int
|
||||
|
||||
@overload
|
||||
def foo() -> None: ...
|
||||
@overload
|
||||
def foo(x: str) -> str: ...
|
||||
def foo(x: str | None = None) -> str | None:
|
||||
return x
|
||||
|
||||
reveal_type(foo) # revealed: Overload[() -> None, (x: str) -> str]
|
||||
reveal_type(foo()) # revealed: None
|
||||
reveal_type(foo("")) # revealed: str
|
||||
```
|
||||
|
||||
A non-overloaded function is overriding an overloaded function:
|
||||
|
||||
```py
|
||||
def foo(x: int) -> int:
|
||||
return x
|
||||
|
||||
reveal_type(foo) # revealed: def foo(x: int) -> int
|
||||
```
|
||||
|
||||
An overloaded function is overriding a non-overloaded function:
|
||||
|
||||
```py
|
||||
reveal_type(foo) # revealed: def foo(x: int) -> int
|
||||
|
||||
@overload
|
||||
def foo() -> None: ...
|
||||
@overload
|
||||
def foo(x: bytes) -> bytes: ...
|
||||
def foo(x: bytes | None = None) -> bytes | None:
|
||||
return x
|
||||
|
||||
reveal_type(foo) # revealed: Overload[() -> None, (x: bytes) -> bytes]
|
||||
reveal_type(foo()) # revealed: None
|
||||
reveal_type(foo(b"")) # revealed: bytes
|
||||
```
|
||||
|
||||
## Methods
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
|
||||
class Foo1:
|
||||
@overload
|
||||
def method(self) -> None: ...
|
||||
@overload
|
||||
def method(self, x: int) -> int: ...
|
||||
def method(self, x: int | None = None) -> int | None:
|
||||
return x
|
||||
|
||||
foo1 = Foo1()
|
||||
reveal_type(foo1.method) # revealed: Overload[() -> None, (x: int) -> int]
|
||||
reveal_type(foo1.method()) # revealed: None
|
||||
reveal_type(foo1.method(1)) # revealed: int
|
||||
|
||||
class Foo2:
|
||||
@overload
|
||||
def method(self) -> None: ...
|
||||
@overload
|
||||
def method(self, x: str) -> str: ...
|
||||
def method(self, x: str | None = None) -> str | None:
|
||||
return x
|
||||
|
||||
foo2 = Foo2()
|
||||
reveal_type(foo2.method) # revealed: Overload[() -> None, (x: str) -> str]
|
||||
reveal_type(foo2.method()) # revealed: None
|
||||
reveal_type(foo2.method("")) # revealed: str
|
||||
```
|
||||
|
||||
## Constructor
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
|
||||
class Foo:
|
||||
@overload
|
||||
def __init__(self) -> None: ...
|
||||
@overload
|
||||
def __init__(self, x: int) -> None: ...
|
||||
def __init__(self, x: int | None = None) -> None:
|
||||
self.x = x
|
||||
|
||||
foo = Foo()
|
||||
reveal_type(foo) # revealed: Foo
|
||||
reveal_type(foo.x) # revealed: Unknown | int | None
|
||||
|
||||
foo1 = Foo(1)
|
||||
reveal_type(foo1) # revealed: Foo
|
||||
reveal_type(foo1.x) # revealed: Unknown | int | None
|
||||
```
|
||||
|
||||
## Version specific
|
||||
|
||||
Function definitions can vary between multiple Python versions.
|
||||
|
||||
### Overload and non-overload (3.9)
|
||||
|
||||
Here, the same function is overloaded in one version and not in another.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.9"
|
||||
```
|
||||
|
||||
```py
|
||||
import sys
|
||||
from typing import overload
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
def func(x: int) -> int:
|
||||
return x
|
||||
|
||||
elif sys.version_info <= (3, 12):
|
||||
@overload
|
||||
def func() -> None: ...
|
||||
@overload
|
||||
def func(x: int) -> int: ...
|
||||
def func(x: int | None = None) -> int | None:
|
||||
return x
|
||||
|
||||
reveal_type(func) # revealed: def func(x: int) -> int
|
||||
func() # error: [missing-argument]
|
||||
```
|
||||
|
||||
### Overload and non-overload (3.10)
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
```py
|
||||
import sys
|
||||
from typing import overload
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
def func(x: int) -> int:
|
||||
return x
|
||||
|
||||
elif sys.version_info <= (3, 12):
|
||||
@overload
|
||||
def func() -> None: ...
|
||||
@overload
|
||||
def func(x: int) -> int: ...
|
||||
def func(x: int | None = None) -> int | None:
|
||||
return x
|
||||
|
||||
reveal_type(func) # revealed: Overload[() -> None, (x: int) -> int]
|
||||
reveal_type(func()) # revealed: None
|
||||
reveal_type(func(1)) # revealed: int
|
||||
```
|
||||
|
||||
### Some overloads are version specific (3.9)
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.9"
|
||||
```
|
||||
|
||||
`overloaded.pyi`:
|
||||
|
||||
```pyi
|
||||
import sys
|
||||
from typing import overload
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def func() -> None: ...
|
||||
|
||||
@overload
|
||||
def func(x: int) -> int: ...
|
||||
@overload
|
||||
def func(x: str) -> str: ...
|
||||
```
|
||||
|
||||
`main.py`:
|
||||
|
||||
```py
|
||||
from overloaded import func
|
||||
|
||||
reveal_type(func) # revealed: Overload[(x: int) -> int, (x: str) -> str]
|
||||
func() # error: [no-matching-overload]
|
||||
reveal_type(func(1)) # revealed: int
|
||||
reveal_type(func("")) # revealed: str
|
||||
```
|
||||
|
||||
### Some overloads are version specific (3.10)
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
`overloaded.pyi`:
|
||||
|
||||
```pyi
|
||||
import sys
|
||||
from typing import overload
|
||||
|
||||
@overload
|
||||
def func() -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def func(x: int) -> int: ...
|
||||
|
||||
@overload
|
||||
def func(x: str) -> str: ...
|
||||
```
|
||||
|
||||
`main.py`:
|
||||
|
||||
```py
|
||||
from overloaded import func
|
||||
|
||||
reveal_type(func) # revealed: Overload[() -> None, (x: int) -> int, (x: str) -> str]
|
||||
reveal_type(func()) # revealed: None
|
||||
reveal_type(func(1)) # revealed: int
|
||||
reveal_type(func("")) # revealed: str
|
||||
```
|
||||
|
||||
## Generic
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
For an overloaded generic function, it's not necessary for all overloads to be generic.
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
|
||||
@overload
|
||||
def func() -> None: ...
|
||||
@overload
|
||||
def func[T](x: T) -> T: ...
|
||||
def func[T](x: T | None = None) -> T | None:
|
||||
return x
|
||||
|
||||
reveal_type(func) # revealed: Overload[() -> None, (x: T) -> T]
|
||||
reveal_type(func()) # revealed: None
|
||||
reveal_type(func(1)) # revealed: Literal[1]
|
||||
reveal_type(func("")) # revealed: Literal[""]
|
||||
```
|
||||
|
||||
## Invalid
|
||||
|
||||
### At least two overloads
|
||||
|
||||
At least two `@overload`-decorated definitions must be present.
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
|
||||
# TODO: error
|
||||
@overload
|
||||
def func(x: int) -> int: ...
|
||||
def func(x: int | str) -> int | str:
|
||||
return x
|
||||
```
|
||||
|
||||
### Overload without an implementation
|
||||
|
||||
#### Regular modules
|
||||
|
||||
In regular modules, a series of `@overload`-decorated definitions must be followed by exactly one
|
||||
non-`@overload`-decorated definition (for the same function/method).
|
||||
|
||||
```py
|
||||
from typing import overload
|
||||
|
||||
# TODO: error because implementation does not exists
|
||||
@overload
|
||||
def func(x: int) -> int: ...
|
||||
@overload
|
||||
def func(x: str) -> str: ...
|
||||
|
||||
class Foo:
|
||||
# TODO: error because implementation does not exists
|
||||
@overload
|
||||
def method(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method(self, x: str) -> str: ...
|
||||
```
|
||||
|
||||
#### Stub files
|
||||
|
||||
Overload definitions within stub files are exempt from this check.
|
||||
|
||||
```pyi
|
||||
from typing import overload
|
||||
|
||||
@overload
|
||||
def func(x: int) -> int: ...
|
||||
@overload
|
||||
def func(x: str) -> str: ...
|
||||
```
|
||||
|
||||
#### Protocols
|
||||
|
||||
Overload definitions within protocols are exempt from this check.
|
||||
|
||||
```py
|
||||
from typing import Protocol, overload
|
||||
|
||||
class Foo(Protocol):
|
||||
@overload
|
||||
def f(self, x: int) -> int: ...
|
||||
@overload
|
||||
def f(self, x: str) -> str: ...
|
||||
```
|
||||
|
||||
#### Abstract methods
|
||||
|
||||
Overload definitions within abstract base classes are exempt from this check.
|
||||
|
||||
```py
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import overload
|
||||
|
||||
class AbstractFoo(ABC):
|
||||
@overload
|
||||
@abstractmethod
|
||||
def f(self, x: int) -> int: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
def f(self, x: str) -> str: ...
|
||||
```
|
||||
|
||||
Using the `@abstractmethod` decorator requires that the class's metaclass is `ABCMeta` or is derived
|
||||
from it.
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
# TODO: Error because implementation does not exists
|
||||
@overload
|
||||
@abstractmethod
|
||||
def f(self, x: int) -> int: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
def f(self, x: str) -> str: ...
|
||||
```
|
||||
|
||||
And, the `@abstractmethod` decorator must be present on all the `@overload`-ed methods.
|
||||
|
||||
```py
|
||||
class PartialFoo1(ABC):
|
||||
@overload
|
||||
@abstractmethod
|
||||
def f(self, x: int) -> int: ...
|
||||
@overload
|
||||
def f(self, x: str) -> str: ...
|
||||
|
||||
class PartialFoo(ABC):
|
||||
@overload
|
||||
def f(self, x: int) -> int: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
def f(self, x: str) -> str: ...
|
||||
```
|
||||
|
||||
### Inconsistent decorators
|
||||
|
||||
#### `@staticmethod` / `@classmethod`
|
||||
|
||||
If one overload signature is decorated with `@staticmethod` or `@classmethod`, all overload
|
||||
signatures must be similarly decorated. The implementation, if present, must also have a consistent
|
||||
decorator.
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import overload
|
||||
|
||||
class CheckStaticMethod:
|
||||
# TODO: error because `@staticmethod` does not exist on all overloads
|
||||
@overload
|
||||
def method1(x: int) -> int: ...
|
||||
@overload
|
||||
def method1(x: str) -> str: ...
|
||||
@staticmethod
|
||||
def method1(x: int | str) -> int | str:
|
||||
return x
|
||||
# TODO: error because `@staticmethod` does not exist on all overloads
|
||||
@overload
|
||||
def method2(x: int) -> int: ...
|
||||
@overload
|
||||
@staticmethod
|
||||
def method2(x: str) -> str: ...
|
||||
@staticmethod
|
||||
def method2(x: int | str) -> int | str:
|
||||
return x
|
||||
# TODO: error because `@staticmethod` does not exist on the implementation
|
||||
@overload
|
||||
@staticmethod
|
||||
def method3(x: int) -> int: ...
|
||||
@overload
|
||||
@staticmethod
|
||||
def method3(x: str) -> str: ...
|
||||
def method3(x: int | str) -> int | str:
|
||||
return x
|
||||
|
||||
@overload
|
||||
@staticmethod
|
||||
def method4(x: int) -> int: ...
|
||||
@overload
|
||||
@staticmethod
|
||||
def method4(x: str) -> str: ...
|
||||
@staticmethod
|
||||
def method4(x: int | str) -> int | str:
|
||||
return x
|
||||
|
||||
class CheckClassMethod:
|
||||
def __init__(self, x: int) -> None:
|
||||
self.x = x
|
||||
# TODO: error because `@classmethod` does not exist on all overloads
|
||||
@overload
|
||||
@classmethod
|
||||
def try_from1(cls, x: int) -> CheckClassMethod: ...
|
||||
@overload
|
||||
def try_from1(cls, x: str) -> None: ...
|
||||
@classmethod
|
||||
def try_from1(cls, x: int | str) -> CheckClassMethod | None:
|
||||
if isinstance(x, int):
|
||||
return cls(x)
|
||||
return None
|
||||
# TODO: error because `@classmethod` does not exist on all overloads
|
||||
@overload
|
||||
def try_from2(cls, x: int) -> CheckClassMethod: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def try_from2(cls, x: str) -> None: ...
|
||||
@classmethod
|
||||
def try_from2(cls, x: int | str) -> CheckClassMethod | None:
|
||||
if isinstance(x, int):
|
||||
return cls(x)
|
||||
return None
|
||||
# TODO: error because `@classmethod` does not exist on the implementation
|
||||
@overload
|
||||
@classmethod
|
||||
def try_from3(cls, x: int) -> CheckClassMethod: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def try_from3(cls, x: str) -> None: ...
|
||||
def try_from3(cls, x: int | str) -> CheckClassMethod | None:
|
||||
if isinstance(x, int):
|
||||
return cls(x)
|
||||
return None
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
def try_from4(cls, x: int) -> CheckClassMethod: ...
|
||||
@overload
|
||||
@classmethod
|
||||
def try_from4(cls, x: str) -> None: ...
|
||||
@classmethod
|
||||
def try_from4(cls, x: int | str) -> CheckClassMethod | None:
|
||||
if isinstance(x, int):
|
||||
return cls(x)
|
||||
return None
|
||||
```
|
||||
|
||||
#### `@final` / `@override`
|
||||
|
||||
If a `@final` or `@override` decorator is supplied for a function with overloads, the decorator
|
||||
should be applied only to the overload implementation if it is present.
|
||||
|
||||
```py
|
||||
from typing_extensions import final, overload, override
|
||||
|
||||
class Foo:
|
||||
@overload
|
||||
def method1(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method1(self, x: str) -> str: ...
|
||||
@final
|
||||
def method1(self, x: int | str) -> int | str:
|
||||
return x
|
||||
# TODO: error because `@final` is not on the implementation
|
||||
@overload
|
||||
@final
|
||||
def method2(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method2(self, x: str) -> str: ...
|
||||
def method2(self, x: int | str) -> int | str:
|
||||
return x
|
||||
# TODO: error because `@final` is not on the implementation
|
||||
@overload
|
||||
def method3(self, x: int) -> int: ...
|
||||
@overload
|
||||
@final
|
||||
def method3(self, x: str) -> str: ...
|
||||
def method3(self, x: int | str) -> int | str:
|
||||
return x
|
||||
|
||||
class Base:
|
||||
@overload
|
||||
def method(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method(self, x: str) -> str: ...
|
||||
def method(self, x: int | str) -> int | str:
|
||||
return x
|
||||
|
||||
class Sub1(Base):
|
||||
@overload
|
||||
def method(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method(self, x: str) -> str: ...
|
||||
@override
|
||||
def method(self, x: int | str) -> int | str:
|
||||
return x
|
||||
|
||||
class Sub2(Base):
|
||||
# TODO: error because `@override` is not on the implementation
|
||||
@overload
|
||||
def method(self, x: int) -> int: ...
|
||||
@overload
|
||||
@override
|
||||
def method(self, x: str) -> str: ...
|
||||
def method(self, x: int | str) -> int | str:
|
||||
return x
|
||||
|
||||
class Sub3(Base):
|
||||
# TODO: error because `@override` is not on the implementation
|
||||
@overload
|
||||
@override
|
||||
def method(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method(self, x: str) -> str: ...
|
||||
def method(self, x: int | str) -> int | str:
|
||||
return x
|
||||
```
|
||||
|
||||
#### `@final` / `@override` in stub files
|
||||
|
||||
If an overload implementation isn’t present (for example, in a stub file), the `@final` or
|
||||
`@override` decorator should be applied only to the first overload.
|
||||
|
||||
```pyi
|
||||
from typing_extensions import final, overload, override
|
||||
|
||||
class Foo:
|
||||
@overload
|
||||
@final
|
||||
def method1(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method1(self, x: str) -> str: ...
|
||||
|
||||
# TODO: error because `@final` is not on the first overload
|
||||
@overload
|
||||
def method2(self, x: int) -> int: ...
|
||||
@final
|
||||
@overload
|
||||
def method2(self, x: str) -> str: ...
|
||||
|
||||
class Base:
|
||||
@overload
|
||||
def method(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method(self, x: str) -> str: ...
|
||||
|
||||
class Sub1(Base):
|
||||
@overload
|
||||
@override
|
||||
def method(self, x: int) -> int: ...
|
||||
@overload
|
||||
def method(self, x: str) -> str: ...
|
||||
|
||||
class Sub2(Base):
|
||||
# TODO: error because `@override` is not on the first overload
|
||||
@overload
|
||||
def method(self, x: int) -> int: ...
|
||||
@overload
|
||||
@override
|
||||
def method(self, x: str) -> str: ...
|
||||
```
|
||||
@@ -15,6 +15,11 @@ types, on the other hand: a type which is defined by its properties and behaviou
|
||||
|
||||
## Defining a protocol
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
A protocol is defined by inheriting from the `Protocol` class, which is annotated as an instance of
|
||||
`_SpecialForm` in typeshed's stubs.
|
||||
|
||||
@@ -23,8 +28,7 @@ from typing import Protocol
|
||||
|
||||
class MyProtocol(Protocol): ...
|
||||
|
||||
# TODO: at runtime this is `(<class '__main__.MyProtocol'>, <class 'typing.Protocol'>, <class 'typing.Generic'>, <class 'object'>)`
|
||||
reveal_type(MyProtocol.__mro__) # revealed: tuple[Literal[MyProtocol], @Todo(protocol), Literal[object]]
|
||||
reveal_type(MyProtocol.__mro__) # revealed: tuple[Literal[MyProtocol], typing.Protocol, typing.Generic, Literal[object]]
|
||||
```
|
||||
|
||||
Just like for any other class base, it is an error for `Protocol` to appear multiple times in a
|
||||
@@ -36,27 +40,63 @@ class Foo(Protocol, Protocol): ... # error: [inconsistent-mro]
|
||||
reveal_type(Foo.__mro__) # revealed: tuple[Literal[Foo], Unknown, Literal[object]]
|
||||
```
|
||||
|
||||
Protocols can also be generic, either by including `Generic[]` in the bases list, subscripting
|
||||
`Protocol` directly in the bases list, using PEP-695 type parameters, or some combination of the
|
||||
above:
|
||||
|
||||
```py
|
||||
from typing import TypeVar, Generic
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
class Bar0(Protocol[T]):
|
||||
x: T
|
||||
|
||||
class Bar1(Protocol[T], Generic[T]):
|
||||
x: T
|
||||
|
||||
class Bar2[T](Protocol):
|
||||
x: T
|
||||
|
||||
class Bar3[T](Protocol[T]):
|
||||
x: T
|
||||
```
|
||||
|
||||
It's an error to include both bare `Protocol` and subscripted `Protocol[]` in the bases list
|
||||
simultaneously:
|
||||
|
||||
```py
|
||||
# TODO: should emit a `[duplicate-bases]` error here:
|
||||
class DuplicateBases(Protocol, Protocol[T]):
|
||||
x: T
|
||||
|
||||
# TODO: should not have `Generic` multiple times and `Protocol` multiple times
|
||||
# revealed: tuple[Literal[DuplicateBases], typing.Protocol, typing.Generic, @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
|
||||
reveal_type(DuplicateBases.__mro__)
|
||||
```
|
||||
|
||||
The introspection helper `typing(_extensions).is_protocol` can be used to verify whether a class is
|
||||
a protocol class or not:
|
||||
|
||||
```py
|
||||
from typing_extensions import is_protocol
|
||||
|
||||
# TODO: should be `Literal[True]`
|
||||
reveal_type(is_protocol(MyProtocol)) # revealed: bool
|
||||
reveal_type(is_protocol(MyProtocol)) # revealed: Literal[True]
|
||||
reveal_type(is_protocol(Bar0)) # revealed: Literal[True]
|
||||
reveal_type(is_protocol(Bar1)) # revealed: Literal[True]
|
||||
reveal_type(is_protocol(Bar2)) # revealed: Literal[True]
|
||||
reveal_type(is_protocol(Bar3)) # revealed: Literal[True]
|
||||
|
||||
class NotAProtocol: ...
|
||||
|
||||
# TODO: should be `Literal[False]`
|
||||
reveal_type(is_protocol(NotAProtocol)) # revealed: bool
|
||||
reveal_type(is_protocol(NotAProtocol)) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
A type checker should follow the typeshed stubs if a non-class is passed in, and typeshed's stubs
|
||||
indicate that the argument passed in must be an instance of `type`. `Literal[False]` should be
|
||||
inferred as the return type, however.
|
||||
indicate that the argument passed in must be an instance of `type`.
|
||||
|
||||
```py
|
||||
# TODO: the diagnostic is correct, but should infer `Literal[False]`
|
||||
# We could also reasonably infer `Literal[False]` here, but it probably doesn't matter that much:
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(is_protocol("not a class")) # revealed: bool
|
||||
```
|
||||
@@ -67,12 +107,10 @@ it is not sufficient for it to have `Protocol` in its MRO.
|
||||
```py
|
||||
class SubclassOfMyProtocol(MyProtocol): ...
|
||||
|
||||
# TODO
|
||||
# revealed: tuple[Literal[SubclassOfMyProtocol], Literal[MyProtocol], @Todo(protocol), Literal[object]]
|
||||
# revealed: tuple[Literal[SubclassOfMyProtocol], Literal[MyProtocol], typing.Protocol, typing.Generic, Literal[object]]
|
||||
reveal_type(SubclassOfMyProtocol.__mro__)
|
||||
|
||||
# TODO: should be `Literal[False]`
|
||||
reveal_type(is_protocol(SubclassOfMyProtocol)) # revealed: bool
|
||||
reveal_type(is_protocol(SubclassOfMyProtocol)) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
A protocol class may inherit from other protocols, however, as long as it re-inherits from
|
||||
@@ -81,38 +119,33 @@ A protocol class may inherit from other protocols, however, as long as it re-inh
|
||||
```py
|
||||
class SubProtocol(MyProtocol, Protocol): ...
|
||||
|
||||
# TODO: should be `Literal[True]`
|
||||
reveal_type(is_protocol(SubProtocol)) # revealed: bool
|
||||
reveal_type(is_protocol(SubProtocol)) # revealed: Literal[True]
|
||||
|
||||
class OtherProtocol(Protocol):
|
||||
some_attribute: str
|
||||
|
||||
class ComplexInheritance(SubProtocol, OtherProtocol, Protocol): ...
|
||||
|
||||
# TODO
|
||||
# revealed: tuple[Literal[ComplexInheritance], Literal[SubProtocol], Literal[MyProtocol], Literal[OtherProtocol], @Todo(protocol), Literal[object]]
|
||||
# revealed: tuple[Literal[ComplexInheritance], Literal[SubProtocol], Literal[MyProtocol], Literal[OtherProtocol], typing.Protocol, typing.Generic, Literal[object]]
|
||||
reveal_type(ComplexInheritance.__mro__)
|
||||
|
||||
# TODO: should be `Literal[True]`
|
||||
reveal_type(is_protocol(ComplexInheritance)) # revealed: bool
|
||||
reveal_type(is_protocol(ComplexInheritance)) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
If `Protocol` is present in the bases tuple, all other bases in the tuple must be protocol classes,
|
||||
or `TypeError` is raised at runtime when the class is created.
|
||||
|
||||
```py
|
||||
# TODO: should emit `[invalid-protocol]`
|
||||
# error: [invalid-protocol] "Protocol class `Invalid` cannot inherit from non-protocol class `NotAProtocol`"
|
||||
class Invalid(NotAProtocol, Protocol): ...
|
||||
|
||||
# TODO
|
||||
# revealed: tuple[Literal[Invalid], Literal[NotAProtocol], @Todo(protocol), Literal[object]]
|
||||
# revealed: tuple[Literal[Invalid], Literal[NotAProtocol], typing.Protocol, typing.Generic, Literal[object]]
|
||||
reveal_type(Invalid.__mro__)
|
||||
|
||||
# TODO: should emit an `[invalid-protocol`] error
|
||||
# error: [invalid-protocol] "Protocol class `AlsoInvalid` cannot inherit from non-protocol class `NotAProtocol`"
|
||||
class AlsoInvalid(MyProtocol, OtherProtocol, NotAProtocol, Protocol): ...
|
||||
|
||||
# TODO
|
||||
# revealed: tuple[Literal[AlsoInvalid], Literal[MyProtocol], Literal[OtherProtocol], Literal[NotAProtocol], @Todo(protocol), Literal[object]]
|
||||
# revealed: tuple[Literal[AlsoInvalid], Literal[MyProtocol], Literal[OtherProtocol], Literal[NotAProtocol], typing.Protocol, typing.Generic, Literal[object]]
|
||||
reveal_type(AlsoInvalid.__mro__)
|
||||
```
|
||||
|
||||
@@ -130,12 +163,12 @@ T = TypeVar("T")
|
||||
# type checkers.
|
||||
class Fine(Protocol, object): ...
|
||||
|
||||
# TODO
|
||||
reveal_type(Fine.__mro__) # revealed: tuple[Literal[Fine], @Todo(protocol), Literal[object]]
|
||||
reveal_type(Fine.__mro__) # revealed: tuple[Literal[Fine], typing.Protocol, typing.Generic, Literal[object]]
|
||||
|
||||
# TODO: should not error
|
||||
class StillFine(Protocol, Generic[T], object): ... # error: [invalid-base]
|
||||
class StillFine(Protocol, Generic[T], object): ...
|
||||
class EvenThis[T](Protocol, object): ...
|
||||
class OrThis(Protocol[T], Generic[T]): ...
|
||||
class AndThis(Protocol[T], Generic[T], object): ...
|
||||
```
|
||||
|
||||
And multiple inheritance from a mix of protocol and non-protocol classes is fine as long as
|
||||
@@ -144,8 +177,7 @@ And multiple inheritance from a mix of protocol and non-protocol classes is fine
|
||||
```py
|
||||
class FineAndDandy(MyProtocol, OtherProtocol, NotAProtocol): ...
|
||||
|
||||
# TODO
|
||||
# revealed: tuple[Literal[FineAndDandy], Literal[MyProtocol], Literal[OtherProtocol], @Todo(protocol), Literal[NotAProtocol], Literal[object]]
|
||||
# revealed: tuple[Literal[FineAndDandy], Literal[MyProtocol], Literal[OtherProtocol], typing.Protocol, typing.Generic, Literal[NotAProtocol], Literal[object]]
|
||||
reveal_type(FineAndDandy.__mro__)
|
||||
```
|
||||
|
||||
@@ -153,8 +185,7 @@ But if `Protocol` is not present in the bases list, the resulting class doesn't
|
||||
class anymore:
|
||||
|
||||
```py
|
||||
# TODO: should reveal `Literal[False]`
|
||||
reveal_type(is_protocol(FineAndDandy)) # revealed: bool
|
||||
reveal_type(is_protocol(FineAndDandy)) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
A class does not *have* to inherit from a protocol class in order for it to be considered a subtype
|
||||
@@ -222,18 +253,21 @@ reveal_type(issubclass(MyProtocol, Protocol)) # revealed: bool
|
||||
```py
|
||||
import typing
|
||||
import typing_extensions
|
||||
from knot_extensions import static_assert, is_equivalent_to
|
||||
from knot_extensions import static_assert, is_equivalent_to, TypeOf
|
||||
|
||||
static_assert(is_equivalent_to(TypeOf[typing.Protocol], TypeOf[typing_extensions.Protocol]))
|
||||
static_assert(is_equivalent_to(int | str | TypeOf[typing.Protocol], TypeOf[typing_extensions.Protocol] | str | int))
|
||||
|
||||
class Foo(typing.Protocol):
|
||||
x: int
|
||||
|
||||
# TODO: should not error
|
||||
class Bar(typing_extensions.Protocol): # error: [invalid-base]
|
||||
class Bar(typing_extensions.Protocol):
|
||||
x: int
|
||||
|
||||
# TODO: these should pass
|
||||
static_assert(typing_extensions.is_protocol(Foo)) # error: [static-assert-error]
|
||||
static_assert(typing_extensions.is_protocol(Bar)) # error: [static-assert-error]
|
||||
static_assert(typing_extensions.is_protocol(Foo))
|
||||
static_assert(typing_extensions.is_protocol(Bar))
|
||||
|
||||
# TODO: should pass
|
||||
static_assert(is_equivalent_to(Foo, Bar)) # error: [static-assert-error]
|
||||
```
|
||||
|
||||
@@ -244,14 +278,14 @@ The same goes for `typing.runtime_checkable` and `typing_extensions.runtime_chec
|
||||
class RuntimeCheckableFoo(typing.Protocol):
|
||||
x: int
|
||||
|
||||
# TODO: should not error
|
||||
@typing.runtime_checkable
|
||||
class RuntimeCheckableBar(typing_extensions.Protocol): # error: [invalid-base]
|
||||
class RuntimeCheckableBar(typing_extensions.Protocol):
|
||||
x: int
|
||||
|
||||
# TODO: these should pass
|
||||
static_assert(typing_extensions.is_protocol(RuntimeCheckableFoo)) # error: [static-assert-error]
|
||||
static_assert(typing_extensions.is_protocol(RuntimeCheckableBar)) # error: [static-assert-error]
|
||||
static_assert(typing_extensions.is_protocol(RuntimeCheckableFoo))
|
||||
static_assert(typing_extensions.is_protocol(RuntimeCheckableBar))
|
||||
|
||||
# TODO: should pass
|
||||
static_assert(is_equivalent_to(RuntimeCheckableFoo, RuntimeCheckableBar)) # error: [static-assert-error]
|
||||
|
||||
# These should not error because the protocols are decorated with `@runtime_checkable`
|
||||
@@ -259,6 +293,15 @@ isinstance(object(), RuntimeCheckableFoo)
|
||||
isinstance(object(), RuntimeCheckableBar)
|
||||
```
|
||||
|
||||
However, we understand that they are not necessarily the same symbol at the same memory address at
|
||||
runtime -- these reveal `bool` rather than `Literal[True]` or `Literal[False]`, which would be
|
||||
incorrect:
|
||||
|
||||
```py
|
||||
reveal_type(typing.Protocol is typing_extensions.Protocol) # revealed: bool
|
||||
reveal_type(typing.Protocol is not typing_extensions.Protocol) # revealed: bool
|
||||
```
|
||||
|
||||
## Calls to protocol classes
|
||||
|
||||
Neither `Protocol`, nor any protocol class, can be directly instantiated:
|
||||
@@ -304,8 +347,7 @@ via `typing_extensions`.
|
||||
```py
|
||||
from typing_extensions import Protocol, get_protocol_members
|
||||
|
||||
# TODO: should not error
|
||||
class Foo(Protocol): # error: [invalid-base]
|
||||
class Foo(Protocol):
|
||||
x: int
|
||||
|
||||
@property
|
||||
@@ -330,7 +372,7 @@ class Foo(Protocol): # error: [invalid-base]
|
||||
# `tuple[Literal["x"], Literal["y"], Literal["z"], Literal["method_member"]]`
|
||||
#
|
||||
# `frozenset[Literal["x", "y", "z", "method_member"]]`
|
||||
reveal_type(get_protocol_members(Foo)) # revealed: @Todo(generics)
|
||||
reveal_type(get_protocol_members(Foo)) # revealed: @Todo(specialized non-generic class)
|
||||
```
|
||||
|
||||
Calling `get_protocol_members` on a non-protocol class raises an error at runtime:
|
||||
@@ -339,15 +381,14 @@ Calling `get_protocol_members` on a non-protocol class raises an error at runtim
|
||||
class NotAProtocol: ...
|
||||
|
||||
# TODO: should emit `[invalid-protocol]` error, should reveal `Unknown`
|
||||
reveal_type(get_protocol_members(NotAProtocol)) # revealed: @Todo(generics)
|
||||
reveal_type(get_protocol_members(NotAProtocol)) # revealed: @Todo(specialized non-generic class)
|
||||
```
|
||||
|
||||
Certain special attributes and methods are not considered protocol members at runtime, and should
|
||||
not be considered protocol members by type checkers either:
|
||||
|
||||
```py
|
||||
# TODO: should not error
|
||||
class Lumberjack(Protocol): # error: [invalid-base]
|
||||
class Lumberjack(Protocol):
|
||||
__slots__ = ()
|
||||
__match_args__ = ()
|
||||
x: int
|
||||
@@ -359,7 +400,7 @@ class Lumberjack(Protocol): # error: [invalid-base]
|
||||
self.x = x
|
||||
|
||||
# TODO: `tuple[Literal["x"]]` or `frozenset[Literal["x"]]`
|
||||
reveal_type(get_protocol_members(Lumberjack)) # revealed: @Todo(generics)
|
||||
reveal_type(get_protocol_members(Lumberjack)) # revealed: @Todo(specialized non-generic class)
|
||||
```
|
||||
|
||||
## Subtyping of protocols with attribute members
|
||||
|
||||
@@ -14,7 +14,7 @@ reveal_type(__package__) # revealed: str | None
|
||||
reveal_type(__doc__) # revealed: str | None
|
||||
reveal_type(__spec__) # revealed: ModuleSpec | None
|
||||
|
||||
reveal_type(__path__) # revealed: @Todo(generics)
|
||||
reveal_type(__path__) # revealed: @Todo(specialized non-generic class)
|
||||
|
||||
class X:
|
||||
reveal_type(__name__) # revealed: str
|
||||
@@ -59,7 +59,7 @@ reveal_type(typing.__eq__) # revealed: bound method ModuleType.__eq__(value: ob
|
||||
reveal_type(typing.__class__) # revealed: Literal[ModuleType]
|
||||
|
||||
# TODO: needs support generics; should be `dict[str, Any]`:
|
||||
reveal_type(typing.__dict__) # revealed: @Todo(generics)
|
||||
reveal_type(typing.__dict__) # revealed: @Todo(specialized non-generic class)
|
||||
```
|
||||
|
||||
Typeshed includes a fake `__getattr__` method in the stub for `types.ModuleType` to help out with
|
||||
@@ -92,8 +92,8 @@ import foo
|
||||
from foo import __dict__ as foo_dict
|
||||
|
||||
# TODO: needs support generics; should be `dict[str, Any]` for both of these:
|
||||
reveal_type(foo.__dict__) # revealed: @Todo(generics)
|
||||
reveal_type(foo_dict) # revealed: @Todo(generics)
|
||||
reveal_type(foo.__dict__) # revealed: @Todo(specialized non-generic class)
|
||||
reveal_type(foo_dict) # revealed: @Todo(specialized non-generic class)
|
||||
```
|
||||
|
||||
## Conditionally global or `ModuleType` attribute
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
---
|
||||
source: crates/red_knot_test/src/lib.rs
|
||||
expression: snapshot
|
||||
---
|
||||
---
|
||||
mdtest name: version_related_syntax_errors.md - Version-related syntax error diagnostics - `match` statement - Before 3.10
|
||||
mdtest path: crates/red_knot_python_semantic/resources/mdtest/diagnostics/version_related_syntax_errors.md
|
||||
---
|
||||
|
||||
# Python source files
|
||||
|
||||
## mdtest_snippet.py
|
||||
|
||||
```
|
||||
1 | match 2: # error: 1 [invalid-syntax] "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
2 | case 1:
|
||||
3 | print("it's one")
|
||||
```
|
||||
|
||||
# Diagnostics
|
||||
|
||||
```
|
||||
error: invalid-syntax
|
||||
--> /src/mdtest_snippet.py:1:1
|
||||
|
|
||||
1 | match 2: # error: 1 [invalid-syntax] "Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)"
|
||||
| ^^^^^ Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
2 | case 1:
|
||||
3 | print("it's one")
|
||||
|
|
||||
|
||||
```
|
||||
@@ -996,6 +996,11 @@ reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
## `match` statements
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
### Single-valued types, always true
|
||||
|
||||
```py
|
||||
@@ -1118,6 +1123,7 @@ def _(s: str):
|
||||
```toml
|
||||
[environment]
|
||||
python-platform = "darwin"
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
```py
|
||||
|
||||
@@ -2,6 +2,11 @@
|
||||
|
||||
## Cyclical class definition
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
In type stubs, classes can reference themselves in their base class definitions. For example, in
|
||||
`typeshed`, we have `class str(Sequence[str]): ...`.
|
||||
|
||||
|
||||
@@ -24,8 +24,7 @@ reveal_type(y) # revealed: Unknown
|
||||
|
||||
def _(n: int):
|
||||
a = b"abcde"[n]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
reveal_type(a) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(a) # revealed: int
|
||||
```
|
||||
|
||||
## Slices
|
||||
@@ -43,11 +42,9 @@ b[::0] # error: [zero-stepsize-in-slice]
|
||||
|
||||
def _(m: int, n: int):
|
||||
byte_slice1 = b[m:n]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
reveal_type(byte_slice1) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(byte_slice1) # revealed: bytes
|
||||
|
||||
def _(s: bytes) -> bytes:
|
||||
byte_slice2 = s[0:5]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
return reveal_type(byte_slice2) # revealed: @Todo(return type of overloaded function)
|
||||
return reveal_type(byte_slice2) # revealed: bytes
|
||||
```
|
||||
|
||||
@@ -12,13 +12,13 @@ x = [1, 2, 3]
|
||||
reveal_type(x) # revealed: list
|
||||
|
||||
# TODO reveal int
|
||||
reveal_type(x[0]) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(x[0]) # revealed: Unknown
|
||||
|
||||
# TODO reveal list
|
||||
reveal_type(x[0:1]) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(x[0:1]) # revealed: @Todo(specialized non-generic class)
|
||||
|
||||
# TODO error
|
||||
reveal_type(x["a"]) # revealed: @Todo(return type of overloaded function)
|
||||
# error: [call-non-callable]
|
||||
reveal_type(x["a"]) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Assignments within list assignment
|
||||
@@ -29,9 +29,11 @@ In assignment, we might also have a named assignment. This should also get type
|
||||
x = [1, 2, 3]
|
||||
x[0 if (y := 2) else 1] = 5
|
||||
|
||||
# TODO error? (indeterminite index type)
|
||||
# TODO: better error than "method `__getitem__` not callable on type `list`"
|
||||
# error: [call-non-callable]
|
||||
x["a" if (y := 2) else 1] = 6
|
||||
|
||||
# TODO error (can't index via string)
|
||||
# TODO: better error than "method `__getitem__` not callable on type `list`"
|
||||
# error: [call-non-callable]
|
||||
x["a" if (y := 2) else "b"] = 6
|
||||
```
|
||||
|
||||
@@ -21,8 +21,7 @@ reveal_type(b) # revealed: Unknown
|
||||
|
||||
def _(n: int):
|
||||
a = "abcde"[n]
|
||||
# TODO: Support overloads... Should be `str`
|
||||
reveal_type(a) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
```
|
||||
|
||||
## Slices
|
||||
@@ -75,12 +74,10 @@ def _(m: int, n: int, s2: str):
|
||||
s[::0] # error: [zero-stepsize-in-slice]
|
||||
|
||||
substring1 = s[m:n]
|
||||
# TODO: Support overloads... Should be `LiteralString`
|
||||
reveal_type(substring1) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(substring1) # revealed: LiteralString
|
||||
|
||||
substring2 = s2[0:5]
|
||||
# TODO: Support overloads... Should be `str`
|
||||
reveal_type(substring2) # revealed: @Todo(return type of overloaded function)
|
||||
reveal_type(substring2) # revealed: str
|
||||
```
|
||||
|
||||
## Unsupported slice types
|
||||
|
||||
@@ -69,8 +69,8 @@ def _(m: int, n: int):
|
||||
t[::0] # error: [zero-stepsize-in-slice]
|
||||
|
||||
tuple_slice = t[m:n]
|
||||
# TODO: Support overloads... Should be `tuple[Literal[1, 'a', b"b"] | None, ...]`
|
||||
reveal_type(tuple_slice) # revealed: @Todo(return type of overloaded function)
|
||||
# TODO: Should be `tuple[Literal[1, 'a', b"b"] | None, ...]`
|
||||
reveal_type(tuple_slice) # revealed: @Todo(full tuple[...] support)
|
||||
```
|
||||
|
||||
## Inheritance
|
||||
@@ -117,6 +117,7 @@ from typing import Tuple
|
||||
|
||||
class C(Tuple): ...
|
||||
|
||||
# revealed: tuple[Literal[C], Literal[tuple], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
||||
# TODO: generic protocols
|
||||
# revealed: tuple[Literal[C], Literal[tuple], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(`Protocol[]` subscript), @Todo(`Generic[]` subscript), Literal[object]]
|
||||
reveal_type(C.__mro__)
|
||||
```
|
||||
|
||||
@@ -42,6 +42,11 @@ def static_truthiness(not_one: Not[Literal[1]]) -> None:
|
||||
|
||||
### Intersection
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not, is_subtype_of, static_assert
|
||||
from typing_extensions import Literal, Never
|
||||
|
||||
@@ -522,4 +522,35 @@ c: Callable[[Any], str] = A().f
|
||||
c: Callable[[Any], str] = A().g
|
||||
```
|
||||
|
||||
### Overloads
|
||||
|
||||
`overloaded.pyi`:
|
||||
|
||||
```pyi
|
||||
from typing import Any, overload
|
||||
|
||||
@overload
|
||||
def overloaded() -> None: ...
|
||||
@overload
|
||||
def overloaded(a: str) -> str: ...
|
||||
@overload
|
||||
def overloaded(a: str, b: Any) -> str: ...
|
||||
```
|
||||
|
||||
```py
|
||||
from overloaded import overloaded
|
||||
from typing import Any, Callable
|
||||
|
||||
c: Callable[[], None] = overloaded
|
||||
c: Callable[[str], str] = overloaded
|
||||
c: Callable[[str, Any], Any] = overloaded
|
||||
c: Callable[..., str] = overloaded
|
||||
|
||||
# error: [invalid-assignment]
|
||||
c: Callable[..., int] = overloaded
|
||||
|
||||
# error: [invalid-assignment]
|
||||
c: Callable[[int], str] = overloaded
|
||||
```
|
||||
|
||||
[typing documentation]: https://typing.python.org/en/latest/spec/concepts.html#the-assignable-to-or-consistent-subtyping-relation
|
||||
|
||||
@@ -246,6 +246,11 @@ static_assert(is_disjoint_from(Intersection[LiteralString, Not[AlwaysFalsy]], No
|
||||
|
||||
### Class, module and function literals
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from types import ModuleType, FunctionType
|
||||
from knot_extensions import TypeOf, is_disjoint_from, static_assert
|
||||
|
||||
@@ -254,4 +254,8 @@ from knot_extensions import is_equivalent_to, static_assert
|
||||
static_assert(is_equivalent_to(int | Callable[[int | str], None], Callable[[str | int], None] | int))
|
||||
```
|
||||
|
||||
### Overloads
|
||||
|
||||
TODO
|
||||
|
||||
[the equivalence relation]: https://typing.python.org/en/latest/spec/glossary.html#term-equivalent
|
||||
|
||||
@@ -99,3 +99,34 @@ static_assert(not is_fully_static(CallableTypeOf[f13]))
|
||||
static_assert(not is_fully_static(CallableTypeOf[f14]))
|
||||
static_assert(not is_fully_static(CallableTypeOf[f15]))
|
||||
```
|
||||
|
||||
## Overloads
|
||||
|
||||
`overloaded.pyi`:
|
||||
|
||||
```pyi
|
||||
from typing import Any, overload
|
||||
|
||||
@overload
|
||||
def gradual() -> None: ...
|
||||
@overload
|
||||
def gradual(a: Any) -> None: ...
|
||||
|
||||
@overload
|
||||
def static() -> None: ...
|
||||
@overload
|
||||
def static(x: int) -> None: ...
|
||||
@overload
|
||||
def static(x: str) -> str: ...
|
||||
```
|
||||
|
||||
```py
|
||||
from knot_extensions import CallableTypeOf, TypeOf, is_fully_static, static_assert
|
||||
from overloaded import gradual, static
|
||||
|
||||
static_assert(is_fully_static(TypeOf[gradual]))
|
||||
static_assert(is_fully_static(TypeOf[static]))
|
||||
|
||||
static_assert(not is_fully_static(CallableTypeOf[gradual]))
|
||||
static_assert(is_fully_static(CallableTypeOf[static]))
|
||||
```
|
||||
|
||||
@@ -47,10 +47,7 @@ static_assert(is_gradual_equivalent_to(Intersection[str | int, Not[type[Any]]],
|
||||
static_assert(not is_gradual_equivalent_to(str | int, int | str | bytes))
|
||||
static_assert(not is_gradual_equivalent_to(str | int | bytes, int | str | dict))
|
||||
|
||||
# TODO: No errors
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_gradual_equivalent_to(Unknown, Unknown | Any))
|
||||
# error: [static-assert-error]
|
||||
static_assert(is_gradual_equivalent_to(Unknown, Intersection[Unknown, Any]))
|
||||
```
|
||||
|
||||
@@ -157,4 +154,6 @@ def f6(a, /): ...
|
||||
static_assert(not is_gradual_equivalent_to(CallableTypeOf[f1], CallableTypeOf[f6]))
|
||||
```
|
||||
|
||||
TODO: Overloads
|
||||
|
||||
[materializations]: https://typing.python.org/en/latest/spec/glossary.html#term-materialize
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Subtype relation
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
The `is_subtype_of(S, T)` relation below checks if type `S` is a subtype of type `T`.
|
||||
|
||||
A fully static type `S` is a subtype of another fully static type `T` iff the set of values
|
||||
@@ -1148,5 +1153,187 @@ static_assert(not is_subtype_of(TypeOf[A.g], Callable[[], int]))
|
||||
static_assert(is_subtype_of(TypeOf[A.f], Callable[[A, int], int]))
|
||||
```
|
||||
|
||||
### Overloads
|
||||
|
||||
#### Subtype overloaded
|
||||
|
||||
For `B <: A`, if a callable `B` is overloaded with two or more signatures, it is a subtype of
|
||||
callable `A` if _at least one_ of the overloaded signatures in `B` is a subtype of `A`.
|
||||
|
||||
`overloaded.pyi`:
|
||||
|
||||
```pyi
|
||||
from typing import overload
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
class C: ...
|
||||
|
||||
@overload
|
||||
def overloaded(x: A) -> None: ...
|
||||
@overload
|
||||
def overloaded(x: B) -> None: ...
|
||||
```
|
||||
|
||||
```py
|
||||
from knot_extensions import CallableTypeOf, is_subtype_of, static_assert
|
||||
from overloaded import A, B, C, overloaded
|
||||
|
||||
def accepts_a(x: A) -> None: ...
|
||||
def accepts_b(x: B) -> None: ...
|
||||
def accepts_c(x: C) -> None: ...
|
||||
|
||||
static_assert(is_subtype_of(CallableTypeOf[overloaded], CallableTypeOf[accepts_a]))
|
||||
static_assert(is_subtype_of(CallableTypeOf[overloaded], CallableTypeOf[accepts_b]))
|
||||
static_assert(not is_subtype_of(CallableTypeOf[overloaded], CallableTypeOf[accepts_c]))
|
||||
```
|
||||
|
||||
#### Supertype overloaded
|
||||
|
||||
For `B <: A`, if a callable `A` is overloaded with two or more signatures, callable `B` is a subtype
|
||||
of `A` if `B` is a subtype of _all_ of the signatures in `A`.
|
||||
|
||||
`overloaded.pyi`:
|
||||
|
||||
```pyi
|
||||
from typing import overload
|
||||
|
||||
class Grandparent: ...
|
||||
class Parent(Grandparent): ...
|
||||
class Child(Parent): ...
|
||||
|
||||
@overload
|
||||
def overloaded(a: Child) -> None: ...
|
||||
@overload
|
||||
def overloaded(a: Parent) -> None: ...
|
||||
@overload
|
||||
def overloaded(a: Grandparent) -> None: ...
|
||||
```
|
||||
|
||||
```py
|
||||
from knot_extensions import CallableTypeOf, is_subtype_of, static_assert
|
||||
from overloaded import Grandparent, Parent, Child, overloaded
|
||||
|
||||
# This is a subtype of only the first overload
|
||||
def child(a: Child) -> None: ...
|
||||
|
||||
# This is a subtype of the first and second overload
|
||||
def parent(a: Parent) -> None: ...
|
||||
|
||||
# This is the only function that's a subtype of all overloads
|
||||
def grandparent(a: Grandparent) -> None: ...
|
||||
|
||||
static_assert(not is_subtype_of(CallableTypeOf[child], CallableTypeOf[overloaded]))
|
||||
static_assert(not is_subtype_of(CallableTypeOf[parent], CallableTypeOf[overloaded]))
|
||||
static_assert(is_subtype_of(CallableTypeOf[grandparent], CallableTypeOf[overloaded]))
|
||||
```
|
||||
|
||||
#### Both overloads
|
||||
|
||||
For `B <: A`, if both `A` and `B` is a callable that's overloaded with two or more signatures, then
|
||||
`B` is a subtype of `A` if for _every_ signature in `A`, there is _at least one_ signature in `B`
|
||||
that is a subtype of it.
|
||||
|
||||
`overloaded.pyi`:
|
||||
|
||||
```pyi
|
||||
from typing import overload
|
||||
|
||||
class Grandparent: ...
|
||||
class Parent(Grandparent): ...
|
||||
class Child(Parent): ...
|
||||
class Other: ...
|
||||
|
||||
@overload
|
||||
def pg(a: Parent) -> None: ...
|
||||
@overload
|
||||
def pg(a: Grandparent) -> None: ...
|
||||
|
||||
@overload
|
||||
def po(a: Parent) -> None: ...
|
||||
@overload
|
||||
def po(a: Other) -> None: ...
|
||||
|
||||
@overload
|
||||
def go(a: Grandparent) -> None: ...
|
||||
@overload
|
||||
def go(a: Other) -> None: ...
|
||||
|
||||
@overload
|
||||
def cpg(a: Child) -> None: ...
|
||||
@overload
|
||||
def cpg(a: Parent) -> None: ...
|
||||
@overload
|
||||
def cpg(a: Grandparent) -> None: ...
|
||||
|
||||
@overload
|
||||
def empty_go() -> Child: ...
|
||||
@overload
|
||||
def empty_go(a: Grandparent) -> None: ...
|
||||
@overload
|
||||
def empty_go(a: Other) -> Other: ...
|
||||
|
||||
@overload
|
||||
def empty_cp() -> Parent: ...
|
||||
@overload
|
||||
def empty_cp(a: Child) -> None: ...
|
||||
@overload
|
||||
def empty_cp(a: Parent) -> None: ...
|
||||
```
|
||||
|
||||
```py
|
||||
from knot_extensions import CallableTypeOf, is_subtype_of, static_assert
|
||||
from overloaded import pg, po, go, cpg, empty_go, empty_cp
|
||||
|
||||
static_assert(is_subtype_of(CallableTypeOf[pg], CallableTypeOf[cpg]))
|
||||
static_assert(is_subtype_of(CallableTypeOf[cpg], CallableTypeOf[pg]))
|
||||
|
||||
static_assert(not is_subtype_of(CallableTypeOf[po], CallableTypeOf[pg]))
|
||||
static_assert(not is_subtype_of(CallableTypeOf[pg], CallableTypeOf[po]))
|
||||
|
||||
static_assert(is_subtype_of(CallableTypeOf[go], CallableTypeOf[pg]))
|
||||
static_assert(not is_subtype_of(CallableTypeOf[pg], CallableTypeOf[go]))
|
||||
|
||||
# Overload 1 in `empty_go` is a subtype of overload 1 in `empty_cp`
|
||||
# Overload 2 in `empty_go` is a subtype of overload 2 in `empty_cp`
|
||||
# Overload 2 in `empty_go` is a subtype of overload 3 in `empty_cp`
|
||||
#
|
||||
# All overloads in `empty_cp` has a subtype in `empty_go`
|
||||
static_assert(is_subtype_of(CallableTypeOf[empty_go], CallableTypeOf[empty_cp]))
|
||||
|
||||
static_assert(not is_subtype_of(CallableTypeOf[empty_cp], CallableTypeOf[empty_go]))
|
||||
```
|
||||
|
||||
#### Order of overloads
|
||||
|
||||
Order of overloads is irrelevant for subtyping.
|
||||
|
||||
`overloaded.pyi`:
|
||||
|
||||
```pyi
|
||||
from typing import overload
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
@overload
|
||||
def overload_ab(x: A) -> None: ...
|
||||
@overload
|
||||
def overload_ab(x: B) -> None: ...
|
||||
|
||||
@overload
|
||||
def overload_ba(x: B) -> None: ...
|
||||
@overload
|
||||
def overload_ba(x: A) -> None: ...
|
||||
```
|
||||
|
||||
```py
|
||||
from overloaded import overload_ab, overload_ba
|
||||
from knot_extensions import CallableTypeOf, is_subtype_of, static_assert
|
||||
|
||||
static_assert(is_subtype_of(CallableTypeOf[overload_ab], CallableTypeOf[overload_ba]))
|
||||
static_assert(is_subtype_of(CallableTypeOf[overload_ba], CallableTypeOf[overload_ab]))
|
||||
```
|
||||
|
||||
[special case for float and complex]: https://typing.python.org/en/latest/spec/special-types.html#special-cases-for-float-and-complex
|
||||
[typing documentation]: https://typing.python.org/en/latest/spec/concepts.html#subtype-supertype-and-type-equivalence
|
||||
|
||||
@@ -166,3 +166,51 @@ def _(
|
||||
reveal_type(i1) # revealed: P & Q
|
||||
reveal_type(i2) # revealed: P & Q
|
||||
```
|
||||
|
||||
## Unions of literals with `AlwaysTruthy` and `AlwaysFalsy`
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
from knot_extensions import AlwaysTruthy, AlwaysFalsy
|
||||
|
||||
type strings = Literal["foo", ""]
|
||||
type ints = Literal[0, 1]
|
||||
type bytes = Literal[b"foo", b""]
|
||||
|
||||
def _(
|
||||
strings_or_truthy: strings | AlwaysTruthy,
|
||||
truthy_or_strings: AlwaysTruthy | strings,
|
||||
strings_or_falsy: strings | AlwaysFalsy,
|
||||
falsy_or_strings: AlwaysFalsy | strings,
|
||||
ints_or_truthy: ints | AlwaysTruthy,
|
||||
truthy_or_ints: AlwaysTruthy | ints,
|
||||
ints_or_falsy: ints | AlwaysFalsy,
|
||||
falsy_or_ints: AlwaysFalsy | ints,
|
||||
bytes_or_truthy: bytes | AlwaysTruthy,
|
||||
truthy_or_bytes: AlwaysTruthy | bytes,
|
||||
bytes_or_falsy: bytes | AlwaysFalsy,
|
||||
falsy_or_bytes: AlwaysFalsy | bytes,
|
||||
):
|
||||
reveal_type(strings_or_truthy) # revealed: Literal[""] | AlwaysTruthy
|
||||
reveal_type(truthy_or_strings) # revealed: AlwaysTruthy | Literal[""]
|
||||
|
||||
reveal_type(strings_or_falsy) # revealed: Literal["foo"] | AlwaysFalsy
|
||||
reveal_type(falsy_or_strings) # revealed: AlwaysFalsy | Literal["foo"]
|
||||
|
||||
reveal_type(ints_or_truthy) # revealed: Literal[0] | AlwaysTruthy
|
||||
reveal_type(truthy_or_ints) # revealed: AlwaysTruthy | Literal[0]
|
||||
|
||||
reveal_type(ints_or_falsy) # revealed: Literal[1] | AlwaysFalsy
|
||||
reveal_type(falsy_or_ints) # revealed: AlwaysFalsy | Literal[1]
|
||||
|
||||
reveal_type(bytes_or_truthy) # revealed: Literal[b""] | AlwaysTruthy
|
||||
reveal_type(truthy_or_bytes) # revealed: AlwaysTruthy | Literal[b""]
|
||||
|
||||
reveal_type(bytes_or_falsy) # revealed: Literal[b"foo"] | AlwaysFalsy
|
||||
reveal_type(falsy_or_bytes) # revealed: AlwaysFalsy | Literal[b"foo"]
|
||||
```
|
||||
|
||||
@@ -708,3 +708,95 @@ with ContextManager() as (a, b, c):
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Comprehension
|
||||
|
||||
Unpacking in a comprehension.
|
||||
|
||||
### Same types
|
||||
|
||||
```py
|
||||
def _(arg: tuple[tuple[int, int], tuple[int, int]]):
|
||||
# revealed: tuple[int, int]
|
||||
[reveal_type((a, b)) for a, b in arg]
|
||||
```
|
||||
|
||||
### Mixed types (1)
|
||||
|
||||
```py
|
||||
def _(arg: tuple[tuple[int, int], tuple[int, str]]):
|
||||
# revealed: tuple[int, int | str]
|
||||
[reveal_type((a, b)) for a, b in arg]
|
||||
```
|
||||
|
||||
### Mixed types (2)
|
||||
|
||||
```py
|
||||
def _(arg: tuple[tuple[int, str], tuple[str, int]]):
|
||||
# revealed: tuple[int | str, str | int]
|
||||
[reveal_type((a, b)) for a, b in arg]
|
||||
```
|
||||
|
||||
### Mixed types (3)
|
||||
|
||||
```py
|
||||
def _(arg: tuple[tuple[int, int, int], tuple[int, str, bytes], tuple[int, int, str]]):
|
||||
# revealed: tuple[int, int | str, int | bytes | str]
|
||||
[reveal_type((a, b, c)) for a, b, c in arg]
|
||||
```
|
||||
|
||||
### Same literal values
|
||||
|
||||
```py
|
||||
# revealed: tuple[Literal[1, 3], Literal[2, 4]]
|
||||
[reveal_type((a, b)) for a, b in ((1, 2), (3, 4))]
|
||||
```
|
||||
|
||||
### Mixed literal values (1)
|
||||
|
||||
```py
|
||||
# revealed: tuple[Literal[1, "a"], Literal[2, "b"]]
|
||||
[reveal_type((a, b)) for a, b in ((1, 2), ("a", "b"))]
|
||||
```
|
||||
|
||||
### Mixed literals values (2)
|
||||
|
||||
```py
|
||||
# error: "Object of type `Literal[1]` is not iterable"
|
||||
# error: "Object of type `Literal[2]` is not iterable"
|
||||
# error: "Object of type `Literal[4]` is not iterable"
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 2, got 1)"
|
||||
# revealed: tuple[Unknown | Literal[3, 5], Unknown | Literal["a", "b"]]
|
||||
[reveal_type((a, b)) for a, b in (1, 2, (3, "a"), 4, (5, "b"), "c")]
|
||||
```
|
||||
|
||||
### Custom iterator (1)
|
||||
|
||||
```py
|
||||
class Iterator:
|
||||
def __next__(self) -> tuple[int, int]:
|
||||
return (1, 2)
|
||||
|
||||
class Iterable:
|
||||
def __iter__(self) -> Iterator:
|
||||
return Iterator()
|
||||
|
||||
# revealed: tuple[int, int]
|
||||
[reveal_type((a, b)) for a, b in Iterable()]
|
||||
```
|
||||
|
||||
### Custom iterator (2)
|
||||
|
||||
```py
|
||||
class Iterator:
|
||||
def __next__(self) -> bytes:
|
||||
return b""
|
||||
|
||||
class Iterable:
|
||||
def __iter__(self) -> Iterator:
|
||||
return Iterator()
|
||||
|
||||
def _(arg: tuple[tuple[int, str], Iterable]):
|
||||
# revealed: tuple[int | bytes, str | bytes]
|
||||
[reveal_type((a, b)) for a, b in arg]
|
||||
```
|
||||
|
||||
23
crates/red_knot_python_semantic/resources/primer/good.txt
Normal file
23
crates/red_knot_python_semantic/resources/primer/good.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
arrow
|
||||
async-utils
|
||||
bidict
|
||||
black
|
||||
dacite
|
||||
git-revise
|
||||
isort
|
||||
itsdangerous
|
||||
mypy_primer
|
||||
packaging
|
||||
paroxython
|
||||
porcupine
|
||||
psycopg
|
||||
pybind11
|
||||
pyinstrument
|
||||
pyp
|
||||
python-chess
|
||||
python-htmlgen
|
||||
rich
|
||||
scrapy
|
||||
typeshed-stats
|
||||
werkzeug
|
||||
zipp
|
||||
@@ -98,6 +98,10 @@ pub(crate) mod tests {
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
|
||||
fn python_version(&self) -> PythonVersion {
|
||||
Program::get(self).python_version(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for TestDb {
|
||||
|
||||
@@ -497,11 +497,10 @@ impl FusedIterator for ChildrenIter<'_> {}
|
||||
mod tests {
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithWritableSystem as _;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{self as ast};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::tests::{TestDb, TestDbBuilder};
|
||||
use crate::semantic_index::ast_ids::{HasScopedUseId, ScopedUseId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind};
|
||||
use crate::semantic_index::symbol::{
|
||||
@@ -528,11 +527,15 @@ mod tests {
|
||||
file: File,
|
||||
}
|
||||
|
||||
fn test_case(content: impl AsRef<str>) -> TestCase {
|
||||
let mut db = TestDb::new();
|
||||
db.write_file("test.py", content).unwrap();
|
||||
fn test_case(content: &str) -> TestCase {
|
||||
const FILENAME: &str = "test.py";
|
||||
|
||||
let file = system_path_to_file(&db, "test.py").unwrap();
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file(FILENAME, content)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, FILENAME).unwrap();
|
||||
|
||||
TestCase { db, file }
|
||||
}
|
||||
@@ -937,7 +940,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let target = comprehension.target();
|
||||
let name = target.id().as_str();
|
||||
let name = target.as_name_expr().unwrap().id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
|
||||
@@ -58,6 +58,13 @@ pub trait HasScopedUseId {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId;
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::Identifier {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.use_id(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId {
|
||||
let expression_ref = ExprRef::from(self);
|
||||
@@ -157,7 +164,7 @@ impl AstIdsBuilder {
|
||||
}
|
||||
|
||||
/// Adds `expr` to the use ids map and returns its id.
|
||||
pub(super) fn record_use(&mut self, expr: &ast::Expr) -> ScopedUseId {
|
||||
pub(super) fn record_use(&mut self, expr: impl Into<ExpressionNodeKey>) -> ScopedUseId {
|
||||
let use_id = self.uses_map.len().into();
|
||||
|
||||
self.uses_map.insert(expr.into(), use_id);
|
||||
@@ -196,4 +203,10 @@ pub(crate) mod node_key {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Identifier> for ExpressionNodeKey {
|
||||
fn from(value: &ast::Identifier) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,11 +18,12 @@ use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{
|
||||
AnnotatedAssignmentDefinitionKind, AnnotatedAssignmentDefinitionNodeRef,
|
||||
AssignmentDefinitionKind, AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef,
|
||||
Definition, DefinitionCategory, DefinitionKind, DefinitionNodeKey, DefinitionNodeRef,
|
||||
Definitions, ExceptHandlerDefinitionNodeRef, ForStmtDefinitionKind, ForStmtDefinitionNodeRef,
|
||||
ImportDefinitionNodeRef, ImportFromDefinitionNodeRef, MatchPatternDefinitionNodeRef,
|
||||
StarImportDefinitionNodeRef, TargetKind, WithItemDefinitionKind, WithItemDefinitionNodeRef,
|
||||
AssignmentDefinitionKind, AssignmentDefinitionNodeRef, ComprehensionDefinitionKind,
|
||||
ComprehensionDefinitionNodeRef, Definition, DefinitionCategory, DefinitionKind,
|
||||
DefinitionNodeKey, DefinitionNodeRef, Definitions, ExceptHandlerDefinitionNodeRef,
|
||||
ForStmtDefinitionKind, ForStmtDefinitionNodeRef, ImportDefinitionNodeRef,
|
||||
ImportFromDefinitionNodeRef, MatchPatternDefinitionNodeRef, StarImportDefinitionNodeRef,
|
||||
TargetKind, WithItemDefinitionKind, WithItemDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::{Expression, ExpressionKind};
|
||||
use crate::semantic_index::predicate::{
|
||||
@@ -354,15 +355,14 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
}
|
||||
|
||||
/// Return a 2-element tuple, where the first element is the [`ScopedSymbolId`] of the
|
||||
/// symbol added, and the second element is a boolean indicating whether the symbol was *newly*
|
||||
/// added or not
|
||||
fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
|
||||
/// Add a symbol to the symbol table and the use-def map.
|
||||
/// Return the [`ScopedSymbolId`] that uniquely identifies the symbol in both.
|
||||
fn add_symbol(&mut self, name: Name) -> ScopedSymbolId {
|
||||
let (symbol_id, added) = self.current_symbol_table().add_symbol(name);
|
||||
if added {
|
||||
self.current_use_def_map_mut().add_symbol(symbol_id);
|
||||
}
|
||||
(symbol_id, added)
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn add_attribute(&mut self, name: Name) -> ScopedSymbolId {
|
||||
@@ -569,7 +569,6 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
}
|
||||
|
||||
/// Records a visibility constraint by applying it to all live bindings and declarations.
|
||||
#[must_use = "A visibility constraint must always be negated after it is added"]
|
||||
fn record_visibility_constraint(
|
||||
&mut self,
|
||||
predicate: Predicate<'db>,
|
||||
@@ -797,7 +796,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
..
|
||||
}) => (name, &None, default),
|
||||
};
|
||||
let (symbol, _) = self.add_symbol(name.id.clone());
|
||||
let symbol = self.add_symbol(name.id.clone());
|
||||
// TODO create Definition for PEP 695 typevars
|
||||
// note that the "bound" on the typevar is a totally different thing than whether
|
||||
// or not a name is "bound" by a typevar declaration; the latter is always true.
|
||||
@@ -851,31 +850,35 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.add_standalone_expression(&generator.iter);
|
||||
let value = self.add_standalone_expression(&generator.iter);
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
self.push_assignment(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: true,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.pop_assignment();
|
||||
self.add_unpackable_assignment(
|
||||
&Unpackable::Comprehension {
|
||||
node: generator,
|
||||
first: true,
|
||||
},
|
||||
&generator.target,
|
||||
value,
|
||||
);
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.add_standalone_expression(&generator.iter);
|
||||
let value = self.add_standalone_expression(&generator.iter);
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.push_assignment(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: false,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.pop_assignment();
|
||||
self.add_unpackable_assignment(
|
||||
&Unpackable::Comprehension {
|
||||
node: generator,
|
||||
first: false,
|
||||
},
|
||||
&generator.target,
|
||||
value,
|
||||
);
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
@@ -891,20 +894,20 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.declare_parameter(parameter);
|
||||
}
|
||||
if let Some(vararg) = parameters.vararg.as_ref() {
|
||||
let (symbol, _) = self.add_symbol(vararg.name.id().clone());
|
||||
let symbol = self.add_symbol(vararg.name.id().clone());
|
||||
self.add_definition(
|
||||
symbol,
|
||||
DefinitionNodeRef::VariadicPositionalParameter(vararg),
|
||||
);
|
||||
}
|
||||
if let Some(kwarg) = parameters.kwarg.as_ref() {
|
||||
let (symbol, _) = self.add_symbol(kwarg.name.id().clone());
|
||||
let symbol = self.add_symbol(kwarg.name.id().clone());
|
||||
self.add_definition(symbol, DefinitionNodeRef::VariadicKeywordParameter(kwarg));
|
||||
}
|
||||
}
|
||||
|
||||
fn declare_parameter(&mut self, parameter: &'db ast::ParameterWithDefault) {
|
||||
let (symbol, _) = self.add_symbol(parameter.name().id().clone());
|
||||
let symbol = self.add_symbol(parameter.name().id().clone());
|
||||
|
||||
let definition = self.add_definition(symbol, parameter);
|
||||
|
||||
@@ -934,9 +937,30 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
let current_assignment = match target {
|
||||
ast::Expr::List(_) | ast::Expr::Tuple(_) => {
|
||||
if matches!(unpackable, Unpackable::Comprehension { .. }) {
|
||||
debug_assert_eq!(
|
||||
self.scopes[self.current_scope()].node().scope_kind(),
|
||||
ScopeKind::Comprehension
|
||||
);
|
||||
}
|
||||
// The first iterator of the comprehension is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
// SAFETY: The current scope is the comprehension, and the comprehension scope must have a parent scope.
|
||||
let value_file_scope =
|
||||
if let Unpackable::Comprehension { first: true, .. } = unpackable {
|
||||
self.scope_stack
|
||||
.iter()
|
||||
.rev()
|
||||
.nth(1)
|
||||
.expect("The comprehension scope must have a parent scope")
|
||||
.file_scope_id
|
||||
} else {
|
||||
self.current_scope()
|
||||
};
|
||||
let unpack = Some(Unpack::new(
|
||||
self.db,
|
||||
self.file,
|
||||
value_file_scope,
|
||||
self.current_scope(),
|
||||
// SAFETY: `target` belongs to the `self.module` tree
|
||||
#[allow(unsafe_code)]
|
||||
@@ -1114,7 +1138,18 @@ where
|
||||
// The symbol for the function name itself has to be evaluated
|
||||
// at the end to match the runtime evaluation of parameter defaults
|
||||
// and return-type annotations.
|
||||
let (symbol, _) = self.add_symbol(name.id.clone());
|
||||
let symbol = self.add_symbol(name.id.clone());
|
||||
|
||||
// Record a use of the function name in the scope that it is defined in, so that it
|
||||
// can be used to find previously defined functions with the same name. This is
|
||||
// used to collect all the overloaded definitions of a function. This needs to be
|
||||
// done on the `Identifier` node as opposed to `ExprName` because that's what the
|
||||
// AST uses.
|
||||
self.mark_symbol_used(symbol);
|
||||
let use_id = self.current_ast_ids().record_use(name);
|
||||
self.current_use_def_map_mut()
|
||||
.record_use(symbol, use_id, NodeKey::from_node(name));
|
||||
|
||||
self.add_definition(symbol, function_def);
|
||||
}
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
@@ -1138,11 +1173,11 @@ where
|
||||
);
|
||||
|
||||
// In Python runtime semantics, a class is registered after its scope is evaluated.
|
||||
let (symbol, _) = self.add_symbol(class.name.id.clone());
|
||||
let symbol = self.add_symbol(class.name.id.clone());
|
||||
self.add_definition(symbol, class);
|
||||
}
|
||||
ast::Stmt::TypeAlias(type_alias) => {
|
||||
let (symbol, _) = self.add_symbol(
|
||||
let symbol = self.add_symbol(
|
||||
type_alias
|
||||
.name
|
||||
.as_name_expr()
|
||||
@@ -1179,7 +1214,7 @@ where
|
||||
(Name::new(alias.name.id.split('.').next().unwrap()), false)
|
||||
};
|
||||
|
||||
let (symbol, _) = self.add_symbol(symbol_name);
|
||||
let symbol = self.add_symbol(symbol_name);
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ImportDefinitionNodeRef {
|
||||
@@ -1250,7 +1285,7 @@ where
|
||||
//
|
||||
// For more details, see the doc-comment on `StarImportPlaceholderPredicate`.
|
||||
for export in exported_names(self.db, referenced_module) {
|
||||
let (symbol_id, newly_added) = self.add_symbol(export.clone());
|
||||
let symbol_id = self.add_symbol(export.clone());
|
||||
let node_ref = StarImportDefinitionNodeRef { node, symbol_id };
|
||||
let star_import = StarImportPlaceholderPredicate::new(
|
||||
self.db,
|
||||
@@ -1259,28 +1294,15 @@ where
|
||||
referenced_module,
|
||||
);
|
||||
|
||||
// Fast path for if there were no previous definitions
|
||||
// of the symbol defined through the `*` import:
|
||||
// we can apply the visibility constraint to *only* the added definition,
|
||||
// rather than all definitions
|
||||
if newly_added {
|
||||
self.push_additional_definition(symbol_id, node_ref);
|
||||
self.current_use_def_map_mut()
|
||||
.record_and_negate_star_import_visibility_constraint(
|
||||
star_import,
|
||||
symbol_id,
|
||||
);
|
||||
} else {
|
||||
let pre_definition = self.flow_snapshot();
|
||||
self.push_additional_definition(symbol_id, node_ref);
|
||||
let constraint_id =
|
||||
self.record_visibility_constraint(star_import.into());
|
||||
let post_definition = self.flow_snapshot();
|
||||
self.flow_restore(pre_definition.clone());
|
||||
self.record_negated_visibility_constraint(constraint_id);
|
||||
self.flow_merge(post_definition);
|
||||
self.simplify_visibility_constraints(pre_definition);
|
||||
}
|
||||
let pre_definition =
|
||||
self.current_use_def_map().single_symbol_snapshot(symbol_id);
|
||||
self.push_additional_definition(symbol_id, node_ref);
|
||||
self.current_use_def_map_mut()
|
||||
.record_and_negate_star_import_visibility_constraint(
|
||||
star_import,
|
||||
symbol_id,
|
||||
pre_definition,
|
||||
);
|
||||
}
|
||||
|
||||
continue;
|
||||
@@ -1300,7 +1322,7 @@ where
|
||||
self.has_future_annotations |= alias.name.id == "annotations"
|
||||
&& node.module.as_deref() == Some("__future__");
|
||||
|
||||
let (symbol, _) = self.add_symbol(symbol_name.clone());
|
||||
let symbol = self.add_symbol(symbol_name.clone());
|
||||
|
||||
self.add_definition(
|
||||
symbol,
|
||||
@@ -1312,6 +1334,17 @@ where
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
ast::Stmt::Assert(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let predicate = self.record_expression_narrowing_constraint(&node.test);
|
||||
self.record_visibility_constraint(predicate);
|
||||
|
||||
if let Some(msg) = &node.msg {
|
||||
self.visit_expr(msg);
|
||||
}
|
||||
}
|
||||
|
||||
ast::Stmt::Assign(node) => {
|
||||
debug_assert_eq!(&self.current_assignments, &[]);
|
||||
|
||||
@@ -1572,54 +1605,76 @@ where
|
||||
return;
|
||||
}
|
||||
|
||||
let after_subject = self.flow_snapshot();
|
||||
let mut vis_constraints = vec![];
|
||||
let mut post_case_snapshots = vec![];
|
||||
for (i, case) in cases.iter().enumerate() {
|
||||
if i != 0 {
|
||||
post_case_snapshots.push(self.flow_snapshot());
|
||||
self.flow_restore(after_subject.clone());
|
||||
}
|
||||
let mut no_case_matched = self.flow_snapshot();
|
||||
|
||||
let has_catchall = cases
|
||||
.last()
|
||||
.is_some_and(|case| case.guard.is_none() && case.pattern.is_wildcard());
|
||||
|
||||
let mut post_case_snapshots = vec![];
|
||||
let mut match_predicate;
|
||||
|
||||
for (i, case) in cases.iter().enumerate() {
|
||||
self.current_match_case = Some(CurrentMatchCase::new(&case.pattern));
|
||||
self.visit_pattern(&case.pattern);
|
||||
self.current_match_case = None;
|
||||
let predicate = self.add_pattern_narrowing_constraint(
|
||||
// unlike in [Stmt::If], we don't reset [no_case_matched]
|
||||
// here because the effects of visiting a pattern is binding
|
||||
// symbols, and this doesn't occur unless the pattern
|
||||
// actually matches
|
||||
match_predicate = self.add_pattern_narrowing_constraint(
|
||||
subject_expr,
|
||||
&case.pattern,
|
||||
case.guard.as_deref(),
|
||||
);
|
||||
self.record_reachability_constraint(predicate);
|
||||
if let Some(expr) = &case.guard {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
let vis_constraint_id = self.record_reachability_constraint(match_predicate);
|
||||
|
||||
let match_success_guard_failure = case.guard.as_ref().map(|guard| {
|
||||
let guard_expr = self.add_standalone_expression(guard);
|
||||
self.visit_expr(guard);
|
||||
let post_guard_eval = self.flow_snapshot();
|
||||
let predicate = Predicate {
|
||||
node: PredicateNode::Expression(guard_expr),
|
||||
is_positive: true,
|
||||
};
|
||||
self.record_negated_narrowing_constraint(predicate);
|
||||
let match_success_guard_failure = self.flow_snapshot();
|
||||
self.flow_restore(post_guard_eval);
|
||||
self.record_narrowing_constraint(predicate);
|
||||
match_success_guard_failure
|
||||
});
|
||||
|
||||
self.record_visibility_constraint_id(vis_constraint_id);
|
||||
|
||||
self.visit_body(&case.body);
|
||||
for id in &vis_constraints {
|
||||
self.record_negated_visibility_constraint(*id);
|
||||
}
|
||||
let vis_constraint_id = self.record_visibility_constraint(predicate);
|
||||
vis_constraints.push(vis_constraint_id);
|
||||
}
|
||||
|
||||
// If there is no final wildcard match case, pretend there is one. This is similar to how
|
||||
// we add an implicit `else` block in if-elif chains, in case it's not present.
|
||||
if !cases
|
||||
.last()
|
||||
.is_some_and(|case| case.guard.is_none() && case.pattern.is_wildcard())
|
||||
{
|
||||
post_case_snapshots.push(self.flow_snapshot());
|
||||
self.flow_restore(after_subject.clone());
|
||||
|
||||
for id in &vis_constraints {
|
||||
self.record_negated_visibility_constraint(*id);
|
||||
if i != cases.len() - 1 || !has_catchall {
|
||||
// We need to restore the state after each case, but not after the last
|
||||
// one. The last one will just become the state that we merge the other
|
||||
// snapshots into.
|
||||
self.flow_restore(no_case_matched.clone());
|
||||
self.record_negated_narrowing_constraint(match_predicate);
|
||||
if let Some(match_success_guard_failure) = match_success_guard_failure {
|
||||
self.flow_merge(match_success_guard_failure);
|
||||
} else {
|
||||
assert!(case.guard.is_none());
|
||||
}
|
||||
} else {
|
||||
debug_assert!(match_success_guard_failure.is_none());
|
||||
debug_assert!(case.guard.is_none());
|
||||
}
|
||||
|
||||
self.record_negated_visibility_constraint(vis_constraint_id);
|
||||
no_case_matched = self.flow_snapshot();
|
||||
}
|
||||
|
||||
for post_clause_state in post_case_snapshots {
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
|
||||
self.simplify_visibility_constraints(after_subject);
|
||||
self.simplify_visibility_constraints(no_case_matched);
|
||||
}
|
||||
ast::Stmt::Try(ast::StmtTry {
|
||||
body,
|
||||
@@ -1685,7 +1740,7 @@ where
|
||||
// which is invalid syntax. However, it's still pretty obvious here that the user
|
||||
// *wanted* `e` to be bound, so we should still create a definition here nonetheless.
|
||||
if let Some(symbol_name) = symbol_name {
|
||||
let (symbol, _) = self.add_symbol(symbol_name.id.clone());
|
||||
let symbol = self.add_symbol(symbol_name.id.clone());
|
||||
|
||||
self.add_definition(
|
||||
symbol,
|
||||
@@ -1761,7 +1816,7 @@ where
|
||||
let node_key = NodeKey::from_node(expr);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
ast::Expr::Name(ast::ExprName { id, ctx, .. }) => {
|
||||
let (is_use, is_definition) = match (ctx, self.current_assignment()) {
|
||||
(ast::ExprContext::Store, Some(CurrentAssignment::AugAssign(_))) => {
|
||||
// For augmented assignment, the target expression is also used.
|
||||
@@ -1772,7 +1827,7 @@ where
|
||||
(ast::ExprContext::Del, _) => (false, true),
|
||||
(ast::ExprContext::Invalid, _) => (false, false),
|
||||
};
|
||||
let (symbol, _) = self.add_symbol(id.clone());
|
||||
let symbol = self.add_symbol(id.clone());
|
||||
|
||||
if is_use {
|
||||
self.mark_symbol_used(symbol);
|
||||
@@ -1824,12 +1879,17 @@ where
|
||||
// implemented.
|
||||
self.add_definition(symbol, named);
|
||||
}
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
Some(CurrentAssignment::Comprehension {
|
||||
unpack,
|
||||
node,
|
||||
first,
|
||||
}) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef {
|
||||
unpack,
|
||||
iterable: &node.iter,
|
||||
target: name_node,
|
||||
target: expr,
|
||||
first,
|
||||
is_async: node.is_async,
|
||||
},
|
||||
@@ -2100,14 +2160,37 @@ where
|
||||
DefinitionKind::WithItem(assignment),
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::Comprehension { .. }) => {
|
||||
// TODO:
|
||||
Some(CurrentAssignment::Comprehension {
|
||||
unpack,
|
||||
node,
|
||||
first,
|
||||
}) => {
|
||||
// SAFETY: `iter` and `expr` belong to the `self.module` tree
|
||||
#[allow(unsafe_code)]
|
||||
let assignment = ComprehensionDefinitionKind {
|
||||
target_kind: TargetKind::from(unpack),
|
||||
iterable: unsafe {
|
||||
AstNodeRef::new(self.module.clone(), &node.iter)
|
||||
},
|
||||
target: unsafe { AstNodeRef::new(self.module.clone(), expr) },
|
||||
first,
|
||||
is_async: node.is_async,
|
||||
};
|
||||
// Temporarily move to the scope of the method to which the instance attribute is defined.
|
||||
// SAFETY: `self.scope_stack` is not empty because the targets in comprehensions should always introduce a new scope.
|
||||
let scope = self.scope_stack.pop().expect("The popped scope must be a comprehension, which must have a parent scope");
|
||||
self.register_attribute_assignment(
|
||||
object,
|
||||
attr,
|
||||
DefinitionKind::Comprehension(assignment),
|
||||
);
|
||||
self.scope_stack.push(scope);
|
||||
}
|
||||
Some(CurrentAssignment::AugAssign(_)) => {
|
||||
// TODO:
|
||||
}
|
||||
Some(CurrentAssignment::Named(_)) => {
|
||||
// TODO:
|
||||
// A named expression whose target is an attribute is syntactically prohibited
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
@@ -2148,7 +2231,7 @@ where
|
||||
range: _,
|
||||
}) = pattern
|
||||
{
|
||||
let (symbol, _) = self.add_symbol(name.id().clone());
|
||||
let symbol = self.add_symbol(name.id().clone());
|
||||
let state = self.current_match_case.as_ref().unwrap();
|
||||
self.add_definition(
|
||||
symbol,
|
||||
@@ -2169,7 +2252,7 @@ where
|
||||
rest: Some(name), ..
|
||||
}) = pattern
|
||||
{
|
||||
let (symbol, _) = self.add_symbol(name.id().clone());
|
||||
let symbol = self.add_symbol(name.id().clone());
|
||||
let state = self.current_match_case.as_ref().unwrap();
|
||||
self.add_definition(
|
||||
symbol,
|
||||
@@ -2201,6 +2284,7 @@ enum CurrentAssignment<'a> {
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
first: bool,
|
||||
unpack: Option<(UnpackPosition, Unpack<'a>)>,
|
||||
},
|
||||
WithItem {
|
||||
item: &'a ast::WithItem,
|
||||
@@ -2214,11 +2298,9 @@ impl CurrentAssignment<'_> {
|
||||
match self {
|
||||
Self::Assign { unpack, .. }
|
||||
| Self::For { unpack, .. }
|
||||
| Self::WithItem { unpack, .. } => unpack.as_mut().map(|(position, _)| position),
|
||||
Self::AnnAssign(_)
|
||||
| Self::AugAssign(_)
|
||||
| Self::Named(_)
|
||||
| Self::Comprehension { .. } => None,
|
||||
| Self::WithItem { unpack, .. }
|
||||
| Self::Comprehension { unpack, .. } => unpack.as_mut().map(|(position, _)| position),
|
||||
Self::AnnAssign(_) | Self::AugAssign(_) | Self::Named(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2273,13 +2355,17 @@ enum Unpackable<'a> {
|
||||
item: &'a ast::WithItem,
|
||||
is_async: bool,
|
||||
},
|
||||
Comprehension {
|
||||
first: bool,
|
||||
node: &'a ast::Comprehension,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Unpackable<'a> {
|
||||
const fn kind(&self) -> UnpackKind {
|
||||
match self {
|
||||
Unpackable::Assign(_) => UnpackKind::Assign,
|
||||
Unpackable::For(_) => UnpackKind::Iterable,
|
||||
Unpackable::For(_) | Unpackable::Comprehension { .. } => UnpackKind::Iterable,
|
||||
Unpackable::WithItem { .. } => UnpackKind::ContextManager,
|
||||
}
|
||||
}
|
||||
@@ -2294,6 +2380,11 @@ impl<'a> Unpackable<'a> {
|
||||
is_async: *is_async,
|
||||
unpack,
|
||||
},
|
||||
Unpackable::Comprehension { node, first } => CurrentAssignment::Comprehension {
|
||||
node,
|
||||
first: *first,
|
||||
unpack,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -281,8 +281,9 @@ pub(crate) struct ExceptHandlerDefinitionNodeRef<'a> {
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) unpack: Option<(UnpackPosition, Unpack<'a>)>,
|
||||
pub(crate) iterable: &'a ast::Expr,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
pub(crate) target: &'a ast::Expr,
|
||||
pub(crate) first: bool,
|
||||
pub(crate) is_async: bool,
|
||||
}
|
||||
@@ -374,11 +375,13 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
is_async,
|
||||
}),
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef {
|
||||
unpack,
|
||||
iterable,
|
||||
target,
|
||||
first,
|
||||
is_async,
|
||||
}) => DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
target_kind: TargetKind::from(unpack),
|
||||
iterable: AstNodeRef::new(parsed.clone(), iterable),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
first,
|
||||
@@ -474,7 +477,9 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
unpack: _,
|
||||
is_async: _,
|
||||
}) => DefinitionNodeKey(NodeKey::from_node(target)),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => target.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => {
|
||||
DefinitionNodeKey(NodeKey::from_node(target))
|
||||
}
|
||||
Self::VariadicPositionalParameter(node) => node.into(),
|
||||
Self::VariadicKeywordParameter(node) => node.into(),
|
||||
Self::Parameter(node) => node.into(),
|
||||
@@ -550,7 +555,7 @@ pub enum DefinitionKind<'db> {
|
||||
AnnotatedAssignment(AnnotatedAssignmentDefinitionKind),
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
For(ForStmtDefinitionKind<'db>),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Comprehension(ComprehensionDefinitionKind<'db>),
|
||||
VariadicPositionalParameter(AstNodeRef<ast::Parameter>),
|
||||
VariadicKeywordParameter(AstNodeRef<ast::Parameter>),
|
||||
Parameter(AstNodeRef<ast::ParameterWithDefault>),
|
||||
@@ -749,19 +754,24 @@ impl MatchPatternDefinitionKind {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ComprehensionDefinitionKind {
|
||||
iterable: AstNodeRef<ast::Expr>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
first: bool,
|
||||
is_async: bool,
|
||||
pub struct ComprehensionDefinitionKind<'db> {
|
||||
pub(super) target_kind: TargetKind<'db>,
|
||||
pub(super) iterable: AstNodeRef<ast::Expr>,
|
||||
pub(super) target: AstNodeRef<ast::Expr>,
|
||||
pub(super) first: bool,
|
||||
pub(super) is_async: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionKind {
|
||||
impl<'db> ComprehensionDefinitionKind<'db> {
|
||||
pub(crate) fn iterable(&self) -> &ast::Expr {
|
||||
self.iterable.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
pub(crate) fn target_kind(&self) -> TargetKind<'db> {
|
||||
self.target_kind
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::Expr {
|
||||
self.target.node()
|
||||
}
|
||||
|
||||
|
||||
@@ -775,7 +775,16 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
.add_and_constraint(self.scope_start_visibility, constraint);
|
||||
}
|
||||
|
||||
/// This method exists solely as a fast path for handling `*`-import visibility constraints.
|
||||
/// Snapshot the state of a single symbol at the current point in control flow.
|
||||
///
|
||||
/// This is only used for `*`-import visibility constraints, which are handled differently
|
||||
/// to most other visibility constraints. See the doc-comment for
|
||||
/// [`Self::record_and_negate_star_import_visibility_constraint`] for more details.
|
||||
pub(super) fn single_symbol_snapshot(&self, symbol: ScopedSymbolId) -> SymbolState {
|
||||
self.symbol_states[symbol].clone()
|
||||
}
|
||||
|
||||
/// This method exists solely for handling `*`-import visibility constraints.
|
||||
///
|
||||
/// The reason why we add visibility constraints for [`Definition`]s created by `*` imports
|
||||
/// is laid out in the doc-comment for [`StarImportPlaceholderPredicate`]. But treating these
|
||||
@@ -784,12 +793,11 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
/// dominates. (Although `*` imports are not common generally, they are used in several
|
||||
/// important places by typeshed.)
|
||||
///
|
||||
/// To solve these regressions, it was observed that we could add a fast path for `*`-import
|
||||
/// definitions which added a new symbol to the global scope (as opposed to `*`-import definitions
|
||||
/// that provided redefinitions for *pre-existing* global-scope symbols). The fast path does a
|
||||
/// number of things differently to our normal handling of visibility constraints:
|
||||
/// To solve these regressions, it was observed that we could do significantly less work for
|
||||
/// `*`-import definitions. We do a number of things differently here to our normal handling of
|
||||
/// visibility constraints:
|
||||
///
|
||||
/// - It only applies and negates the visibility constraints to a single symbol, rather than to
|
||||
/// - We only apply and negate the visibility constraints to a single symbol, rather than to
|
||||
/// all symbols. This is possible here because, unlike most definitions, we know in advance that
|
||||
/// exactly one definition occurs inside the "if-true" predicate branch, and we know exactly
|
||||
/// which definition it is.
|
||||
@@ -800,9 +808,9 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
/// the visibility constraints is only important for symbols that did not have any new
|
||||
/// definitions inside either the "if-predicate-true" branch or the "if-predicate-false" branch.
|
||||
///
|
||||
/// - It avoids multiple expensive calls to [`Self::snapshot`]. This is possible because we know
|
||||
/// the symbol is newly added, so we know the prior state of the symbol was
|
||||
/// [`SymbolState::undefined`].
|
||||
/// - We only snapshot the state for a single symbol prior to the definition, rather than doing
|
||||
/// expensive calls to [`Self::snapshot`]. Again, this is possible because we know
|
||||
/// that only a single definition occurs inside the "if-predicate-true" predicate branch.
|
||||
///
|
||||
/// - Normally we take care to check whether an "if-predicate-true" branch or an
|
||||
/// "if-predicate-false" branch contains a terminal statement: these can affect the visibility
|
||||
@@ -815,6 +823,7 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
&mut self,
|
||||
star_import: StarImportPlaceholderPredicate<'db>,
|
||||
symbol: ScopedSymbolId,
|
||||
pre_definition_state: SymbolState,
|
||||
) {
|
||||
let predicate_id = self.add_predicate(star_import.into());
|
||||
let visibility_id = self.visibility_constraints.add_atom(predicate_id);
|
||||
@@ -822,10 +831,9 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
.visibility_constraints
|
||||
.add_not_constraint(visibility_id);
|
||||
|
||||
let mut post_definition_state = std::mem::replace(
|
||||
&mut self.symbol_states[symbol],
|
||||
SymbolState::undefined(self.scope_start_visibility),
|
||||
);
|
||||
let mut post_definition_state =
|
||||
std::mem::replace(&mut self.symbol_states[symbol], pre_definition_state);
|
||||
|
||||
post_definition_state
|
||||
.record_visibility_constraint(&mut self.visibility_constraints, visibility_id);
|
||||
|
||||
|
||||
@@ -314,7 +314,7 @@ impl SymbolBindings {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolState {
|
||||
pub(in crate::semantic_index) struct SymbolState {
|
||||
declarations: SymbolDeclarations,
|
||||
bindings: SymbolBindings,
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -51,6 +51,67 @@ enum UnionElement<'db> {
|
||||
Type(Type<'db>),
|
||||
}
|
||||
|
||||
impl<'db> UnionElement<'db> {
|
||||
/// Try reducing this `UnionElement` given the presence in the same union of `other_type`.
|
||||
///
|
||||
/// If this `UnionElement` is a group of literals, filter the literals present if needed and
|
||||
/// return `ReduceResult::KeepIf` with a boolean value indicating whether the remaining group
|
||||
/// of literals should be kept in the union
|
||||
///
|
||||
/// If this `UnionElement` is some other type, return `ReduceResult::Type` so `UnionBuilder`
|
||||
/// can perform more complex checks on it.
|
||||
fn try_reduce(&mut self, db: &'db dyn Db, other_type: Type<'db>) -> ReduceResult<'db> {
|
||||
// `AlwaysTruthy` and `AlwaysFalsy` are the only types which can be a supertype of only
|
||||
// _some_ literals of the same kind, so we need to walk the full set in this case.
|
||||
let needs_filter = matches!(other_type, Type::AlwaysTruthy | Type::AlwaysFalsy);
|
||||
match self {
|
||||
UnionElement::IntLiterals(literals) => {
|
||||
ReduceResult::KeepIf(if needs_filter {
|
||||
literals.retain(|literal| {
|
||||
!Type::IntLiteral(*literal).is_subtype_of(db, other_type)
|
||||
});
|
||||
!literals.is_empty()
|
||||
} else {
|
||||
// SAFETY: All `UnionElement` literal kinds must always be non-empty
|
||||
!Type::IntLiteral(literals[0]).is_subtype_of(db, other_type)
|
||||
})
|
||||
}
|
||||
UnionElement::StringLiterals(literals) => {
|
||||
ReduceResult::KeepIf(if needs_filter {
|
||||
literals.retain(|literal| {
|
||||
!Type::StringLiteral(*literal).is_subtype_of(db, other_type)
|
||||
});
|
||||
!literals.is_empty()
|
||||
} else {
|
||||
// SAFETY: All `UnionElement` literal kinds must always be non-empty
|
||||
!Type::StringLiteral(literals[0]).is_subtype_of(db, other_type)
|
||||
})
|
||||
}
|
||||
UnionElement::BytesLiterals(literals) => {
|
||||
ReduceResult::KeepIf(if needs_filter {
|
||||
literals.retain(|literal| {
|
||||
!Type::BytesLiteral(*literal).is_subtype_of(db, other_type)
|
||||
});
|
||||
!literals.is_empty()
|
||||
} else {
|
||||
// SAFETY: All `UnionElement` literal kinds must always be non-empty
|
||||
!Type::BytesLiteral(literals[0]).is_subtype_of(db, other_type)
|
||||
})
|
||||
}
|
||||
UnionElement::Type(existing) => ReduceResult::Type(*existing),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ReduceResult<'db> {
|
||||
/// Reduction of this `UnionElement` is complete; keep it in the union if the nested
|
||||
/// boolean is true, eliminate it from the union if false.
|
||||
KeepIf(bool),
|
||||
/// The given `Type` can stand-in for the entire `UnionElement` for further union
|
||||
/// simplification checks.
|
||||
Type(Type<'db>),
|
||||
}
|
||||
|
||||
// TODO increase this once we extend `UnionElement` throughout all union/intersection
|
||||
// representations, so that we can make large unions of literals fast in all operations.
|
||||
const MAX_UNION_LITERALS: usize = 200;
|
||||
@@ -197,27 +258,17 @@ impl<'db> UnionBuilder<'db> {
|
||||
let mut to_remove = SmallVec::<[usize; 2]>::new();
|
||||
let ty_negated = ty.negate(self.db);
|
||||
|
||||
for (index, element) in self
|
||||
.elements
|
||||
.iter()
|
||||
.map(|element| {
|
||||
// For literals, the first element in the set can stand in for all the rest,
|
||||
// since they all have the same super-types. SAFETY: a `UnionElement` of
|
||||
// literal kind must always have at least one element in it.
|
||||
match element {
|
||||
UnionElement::IntLiterals(literals) => Type::IntLiteral(literals[0]),
|
||||
UnionElement::StringLiterals(literals) => {
|
||||
Type::StringLiteral(literals[0])
|
||||
for (index, element) in self.elements.iter_mut().enumerate() {
|
||||
let element_type = match element.try_reduce(self.db, ty) {
|
||||
ReduceResult::KeepIf(keep) => {
|
||||
if !keep {
|
||||
to_remove.push(index);
|
||||
}
|
||||
UnionElement::BytesLiterals(literals) => {
|
||||
Type::BytesLiteral(literals[0])
|
||||
}
|
||||
UnionElement::Type(ty) => *ty,
|
||||
continue;
|
||||
}
|
||||
})
|
||||
.enumerate()
|
||||
{
|
||||
if Some(element) == bool_pair {
|
||||
ReduceResult::Type(ty) => ty,
|
||||
};
|
||||
if Some(element_type) == bool_pair {
|
||||
to_add = KnownClass::Bool.to_instance(self.db);
|
||||
to_remove.push(index);
|
||||
// The type we are adding is a BooleanLiteral, which doesn't have any
|
||||
@@ -227,14 +278,14 @@ impl<'db> UnionBuilder<'db> {
|
||||
break;
|
||||
}
|
||||
|
||||
if ty.is_same_gradual_form(element)
|
||||
|| ty.is_subtype_of(self.db, element)
|
||||
|| element.is_object(self.db)
|
||||
if ty.is_gradual_equivalent_to(self.db, element_type)
|
||||
|| ty.is_subtype_of(self.db, element_type)
|
||||
|| element_type.is_object(self.db)
|
||||
{
|
||||
return;
|
||||
} else if element.is_subtype_of(self.db, ty) {
|
||||
} else if element_type.is_subtype_of(self.db, ty) {
|
||||
to_remove.push(index);
|
||||
} else if ty_negated.is_subtype_of(self.db, element) {
|
||||
} else if ty_negated.is_subtype_of(self.db, element_type) {
|
||||
// We add `ty` to the union. We just checked that `~ty` is a subtype of an existing `element`.
|
||||
// This also means that `~ty | ty` is a subtype of `element | ty`, because both elements in the
|
||||
// first union are subtypes of the corresponding elements in the second union. But `~ty | ty` is
|
||||
@@ -509,7 +560,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
for (index, existing_positive) in self.positive.iter().enumerate() {
|
||||
// S & T = S if S <: T
|
||||
if existing_positive.is_subtype_of(db, new_positive)
|
||||
|| existing_positive.is_same_gradual_form(new_positive)
|
||||
|| existing_positive.is_gradual_equivalent_to(db, new_positive)
|
||||
{
|
||||
return;
|
||||
}
|
||||
@@ -605,7 +656,9 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
let mut to_remove = SmallVec::<[usize; 1]>::new();
|
||||
for (index, existing_negative) in self.negative.iter().enumerate() {
|
||||
// ~S & ~T = ~T if S <: T
|
||||
if existing_negative.is_subtype_of(db, new_negative) {
|
||||
if existing_negative.is_subtype_of(db, new_negative)
|
||||
|| existing_negative.is_gradual_equivalent_to(db, new_negative)
|
||||
{
|
||||
to_remove.push(index);
|
||||
}
|
||||
// same rule, reverse order
|
||||
|
||||
@@ -19,7 +19,7 @@ use crate::types::diagnostic::{
|
||||
use crate::types::generics::{Specialization, SpecializationBuilder};
|
||||
use crate::types::signatures::{Parameter, ParameterForm};
|
||||
use crate::types::{
|
||||
todo_type, BoundMethodType, DataclassMetadata, FunctionDecorators, KnownClass, KnownFunction,
|
||||
BoundMethodType, DataclassMetadata, FunctionDecorators, KnownClass, KnownFunction,
|
||||
KnownInstanceType, MethodWrapperKind, PropertyInstanceType, UnionType, WrapperDescriptorKind,
|
||||
};
|
||||
use ruff_db::diagnostic::{Annotation, Severity, Span, SubDiagnostic};
|
||||
@@ -219,7 +219,8 @@ impl<'db> Bindings<'db> {
|
||||
|
||||
match binding_type {
|
||||
Type::MethodWrapper(MethodWrapperKind::FunctionTypeDunderGet(function)) => {
|
||||
if function.has_known_decorator(db, FunctionDecorators::CLASSMETHOD) {
|
||||
let function_literal = function.function_literal(db);
|
||||
if function_literal.has_known_decorator(db, FunctionDecorators::CLASSMETHOD) {
|
||||
match overload.parameter_types() {
|
||||
[_, Some(owner)] => {
|
||||
overload.set_return_type(Type::BoundMethod(BoundMethodType::new(
|
||||
@@ -250,7 +251,9 @@ impl<'db> Bindings<'db> {
|
||||
if let [Some(function_ty @ Type::FunctionLiteral(function)), ..] =
|
||||
overload.parameter_types()
|
||||
{
|
||||
if function.has_known_decorator(db, FunctionDecorators::CLASSMETHOD) {
|
||||
let function_literal = function.function_literal(db);
|
||||
if function_literal.has_known_decorator(db, FunctionDecorators::CLASSMETHOD)
|
||||
{
|
||||
match overload.parameter_types() {
|
||||
[_, _, Some(owner)] => {
|
||||
overload.set_return_type(Type::BoundMethod(
|
||||
@@ -298,7 +301,7 @@ impl<'db> Bindings<'db> {
|
||||
if property.getter(db).is_some_and(|getter| {
|
||||
getter
|
||||
.into_function_literal()
|
||||
.is_some_and(|f| f.name(db) == "__name__")
|
||||
.is_some_and(|f| f.function_literal(db).name(db) == "__name__")
|
||||
}) =>
|
||||
{
|
||||
overload.set_return_type(Type::string_literal(db, type_alias.name(db)));
|
||||
@@ -307,7 +310,7 @@ impl<'db> Bindings<'db> {
|
||||
if property.getter(db).is_some_and(|getter| {
|
||||
getter
|
||||
.into_function_literal()
|
||||
.is_some_and(|f| f.name(db) == "__name__")
|
||||
.is_some_and(|f| f.function_literal(db).name(db) == "__name__")
|
||||
}) =>
|
||||
{
|
||||
overload.set_return_type(Type::string_literal(db, type_var.name(db)));
|
||||
@@ -416,7 +419,12 @@ impl<'db> Bindings<'db> {
|
||||
Type::BoundMethod(bound_method)
|
||||
if bound_method.self_instance(db).is_property_instance() =>
|
||||
{
|
||||
match bound_method.function(db).name(db).as_str() {
|
||||
match bound_method
|
||||
.function(db)
|
||||
.function_literal(db)
|
||||
.name(db)
|
||||
.as_str()
|
||||
{
|
||||
"setter" => {
|
||||
if let [Some(_), Some(setter)] = overload.parameter_types() {
|
||||
let mut ty_property = bound_method.self_instance(db);
|
||||
@@ -456,7 +464,10 @@ impl<'db> Bindings<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
Type::FunctionLiteral(function_type) => match function_type.known(db) {
|
||||
Type::FunctionLiteral(function_type) => match function_type
|
||||
.function_literal(db)
|
||||
.known(db)
|
||||
{
|
||||
Some(KnownFunction::IsEquivalentTo) => {
|
||||
if let [Some(ty_a), Some(ty_b)] = overload.parameter_types() {
|
||||
overload.set_return_type(Type::BooleanLiteral(
|
||||
@@ -535,8 +546,21 @@ impl<'db> Bindings<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
Some(KnownFunction::IsProtocol) => {
|
||||
if let [Some(ty)] = overload.parameter_types() {
|
||||
overload.set_return_type(Type::BooleanLiteral(
|
||||
ty.into_class_literal()
|
||||
.is_some_and(|class| class.is_protocol(db)),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Some(KnownFunction::Overload) => {
|
||||
overload.set_return_type(todo_type!("overload[..] return type"));
|
||||
// TODO: This can be removed once we understand legacy generics because the
|
||||
// typeshed definition for `typing.overload` is an identity function.
|
||||
if let [Some(ty)] = overload.parameter_types() {
|
||||
overload.set_return_type(*ty);
|
||||
}
|
||||
}
|
||||
|
||||
Some(KnownFunction::GetattrStatic) => {
|
||||
@@ -1162,7 +1186,7 @@ impl<'db> CallableDescription<'db> {
|
||||
match callable_type {
|
||||
Type::FunctionLiteral(function) => Some(CallableDescription {
|
||||
kind: "function",
|
||||
name: function.name(db),
|
||||
name: function.function_literal(db).name(db),
|
||||
}),
|
||||
Type::ClassLiteral(class_type) => Some(CallableDescription {
|
||||
kind: "class",
|
||||
@@ -1170,12 +1194,12 @@ impl<'db> CallableDescription<'db> {
|
||||
}),
|
||||
Type::BoundMethod(bound_method) => Some(CallableDescription {
|
||||
kind: "bound method",
|
||||
name: bound_method.function(db).name(db),
|
||||
name: bound_method.function(db).function_literal(db).name(db),
|
||||
}),
|
||||
Type::MethodWrapper(MethodWrapperKind::FunctionTypeDunderGet(function)) => {
|
||||
Some(CallableDescription {
|
||||
kind: "method wrapper `__get__` of function",
|
||||
name: function.name(db),
|
||||
name: function.function_literal(db).name(db),
|
||||
})
|
||||
}
|
||||
Type::MethodWrapper(MethodWrapperKind::PropertyDunderGet(_)) => {
|
||||
@@ -1300,7 +1324,7 @@ impl<'db> BindingError<'db> {
|
||||
) -> Option<(Span, Span)> {
|
||||
match callable_ty {
|
||||
Type::FunctionLiteral(function) => {
|
||||
let function_scope = function.body_scope(db);
|
||||
let function_scope = function.function_literal(db).body_scope(db);
|
||||
let span = Span::from(function_scope.file(db));
|
||||
let node = function_scope.node(db);
|
||||
if let Some(func_def) = node.as_function() {
|
||||
|
||||
@@ -246,7 +246,7 @@ impl<'db> ClassType<'db> {
|
||||
/// cases rather than simply iterating over the inferred resolution order for the class.
|
||||
///
|
||||
/// [method resolution order]: https://docs.python.org/3/glossary.html#term-method-resolution-order
|
||||
pub(super) fn iter_mro(self, db: &'db dyn Db) -> impl Iterator<Item = ClassBase<'db>> {
|
||||
pub(super) fn iter_mro(self, db: &'db dyn Db) -> MroIterator<'db> {
|
||||
let (class_literal, specialization) = self.class_literal(db);
|
||||
class_literal.iter_mro(db, specialization)
|
||||
}
|
||||
@@ -257,11 +257,107 @@ impl<'db> ClassType<'db> {
|
||||
class_literal.is_final(db)
|
||||
}
|
||||
|
||||
/// If `self` and `other` are generic aliases of the same generic class, returns their
|
||||
/// corresponding specializations.
|
||||
fn compatible_specializations(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
other: ClassType<'db>,
|
||||
) -> Option<(Specialization<'db>, Specialization<'db>)> {
|
||||
match (self, other) {
|
||||
(ClassType::Generic(self_generic), ClassType::Generic(other_generic)) => {
|
||||
if self_generic.origin(db) == other_generic.origin(db) {
|
||||
Some((
|
||||
self_generic.specialization(db),
|
||||
other_generic.specialization(db),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return `true` if `other` is present in this class's MRO.
|
||||
pub(super) fn is_subclass_of(self, db: &'db dyn Db, other: ClassType<'db>) -> bool {
|
||||
// `is_subclass_of` is checking the subtype relation, in which gradual types do not
|
||||
// participate, so we should not return `True` if we find `Any/Unknown` in the MRO.
|
||||
self.iter_mro(db).contains(&ClassBase::Class(other))
|
||||
if self.iter_mro(db).contains(&ClassBase::Class(other)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// `self` is a subclass of `other` if they are both generic aliases of the same generic
|
||||
// class, and their specializations are compatible, taking into account the variance of the
|
||||
// class's typevars.
|
||||
if let Some((self_specialization, other_specialization)) =
|
||||
self.compatible_specializations(db, other)
|
||||
{
|
||||
if self_specialization.is_subtype_of(db, other_specialization) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub(super) fn is_equivalent_to(self, db: &'db dyn Db, other: ClassType<'db>) -> bool {
|
||||
if self == other {
|
||||
return true;
|
||||
}
|
||||
|
||||
// `self` is equivalent to `other` if they are both generic aliases of the same generic
|
||||
// class, and their specializations are compatible, taking into account the variance of the
|
||||
// class's typevars.
|
||||
if let Some((self_specialization, other_specialization)) =
|
||||
self.compatible_specializations(db, other)
|
||||
{
|
||||
if self_specialization.is_equivalent_to(db, other_specialization) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub(super) fn is_assignable_to(self, db: &'db dyn Db, other: ClassType<'db>) -> bool {
|
||||
// `is_subclass_of` is checking the subtype relation, in which gradual types do not
|
||||
// participate, so we should not return `True` if we find `Any/Unknown` in the MRO.
|
||||
if self.is_subclass_of(db, other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// `self` is assignable to `other` if they are both generic aliases of the same generic
|
||||
// class, and their specializations are compatible, taking into account the variance of the
|
||||
// class's typevars.
|
||||
if let Some((self_specialization, other_specialization)) =
|
||||
self.compatible_specializations(db, other)
|
||||
{
|
||||
if self_specialization.is_assignable_to(db, other_specialization) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub(super) fn is_gradual_equivalent_to(self, db: &'db dyn Db, other: ClassType<'db>) -> bool {
|
||||
if self == other {
|
||||
return true;
|
||||
}
|
||||
|
||||
// `self` is equivalent to `other` if they are both generic aliases of the same generic
|
||||
// class, and their specializations are compatible, taking into account the variance of the
|
||||
// class's typevars.
|
||||
if let Some((self_specialization, other_specialization)) =
|
||||
self.compatible_specializations(db, other)
|
||||
{
|
||||
if self_specialization.is_gradual_equivalent_to(db, other_specialization) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Return the metaclass of this class, or `type[Unknown]` if the metaclass cannot be inferred.
|
||||
@@ -486,6 +582,17 @@ impl<'db> ClassLiteralType<'db> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Determine if this class is a protocol.
|
||||
pub(super) fn is_protocol(self, db: &'db dyn Db) -> bool {
|
||||
self.explicit_bases(db).iter().any(|base| {
|
||||
matches!(
|
||||
base,
|
||||
Type::KnownInstance(KnownInstanceType::Protocol)
|
||||
| Type::Dynamic(DynamicType::SubscriptedProtocol)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the types of the decorators on this class
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn decorators(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
|
||||
@@ -549,7 +656,7 @@ impl<'db> ClassLiteralType<'db> {
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
specialization: Option<Specialization<'db>>,
|
||||
) -> impl Iterator<Item = ClassBase<'db>> {
|
||||
) -> MroIterator<'db> {
|
||||
MroIterator::new(db, self, specialization)
|
||||
}
|
||||
|
||||
@@ -750,7 +857,11 @@ impl<'db> ClassLiteralType<'db> {
|
||||
|
||||
for superclass in mro_iter {
|
||||
match superclass {
|
||||
ClassBase::Dynamic(DynamicType::TodoProtocol) => {
|
||||
ClassBase::Dynamic(
|
||||
DynamicType::SubscriptedGeneric | DynamicType::SubscriptedProtocol,
|
||||
)
|
||||
| ClassBase::Generic
|
||||
| ClassBase::Protocol => {
|
||||
// TODO: We currently skip `Protocol` when looking up class members, in order to
|
||||
// avoid creating many dynamic types in our test suite that would otherwise
|
||||
// result from looking up attributes on builtin types like `str`, `list`, `tuple`
|
||||
@@ -769,7 +880,12 @@ impl<'db> ClassLiteralType<'db> {
|
||||
continue;
|
||||
}
|
||||
|
||||
if class.is_known(db, KnownClass::Type) && policy.meta_class_no_type_fallback()
|
||||
// HACK: we should implement some more general logic here that supports arbitrary custom
|
||||
// metaclasses, not just `type` and `ABCMeta`.
|
||||
if matches!(
|
||||
class.known(db),
|
||||
Some(KnownClass::Type | KnownClass::ABCMeta)
|
||||
) && policy.meta_class_no_type_fallback()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
@@ -846,7 +962,9 @@ impl<'db> ClassLiteralType<'db> {
|
||||
Some(_),
|
||||
"__new__" | "__init__",
|
||||
) => Type::FunctionLiteral(
|
||||
function.with_generic_context(db, origin.generic_context(db)),
|
||||
function
|
||||
.function_literal(db)
|
||||
.with_generic_context(db, origin.generic_context(db)),
|
||||
),
|
||||
_ => ty,
|
||||
}
|
||||
@@ -931,7 +1049,7 @@ impl<'db> ClassLiteralType<'db> {
|
||||
|
||||
let init_signature = Signature::new(Parameters::new(parameters), Some(Type::none(db)));
|
||||
|
||||
return Some(Type::Callable(CallableType::new(db, init_signature)));
|
||||
return Some(Type::Callable(CallableType::single(db, init_signature)));
|
||||
} else if matches!(name, "__lt__" | "__le__" | "__gt__" | "__ge__") {
|
||||
if metadata.contains(DataclassMetadata::ORDER) {
|
||||
let signature = Signature::new(
|
||||
@@ -943,7 +1061,7 @@ impl<'db> ClassLiteralType<'db> {
|
||||
Some(KnownClass::Bool.to_instance(db)),
|
||||
);
|
||||
|
||||
return Some(Type::Callable(CallableType::new(db, signature)));
|
||||
return Some(Type::Callable(CallableType::single(db, signature)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1062,8 +1180,12 @@ impl<'db> ClassLiteralType<'db> {
|
||||
|
||||
for superclass in self.iter_mro(db, specialization) {
|
||||
match superclass {
|
||||
ClassBase::Dynamic(DynamicType::TodoProtocol) => {
|
||||
// TODO: We currently skip `Protocol` when looking up instance members, in order to
|
||||
ClassBase::Dynamic(
|
||||
DynamicType::SubscriptedProtocol | DynamicType::SubscriptedGeneric,
|
||||
)
|
||||
| ClassBase::Generic
|
||||
| ClassBase::Protocol => {
|
||||
// TODO: We currently skip these when looking up instance members, in order to
|
||||
// avoid creating many dynamic types in our test suite that would otherwise
|
||||
// result from looking up attributes on builtin types like `str`, `list`, `tuple`
|
||||
}
|
||||
@@ -1307,14 +1429,42 @@ impl<'db> ClassLiteralType<'db> {
|
||||
}
|
||||
}
|
||||
}
|
||||
DefinitionKind::Comprehension(_) => {
|
||||
// TODO:
|
||||
DefinitionKind::Comprehension(comprehension) => {
|
||||
match comprehension.target_kind() {
|
||||
TargetKind::Sequence(_, unpack) => {
|
||||
// We found an unpacking assignment like:
|
||||
//
|
||||
// [... for .., self.name, .. in <iterable>]
|
||||
|
||||
let unpacked = infer_unpack_types(db, unpack);
|
||||
let target_ast_id = comprehension
|
||||
.target()
|
||||
.scoped_expression_id(db, unpack.target_scope(db));
|
||||
let inferred_ty = unpacked.expression_type(target_ast_id);
|
||||
|
||||
union_of_inferred_types = union_of_inferred_types.add(inferred_ty);
|
||||
}
|
||||
TargetKind::NameOrAttribute => {
|
||||
// We found an attribute assignment like:
|
||||
//
|
||||
// [... for self.name in <iterable>]
|
||||
|
||||
let iterable_ty = infer_expression_type(
|
||||
db,
|
||||
index.expression(comprehension.iterable()),
|
||||
);
|
||||
// TODO: Potential diagnostics resulting from the iterable are currently not reported.
|
||||
let inferred_ty = iterable_ty.iterate(db);
|
||||
|
||||
union_of_inferred_types = union_of_inferred_types.add(inferred_ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
DefinitionKind::AugmentedAssignment(_) => {
|
||||
// TODO:
|
||||
}
|
||||
DefinitionKind::NamedExpression(_) => {
|
||||
// TODO:
|
||||
// A named expression whose target is an attribute is syntactically prohibited
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -1516,6 +1666,22 @@ impl<'db> InstanceType<'db> {
|
||||
// N.B. The subclass relation is fully static
|
||||
self.class.is_subclass_of(db, other.class)
|
||||
}
|
||||
|
||||
pub(super) fn is_equivalent_to(self, db: &'db dyn Db, other: InstanceType<'db>) -> bool {
|
||||
self.class.is_equivalent_to(db, other.class)
|
||||
}
|
||||
|
||||
pub(super) fn is_assignable_to(self, db: &'db dyn Db, other: InstanceType<'db>) -> bool {
|
||||
self.class.is_assignable_to(db, other.class)
|
||||
}
|
||||
|
||||
pub(super) fn is_gradual_equivalent_to(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
other: InstanceType<'db>,
|
||||
) -> bool {
|
||||
self.class.is_gradual_equivalent_to(db, other.class)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> From<InstanceType<'db>> for Type<'db> {
|
||||
@@ -1559,6 +1725,8 @@ pub(crate) enum KnownClass {
|
||||
Super,
|
||||
// enum
|
||||
Enum,
|
||||
// abc
|
||||
ABCMeta,
|
||||
// Types
|
||||
GenericAlias,
|
||||
ModuleType,
|
||||
@@ -1668,6 +1836,7 @@ impl<'db> KnownClass {
|
||||
| Self::Float
|
||||
| Self::Sized
|
||||
| Self::Enum
|
||||
| Self::ABCMeta
|
||||
// Evaluating `NotImplementedType` in a boolean context was deprecated in Python 3.9
|
||||
// and raises a `TypeError` in Python >=3.14
|
||||
// (see https://docs.python.org/3/library/constants.html#NotImplemented)
|
||||
@@ -1724,6 +1893,7 @@ impl<'db> KnownClass {
|
||||
Self::Sized => "Sized",
|
||||
Self::OrderedDict => "OrderedDict",
|
||||
Self::Enum => "Enum",
|
||||
Self::ABCMeta => "ABCMeta",
|
||||
Self::Super => "super",
|
||||
// For example, `typing.List` is defined as `List = _Alias()` in typeshed
|
||||
Self::StdlibAlias => "_Alias",
|
||||
@@ -1880,6 +2050,7 @@ impl<'db> KnownClass {
|
||||
| Self::Super
|
||||
| Self::Property => KnownModule::Builtins,
|
||||
Self::VersionInfo => KnownModule::Sys,
|
||||
Self::ABCMeta => KnownModule::Abc,
|
||||
Self::Enum => KnownModule::Enum,
|
||||
Self::GenericAlias
|
||||
| Self::ModuleType
|
||||
@@ -1984,6 +2155,7 @@ impl<'db> KnownClass {
|
||||
| Self::TypeVarTuple
|
||||
| Self::Sized
|
||||
| Self::Enum
|
||||
| Self::ABCMeta
|
||||
| Self::Super
|
||||
| Self::NewType => false,
|
||||
}
|
||||
@@ -2043,6 +2215,7 @@ impl<'db> KnownClass {
|
||||
| Self::TypeVarTuple
|
||||
| Self::Sized
|
||||
| Self::Enum
|
||||
| Self::ABCMeta
|
||||
| Self::Super
|
||||
| Self::UnionType
|
||||
| Self::NewType => false,
|
||||
@@ -2104,6 +2277,7 @@ impl<'db> KnownClass {
|
||||
"SupportsIndex" => Self::SupportsIndex,
|
||||
"Sized" => Self::Sized,
|
||||
"Enum" => Self::Enum,
|
||||
"ABCMeta" => Self::ABCMeta,
|
||||
"super" => Self::Super,
|
||||
"_version_info" => Self::VersionInfo,
|
||||
"ellipsis" if Program::get(db).python_version(db) <= PythonVersion::PY39 => {
|
||||
@@ -2159,6 +2333,7 @@ impl<'db> KnownClass {
|
||||
| Self::MethodType
|
||||
| Self::MethodWrapperType
|
||||
| Self::Enum
|
||||
| Self::ABCMeta
|
||||
| Self::Super
|
||||
| Self::NotImplementedType
|
||||
| Self::UnionType
|
||||
@@ -2281,6 +2456,8 @@ pub enum KnownInstanceType<'db> {
|
||||
OrderedDict,
|
||||
/// The symbol `typing.Protocol` (which can also be found as `typing_extensions.Protocol`)
|
||||
Protocol,
|
||||
/// The symbol `typing.Generic` (which can also be found as `typing_extensions.Generic`)
|
||||
Generic,
|
||||
/// The symbol `typing.Type` (which can also be found as `typing_extensions.Type`)
|
||||
Type,
|
||||
/// A single instance of `typing.TypeVar`
|
||||
@@ -2355,6 +2532,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
| Self::ChainMap
|
||||
| Self::OrderedDict
|
||||
| Self::Protocol
|
||||
| Self::Generic
|
||||
| Self::ReadOnly
|
||||
| Self::TypeAliasType(_)
|
||||
| Self::Unknown
|
||||
@@ -2401,6 +2579,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
Self::ChainMap => "typing.ChainMap",
|
||||
Self::OrderedDict => "typing.OrderedDict",
|
||||
Self::Protocol => "typing.Protocol",
|
||||
Self::Generic => "typing.Generic",
|
||||
Self::ReadOnly => "typing.ReadOnly",
|
||||
Self::TypeVar(typevar) => typevar.name(db),
|
||||
Self::TypeAliasType(_) => "typing.TypeAliasType",
|
||||
@@ -2448,7 +2627,8 @@ impl<'db> KnownInstanceType<'db> {
|
||||
Self::Deque => KnownClass::StdlibAlias,
|
||||
Self::ChainMap => KnownClass::StdlibAlias,
|
||||
Self::OrderedDict => KnownClass::StdlibAlias,
|
||||
Self::Protocol => KnownClass::SpecialForm,
|
||||
Self::Protocol => KnownClass::SpecialForm, // actually `_ProtocolMeta` at runtime but this is what typeshed says
|
||||
Self::Generic => KnownClass::SpecialForm, // actually `type` at runtime but this is what typeshed says
|
||||
Self::TypeVar(_) => KnownClass::TypeVar,
|
||||
Self::TypeAliasType(_) => KnownClass::TypeAliasType,
|
||||
Self::TypeOf => KnownClass::SpecialForm,
|
||||
@@ -2492,6 +2672,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
"Counter" => Self::Counter,
|
||||
"ChainMap" => Self::ChainMap,
|
||||
"OrderedDict" => Self::OrderedDict,
|
||||
"Generic" => Self::Generic,
|
||||
"Protocol" => Self::Protocol,
|
||||
"Optional" => Self::Optional,
|
||||
"Union" => Self::Union,
|
||||
@@ -2532,7 +2713,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
///
|
||||
/// Most variants can only exist in one module, which is the same as `self.class().canonical_module()`.
|
||||
/// Some variants could validly be defined in either `typing` or `typing_extensions`, however.
|
||||
fn check_module(self, module: KnownModule) -> bool {
|
||||
pub(super) fn check_module(self, module: KnownModule) -> bool {
|
||||
match self {
|
||||
Self::Any
|
||||
| Self::ClassVar
|
||||
@@ -2545,14 +2726,15 @@ impl<'db> KnownInstanceType<'db> {
|
||||
| Self::Counter
|
||||
| Self::ChainMap
|
||||
| Self::OrderedDict
|
||||
| Self::Protocol
|
||||
| Self::Optional
|
||||
| Self::Union
|
||||
| Self::NoReturn
|
||||
| Self::Tuple
|
||||
| Self::Type
|
||||
| Self::Generic
|
||||
| Self::Callable => module.is_typing(),
|
||||
Self::Annotated
|
||||
| Self::Protocol
|
||||
| Self::Literal
|
||||
| Self::LiteralString
|
||||
| Self::Never
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use crate::types::{todo_type, ClassType, DynamicType, KnownClass, KnownInstanceType, Type};
|
||||
use crate::types::{
|
||||
todo_type, ClassType, DynamicType, KnownClass, KnownInstanceType, MroIterator, Type,
|
||||
};
|
||||
use crate::Db;
|
||||
use itertools::Either;
|
||||
|
||||
/// Enumeration of the possible kinds of types we allow in class bases.
|
||||
///
|
||||
/// This is much more limited than the [`Type`] enum: all types that would be invalid to have as a
|
||||
/// class base are transformed into [`ClassBase::unknown`]
|
||||
/// class base are transformed into [`ClassBase::unknown()`]
|
||||
///
|
||||
/// Note that a non-specialized generic class _cannot_ be a class base. When we see a
|
||||
/// non-specialized generic class in any type expression (including the list of base classes), we
|
||||
@@ -14,6 +15,13 @@ use itertools::Either;
|
||||
pub enum ClassBase<'db> {
|
||||
Dynamic(DynamicType),
|
||||
Class(ClassType<'db>),
|
||||
/// Although `Protocol` is not a class in typeshed's stubs, it is at runtime,
|
||||
/// and can appear in the MRO of a class.
|
||||
Protocol,
|
||||
/// Bare `Generic` cannot be subclassed directly in user code,
|
||||
/// but nonetheless appears in the MRO of classes that inherit from `Generic[T]`,
|
||||
/// `Protocol[T]`, or bare `Protocol`.
|
||||
Generic,
|
||||
}
|
||||
|
||||
impl<'db> ClassBase<'db> {
|
||||
@@ -25,13 +33,6 @@ impl<'db> ClassBase<'db> {
|
||||
Self::Dynamic(DynamicType::Unknown)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_dynamic(self) -> bool {
|
||||
match self {
|
||||
ClassBase::Dynamic(_) => true,
|
||||
ClassBase::Class(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn display(self, db: &'db dyn Db) -> impl std::fmt::Display + 'db {
|
||||
struct Display<'db> {
|
||||
base: ClassBase<'db>,
|
||||
@@ -48,6 +49,8 @@ impl<'db> ClassBase<'db> {
|
||||
ClassBase::Class(ClassType::Generic(alias)) => {
|
||||
write!(f, "<class '{}'>", alias.display(self.db))
|
||||
}
|
||||
ClassBase::Protocol => f.write_str("typing.Protocol"),
|
||||
ClassBase::Generic => f.write_str("typing.Generic"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -165,7 +168,8 @@ impl<'db> ClassBase<'db> {
|
||||
KnownInstanceType::Callable => {
|
||||
Self::try_from_type(db, todo_type!("Support for Callable as a base class"))
|
||||
}
|
||||
KnownInstanceType::Protocol => Some(ClassBase::Dynamic(DynamicType::TodoProtocol)),
|
||||
KnownInstanceType::Protocol => Some(ClassBase::Protocol),
|
||||
KnownInstanceType::Generic => Some(ClassBase::Generic),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -173,18 +177,21 @@ impl<'db> ClassBase<'db> {
|
||||
pub(super) fn into_class(self) -> Option<ClassType<'db>> {
|
||||
match self {
|
||||
Self::Class(class) => Some(class),
|
||||
Self::Dynamic(_) => None,
|
||||
Self::Dynamic(_) | Self::Generic | Self::Protocol => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over the MRO of this base
|
||||
pub(super) fn mro(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
) -> Either<impl Iterator<Item = ClassBase<'db>>, impl Iterator<Item = ClassBase<'db>>> {
|
||||
pub(super) fn mro(self, db: &'db dyn Db) -> impl Iterator<Item = ClassBase<'db>> {
|
||||
match self {
|
||||
ClassBase::Dynamic(_) => Either::Left([self, ClassBase::object(db)].into_iter()),
|
||||
ClassBase::Class(class) => Either::Right(class.iter_mro(db)),
|
||||
ClassBase::Protocol => ClassBaseMroIterator::length_3(db, self, ClassBase::Generic),
|
||||
ClassBase::Dynamic(DynamicType::SubscriptedProtocol) => ClassBaseMroIterator::length_3(
|
||||
db,
|
||||
self,
|
||||
ClassBase::Dynamic(DynamicType::SubscriptedGeneric),
|
||||
),
|
||||
ClassBase::Dynamic(_) | ClassBase::Generic => ClassBaseMroIterator::length_2(db, self),
|
||||
ClassBase::Class(class) => ClassBaseMroIterator::from_class(db, class),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -200,6 +207,8 @@ impl<'db> From<ClassBase<'db>> for Type<'db> {
|
||||
match value {
|
||||
ClassBase::Dynamic(dynamic) => Type::Dynamic(dynamic),
|
||||
ClassBase::Class(class) => class.into(),
|
||||
ClassBase::Protocol => Type::KnownInstance(KnownInstanceType::Protocol),
|
||||
ClassBase::Generic => Type::KnownInstance(KnownInstanceType::Generic),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -209,3 +218,41 @@ impl<'db> From<&ClassBase<'db>> for Type<'db> {
|
||||
Self::from(*value)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the MRO of a class base.
|
||||
enum ClassBaseMroIterator<'db> {
|
||||
Length2(core::array::IntoIter<ClassBase<'db>, 2>),
|
||||
Length3(core::array::IntoIter<ClassBase<'db>, 3>),
|
||||
FromClass(MroIterator<'db>),
|
||||
}
|
||||
|
||||
impl<'db> ClassBaseMroIterator<'db> {
|
||||
/// Iterate over an MRO of length 2 that consists of `first_element` and then `object`.
|
||||
fn length_2(db: &'db dyn Db, first_element: ClassBase<'db>) -> Self {
|
||||
ClassBaseMroIterator::Length2([first_element, ClassBase::object(db)].into_iter())
|
||||
}
|
||||
|
||||
/// Iterate over an MRO of length 3 that consists of `first_element`, then `second_element`, then `object`.
|
||||
fn length_3(db: &'db dyn Db, element_1: ClassBase<'db>, element_2: ClassBase<'db>) -> Self {
|
||||
ClassBaseMroIterator::Length3([element_1, element_2, ClassBase::object(db)].into_iter())
|
||||
}
|
||||
|
||||
/// Iterate over the MRO of an arbitrary class. The MRO may be of any length.
|
||||
fn from_class(db: &'db dyn Db, class: ClassType<'db>) -> Self {
|
||||
ClassBaseMroIterator::FromClass(class.iter_mro(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Iterator for ClassBaseMroIterator<'db> {
|
||||
type Item = ClassBase<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
Self::Length2(iter) => iter.next(),
|
||||
Self::Length3(iter) => iter.next(),
|
||||
Self::FromClass(iter) => iter.next(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ClassBaseMroIterator<'_> {}
|
||||
|
||||
@@ -169,7 +169,9 @@ impl<'db> InferContext<'db> {
|
||||
|
||||
// Iterate over all functions and test if any is decorated with `@no_type_check`.
|
||||
function_scope_tys.any(|function_ty| {
|
||||
function_ty.has_known_decorator(self.db, FunctionDecorators::NO_TYPE_CHECK)
|
||||
function_ty
|
||||
.function_literal(self.db)
|
||||
.has_known_decorator(self.db, FunctionDecorators::NO_TYPE_CHECK)
|
||||
})
|
||||
}
|
||||
InNoTypeCheck::Yes => true,
|
||||
|
||||
@@ -36,6 +36,7 @@ pub(crate) fn register_lints(registry: &mut LintRegistryBuilder) {
|
||||
registry.register_lint(&INVALID_EXCEPTION_CAUGHT);
|
||||
registry.register_lint(&INVALID_METACLASS);
|
||||
registry.register_lint(&INVALID_PARAMETER_DEFAULT);
|
||||
registry.register_lint(&INVALID_PROTOCOL);
|
||||
registry.register_lint(&INVALID_RAISE);
|
||||
registry.register_lint(&INVALID_SUPER_ARGUMENT);
|
||||
registry.register_lint(&INVALID_TYPE_CHECKING_CONSTANT);
|
||||
@@ -230,6 +231,34 @@ declare_lint! {
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for invalidly defined protocol classes.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// An invalidly defined protocol class may lead to the type checker inferring
|
||||
/// unexpected things. It may also lead to `TypeError`s at runtime.
|
||||
///
|
||||
/// ## Examples
|
||||
/// A `Protocol` class cannot inherit from a non-`Protocol` class;
|
||||
/// this raises a `TypeError` at runtime:
|
||||
///
|
||||
/// ```pycon
|
||||
/// >>> from typing import Protocol
|
||||
/// >>> class Foo(int, Protocol): ...
|
||||
/// ...
|
||||
/// Traceback (most recent call last):
|
||||
/// File "<python-input-1>", line 1, in <module>
|
||||
/// class Foo(int, Protocol): ...
|
||||
/// TypeError: Protocols can only inherit from other protocols, got <class 'int'>
|
||||
/// ```
|
||||
pub(crate) static INVALID_PROTOCOL = {
|
||||
summary: "detects invalid protocol class definitions",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// TODO #14889
|
||||
pub(crate) static INCONSISTENT_MRO = {
|
||||
@@ -1096,7 +1125,7 @@ fn report_invalid_assignment_with_message(
|
||||
Type::FunctionLiteral(function) => {
|
||||
context.report_lint_old(&INVALID_ASSIGNMENT, node, format_args!(
|
||||
"Implicit shadowing of function `{}`; annotate to make it explicit if this is intentional",
|
||||
function.name(context.db())));
|
||||
function.function_literal(context.db()).name(context.db())));
|
||||
}
|
||||
_ => {
|
||||
context.report_lint_old(&INVALID_ASSIGNMENT, node, message);
|
||||
|
||||
@@ -7,16 +7,18 @@ use ruff_python_ast::str::{Quote, TripleQuotes};
|
||||
use ruff_python_literal::escape::AsciiEscape;
|
||||
|
||||
use crate::types::class::{ClassType, GenericAlias, GenericClass};
|
||||
use crate::types::class_base::ClassBase;
|
||||
use crate::types::generics::{GenericContext, Specialization};
|
||||
use crate::types::signatures::{Parameter, Parameters, Signature};
|
||||
use crate::types::{
|
||||
InstanceType, IntersectionType, KnownClass, MethodWrapperKind, StringLiteralType, Type,
|
||||
TypeVarBoundOrConstraints, TypeVarInstance, UnionType, WrapperDescriptorKind,
|
||||
FunctionSignature, FunctionType, InstanceType, IntersectionType, KnownClass, MethodWrapperKind,
|
||||
StringLiteralType, SubclassOfInner, Type, TypeVarBoundOrConstraints, TypeVarInstance,
|
||||
UnionType, WrapperDescriptorKind,
|
||||
};
|
||||
use crate::Db;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use super::CallableType;
|
||||
|
||||
impl<'db> Type<'db> {
|
||||
pub fn display(&self, db: &'db dyn Db) -> DisplayType {
|
||||
DisplayType { ty: self, db }
|
||||
@@ -89,47 +91,76 @@ impl Display for DisplayRepresentation<'_> {
|
||||
Type::SubclassOf(subclass_of_ty) => match subclass_of_ty.subclass_of() {
|
||||
// Only show the bare class name here; ClassBase::display would render this as
|
||||
// type[<class 'Foo'>] instead of type[Foo].
|
||||
ClassBase::Class(class) => write!(f, "type[{}]", class.name(self.db)),
|
||||
ClassBase::Dynamic(dynamic) => write!(f, "type[{dynamic}]"),
|
||||
SubclassOfInner::Class(class) => write!(f, "type[{}]", class.name(self.db)),
|
||||
SubclassOfInner::Dynamic(dynamic) => write!(f, "type[{dynamic}]"),
|
||||
},
|
||||
Type::KnownInstance(known_instance) => f.write_str(known_instance.repr(self.db)),
|
||||
Type::FunctionLiteral(function) => {
|
||||
let signature = function.signature(self.db);
|
||||
|
||||
// TODO: when generic function types are supported, we should add
|
||||
// the generic type parameters to the signature, i.e.
|
||||
// show `def foo[T](x: T) -> T`.
|
||||
|
||||
write!(
|
||||
f,
|
||||
// "def {name}{specialization}{signature}",
|
||||
"def {name}{signature}",
|
||||
name = function.name(self.db),
|
||||
signature = signature.display(self.db)
|
||||
)
|
||||
match signature {
|
||||
FunctionSignature::Single(signature) => {
|
||||
write!(
|
||||
f,
|
||||
// "def {name}{specialization}{signature}",
|
||||
"def {name}{signature}",
|
||||
name = function.function_literal(self.db).name(self.db),
|
||||
signature = signature.display(self.db)
|
||||
)
|
||||
}
|
||||
FunctionSignature::Overloaded(signatures, _) => {
|
||||
// TODO: How to display overloads?
|
||||
f.write_str("Overload[")?;
|
||||
let mut join = f.join(", ");
|
||||
for signature in signatures {
|
||||
join.entry(&signature.display(self.db));
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
}
|
||||
Type::Callable(callable) => callable.signature(self.db).display(self.db).fmt(f),
|
||||
Type::Callable(callable) => callable.display(self.db).fmt(f),
|
||||
Type::BoundMethod(bound_method) => {
|
||||
let function = bound_method.function(self.db);
|
||||
|
||||
// TODO: use the specialization from the method. Similar to the comment above
|
||||
// about the function specialization,
|
||||
|
||||
write!(
|
||||
f,
|
||||
"bound method {instance}.{method}{signature}",
|
||||
method = function.name(self.db),
|
||||
instance = bound_method.self_instance(self.db).display(self.db),
|
||||
signature = function.signature(self.db).bind_self().display(self.db)
|
||||
)
|
||||
match function.signature(self.db) {
|
||||
FunctionSignature::Single(signature) => {
|
||||
write!(
|
||||
f,
|
||||
"bound method {instance}.{method}{signature}",
|
||||
method = function.function_literal(self.db).name(self.db),
|
||||
instance = bound_method.self_instance(self.db).display(self.db),
|
||||
signature = signature.bind_self().display(self.db)
|
||||
)
|
||||
}
|
||||
FunctionSignature::Overloaded(signatures, _) => {
|
||||
// TODO: How to display overloads?
|
||||
f.write_str("Overload[")?;
|
||||
let mut join = f.join(", ");
|
||||
for signature in signatures {
|
||||
join.entry(&signature.bind_self().display(self.db));
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
}
|
||||
Type::MethodWrapper(MethodWrapperKind::FunctionTypeDunderGet(function)) => {
|
||||
write!(
|
||||
f,
|
||||
"<method-wrapper `__get__` of `{function}{specialization}`>",
|
||||
function = function.name(self.db),
|
||||
specialization = if let Some(specialization) = function.specialization(self.db)
|
||||
{
|
||||
specialization.display_short(self.db).to_string()
|
||||
function = function.function_literal(self.db).name(self.db),
|
||||
specialization = if let FunctionType::Specialized(specialized) = function {
|
||||
specialized
|
||||
.specialization(self.db)
|
||||
.display_short(self.db)
|
||||
.to_string()
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
@@ -139,10 +170,12 @@ impl Display for DisplayRepresentation<'_> {
|
||||
write!(
|
||||
f,
|
||||
"<method-wrapper `__call__` of `{function}{specialization}`>",
|
||||
function = function.name(self.db),
|
||||
specialization = if let Some(specialization) = function.specialization(self.db)
|
||||
{
|
||||
specialization.display_short(self.db).to_string()
|
||||
function = function.function_literal(self.db).name(self.db),
|
||||
specialization = if let FunctionType::Specialized(specialized) = function {
|
||||
specialized
|
||||
.specialization(self.db)
|
||||
.display_short(self.db)
|
||||
.to_string()
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
@@ -355,8 +388,40 @@ impl Display for DisplaySpecialization<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> CallableType<'db> {
|
||||
pub(crate) fn display(&'db self, db: &'db dyn Db) -> DisplayCallableType<'db> {
|
||||
DisplayCallableType {
|
||||
signatures: self.signatures(db),
|
||||
db,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct DisplayCallableType<'db> {
|
||||
signatures: &'db [Signature<'db>],
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayCallableType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self.signatures {
|
||||
[signature] => write!(f, "{}", signature.display(self.db)),
|
||||
signatures => {
|
||||
// TODO: How to display overloads?
|
||||
f.write_str("Overload[")?;
|
||||
let mut join = f.join(", ");
|
||||
for signature in signatures {
|
||||
join.entry(&signature.display(self.db));
|
||||
}
|
||||
join.finish()?;
|
||||
f.write_char(']')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Signature<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplaySignature<'db> {
|
||||
pub(crate) fn display(&'db self, db: &'db dyn Db) -> DisplaySignature<'db> {
|
||||
DisplaySignature {
|
||||
parameters: self.parameters(),
|
||||
return_ty: self.return_ty,
|
||||
@@ -365,7 +430,7 @@ impl<'db> Signature<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplaySignature<'db> {
|
||||
pub(crate) struct DisplaySignature<'db> {
|
||||
parameters: &'db Parameters<'db>,
|
||||
return_ty: Option<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
|
||||
@@ -181,6 +181,118 @@ impl<'db> Specialization<'db> {
|
||||
.find(|(var, _)| **var == typevar)
|
||||
.map(|(_, ty)| *ty)
|
||||
}
|
||||
|
||||
pub(crate) fn is_subtype_of(self, db: &'db dyn Db, other: Specialization<'db>) -> bool {
|
||||
let generic_context = self.generic_context(db);
|
||||
if generic_context != other.generic_context(db) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for ((_typevar, self_type), other_type) in (generic_context.variables(db).into_iter())
|
||||
.zip(self.types(db))
|
||||
.zip(other.types(db))
|
||||
{
|
||||
if matches!(self_type, Type::Dynamic(_)) || matches!(other_type, Type::Dynamic(_)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: We currently treat all typevars as invariant. Once we track the actual
|
||||
// variance of each typevar, these checks should change:
|
||||
// - covariant: verify that self_type <: other_type
|
||||
// - contravariant: verify that other_type <: self_type
|
||||
// - invariant: verify that self_type == other_type
|
||||
// - bivariant: skip, can't make subtyping false
|
||||
if !self_type.is_equivalent_to(db, *other_type) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub(crate) fn is_equivalent_to(self, db: &'db dyn Db, other: Specialization<'db>) -> bool {
|
||||
let generic_context = self.generic_context(db);
|
||||
if generic_context != other.generic_context(db) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for ((_typevar, self_type), other_type) in (generic_context.variables(db).into_iter())
|
||||
.zip(self.types(db))
|
||||
.zip(other.types(db))
|
||||
{
|
||||
if matches!(self_type, Type::Dynamic(_)) || matches!(other_type, Type::Dynamic(_)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: We currently treat all typevars as invariant. Once we track the actual
|
||||
// variance of each typevar, these checks should change:
|
||||
// - covariant: verify that self_type == other_type
|
||||
// - contravariant: verify that other_type == self_type
|
||||
// - invariant: verify that self_type == other_type
|
||||
// - bivariant: skip, can't make equivalence false
|
||||
if !self_type.is_equivalent_to(db, *other_type) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub(crate) fn is_assignable_to(self, db: &'db dyn Db, other: Specialization<'db>) -> bool {
|
||||
let generic_context = self.generic_context(db);
|
||||
if generic_context != other.generic_context(db) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for ((_typevar, self_type), other_type) in (generic_context.variables(db).into_iter())
|
||||
.zip(self.types(db))
|
||||
.zip(other.types(db))
|
||||
{
|
||||
if matches!(self_type, Type::Dynamic(_)) || matches!(other_type, Type::Dynamic(_)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO: We currently treat all typevars as invariant. Once we track the actual
|
||||
// variance of each typevar, these checks should change:
|
||||
// - covariant: verify that self_type <: other_type
|
||||
// - contravariant: verify that other_type <: self_type
|
||||
// - invariant: verify that self_type == other_type
|
||||
// - bivariant: skip, can't make assignability false
|
||||
if !self_type.is_gradual_equivalent_to(db, *other_type) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub(crate) fn is_gradual_equivalent_to(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
other: Specialization<'db>,
|
||||
) -> bool {
|
||||
let generic_context = self.generic_context(db);
|
||||
if generic_context != other.generic_context(db) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for ((_typevar, self_type), other_type) in (generic_context.variables(db).into_iter())
|
||||
.zip(self.types(db))
|
||||
.zip(other.types(db))
|
||||
{
|
||||
// TODO: We currently treat all typevars as invariant. Once we track the actual
|
||||
// variance of each typevar, these checks should change:
|
||||
// - covariant: verify that self_type == other_type
|
||||
// - contravariant: verify that other_type == self_type
|
||||
// - invariant: verify that self_type == other_type
|
||||
// - bivariant: skip, can't make equivalence false
|
||||
if !self_type.is_gradual_equivalent_to(db, *other_type) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// Performs type inference between parameter annotations and argument types, producing a
|
||||
|
||||
@@ -49,9 +49,9 @@ use crate::module_resolver::resolve_module;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::ast_ids::{HasScopedExpressionId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{
|
||||
AnnotatedAssignmentDefinitionKind, AssignmentDefinitionKind, Definition, DefinitionKind,
|
||||
DefinitionNodeKey, ExceptHandlerDefinitionKind, ForStmtDefinitionKind, TargetKind,
|
||||
WithItemDefinitionKind,
|
||||
AnnotatedAssignmentDefinitionKind, AssignmentDefinitionKind, ComprehensionDefinitionKind,
|
||||
Definition, DefinitionKind, DefinitionNodeKey, ExceptHandlerDefinitionKind,
|
||||
ForStmtDefinitionKind, TargetKind, WithItemDefinitionKind,
|
||||
};
|
||||
use crate::semantic_index::expression::{Expression, ExpressionKind};
|
||||
use crate::semantic_index::symbol::{
|
||||
@@ -81,33 +81,33 @@ use crate::types::generics::GenericContext;
|
||||
use crate::types::mro::MroErrorKind;
|
||||
use crate::types::unpacker::{UnpackResult, Unpacker};
|
||||
use crate::types::{
|
||||
todo_type, CallDunderError, CallableSignature, CallableType, Class, ClassLiteralType,
|
||||
ClassType, DataclassMetadata, DynamicType, FunctionDecorators, FunctionType, GenericAlias,
|
||||
GenericClass, IntersectionBuilder, IntersectionType, KnownClass, KnownFunction,
|
||||
KnownInstanceType, MemberLookupPolicy, MetaclassCandidate, NonGenericClass, Parameter,
|
||||
ParameterForm, Parameters, Signature, Signatures, SliceLiteralType, StringLiteralType,
|
||||
SubclassOfType, Symbol, SymbolAndQualifiers, Truthiness, TupleType, Type, TypeAliasType,
|
||||
TypeAndQualifiers, TypeArrayDisplay, TypeQualifiers, TypeVarBoundOrConstraints,
|
||||
binding_type, todo_type, CallDunderError, CallableSignature, CallableType, Class,
|
||||
ClassLiteralType, ClassType, DataclassMetadata, DynamicType, FunctionDecorators,
|
||||
FunctionLiteral, GenericAlias, GenericClass, IntersectionBuilder, IntersectionType, KnownClass,
|
||||
KnownFunction, KnownInstanceType, MemberLookupPolicy, MetaclassCandidate, NonGenericClass,
|
||||
Parameter, ParameterForm, Parameters, Signature, Signatures, SliceLiteralType,
|
||||
StringLiteralType, SubclassOfType, Symbol, SymbolAndQualifiers, Truthiness, TupleType, Type,
|
||||
TypeAliasType, TypeAndQualifiers, TypeArrayDisplay, TypeQualifiers, TypeVarBoundOrConstraints,
|
||||
TypeVarInstance, UnionBuilder, UnionType,
|
||||
};
|
||||
use crate::unpack::{Unpack, UnpackPosition};
|
||||
use crate::util::subscript::{PyIndex, PySlice};
|
||||
use crate::Db;
|
||||
|
||||
use super::class_base::ClassBase;
|
||||
use super::context::{InNoTypeCheck, InferContext};
|
||||
use super::diagnostic::{
|
||||
report_index_out_of_bounds, report_invalid_exception_caught, report_invalid_exception_cause,
|
||||
report_invalid_exception_raised, report_invalid_type_checking_constant,
|
||||
report_non_subscriptable, report_possibly_unresolved_reference, report_slice_step_size_zero,
|
||||
report_unresolved_reference, INVALID_METACLASS, REDUNDANT_CAST, STATIC_ASSERT_ERROR,
|
||||
SUBCLASS_OF_FINAL_CLASS, TYPE_ASSERTION_FAILURE,
|
||||
report_unresolved_reference, INVALID_METACLASS, INVALID_PROTOCOL, REDUNDANT_CAST,
|
||||
STATIC_ASSERT_ERROR, SUBCLASS_OF_FINAL_CLASS, TYPE_ASSERTION_FAILURE,
|
||||
};
|
||||
use super::slots::check_class_slots;
|
||||
use super::string_annotation::{
|
||||
parse_string_annotation, BYTE_STRING_TYPE_ANNOTATION, FSTRING_TYPE_ANNOTATION,
|
||||
};
|
||||
use super::{BoundSuperError, BoundSuperType};
|
||||
use super::subclass_of::SubclassOfInner;
|
||||
use super::{BoundSuperError, BoundSuperType, ClassBase};
|
||||
|
||||
/// Infer all types for a [`ScopeId`], including all definitions and expressions in that scope.
|
||||
/// Use when checking a scope, or needing to provide a type for an arbitrary expression in the
|
||||
@@ -306,7 +306,7 @@ pub(super) fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> U
|
||||
let _span =
|
||||
tracing::trace_span!("infer_unpack_types", range=?unpack.range(db), ?file).entered();
|
||||
|
||||
let mut unpacker = Unpacker::new(db, unpack.scope(db));
|
||||
let mut unpacker = Unpacker::new(db, unpack.target_scope(db), unpack.value_scope(db));
|
||||
unpacker.unpack(unpack.target(db), unpack.value(db));
|
||||
unpacker.finish()
|
||||
}
|
||||
@@ -585,8 +585,8 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
/// Are we currently inferring types in file with deferred types?
|
||||
/// This is true for stub files and files with `__future__.annotations`
|
||||
fn are_all_types_deferred(&self) -> bool {
|
||||
self.index.has_future_annotations() || self.file().is_stub(self.db().upcast())
|
||||
fn defer_annotations(&self) -> bool {
|
||||
self.index.has_future_annotations() || self.in_stub()
|
||||
}
|
||||
|
||||
/// Are we currently inferring deferred types?
|
||||
@@ -763,24 +763,55 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
continue;
|
||||
}
|
||||
|
||||
// (2) Check for classes that inherit from `@final` classes
|
||||
let is_protocol = class.is_protocol(self.db());
|
||||
|
||||
// (2) Iterate through the class's explicit bases to check for various possible errors:
|
||||
// - Check for inheritance from plain `Generic`,
|
||||
// - Check for inheritance from a `@final` classes
|
||||
// - If the class is a protocol class: check for inheritance from a non-protocol class
|
||||
for (i, base_class) in class.explicit_bases(self.db()).iter().enumerate() {
|
||||
// dynamic/unknown bases are never `@final`
|
||||
let Some(base_class) = base_class.into_class_literal() else {
|
||||
continue;
|
||||
let base_class = match base_class {
|
||||
Type::KnownInstance(KnownInstanceType::Generic) => {
|
||||
// Unsubscripted `Generic` can appear in the MRO of many classes,
|
||||
// but it is never valid as an explicit base class in user code.
|
||||
self.context.report_lint_old(
|
||||
&INVALID_BASE,
|
||||
&class_node.bases()[i],
|
||||
format_args!("Cannot inherit from plain `Generic`"),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
Type::ClassLiteral(class) => class,
|
||||
// dynamic/unknown bases are never `@final`
|
||||
_ => continue,
|
||||
};
|
||||
if !base_class.is_final(self.db()) {
|
||||
continue;
|
||||
|
||||
if is_protocol
|
||||
&& !(base_class.is_protocol(self.db())
|
||||
|| base_class.is_known(self.db(), KnownClass::Object))
|
||||
{
|
||||
self.context.report_lint_old(
|
||||
&INVALID_PROTOCOL,
|
||||
&class_node.bases()[i],
|
||||
format_args!(
|
||||
"Protocol class `{}` cannot inherit from non-protocol class `{}`",
|
||||
class.name(self.db()),
|
||||
base_class.name(self.db()),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if base_class.is_final(self.db()) {
|
||||
self.context.report_lint_old(
|
||||
&SUBCLASS_OF_FINAL_CLASS,
|
||||
&class_node.bases()[i],
|
||||
format_args!(
|
||||
"Class `{}` cannot inherit from final class `{}`",
|
||||
class.name(self.db()),
|
||||
base_class.name(self.db()),
|
||||
),
|
||||
);
|
||||
}
|
||||
self.context.report_lint_old(
|
||||
&SUBCLASS_OF_FINAL_CLASS,
|
||||
&class_node.bases()[i],
|
||||
format_args!(
|
||||
"Class `{}` cannot inherit from final class `{}`",
|
||||
class.name(self.db()),
|
||||
base_class.name(self.db()),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// (3) Check that the class's MRO is resolvable
|
||||
@@ -933,13 +964,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_named_expression_definition(named_expression.node(), definition);
|
||||
}
|
||||
DefinitionKind::Comprehension(comprehension) => {
|
||||
self.infer_comprehension_definition(
|
||||
comprehension.iterable(),
|
||||
comprehension.target(),
|
||||
comprehension.is_first(),
|
||||
comprehension.is_async(),
|
||||
definition,
|
||||
);
|
||||
self.infer_comprehension_definition(comprehension, definition);
|
||||
}
|
||||
DefinitionKind::VariadicPositionalParameter(parameter) => {
|
||||
self.infer_variadic_positional_parameter_definition(parameter, definition);
|
||||
@@ -1217,7 +1242,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
/// Returns `true` if the current scope is the function body scope of a method of a protocol
|
||||
/// (that is, a class which directly inherits `typing.Protocol`.)
|
||||
fn in_class_that_inherits_protocol_directly(&self) -> bool {
|
||||
fn in_protocol_class(&self) -> bool {
|
||||
let current_scope_id = self.scope().file_scope_id(self.db());
|
||||
let current_scope = self.index.scope(current_scope_id);
|
||||
let Some(parent_scope_id) = current_scope.parent() else {
|
||||
@@ -1245,13 +1270,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
return false;
|
||||
};
|
||||
|
||||
// TODO move this to `Class` once we add proper `Protocol` support
|
||||
node_ref.bases().iter().any(|base| {
|
||||
matches!(
|
||||
self.file_expression_type(base),
|
||||
Type::KnownInstance(KnownInstanceType::Protocol)
|
||||
)
|
||||
})
|
||||
let class_definition = self.index.expect_single_definition(node_ref.node());
|
||||
|
||||
let Type::ClassLiteral(class) = binding_type(self.db(), class_definition) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
class.is_protocol(self.db())
|
||||
}
|
||||
|
||||
/// Returns `true` if the current scope is the function body scope of a function overload (that
|
||||
@@ -1315,7 +1340,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
if (self.in_stub()
|
||||
|| self.in_function_overload_or_abstractmethod()
|
||||
|| self.in_class_that_inherits_protocol_directly())
|
||||
|| self.in_protocol_class())
|
||||
&& self.return_types_and_ranges.is_empty()
|
||||
&& is_stub_suite(&function.body)
|
||||
{
|
||||
@@ -1467,7 +1492,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// If there are type params, parameters and returns are evaluated in that scope, that is, in
|
||||
// `infer_function_type_params`, rather than here.
|
||||
if type_params.is_none() {
|
||||
if self.are_all_types_deferred() {
|
||||
if self.defer_annotations() {
|
||||
self.types.deferred.insert(definition);
|
||||
} else {
|
||||
self.infer_optional_annotation_expression(
|
||||
@@ -1478,10 +1503,6 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
let generic_context = type_params.as_ref().map(|type_params| {
|
||||
GenericContext::from_type_params(self.db(), self.index, type_params)
|
||||
});
|
||||
|
||||
let function_kind =
|
||||
KnownFunction::try_from_definition_and_name(self.db(), definition, name);
|
||||
|
||||
@@ -1490,16 +1511,19 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.node_scope(NodeWithScopeRef::Function(function))
|
||||
.to_scope_id(self.db(), self.file());
|
||||
|
||||
let specialization = None;
|
||||
let type_params_scope = type_params.as_ref().map(|_| {
|
||||
self.index
|
||||
.node_scope(NodeWithScopeRef::FunctionTypeParameters(function))
|
||||
.to_scope_id(self.db(), self.file())
|
||||
});
|
||||
|
||||
let mut inferred_ty = Type::FunctionLiteral(FunctionType::new(
|
||||
let mut inferred_ty = Type::from(FunctionLiteral::new(
|
||||
self.db(),
|
||||
&name.id,
|
||||
function_kind,
|
||||
body_scope,
|
||||
type_params_scope,
|
||||
function_decorators,
|
||||
generic_context,
|
||||
specialization,
|
||||
));
|
||||
|
||||
for (decorator_ty, decorator_node) in decorator_types_and_nodes.iter().rev() {
|
||||
@@ -1618,7 +1642,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
} else if (self.in_stub()
|
||||
|| self.in_function_overload_or_abstractmethod()
|
||||
|| self.in_class_that_inherits_protocol_directly())
|
||||
|| self.in_protocol_class())
|
||||
&& default
|
||||
.as_ref()
|
||||
.is_some_and(|d| d.is_ellipsis_literal_expr())
|
||||
@@ -1791,9 +1815,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// TODO: Only defer the references that are actually string literals, instead of
|
||||
// deferring the entire class definition if a string literal occurs anywhere in the
|
||||
// base class list.
|
||||
if self.are_all_types_deferred()
|
||||
|| class_node.bases().iter().any(contains_string_literal)
|
||||
{
|
||||
if self.in_stub() || class_node.bases().iter().any(contains_string_literal) {
|
||||
self.types.deferred.insert(definition);
|
||||
} else {
|
||||
for base in class_node.bases() {
|
||||
@@ -1926,11 +1948,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
for item in items {
|
||||
let target = item.optional_vars.as_deref();
|
||||
if let Some(target) = target {
|
||||
self.infer_target(target, &item.context_expr, |db, ctx_manager_ty| {
|
||||
self.infer_target(target, &item.context_expr, |builder, context_expr| {
|
||||
// TODO: `infer_with_statement_definition` reports a diagnostic if `ctx_manager_ty` isn't a context manager
|
||||
// but only if the target is a name. We should report a diagnostic here if the target isn't a name:
|
||||
// `with not_context_manager as a.x: ...
|
||||
ctx_manager_ty.enter(db)
|
||||
builder
|
||||
.infer_standalone_expression(context_expr)
|
||||
.enter(builder.db())
|
||||
});
|
||||
} else {
|
||||
// Call into the context expression inference to validate that it evaluates
|
||||
@@ -2336,7 +2360,9 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
} = assignment;
|
||||
|
||||
for target in targets {
|
||||
self.infer_target(target, value, |_, ty| ty);
|
||||
self.infer_target(target, value, |builder, value_expr| {
|
||||
builder.infer_standalone_expression(value_expr)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2346,23 +2372,16 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
/// targets (unpacking). If `target` is an attribute expression, we check that the assignment
|
||||
/// is valid. For 'target's that are definitions, this check happens elsewhere.
|
||||
///
|
||||
/// The `to_assigned_ty` function is used to convert the inferred type of the `value` expression
|
||||
/// to the type that is eventually assigned to the `target`.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// If the `value` is not a standalone expression.
|
||||
fn infer_target<F>(&mut self, target: &ast::Expr, value: &ast::Expr, to_assigned_ty: F)
|
||||
/// The `infer_value_expr` function is used to infer the type of the `value` expression which
|
||||
/// are not `Name` expressions. The returned type is the one that is eventually assigned to the
|
||||
/// `target`.
|
||||
fn infer_target<F>(&mut self, target: &ast::Expr, value: &ast::Expr, infer_value_expr: F)
|
||||
where
|
||||
F: Fn(&'db dyn Db, Type<'db>) -> Type<'db>,
|
||||
F: Fn(&mut TypeInferenceBuilder<'db>, &ast::Expr) -> Type<'db>,
|
||||
{
|
||||
let assigned_ty = match target {
|
||||
ast::Expr::Name(_) => None,
|
||||
_ => {
|
||||
let value_ty = self.infer_standalone_expression(value);
|
||||
|
||||
Some(to_assigned_ty(self.db(), value_ty))
|
||||
}
|
||||
_ => Some(infer_value_expr(self, value)),
|
||||
};
|
||||
self.infer_target_impl(target, assigned_ty);
|
||||
}
|
||||
@@ -2919,7 +2938,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
let mut declared_ty = self.infer_annotation_expression(
|
||||
annotation,
|
||||
DeferredExpressionState::from(self.are_all_types_deferred()),
|
||||
DeferredExpressionState::from(self.defer_annotations()),
|
||||
);
|
||||
|
||||
if target
|
||||
@@ -3115,11 +3134,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
is_async: _,
|
||||
} = for_statement;
|
||||
|
||||
self.infer_target(target, iter, |db, iter_ty| {
|
||||
self.infer_target(target, iter, |builder, iter_expr| {
|
||||
// TODO: `infer_for_statement_definition` reports a diagnostic if `iter_ty` isn't iterable
|
||||
// but only if the target is a name. We should report a diagnostic here if the target isn't a name:
|
||||
// `for a.x in not_iterable: ...
|
||||
iter_ty.iterate(db)
|
||||
builder
|
||||
.infer_standalone_expression(iter_expr)
|
||||
.iterate(builder.db())
|
||||
});
|
||||
|
||||
self.infer_body(body);
|
||||
@@ -3296,7 +3317,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
msg,
|
||||
} = assert;
|
||||
|
||||
let test_ty = self.infer_expression(test);
|
||||
let test_ty = self.infer_standalone_expression(test);
|
||||
|
||||
if let Err(err) = test_ty.try_bool(self.db()) {
|
||||
err.report_diagnostic(&self.context, &**test);
|
||||
@@ -3948,15 +3969,17 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
is_async: _,
|
||||
} = comprehension;
|
||||
|
||||
if !is_first {
|
||||
self.infer_standalone_expression(iter);
|
||||
}
|
||||
// TODO more complex assignment targets
|
||||
if let ast::Expr::Name(name) = target {
|
||||
self.infer_definition(name);
|
||||
} else {
|
||||
self.infer_expression(target);
|
||||
}
|
||||
self.infer_target(target, iter, |builder, iter_expr| {
|
||||
// TODO: `infer_comprehension_definition` reports a diagnostic if `iter_ty` isn't iterable
|
||||
// but only if the target is a name. We should report a diagnostic here if the target isn't a name:
|
||||
// `[... for a.x in not_iterable]
|
||||
if is_first {
|
||||
infer_same_file_expression_type(builder.db(), builder.index.expression(iter_expr))
|
||||
} else {
|
||||
builder.infer_standalone_expression(iter_expr)
|
||||
}
|
||||
.iterate(builder.db())
|
||||
});
|
||||
for expr in ifs {
|
||||
self.infer_expression(expr);
|
||||
}
|
||||
@@ -3964,12 +3987,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
fn infer_comprehension_definition(
|
||||
&mut self,
|
||||
iterable: &ast::Expr,
|
||||
target: &ast::ExprName,
|
||||
is_first: bool,
|
||||
is_async: bool,
|
||||
comprehension: &ComprehensionDefinitionKind<'db>,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
let iterable = comprehension.iterable();
|
||||
let target = comprehension.target();
|
||||
|
||||
let expression = self.index.expression(iterable);
|
||||
let result = infer_expression_types(self.db(), expression);
|
||||
|
||||
@@ -3979,7 +4002,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// (2) We must *not* call `self.extend()` on the result of the type inference,
|
||||
// because `ScopedExpressionId`s are only meaningful within their own scope, so
|
||||
// we'd add types for random wrong expressions in the current scope
|
||||
let iterable_type = if is_first {
|
||||
let iterable_type = if comprehension.is_first() {
|
||||
let lookup_scope = self
|
||||
.index
|
||||
.parent_scope_id(self.scope().file_scope_id(self.db()))
|
||||
@@ -3991,14 +4014,26 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
result.expression_type(iterable.scoped_expression_id(self.db(), self.scope()))
|
||||
};
|
||||
|
||||
let target_type = if is_async {
|
||||
let target_type = if comprehension.is_async() {
|
||||
// TODO: async iterables/iterators! -- Alex
|
||||
todo_type!("async iterables/iterators")
|
||||
} else {
|
||||
iterable_type.try_iterate(self.db()).unwrap_or_else(|err| {
|
||||
err.report_diagnostic(&self.context, iterable_type, iterable.into());
|
||||
err.fallback_element_type(self.db())
|
||||
})
|
||||
match comprehension.target_kind() {
|
||||
TargetKind::Sequence(unpack_position, unpack) => {
|
||||
let unpacked = infer_unpack_types(self.db(), unpack);
|
||||
if unpack_position == UnpackPosition::First {
|
||||
self.context.extend(unpacked.diagnostics());
|
||||
}
|
||||
let target_ast_id = target.scoped_expression_id(self.db(), self.scope());
|
||||
unpacked.expression_type(target_ast_id)
|
||||
}
|
||||
TargetKind::NameOrAttribute => {
|
||||
iterable_type.try_iterate(self.db()).unwrap_or_else(|err| {
|
||||
err.report_diagnostic(&self.context, iterable_type, iterable.into());
|
||||
err.fallback_element_type(self.db())
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.types.expressions.insert(
|
||||
@@ -4133,7 +4168,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// TODO: Useful inference of a lambda's return type will require a different approach,
|
||||
// which does the inference of the body expression based on arguments at each call site,
|
||||
// rather than eagerly computing a return type without knowing the argument types.
|
||||
Type::Callable(CallableType::new(
|
||||
Type::Callable(CallableType::single(
|
||||
self.db(),
|
||||
Signature::new(parameters, Some(Type::unknown())),
|
||||
))
|
||||
@@ -4738,10 +4773,10 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
Type::SubclassOf(subclass_of @ SubclassOfType { .. }) => {
|
||||
match subclass_of.subclass_of() {
|
||||
ClassBase::Class(class) => {
|
||||
SubclassOfInner::Class(class) => {
|
||||
!class.instance_member(db, attr).symbol.is_unbound()
|
||||
}
|
||||
ClassBase::Dynamic(_) => unreachable!(
|
||||
SubclassOfInner::Dynamic(_) => unreachable!(
|
||||
"Attribute lookup on a dynamic `SubclassOf` type should always return a bound symbol"
|
||||
),
|
||||
}
|
||||
@@ -4983,8 +5018,10 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
| (_, unknown @ Type::Dynamic(DynamicType::Unknown), _) => Some(unknown),
|
||||
(todo @ Type::Dynamic(DynamicType::Todo(_)), _, _)
|
||||
| (_, todo @ Type::Dynamic(DynamicType::Todo(_)), _) => Some(todo),
|
||||
(todo @ Type::Dynamic(DynamicType::TodoProtocol), _, _)
|
||||
| (_, todo @ Type::Dynamic(DynamicType::TodoProtocol), _) => Some(todo),
|
||||
(todo @ Type::Dynamic(DynamicType::SubscriptedProtocol), _, _)
|
||||
| (_, todo @ Type::Dynamic(DynamicType::SubscriptedProtocol), _) => Some(todo),
|
||||
(todo @ Type::Dynamic(DynamicType::SubscriptedGeneric), _, _)
|
||||
| (_, todo @ Type::Dynamic(DynamicType::SubscriptedGeneric), _) => Some(todo),
|
||||
(Type::Never, _, _) | (_, Type::Never, _) => Some(Type::Never),
|
||||
|
||||
(Type::IntLiteral(n), Type::IntLiteral(m), ast::Operator::Add) => Some(
|
||||
@@ -6046,12 +6083,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// special cases, too.
|
||||
let value_ty = self.infer_expression(value);
|
||||
if let Type::ClassLiteral(ClassLiteralType::Generic(generic_class)) = value_ty {
|
||||
return self.infer_explicit_class_specialization(
|
||||
subscript,
|
||||
value_ty,
|
||||
generic_class,
|
||||
slice,
|
||||
);
|
||||
return self.infer_explicit_class_specialization(subscript, value_ty, generic_class);
|
||||
}
|
||||
|
||||
let slice_ty = self.infer_expression(slice);
|
||||
@@ -6063,8 +6095,8 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
subscript: &ast::ExprSubscript,
|
||||
value_ty: Type<'db>,
|
||||
generic_class: GenericClass<'db>,
|
||||
slice_node: &ast::Expr,
|
||||
) -> Type<'db> {
|
||||
let slice_node = subscript.slice.as_ref();
|
||||
let mut call_argument_types = match slice_node {
|
||||
ast::Expr::Tuple(tuple) => CallArgumentTypes::positional(
|
||||
tuple.elts.iter().map(|elt| self.infer_type_expression(elt)),
|
||||
@@ -6231,7 +6263,10 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::IntLiteral(i64::from(bool)),
|
||||
),
|
||||
(Type::KnownInstance(KnownInstanceType::Protocol), _) => {
|
||||
Type::Dynamic(DynamicType::TodoProtocol)
|
||||
Type::Dynamic(DynamicType::SubscriptedProtocol)
|
||||
}
|
||||
(Type::KnownInstance(KnownInstanceType::Generic), _) => {
|
||||
Type::Dynamic(DynamicType::SubscriptedGeneric)
|
||||
}
|
||||
(Type::KnownInstance(known_instance), _)
|
||||
if known_instance.class().is_special_form() =>
|
||||
@@ -6338,12 +6373,19 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
report_non_subscriptable(
|
||||
&self.context,
|
||||
value_node.into(),
|
||||
value_ty,
|
||||
"__class_getitem__",
|
||||
);
|
||||
// TODO: properly handle old-style generics; get rid of this temporary hack
|
||||
if !value_ty.into_class_literal().is_some_and(|class| {
|
||||
class
|
||||
.iter_mro(self.db(), None)
|
||||
.contains(&ClassBase::Dynamic(DynamicType::SubscriptedGeneric))
|
||||
}) {
|
||||
report_non_subscriptable(
|
||||
&self.context,
|
||||
value_node.into(),
|
||||
value_ty,
|
||||
"__class_getitem__",
|
||||
);
|
||||
}
|
||||
} else {
|
||||
report_non_subscriptable(
|
||||
&self.context,
|
||||
@@ -7185,9 +7227,24 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_type_expression(slice);
|
||||
value_ty
|
||||
}
|
||||
_ => {
|
||||
Type::ClassLiteral(ClassLiteralType::Generic(generic_class)) => {
|
||||
let specialized_class =
|
||||
self.infer_explicit_class_specialization(subscript, value_ty, generic_class);
|
||||
specialized_class
|
||||
.in_type_expression(self.db())
|
||||
.unwrap_or(Type::unknown())
|
||||
}
|
||||
Type::ClassLiteral(ClassLiteralType::NonGeneric(_)) => {
|
||||
// TODO: Once we know that e.g. `list` is generic, emit a diagnostic if you try to
|
||||
// specialize a non-generic class.
|
||||
self.infer_type_expression(slice);
|
||||
todo_type!("generics")
|
||||
todo_type!("specialized non-generic class")
|
||||
}
|
||||
_ => {
|
||||
// TODO: Emit a diagnostic once we've implemented all valid subscript type
|
||||
// expressions.
|
||||
self.infer_type_expression(slice);
|
||||
todo_type!("unknown type subscript")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7305,7 +7362,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let callable_type = if let (Some(parameters), Some(return_type), true) =
|
||||
(parameters, return_type, correct_argument_number)
|
||||
{
|
||||
CallableType::new(db, Signature::new(parameters, Some(return_type)))
|
||||
CallableType::single(db, Signature::new(parameters, Some(return_type)))
|
||||
} else {
|
||||
CallableType::unknown(db)
|
||||
};
|
||||
@@ -7386,8 +7443,22 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let argument_type = self.infer_expression(arguments_slice);
|
||||
let signatures = argument_type.signatures(db);
|
||||
|
||||
// TODO overloads
|
||||
let Some(signature) = signatures.iter().flatten().next() else {
|
||||
// SAFETY: This is enforced by the constructor methods on `Signatures` even in
|
||||
// the case of a non-callable union.
|
||||
let callable_signature = signatures
|
||||
.iter()
|
||||
.next()
|
||||
.expect("`Signatures` should have at least one `CallableSignature`");
|
||||
|
||||
let mut signature_iter = callable_signature.iter().map(|signature| {
|
||||
if argument_type.is_bound_method() {
|
||||
signature.bind_self()
|
||||
} else {
|
||||
signature.clone()
|
||||
}
|
||||
});
|
||||
|
||||
let Some(signature) = signature_iter.next() else {
|
||||
self.context.report_lint_old(
|
||||
&INVALID_TYPE_FORM,
|
||||
arguments_slice,
|
||||
@@ -7400,13 +7471,10 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
return Type::unknown();
|
||||
};
|
||||
|
||||
let revealed_signature = if argument_type.is_bound_method() {
|
||||
signature.bind_self()
|
||||
} else {
|
||||
signature.clone()
|
||||
};
|
||||
|
||||
Type::Callable(CallableType::new(db, revealed_signature))
|
||||
Type::Callable(CallableType::from_overloads(
|
||||
db,
|
||||
std::iter::once(signature).chain(signature_iter),
|
||||
))
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7489,7 +7557,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
KnownInstanceType::Protocol => {
|
||||
self.infer_type_expression(arguments_slice);
|
||||
Type::Dynamic(DynamicType::TodoProtocol)
|
||||
Type::Dynamic(DynamicType::SubscriptedProtocol)
|
||||
}
|
||||
KnownInstanceType::Generic => {
|
||||
self.infer_type_expression(arguments_slice);
|
||||
Type::Dynamic(DynamicType::SubscriptedGeneric)
|
||||
}
|
||||
KnownInstanceType::NoReturn
|
||||
| KnownInstanceType::Never
|
||||
|
||||
@@ -50,7 +50,13 @@ pub(crate) fn infer_narrowing_constraint<'db>(
|
||||
all_negative_narrowing_constraints_for_expression(db, expression)
|
||||
}
|
||||
}
|
||||
PredicateNode::Pattern(pattern) => all_narrowing_constraints_for_pattern(db, pattern),
|
||||
PredicateNode::Pattern(pattern) => {
|
||||
if predicate.is_positive {
|
||||
all_narrowing_constraints_for_pattern(db, pattern)
|
||||
} else {
|
||||
all_negative_narrowing_constraints_for_pattern(db, pattern)
|
||||
}
|
||||
}
|
||||
PredicateNode::StarImportPlaceholder(_) => return None,
|
||||
};
|
||||
if let Some(constraints) = constraints {
|
||||
@@ -95,6 +101,15 @@ fn all_negative_narrowing_constraints_for_expression<'db>(
|
||||
NarrowingConstraintsBuilder::new(db, PredicateNode::Expression(expression), false).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_negative_narrowing_constraints_for_pattern<'db>(
|
||||
db: &'db dyn Db,
|
||||
pattern: PatternPredicate<'db>,
|
||||
) -> Option<NarrowingConstraints<'db>> {
|
||||
NarrowingConstraintsBuilder::new(db, PredicateNode::Pattern(pattern), false).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
fn constraints_for_expression_cycle_recover<'db>(
|
||||
_db: &'db dyn Db,
|
||||
@@ -217,6 +232,23 @@ fn merge_constraints_or<'db>(
|
||||
}
|
||||
}
|
||||
|
||||
fn negate_if<'db>(constraints: &mut NarrowingConstraints<'db>, db: &'db dyn Db, yes: bool) {
|
||||
for (_symbol, ty) in constraints.iter_mut() {
|
||||
*ty = ty.negate_if(db, yes);
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_name(expr: &ast::Expr) -> Option<&ast::name::Name> {
|
||||
match expr {
|
||||
ast::Expr::Named(ast::ExprNamed { target, .. }) => match target.as_ref() {
|
||||
ast::Expr::Name(ast::ExprName { id, .. }) => Some(id),
|
||||
_ => None,
|
||||
},
|
||||
ast::Expr::Name(ast::ExprName { id, .. }) => Some(id),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
struct NarrowingConstraintsBuilder<'db> {
|
||||
db: &'db dyn Db,
|
||||
predicate: PredicateNode<'db>,
|
||||
@@ -237,7 +269,9 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
PredicateNode::Expression(expression) => {
|
||||
self.evaluate_expression_predicate(expression, self.is_positive)
|
||||
}
|
||||
PredicateNode::Pattern(pattern) => self.evaluate_pattern_predicate(pattern),
|
||||
PredicateNode::Pattern(pattern) => {
|
||||
self.evaluate_pattern_predicate(pattern, self.is_positive)
|
||||
}
|
||||
PredicateNode::StarImportPlaceholder(_) => return None,
|
||||
};
|
||||
if let Some(mut constraints) = constraints {
|
||||
@@ -275,7 +309,8 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
self.evaluate_expression_node_predicate(&unary_op.operand, expression, !is_positive)
|
||||
}
|
||||
ast::Expr::BoolOp(bool_op) => self.evaluate_bool_op(bool_op, expression, is_positive),
|
||||
_ => None, // TODO other test expression kinds
|
||||
ast::Expr::Named(expr_named) => self.evaluate_expr_named(expr_named, is_positive),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -300,10 +335,14 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
fn evaluate_pattern_predicate(
|
||||
&mut self,
|
||||
pattern: PatternPredicate<'db>,
|
||||
is_positive: bool,
|
||||
) -> Option<NarrowingConstraints<'db>> {
|
||||
let subject = pattern.subject(self.db);
|
||||
|
||||
self.evaluate_pattern_predicate_kind(pattern.kind(self.db), subject)
|
||||
.map(|mut constraints| {
|
||||
negate_if(&mut constraints, self.db, !is_positive);
|
||||
constraints
|
||||
})
|
||||
}
|
||||
|
||||
fn symbols(&self) -> Arc<SymbolTable> {
|
||||
@@ -343,6 +382,18 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
NarrowingConstraints::from_iter([(symbol, ty)])
|
||||
}
|
||||
|
||||
fn evaluate_expr_named(
|
||||
&mut self,
|
||||
expr_named: &ast::ExprNamed,
|
||||
is_positive: bool,
|
||||
) -> Option<NarrowingConstraints<'db>> {
|
||||
if let ast::Expr::Name(expr_name) = expr_named.target.as_ref() {
|
||||
Some(self.evaluate_expr_name(expr_name, is_positive))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn evaluate_expr_in(&mut self, lhs_ty: Type<'db>, rhs_ty: Type<'db>) -> Option<Type<'db>> {
|
||||
if lhs_ty.is_single_valued(self.db) || lhs_ty.is_union_of_single_valued(self.db) {
|
||||
match rhs_ty {
|
||||
@@ -365,6 +416,44 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
fn evaluate_expr_compare_op(
|
||||
&mut self,
|
||||
lhs_ty: Type<'db>,
|
||||
rhs_ty: Type<'db>,
|
||||
op: ast::CmpOp,
|
||||
) -> Option<Type<'db>> {
|
||||
match op {
|
||||
ast::CmpOp::IsNot => {
|
||||
if rhs_ty.is_singleton(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
Some(ty)
|
||||
} else {
|
||||
// Non-singletons cannot be safely narrowed using `is not`
|
||||
None
|
||||
}
|
||||
}
|
||||
ast::CmpOp::Is => Some(rhs_ty),
|
||||
ast::CmpOp::NotEq => {
|
||||
if rhs_ty.is_single_valued(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
Some(ty)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ast::CmpOp::Eq if lhs_ty.is_literal_string() => Some(rhs_ty),
|
||||
ast::CmpOp::In => self.evaluate_expr_in(lhs_ty, rhs_ty),
|
||||
ast::CmpOp::NotIn => self
|
||||
.evaluate_expr_in(lhs_ty, rhs_ty)
|
||||
.map(|ty| ty.negate(self.db)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn evaluate_expr_compare(
|
||||
&mut self,
|
||||
expr_compare: &ast::ExprCompare,
|
||||
@@ -372,7 +461,10 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
is_positive: bool,
|
||||
) -> Option<NarrowingConstraints<'db>> {
|
||||
fn is_narrowing_target_candidate(expr: &ast::Expr) -> bool {
|
||||
matches!(expr, ast::Expr::Name(_) | ast::Expr::Call(_))
|
||||
matches!(
|
||||
expr,
|
||||
ast::Expr::Name(_) | ast::Expr::Call(_) | ast::Expr::Named(_)
|
||||
)
|
||||
}
|
||||
|
||||
let ast::ExprCompare {
|
||||
@@ -416,50 +508,13 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
last_rhs_ty = Some(rhs_ty);
|
||||
|
||||
match left {
|
||||
ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) => {
|
||||
let symbol = self.expect_expr_name_symbol(id);
|
||||
ast::Expr::Name(_) | ast::Expr::Named(_) => {
|
||||
if let Some(id) = expr_name(left) {
|
||||
let symbol = self.expect_expr_name_symbol(id);
|
||||
let op = if is_positive { *op } else { op.negate() };
|
||||
|
||||
match if is_positive { *op } else { op.negate() } {
|
||||
ast::CmpOp::IsNot => {
|
||||
if rhs_ty.is_singleton(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
} else {
|
||||
// Non-singletons cannot be safely narrowed using `is not`
|
||||
}
|
||||
}
|
||||
ast::CmpOp::Is => {
|
||||
constraints.insert(symbol, rhs_ty);
|
||||
}
|
||||
ast::CmpOp::NotEq => {
|
||||
if rhs_ty.is_single_valued(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
}
|
||||
}
|
||||
ast::CmpOp::Eq if lhs_ty.is_literal_string() => {
|
||||
constraints.insert(symbol, rhs_ty);
|
||||
}
|
||||
ast::CmpOp::In => {
|
||||
if let Some(ty) = self.evaluate_expr_in(lhs_ty, rhs_ty) {
|
||||
constraints.insert(symbol, ty);
|
||||
}
|
||||
}
|
||||
ast::CmpOp::NotIn => {
|
||||
if let Some(ty) = self.evaluate_expr_in(lhs_ty, rhs_ty) {
|
||||
constraints.insert(symbol, ty.negate(self.db));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// TODO other comparison types
|
||||
if let Some(ty) = self.evaluate_expr_compare_op(lhs_ty, rhs_ty, op) {
|
||||
constraints.insert(symbol, ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -483,8 +538,12 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
}
|
||||
};
|
||||
|
||||
let [ast::Expr::Name(ast::ExprName { id, .. })] = &**args else {
|
||||
continue;
|
||||
let id = match &**args {
|
||||
[first] => match expr_name(first) {
|
||||
Some(id) => id,
|
||||
None => continue,
|
||||
},
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let is_valid_constraint = if is_positive {
|
||||
@@ -535,10 +594,12 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
Type::FunctionLiteral(function_type) if expr_call.arguments.keywords.is_empty() => {
|
||||
let function = function_type.known(self.db)?.into_constraint_function()?;
|
||||
|
||||
let [ast::Expr::Name(ast::ExprName { id, .. }), class_info] =
|
||||
&*expr_call.arguments.args
|
||||
else {
|
||||
return None;
|
||||
let (id, class_info) = match &*expr_call.arguments.args {
|
||||
[first, class_info] => match expr_name(first) {
|
||||
Some(id) => (id, class_info),
|
||||
None => return None,
|
||||
},
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let symbol = self.expect_expr_name_symbol(id);
|
||||
|
||||
@@ -188,7 +188,7 @@ impl Ty {
|
||||
|
||||
create_bound_method(db, function, builtins_class)
|
||||
}
|
||||
Ty::Callable { params, returns } => Type::Callable(CallableType::new(
|
||||
Ty::Callable { params, returns } => Type::Callable(CallableType::single(
|
||||
db,
|
||||
Signature::new(
|
||||
params.into_parameters(db),
|
||||
|
||||
@@ -250,16 +250,6 @@ impl<'db> Signature<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a todo signature: (*args: Todo, **kwargs: Todo) -> Todo
|
||||
#[allow(unused_variables)] // 'reason' only unused in debug builds
|
||||
pub(crate) fn todo(reason: &'static str) -> Self {
|
||||
Signature {
|
||||
generic_context: None,
|
||||
parameters: Parameters::todo(),
|
||||
return_ty: Some(todo_type!(reason)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a typed signature from a function definition.
|
||||
pub(super) fn from_function(
|
||||
db: &'db dyn Db,
|
||||
@@ -286,15 +276,31 @@ impl<'db> Signature<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn normalized(&self, db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
generic_context: self.generic_context,
|
||||
parameters: self
|
||||
.parameters
|
||||
.iter()
|
||||
.map(|param| param.normalized(db))
|
||||
.collect(),
|
||||
return_ty: self.return_ty.map(|return_ty| return_ty.normalized(db)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn apply_specialization(
|
||||
&mut self,
|
||||
db: &'db dyn Db,
|
||||
specialization: Specialization<'db>,
|
||||
) {
|
||||
self.parameters.apply_specialization(db, specialization);
|
||||
self.return_ty = self
|
||||
.return_ty
|
||||
.map(|ty| ty.apply_specialization(db, specialization));
|
||||
self.return_ty
|
||||
.as_mut()
|
||||
.map(|ty| *ty = ty.apply_specialization(db, specialization));
|
||||
}
|
||||
|
||||
pub(crate) fn set_generic_context(&mut self, generic_context: GenericContext<'db>) {
|
||||
self.generic_context = Some(generic_context);
|
||||
}
|
||||
|
||||
/// Return the parameters in this signature.
|
||||
@@ -1163,9 +1169,9 @@ impl<'db> Parameter<'db> {
|
||||
}
|
||||
|
||||
fn apply_specialization(&mut self, db: &'db dyn Db, specialization: Specialization<'db>) {
|
||||
self.annotated_type = self
|
||||
.annotated_type
|
||||
.map(|ty| ty.apply_specialization(db, specialization));
|
||||
self.annotated_type
|
||||
.as_mut()
|
||||
.map(|ty| *ty = ty.apply_specialization(db, specialization));
|
||||
self.kind.apply_specialization(db, specialization);
|
||||
}
|
||||
|
||||
@@ -1361,7 +1367,9 @@ impl<'db> ParameterKind<'db> {
|
||||
Self::PositionalOnly { default_type, .. }
|
||||
| Self::PositionalOrKeyword { default_type, .. }
|
||||
| Self::KeywordOnly { default_type, .. } => {
|
||||
*default_type = default_type.map(|ty| ty.apply_specialization(db, specialization));
|
||||
default_type
|
||||
.as_mut()
|
||||
.map(|ty| *ty = ty.apply_specialization(db, specialization));
|
||||
}
|
||||
Self::Variadic { .. } | Self::KeywordVariadic { .. } => {}
|
||||
}
|
||||
@@ -1380,16 +1388,20 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::db::tests::{setup_db, TestDb};
|
||||
use crate::symbol::global_symbol;
|
||||
use crate::types::{FunctionType, KnownClass};
|
||||
use crate::types::{FunctionLiteral, FunctionSignature, FunctionType, KnownClass};
|
||||
use ruff_db::system::DbWithWritableSystem as _;
|
||||
|
||||
#[track_caller]
|
||||
fn get_function_f<'db>(db: &'db TestDb, file: &'static str) -> FunctionType<'db> {
|
||||
fn get_function_f<'db>(db: &'db TestDb, file: &'static str) -> FunctionLiteral<'db> {
|
||||
let module = ruff_db::files::system_path_to_file(db, file).unwrap();
|
||||
global_symbol(db, module, "f")
|
||||
let function = global_symbol(db, module, "f")
|
||||
.symbol
|
||||
.expect_type()
|
||||
.expect_function_literal()
|
||||
.expect_function_literal();
|
||||
let FunctionType::FunctionLiteral(literal) = function else {
|
||||
panic!("function should be a function literal");
|
||||
};
|
||||
literal
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@@ -1627,6 +1639,6 @@ mod tests {
|
||||
let expected_sig = func.internal_signature(&db);
|
||||
|
||||
// With no decorators, internal and external signature are the same
|
||||
assert_eq!(func.signature(&db), &expected_sig);
|
||||
assert_eq!(func.signature(&db), FunctionSignature::Single(expected_sig));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use crate::symbol::SymbolAndQualifiers;
|
||||
|
||||
use super::{ClassBase, Db, KnownClass, MemberLookupPolicy, Type};
|
||||
use super::{ClassType, Db, DynamicType, KnownClass, MemberLookupPolicy, Type};
|
||||
|
||||
/// A type that represents `type[C]`, i.e. the class object `C` and class objects that are subclasses of `C`.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
|
||||
pub struct SubclassOfType<'db> {
|
||||
// Keep this field private, so that the only way of constructing the struct is through the `from` method.
|
||||
subclass_of: ClassBase<'db>,
|
||||
subclass_of: SubclassOfInner<'db>,
|
||||
}
|
||||
|
||||
impl<'db> SubclassOfType<'db> {
|
||||
@@ -21,11 +21,11 @@ impl<'db> SubclassOfType<'db> {
|
||||
///
|
||||
/// The eager normalization here means that we do not need to worry elsewhere about distinguishing
|
||||
/// between `@final` classes and other classes when dealing with [`Type::SubclassOf`] variants.
|
||||
pub(crate) fn from(db: &'db dyn Db, subclass_of: impl Into<ClassBase<'db>>) -> Type<'db> {
|
||||
pub(crate) fn from(db: &'db dyn Db, subclass_of: impl Into<SubclassOfInner<'db>>) -> Type<'db> {
|
||||
let subclass_of = subclass_of.into();
|
||||
match subclass_of {
|
||||
ClassBase::Dynamic(_) => Type::SubclassOf(Self { subclass_of }),
|
||||
ClassBase::Class(class) => {
|
||||
SubclassOfInner::Dynamic(_) => Type::SubclassOf(Self { subclass_of }),
|
||||
SubclassOfInner::Class(class) => {
|
||||
if class.is_final(db) {
|
||||
Type::from(class)
|
||||
} else if class.is_object(db) {
|
||||
@@ -40,19 +40,19 @@ impl<'db> SubclassOfType<'db> {
|
||||
/// Return a [`Type`] instance representing the type `type[Unknown]`.
|
||||
pub(crate) const fn subclass_of_unknown() -> Type<'db> {
|
||||
Type::SubclassOf(SubclassOfType {
|
||||
subclass_of: ClassBase::unknown(),
|
||||
subclass_of: SubclassOfInner::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Return a [`Type`] instance representing the type `type[Any]`.
|
||||
pub(crate) const fn subclass_of_any() -> Type<'db> {
|
||||
Type::SubclassOf(SubclassOfType {
|
||||
subclass_of: ClassBase::any(),
|
||||
subclass_of: SubclassOfInner::Dynamic(DynamicType::Any),
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the inner [`ClassBase`] value wrapped by this `SubclassOfType`.
|
||||
pub(crate) const fn subclass_of(self) -> ClassBase<'db> {
|
||||
/// Return the inner [`SubclassOfInner`] value wrapped by this `SubclassOfType`.
|
||||
pub(crate) const fn subclass_of(self) -> SubclassOfInner<'db> {
|
||||
self.subclass_of
|
||||
}
|
||||
|
||||
@@ -77,17 +77,17 @@ impl<'db> SubclassOfType<'db> {
|
||||
|
||||
/// Return `true` if `self` is a subtype of `other`.
|
||||
///
|
||||
/// This can only return `true` if `self.subclass_of` is a [`ClassBase::Class`] variant;
|
||||
/// This can only return `true` if `self.subclass_of` is a [`SubclassOfInner::Class`] variant;
|
||||
/// only fully static types participate in subtyping.
|
||||
pub(crate) fn is_subtype_of(self, db: &'db dyn Db, other: SubclassOfType<'db>) -> bool {
|
||||
match (self.subclass_of, other.subclass_of) {
|
||||
// Non-fully-static types do not participate in subtyping
|
||||
(ClassBase::Dynamic(_), _) | (_, ClassBase::Dynamic(_)) => false,
|
||||
(SubclassOfInner::Dynamic(_), _) | (_, SubclassOfInner::Dynamic(_)) => false,
|
||||
|
||||
// For example, `type[bool]` describes all possible runtime subclasses of the class `bool`,
|
||||
// and `type[int]` describes all possible runtime subclasses of the class `int`.
|
||||
// The first set is a subset of the second set, because `bool` is itself a subclass of `int`.
|
||||
(ClassBase::Class(self_class), ClassBase::Class(other_class)) => {
|
||||
(SubclassOfInner::Class(self_class), SubclassOfInner::Class(other_class)) => {
|
||||
// N.B. The subclass relation is fully static
|
||||
self_class.is_subclass_of(db, other_class)
|
||||
}
|
||||
@@ -96,8 +96,73 @@ impl<'db> SubclassOfType<'db> {
|
||||
|
||||
pub(crate) fn to_instance(self) -> Type<'db> {
|
||||
match self.subclass_of {
|
||||
ClassBase::Class(class) => Type::instance(class),
|
||||
ClassBase::Dynamic(dynamic_type) => Type::Dynamic(dynamic_type),
|
||||
SubclassOfInner::Class(class) => Type::instance(class),
|
||||
SubclassOfInner::Dynamic(dynamic_type) => Type::Dynamic(dynamic_type),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An enumeration of the different kinds of `type[]` types that a [`SubclassOfType`] can represent:
|
||||
///
|
||||
/// 1. A "subclass of a class": `type[C]` for any class object `C`
|
||||
/// 2. A "subclass of a dynamic type": `type[Any]`, `type[Unknown]` and `type[@Todo]`
|
||||
///
|
||||
/// In the long term, we may want to implement <https://github.com/astral-sh/ruff/issues/15381>.
|
||||
/// Doing this would allow us to get rid of this enum,
|
||||
/// since `type[Any]` would be represented as `type & Any`
|
||||
/// rather than using the [`Type::SubclassOf`] variant at all;
|
||||
/// [`SubclassOfType`] would then be a simple wrapper around [`ClassType`].
|
||||
///
|
||||
/// Note that this enum is similar to the [`super::ClassBase`] enum,
|
||||
/// but does not include the `ClassBase::Protocol` and `ClassBase::Generic` variants
|
||||
/// (`type[Protocol]` and `type[Generic]` are not valid types).
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
|
||||
pub(crate) enum SubclassOfInner<'db> {
|
||||
Class(ClassType<'db>),
|
||||
Dynamic(DynamicType),
|
||||
}
|
||||
|
||||
impl<'db> SubclassOfInner<'db> {
|
||||
pub(crate) const fn unknown() -> Self {
|
||||
Self::Dynamic(DynamicType::Unknown)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_dynamic(self) -> bool {
|
||||
matches!(self, Self::Dynamic(_))
|
||||
}
|
||||
|
||||
pub(crate) const fn into_class(self) -> Option<ClassType<'db>> {
|
||||
match self {
|
||||
Self::Class(class) => Some(class),
|
||||
Self::Dynamic(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn try_from_type(db: &'db dyn Db, ty: Type<'db>) -> Option<Self> {
|
||||
match ty {
|
||||
Type::Dynamic(dynamic) => Some(Self::Dynamic(dynamic)),
|
||||
Type::ClassLiteral(literal) => Some(if literal.is_known(db, KnownClass::Any) {
|
||||
Self::Dynamic(DynamicType::Any)
|
||||
} else {
|
||||
Self::Class(literal.default_specialization(db))
|
||||
}),
|
||||
Type::GenericAlias(generic) => Some(Self::Class(ClassType::Generic(generic))),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> From<ClassType<'db>> for SubclassOfInner<'db> {
|
||||
fn from(value: ClassType<'db>) -> Self {
|
||||
SubclassOfInner::Class(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> From<SubclassOfInner<'db>> for Type<'db> {
|
||||
fn from(value: SubclassOfInner<'db>) -> Self {
|
||||
match value {
|
||||
SubclassOfInner::Dynamic(dynamic) => Type::Dynamic(dynamic),
|
||||
SubclassOfInner::Class(class) => class.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,8 +3,8 @@ use std::cmp::Ordering;
|
||||
use crate::db::Db;
|
||||
|
||||
use super::{
|
||||
class_base::ClassBase, DynamicType, InstanceType, KnownInstanceType, SuperOwnerKind, TodoType,
|
||||
Type,
|
||||
class_base::ClassBase, subclass_of::SubclassOfInner, DynamicType, InstanceType,
|
||||
KnownInstanceType, SuperOwnerKind, TodoType, Type,
|
||||
};
|
||||
|
||||
/// Return an [`Ordering`] that describes the canonical order in which two types should appear
|
||||
@@ -109,10 +109,10 @@ pub(super) fn union_or_intersection_elements_ordering<'db>(
|
||||
|
||||
(Type::SubclassOf(left), Type::SubclassOf(right)) => {
|
||||
match (left.subclass_of(), right.subclass_of()) {
|
||||
(ClassBase::Class(left), ClassBase::Class(right)) => left.cmp(&right),
|
||||
(ClassBase::Class(_), _) => Ordering::Less,
|
||||
(_, ClassBase::Class(_)) => Ordering::Greater,
|
||||
(ClassBase::Dynamic(left), ClassBase::Dynamic(right)) => {
|
||||
(SubclassOfInner::Class(left), SubclassOfInner::Class(right)) => left.cmp(&right),
|
||||
(SubclassOfInner::Class(_), _) => Ordering::Less,
|
||||
(_, SubclassOfInner::Class(_)) => Ordering::Greater,
|
||||
(SubclassOfInner::Dynamic(left), SubclassOfInner::Dynamic(right)) => {
|
||||
dynamic_elements_ordering(left, right)
|
||||
}
|
||||
}
|
||||
@@ -143,6 +143,10 @@ pub(super) fn union_or_intersection_elements_ordering<'db>(
|
||||
(ClassBase::Class(left), ClassBase::Class(right)) => left.cmp(right),
|
||||
(ClassBase::Class(_), _) => Ordering::Less,
|
||||
(_, ClassBase::Class(_)) => Ordering::Greater,
|
||||
(ClassBase::Protocol, _) => Ordering::Less,
|
||||
(_, ClassBase::Protocol) => Ordering::Greater,
|
||||
(ClassBase::Generic, _) => Ordering::Less,
|
||||
(_, ClassBase::Generic) => Ordering::Greater,
|
||||
(ClassBase::Dynamic(left), ClassBase::Dynamic(right)) => {
|
||||
dynamic_elements_ordering(*left, *right)
|
||||
}
|
||||
@@ -230,6 +234,9 @@ pub(super) fn union_or_intersection_elements_ordering<'db>(
|
||||
(KnownInstanceType::OrderedDict, _) => Ordering::Less,
|
||||
(_, KnownInstanceType::OrderedDict) => Ordering::Greater,
|
||||
|
||||
(KnownInstanceType::Generic, _) => Ordering::Less,
|
||||
(_, KnownInstanceType::Generic) => Ordering::Greater,
|
||||
|
||||
(KnownInstanceType::Protocol, _) => Ordering::Less,
|
||||
(_, KnownInstanceType::Protocol) => Ordering::Greater,
|
||||
|
||||
@@ -364,7 +371,10 @@ fn dynamic_elements_ordering(left: DynamicType, right: DynamicType) -> Ordering
|
||||
#[cfg(not(debug_assertions))]
|
||||
(DynamicType::Todo(TodoType), DynamicType::Todo(TodoType)) => Ordering::Equal,
|
||||
|
||||
(DynamicType::TodoProtocol, _) => Ordering::Less,
|
||||
(_, DynamicType::TodoProtocol) => Ordering::Greater,
|
||||
(DynamicType::SubscriptedGeneric, _) => Ordering::Less,
|
||||
(_, DynamicType::SubscriptedGeneric) => Ordering::Greater,
|
||||
|
||||
(DynamicType::SubscriptedProtocol, _) => Ordering::Less,
|
||||
(_, DynamicType::SubscriptedProtocol) => Ordering::Greater,
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user