Compare commits
22 Commits
0.9.0
...
micha/dont
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fcde25773d | ||
|
|
c39ca8fe6d | ||
|
|
2d82445794 | ||
|
|
398f2e8b0c | ||
|
|
232fbc1300 | ||
|
|
c82932e580 | ||
|
|
12f86f39a4 | ||
|
|
2b28d566a4 | ||
|
|
adca7bd95c | ||
|
|
6b98a26452 | ||
|
|
c87463842a | ||
|
|
c364b586f9 | ||
|
|
73d424ee5e | ||
|
|
6e9ff445fd | ||
|
|
f2c3ddc5ea | ||
|
|
b861551b6a | ||
|
|
443bf38565 | ||
|
|
23ad319b55 | ||
|
|
3d9433ca66 | ||
|
|
baf068361a | ||
|
|
b33cf5baba | ||
|
|
b0905c4b04 |
24
CHANGELOG.md
24
CHANGELOG.md
@@ -1,5 +1,29 @@
|
||||
# Changelog
|
||||
|
||||
## 0.9.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Run `too-many-newlines-at-end-of-file` on each cell in notebooks (`W391`) ([#15308](https://github.com/astral-sh/ruff/pull/15308))
|
||||
- \[`ruff`\] Omit diagnostic for shadowed private function parameters in `used-dummy-variable` (`RUF052`) ([#15376](https://github.com/astral-sh/ruff/pull/15376))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Improve `assert-raises-exception` message (`B017`) ([#15389](https://github.com/astral-sh/ruff/pull/15389))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Preserve trailing end-of line comments for the last string literal in implicitly concatenated strings ([#15378](https://github.com/astral-sh/ruff/pull/15378))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix a bug where the server and client notebooks were out of sync after reordering cells ([#15398](https://github.com/astral-sh/ruff/pull/15398))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-pie`\] Correctly remove wrapping parentheses (`PIE800`) ([#15394](https://github.com/astral-sh/ruff/pull/15394))
|
||||
- \[`pyupgrade`\] Handle comments and multiline expressions correctly (`UP037`) ([#15337](https://github.com/astral-sh/ruff/pull/15337))
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.9.0) for a migration guide and overview of the changes!
|
||||
|
||||
6
Cargo.lock
generated
6
Cargo.lock
generated
@@ -2497,7 +2497,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2716,7 +2716,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -3033,7 +3033,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
|
||||
@@ -211,6 +211,9 @@ redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
# were chosen based on a trade-off between compile times
|
||||
|
||||
@@ -149,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.9.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.9.0/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.9.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.9.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.0
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -0,0 +1,142 @@
|
||||
# `assert_type`
|
||||
|
||||
## Basic
|
||||
|
||||
```py
|
||||
from typing_extensions import assert_type
|
||||
|
||||
def _(x: int):
|
||||
assert_type(x, int) # fine
|
||||
assert_type(x, str) # error: [type-assertion-failure]
|
||||
```
|
||||
|
||||
## Narrowing
|
||||
|
||||
The asserted type is checked against the inferred type, not the declared type.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing_extensions import assert_type
|
||||
|
||||
def _(x: int | str):
|
||||
if isinstance(x, int):
|
||||
reveal_type(x) # revealed: int
|
||||
assert_type(x, int) # fine
|
||||
```
|
||||
|
||||
## Equivalence
|
||||
|
||||
The actual type must match the asserted type precisely.
|
||||
|
||||
```py
|
||||
from typing import Any, Type, Union
|
||||
from typing_extensions import assert_type
|
||||
|
||||
# Subtype does not count
|
||||
def _(x: bool):
|
||||
assert_type(x, int) # error: [type-assertion-failure]
|
||||
|
||||
def _(a: type[int], b: type[Any]):
|
||||
assert_type(a, type[Any]) # error: [type-assertion-failure]
|
||||
assert_type(b, type[int]) # error: [type-assertion-failure]
|
||||
|
||||
# The expression constructing the type is not taken into account
|
||||
def _(a: type[int]):
|
||||
assert_type(a, Type[int]) # fine
|
||||
```
|
||||
|
||||
## Gradual types
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
from typing_extensions import Literal, assert_type
|
||||
|
||||
from knot_extensions import Unknown
|
||||
|
||||
# Any and Unknown are considered equivalent
|
||||
def _(a: Unknown, b: Any):
|
||||
reveal_type(a) # revealed: Unknown
|
||||
assert_type(a, Any) # fine
|
||||
|
||||
reveal_type(b) # revealed: Any
|
||||
assert_type(b, Unknown) # fine
|
||||
|
||||
def _(a: type[Unknown], b: type[Any]):
|
||||
reveal_type(a) # revealed: type[Unknown]
|
||||
assert_type(a, type[Any]) # fine
|
||||
|
||||
reveal_type(b) # revealed: type[Any]
|
||||
assert_type(b, type[Unknown]) # fine
|
||||
```
|
||||
|
||||
## Tuples
|
||||
|
||||
Tuple types with the same elements are the same.
|
||||
|
||||
```py
|
||||
from typing_extensions import assert_type
|
||||
|
||||
from knot_extensions import Unknown
|
||||
|
||||
def _(a: tuple[int, str, bytes]):
|
||||
assert_type(a, tuple[int, str, bytes]) # fine
|
||||
|
||||
assert_type(a, tuple[int, str]) # error: [type-assertion-failure]
|
||||
assert_type(a, tuple[int, str, bytes, None]) # error: [type-assertion-failure]
|
||||
assert_type(a, tuple[int, bytes, str]) # error: [type-assertion-failure]
|
||||
|
||||
def _(a: tuple[Any, ...], b: tuple[Unknown, ...]):
|
||||
assert_type(a, tuple[Any, ...]) # fine
|
||||
assert_type(a, tuple[Unknown, ...]) # fine
|
||||
|
||||
assert_type(b, tuple[Unknown, ...]) # fine
|
||||
assert_type(b, tuple[Any, ...]) # fine
|
||||
```
|
||||
|
||||
## Unions
|
||||
|
||||
Unions with the same elements are the same, regardless of order.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.10"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing_extensions import assert_type
|
||||
|
||||
def _(a: str | int):
|
||||
assert_type(a, str | int) # fine
|
||||
|
||||
# TODO: Order-independent union handling in type equivalence
|
||||
assert_type(a, int | str) # error: [type-assertion-failure]
|
||||
```
|
||||
|
||||
## Intersections
|
||||
|
||||
Intersections are the same when their positive and negative parts are respectively the same,
|
||||
regardless of order.
|
||||
|
||||
```py
|
||||
from typing_extensions import assert_type
|
||||
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
class C: ...
|
||||
class D: ...
|
||||
|
||||
def _(a: A):
|
||||
if isinstance(a, B) and not isinstance(a, C) and not isinstance(a, D):
|
||||
reveal_type(a) # revealed: A & B & ~C & ~D
|
||||
|
||||
assert_type(a, Intersection[A, B, Not[C], Not[D]]) # fine
|
||||
|
||||
# TODO: Order-independent intersection handling in type equivalence
|
||||
assert_type(a, Intersection[B, A, Not[D], Not[C]]) # error: [type-assertion-failure]
|
||||
```
|
||||
@@ -0,0 +1,748 @@
|
||||
# Intersection types
|
||||
|
||||
## Introduction
|
||||
|
||||
This test suite covers certain properties of intersection types and makes sure that we can apply
|
||||
various simplification strategies. We use `Intersection` (`&`) and `Not` (`~`) to construct
|
||||
intersection types (note that we display negative contributions at the end; the order does not
|
||||
matter):
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
|
||||
def _(
|
||||
i1: Intersection[P, Q],
|
||||
i2: Intersection[P, Not[Q]],
|
||||
i3: Intersection[Not[P], Q],
|
||||
i4: Intersection[Not[P], Not[Q]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: P & Q
|
||||
reveal_type(i2) # revealed: P & ~Q
|
||||
reveal_type(i3) # revealed: Q & ~P
|
||||
reveal_type(i4) # revealed: ~P & ~Q
|
||||
```
|
||||
|
||||
## Notation
|
||||
|
||||
Throughout this document, we use the following types as representatives for certain equivalence
|
||||
classes.
|
||||
|
||||
### Non-disjoint types
|
||||
|
||||
We use `P`, `Q`, `R`, … to denote types that are non-disjoint:
|
||||
|
||||
```py
|
||||
from knot_extensions import static_assert, is_disjoint_from
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
class R: ...
|
||||
|
||||
static_assert(not is_disjoint_from(P, Q))
|
||||
static_assert(not is_disjoint_from(P, R))
|
||||
static_assert(not is_disjoint_from(Q, R))
|
||||
```
|
||||
|
||||
Although `P` is not a subtype of `Q` and `Q` is not a subtype of `P`, the two types are not disjoint
|
||||
because it would be possible to create a class `S` that inherits from both `P` and `Q` using
|
||||
multiple inheritance. An instance of `S` would be a member of the `P` type _and_ the `Q` type.
|
||||
|
||||
### Disjoint types
|
||||
|
||||
We use `Literal[1]`, `Literal[2]`, … as examples of pairwise-disjoint types, and `int` as a joint
|
||||
supertype of these:
|
||||
|
||||
```py
|
||||
from knot_extensions import static_assert, is_disjoint_from, is_subtype_of
|
||||
from typing import Literal
|
||||
|
||||
static_assert(is_disjoint_from(Literal[1], Literal[2]))
|
||||
static_assert(is_disjoint_from(Literal[1], Literal[3]))
|
||||
static_assert(is_disjoint_from(Literal[2], Literal[3]))
|
||||
|
||||
static_assert(is_subtype_of(Literal[1], int))
|
||||
static_assert(is_subtype_of(Literal[2], int))
|
||||
static_assert(is_subtype_of(Literal[3], int))
|
||||
```
|
||||
|
||||
### Subtypes
|
||||
|
||||
Finally, we use `A <: B <: C` and `A <: B1`, `A <: B2` to denote hierarchies of (proper) subtypes:
|
||||
|
||||
```py
|
||||
from knot_extensions import static_assert, is_subtype_of, is_disjoint_from
|
||||
|
||||
class A: ...
|
||||
class B(A): ...
|
||||
class C(B): ...
|
||||
|
||||
static_assert(is_subtype_of(B, A))
|
||||
static_assert(is_subtype_of(C, B))
|
||||
static_assert(is_subtype_of(C, A))
|
||||
|
||||
static_assert(not is_subtype_of(A, B))
|
||||
static_assert(not is_subtype_of(B, C))
|
||||
static_assert(not is_subtype_of(A, C))
|
||||
|
||||
class B1(A): ...
|
||||
class B2(A): ...
|
||||
|
||||
static_assert(is_subtype_of(B1, A))
|
||||
static_assert(is_subtype_of(B2, A))
|
||||
|
||||
static_assert(not is_subtype_of(A, B1))
|
||||
static_assert(not is_subtype_of(A, B2))
|
||||
|
||||
static_assert(not is_subtype_of(B1, B2))
|
||||
static_assert(not is_subtype_of(B2, B1))
|
||||
```
|
||||
|
||||
## Structural properties
|
||||
|
||||
This section covers structural properties of intersection types and documents some decisions on how
|
||||
to represent mixtures of intersections and unions.
|
||||
|
||||
### Single-element intersections
|
||||
|
||||
If we have an intersection with a single element, we can simplify to that element. Similarly, we
|
||||
show an intersection with a single negative contribution as just the negation of that element.
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class P: ...
|
||||
|
||||
def _(
|
||||
i1: Intersection[P],
|
||||
i2: Intersection[Not[P]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: P
|
||||
reveal_type(i2) # revealed: ~P
|
||||
```
|
||||
|
||||
### Flattening of nested intersections
|
||||
|
||||
We eagerly flatten nested intersections types.
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
class R: ...
|
||||
class S: ...
|
||||
|
||||
def positive_contributions(
|
||||
i1: Intersection[P, Intersection[Q, R]],
|
||||
i2: Intersection[Intersection[P, Q], R],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: P & Q & R
|
||||
reveal_type(i2) # revealed: P & Q & R
|
||||
|
||||
def negative_contributions(
|
||||
i1: Intersection[Not[P], Intersection[Not[Q], Not[R]]],
|
||||
i2: Intersection[Intersection[Not[P], Not[Q]], Not[R]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: ~P & ~Q & ~R
|
||||
reveal_type(i2) # revealed: ~P & ~Q & ~R
|
||||
|
||||
def mixed(
|
||||
i1: Intersection[P, Intersection[Not[Q], R]],
|
||||
i2: Intersection[Intersection[P, Not[Q]], R],
|
||||
i3: Intersection[Not[P], Intersection[Q, Not[R]]],
|
||||
i4: Intersection[Intersection[Q, Not[R]], Not[P]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: P & R & ~Q
|
||||
reveal_type(i2) # revealed: P & R & ~Q
|
||||
reveal_type(i3) # revealed: Q & ~P & ~R
|
||||
reveal_type(i4) # revealed: Q & ~R & ~P
|
||||
|
||||
def multiple(
|
||||
i1: Intersection[Intersection[P, Q], Intersection[R, S]],
|
||||
):
|
||||
reveal_type(i1) # revealed: P & Q & R & S
|
||||
|
||||
def nested(
|
||||
i1: Intersection[Intersection[Intersection[P, Q], R], S],
|
||||
i2: Intersection[P, Intersection[Q, Intersection[R, S]]],
|
||||
):
|
||||
reveal_type(i1) # revealed: P & Q & R & S
|
||||
reveal_type(i2) # revealed: P & Q & R & S
|
||||
```
|
||||
|
||||
### Union of intersections
|
||||
|
||||
We always normalize our representation to a _union of intersections_, so when we add a _union to an
|
||||
intersection_, we distribute the union over the respective elements:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
class R: ...
|
||||
class S: ...
|
||||
|
||||
def _(
|
||||
i1: Intersection[P, Q | R | S],
|
||||
i2: Intersection[P | Q | R, S],
|
||||
i3: Intersection[P | Q, R | S],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: P & Q | P & R | P & S
|
||||
reveal_type(i2) # revealed: P & S | Q & S | R & S
|
||||
reveal_type(i3) # revealed: P & R | Q & R | P & S | Q & S
|
||||
|
||||
def simplifications_for_same_elements(
|
||||
i1: Intersection[P, Q | P],
|
||||
i2: Intersection[Q, P | Q],
|
||||
i3: Intersection[P | Q, Q | R],
|
||||
i4: Intersection[P | Q, P | Q],
|
||||
i5: Intersection[P | Q, Q | P],
|
||||
) -> None:
|
||||
# P & (Q | P)
|
||||
# = P & Q | P & P
|
||||
# = P & Q | P
|
||||
# = P
|
||||
# (because P is a supertype of P & Q)
|
||||
reveal_type(i1) # revealed: P
|
||||
# similar here:
|
||||
reveal_type(i2) # revealed: Q
|
||||
|
||||
# (P | Q) & (Q | R)
|
||||
# = P & Q | P & R | Q & Q | Q & R
|
||||
# = P & Q | P & R | Q | Q & R
|
||||
# = Q | P & R
|
||||
# (again, because Q is a supertype of P & Q and of Q & R)
|
||||
reveal_type(i3) # revealed: Q | P & R
|
||||
|
||||
# (P | Q) & (P | Q)
|
||||
# = P & P | P & Q | Q & P | Q & Q
|
||||
# = P | P & Q | Q
|
||||
# = P | Q
|
||||
reveal_type(i4) # revealed: P | Q
|
||||
```
|
||||
|
||||
### Negation distributes over union
|
||||
|
||||
Distribution also applies to a negation operation. This is a manifestation of one of
|
||||
[De Morgan's laws], namely `~(P | Q) = ~P & ~Q`:
|
||||
|
||||
```py
|
||||
from knot_extensions import Not
|
||||
from typing import Literal
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
class R: ...
|
||||
|
||||
def _(i1: Not[P | Q], i2: Not[P | Q | R]) -> None:
|
||||
reveal_type(i1) # revealed: ~P & ~Q
|
||||
reveal_type(i2) # revealed: ~P & ~Q & ~R
|
||||
|
||||
def example_literals(i: Not[Literal[1, 2]]) -> None:
|
||||
reveal_type(i) # revealed: ~Literal[1] & ~Literal[2]
|
||||
```
|
||||
|
||||
### Negation of intersections
|
||||
|
||||
The other of [De Morgan's laws], `~(P & Q) = ~P | ~Q`, also holds:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
class R: ...
|
||||
|
||||
def _(
|
||||
i1: Not[Intersection[P, Q]],
|
||||
i2: Not[Intersection[P, Q, R]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: ~P | ~Q
|
||||
reveal_type(i2) # revealed: ~P | ~Q | ~R
|
||||
```
|
||||
|
||||
### `Never` is dual to `object`
|
||||
|
||||
`Never` represents the empty set of values, while `object` represents the set of all values, so
|
||||
`~Never` is equivalent to `object`, and `~object` is equivalent to `Never`. This is a manifestation
|
||||
of the [complement laws] of set theory.
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
from typing_extensions import Never
|
||||
|
||||
def _(
|
||||
not_never: Not[Never],
|
||||
not_object: Not[object],
|
||||
) -> None:
|
||||
reveal_type(not_never) # revealed: object
|
||||
reveal_type(not_object) # revealed: Never
|
||||
```
|
||||
|
||||
### Intersection of a type and its negation
|
||||
|
||||
Continuing with more [complement laws], if we see both `P` and `~P` in an intersection, we can
|
||||
simplify to `Never`, even in the presence of other types:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
from typing import Any
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
|
||||
def _(
|
||||
i1: Intersection[P, Not[P]],
|
||||
i2: Intersection[Not[P], P],
|
||||
i3: Intersection[P, Q, Not[P]],
|
||||
i4: Intersection[Not[P], Q, P],
|
||||
i5: Intersection[P, Any, Not[P]],
|
||||
i6: Intersection[Not[P], Any, P],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Never
|
||||
reveal_type(i2) # revealed: Never
|
||||
reveal_type(i3) # revealed: Never
|
||||
reveal_type(i4) # revealed: Never
|
||||
reveal_type(i5) # revealed: Never
|
||||
reveal_type(i6) # revealed: Never
|
||||
```
|
||||
|
||||
### Union of a type and its negation
|
||||
|
||||
Similarly, if we have both `P` and `~P` in a _union_, we can simplify that to `object`.
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
|
||||
def _(
|
||||
i1: P | Not[P],
|
||||
i2: Not[P] | P,
|
||||
i3: P | Q | Not[P],
|
||||
i4: Not[P] | Q | P,
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: object
|
||||
reveal_type(i2) # revealed: object
|
||||
reveal_type(i3) # revealed: object
|
||||
reveal_type(i4) # revealed: object
|
||||
```
|
||||
|
||||
### Negation is an involution
|
||||
|
||||
The final of the [complement laws] states that negating twice is equivalent to not negating at all:
|
||||
|
||||
```py
|
||||
from knot_extensions import Not
|
||||
|
||||
class P: ...
|
||||
|
||||
def _(
|
||||
i1: Not[P],
|
||||
i2: Not[Not[P]],
|
||||
i3: Not[Not[Not[P]]],
|
||||
i4: Not[Not[Not[Not[P]]]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: ~P
|
||||
reveal_type(i2) # revealed: P
|
||||
reveal_type(i3) # revealed: ~P
|
||||
reveal_type(i4) # revealed: P
|
||||
```
|
||||
|
||||
## Simplification strategies
|
||||
|
||||
In this section, we present various simplification strategies that go beyond the structure of the
|
||||
representation.
|
||||
|
||||
### `Never` in intersections
|
||||
|
||||
If we intersect with `Never`, we can simplify the whole intersection to `Never`, even if there are
|
||||
dynamic types involved:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
from typing_extensions import Never, Any
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
|
||||
def _(
|
||||
i1: Intersection[P, Never],
|
||||
i2: Intersection[Never, P],
|
||||
i3: Intersection[Any, Never],
|
||||
i4: Intersection[Never, Not[Any]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Never
|
||||
reveal_type(i2) # revealed: Never
|
||||
reveal_type(i3) # revealed: Never
|
||||
reveal_type(i4) # revealed: Never
|
||||
```
|
||||
|
||||
### Simplifications using disjointness
|
||||
|
||||
#### Positive contributions
|
||||
|
||||
If we intersect disjoint types, we can simplify to `Never`, even in the presence of other types:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
from typing import Literal, Any
|
||||
|
||||
class P: ...
|
||||
|
||||
def _(
|
||||
i01: Intersection[Literal[1], Literal[2]],
|
||||
i02: Intersection[Literal[2], Literal[1]],
|
||||
i03: Intersection[Literal[1], Literal[2], P],
|
||||
i04: Intersection[Literal[1], P, Literal[2]],
|
||||
i05: Intersection[P, Literal[1], Literal[2]],
|
||||
i06: Intersection[Literal[1], Literal[2], Any],
|
||||
i07: Intersection[Literal[1], Any, Literal[2]],
|
||||
i08: Intersection[Any, Literal[1], Literal[2]],
|
||||
) -> None:
|
||||
reveal_type(i01) # revealed: Never
|
||||
reveal_type(i02) # revealed: Never
|
||||
reveal_type(i03) # revealed: Never
|
||||
reveal_type(i04) # revealed: Never
|
||||
reveal_type(i05) # revealed: Never
|
||||
reveal_type(i06) # revealed: Never
|
||||
reveal_type(i07) # revealed: Never
|
||||
reveal_type(i08) # revealed: Never
|
||||
|
||||
# `bool` is final and can not be subclassed, so `type[bool]` is equivalent to `Literal[bool]`, which
|
||||
# is disjoint from `type[str]`:
|
||||
def example_type_bool_type_str(
|
||||
i: Intersection[type[bool], type[str]],
|
||||
) -> None:
|
||||
reveal_type(i) # revealed: Never
|
||||
```
|
||||
|
||||
#### Positive and negative contributions
|
||||
|
||||
If we intersect a type `X` with the negation `~Y` of a disjoint type `Y`, we can remove the negative
|
||||
contribution `~Y`, as `~Y` must fully contain the positive contribution `X` as a subtype:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
from typing import Literal
|
||||
|
||||
def _(
|
||||
i1: Intersection[Literal[1], Not[Literal[2]]],
|
||||
i2: Intersection[Not[Literal[2]], Literal[1]],
|
||||
i3: Intersection[Literal[1], Not[Literal[2]], int],
|
||||
i4: Intersection[Literal[1], int, Not[Literal[2]]],
|
||||
i5: Intersection[int, Literal[1], Not[Literal[2]]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Literal[1]
|
||||
reveal_type(i2) # revealed: Literal[1]
|
||||
reveal_type(i3) # revealed: Literal[1]
|
||||
reveal_type(i4) # revealed: Literal[1]
|
||||
reveal_type(i5) # revealed: Literal[1]
|
||||
|
||||
# None is disjoint from int, so this simplification applies here
|
||||
def example_none(
|
||||
i1: Intersection[int, Not[None]],
|
||||
i2: Intersection[Not[None], int],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: int
|
||||
reveal_type(i2) # revealed: int
|
||||
```
|
||||
|
||||
### Simplifications using subtype relationships
|
||||
|
||||
#### Positive type and positive subtype
|
||||
|
||||
Subtypes are contained within their supertypes, so we can simplify intersections by removing
|
||||
superfluous supertypes:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
from typing import Any
|
||||
|
||||
class A: ...
|
||||
class B(A): ...
|
||||
class C(B): ...
|
||||
class Unrelated: ...
|
||||
|
||||
def _(
|
||||
i01: Intersection[A, B],
|
||||
i02: Intersection[B, A],
|
||||
i03: Intersection[A, C],
|
||||
i04: Intersection[C, A],
|
||||
i05: Intersection[B, C],
|
||||
i06: Intersection[C, B],
|
||||
i07: Intersection[A, B, C],
|
||||
i08: Intersection[C, B, A],
|
||||
i09: Intersection[B, C, A],
|
||||
i10: Intersection[A, B, Unrelated],
|
||||
i11: Intersection[B, A, Unrelated],
|
||||
i12: Intersection[B, Unrelated, A],
|
||||
i13: Intersection[A, Unrelated, B],
|
||||
i14: Intersection[Unrelated, A, B],
|
||||
i15: Intersection[Unrelated, B, A],
|
||||
i16: Intersection[A, B, Any],
|
||||
i17: Intersection[B, A, Any],
|
||||
i18: Intersection[B, Any, A],
|
||||
i19: Intersection[A, Any, B],
|
||||
i20: Intersection[Any, A, B],
|
||||
i21: Intersection[Any, B, A],
|
||||
) -> None:
|
||||
reveal_type(i01) # revealed: B
|
||||
reveal_type(i02) # revealed: B
|
||||
reveal_type(i03) # revealed: C
|
||||
reveal_type(i04) # revealed: C
|
||||
reveal_type(i05) # revealed: C
|
||||
reveal_type(i06) # revealed: C
|
||||
reveal_type(i07) # revealed: C
|
||||
reveal_type(i08) # revealed: C
|
||||
reveal_type(i09) # revealed: C
|
||||
reveal_type(i10) # revealed: B & Unrelated
|
||||
reveal_type(i11) # revealed: B & Unrelated
|
||||
reveal_type(i12) # revealed: B & Unrelated
|
||||
reveal_type(i13) # revealed: Unrelated & B
|
||||
reveal_type(i14) # revealed: Unrelated & B
|
||||
reveal_type(i15) # revealed: Unrelated & B
|
||||
reveal_type(i16) # revealed: B & Any
|
||||
reveal_type(i17) # revealed: B & Any
|
||||
reveal_type(i18) # revealed: B & Any
|
||||
reveal_type(i19) # revealed: Any & B
|
||||
reveal_type(i20) # revealed: Any & B
|
||||
reveal_type(i21) # revealed: Any & B
|
||||
```
|
||||
|
||||
#### Negative type and negative subtype
|
||||
|
||||
For negative contributions, this property is reversed. Here we can remove superfluous _subtypes_:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
from typing import Any
|
||||
|
||||
class A: ...
|
||||
class B(A): ...
|
||||
class C(B): ...
|
||||
class Unrelated: ...
|
||||
|
||||
def _(
|
||||
i01: Intersection[Not[B], Not[A]],
|
||||
i02: Intersection[Not[A], Not[B]],
|
||||
i03: Intersection[Not[A], Not[C]],
|
||||
i04: Intersection[Not[C], Not[A]],
|
||||
i05: Intersection[Not[B], Not[C]],
|
||||
i06: Intersection[Not[C], Not[B]],
|
||||
i07: Intersection[Not[A], Not[B], Not[C]],
|
||||
i08: Intersection[Not[C], Not[B], Not[A]],
|
||||
i09: Intersection[Not[B], Not[C], Not[A]],
|
||||
i10: Intersection[Not[B], Not[A], Unrelated],
|
||||
i11: Intersection[Not[A], Not[B], Unrelated],
|
||||
i12: Intersection[Not[A], Unrelated, Not[B]],
|
||||
i13: Intersection[Not[B], Unrelated, Not[A]],
|
||||
i14: Intersection[Unrelated, Not[A], Not[B]],
|
||||
i15: Intersection[Unrelated, Not[B], Not[A]],
|
||||
i16: Intersection[Not[B], Not[A], Any],
|
||||
i17: Intersection[Not[A], Not[B], Any],
|
||||
i18: Intersection[Not[A], Any, Not[B]],
|
||||
i19: Intersection[Not[B], Any, Not[A]],
|
||||
i20: Intersection[Any, Not[A], Not[B]],
|
||||
i21: Intersection[Any, Not[B], Not[A]],
|
||||
) -> None:
|
||||
reveal_type(i01) # revealed: ~A
|
||||
reveal_type(i02) # revealed: ~A
|
||||
reveal_type(i03) # revealed: ~A
|
||||
reveal_type(i04) # revealed: ~A
|
||||
reveal_type(i05) # revealed: ~B
|
||||
reveal_type(i06) # revealed: ~B
|
||||
reveal_type(i07) # revealed: ~A
|
||||
reveal_type(i08) # revealed: ~A
|
||||
reveal_type(i09) # revealed: ~A
|
||||
reveal_type(i10) # revealed: Unrelated & ~A
|
||||
reveal_type(i11) # revealed: Unrelated & ~A
|
||||
reveal_type(i12) # revealed: Unrelated & ~A
|
||||
reveal_type(i13) # revealed: Unrelated & ~A
|
||||
reveal_type(i14) # revealed: Unrelated & ~A
|
||||
reveal_type(i15) # revealed: Unrelated & ~A
|
||||
reveal_type(i16) # revealed: Any & ~A
|
||||
reveal_type(i17) # revealed: Any & ~A
|
||||
reveal_type(i18) # revealed: Any & ~A
|
||||
reveal_type(i19) # revealed: Any & ~A
|
||||
reveal_type(i20) # revealed: Any & ~A
|
||||
reveal_type(i21) # revealed: Any & ~A
|
||||
```
|
||||
|
||||
#### Negative type and multiple negative subtypes
|
||||
|
||||
If there are multiple negative subtypes, all of them can be removed:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class A: ...
|
||||
class B1(A): ...
|
||||
class B2(A): ...
|
||||
|
||||
def _(
|
||||
i1: Intersection[Not[A], Not[B1], Not[B2]],
|
||||
i2: Intersection[Not[A], Not[B2], Not[B1]],
|
||||
i3: Intersection[Not[B1], Not[A], Not[B2]],
|
||||
i4: Intersection[Not[B1], Not[B2], Not[A]],
|
||||
i5: Intersection[Not[B2], Not[A], Not[B1]],
|
||||
i6: Intersection[Not[B2], Not[B1], Not[A]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: ~A
|
||||
reveal_type(i2) # revealed: ~A
|
||||
reveal_type(i3) # revealed: ~A
|
||||
reveal_type(i4) # revealed: ~A
|
||||
reveal_type(i5) # revealed: ~A
|
||||
reveal_type(i6) # revealed: ~A
|
||||
```
|
||||
|
||||
#### Negative type and positive subtype
|
||||
|
||||
When `A` is a supertype of `B`, its negation `~A` is disjoint from `B`, so we can simplify the
|
||||
intersection to `Never`:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
from typing import Any
|
||||
|
||||
class A: ...
|
||||
class B(A): ...
|
||||
class C(B): ...
|
||||
class Unrelated: ...
|
||||
|
||||
def _(
|
||||
i1: Intersection[Not[A], B],
|
||||
i2: Intersection[B, Not[A]],
|
||||
i3: Intersection[Not[A], C],
|
||||
i4: Intersection[C, Not[A]],
|
||||
i5: Intersection[Unrelated, Not[A], B],
|
||||
i6: Intersection[B, Not[A], Not[Unrelated]],
|
||||
i7: Intersection[Any, Not[A], B],
|
||||
i8: Intersection[B, Not[A], Not[Any]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Never
|
||||
reveal_type(i2) # revealed: Never
|
||||
reveal_type(i3) # revealed: Never
|
||||
reveal_type(i4) # revealed: Never
|
||||
reveal_type(i5) # revealed: Never
|
||||
reveal_type(i6) # revealed: Never
|
||||
reveal_type(i7) # revealed: Never
|
||||
reveal_type(i8) # revealed: Never
|
||||
```
|
||||
|
||||
## Non fully-static types
|
||||
|
||||
### Negation of dynamic types
|
||||
|
||||
`Any` represents the dynamic type, an unknown set of runtime values. The negation of that, `~Any`,
|
||||
is still an unknown set of runtime values, so `~Any` is equivalent to `Any`. We therefore eagerly
|
||||
simplify `~Any` to `Any` in intersections. The same applies to `Unknown`.
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not, Unknown
|
||||
from typing_extensions import Any, Never
|
||||
|
||||
class P: ...
|
||||
|
||||
def any(
|
||||
i1: Not[Any],
|
||||
i2: Intersection[P, Not[Any]],
|
||||
i3: Intersection[Never, Not[Any]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Any
|
||||
reveal_type(i2) # revealed: P & Any
|
||||
reveal_type(i3) # revealed: Never
|
||||
|
||||
def unknown(
|
||||
i1: Not[Unknown],
|
||||
i2: Intersection[P, Not[Unknown]],
|
||||
i3: Intersection[Never, Not[Unknown]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Unknown
|
||||
reveal_type(i2) # revealed: P & Unknown
|
||||
reveal_type(i3) # revealed: Never
|
||||
```
|
||||
|
||||
### Collapsing of multiple `Any`/`Unknown` contributions
|
||||
|
||||
The intersection of an unknown set of runtime values with (another) unknown set of runtime values is
|
||||
still an unknown set of runtime values:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not, Unknown
|
||||
from typing_extensions import Any
|
||||
|
||||
class P: ...
|
||||
|
||||
def any(
|
||||
i1: Intersection[Any, Any],
|
||||
i2: Intersection[P, Any, Any],
|
||||
i3: Intersection[Any, P, Any],
|
||||
i4: Intersection[Any, Any, P],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Any
|
||||
reveal_type(i2) # revealed: P & Any
|
||||
reveal_type(i3) # revealed: Any & P
|
||||
reveal_type(i4) # revealed: Any & P
|
||||
|
||||
def unknown(
|
||||
i1: Intersection[Unknown, Unknown],
|
||||
i2: Intersection[P, Unknown, Unknown],
|
||||
i3: Intersection[Unknown, P, Unknown],
|
||||
i4: Intersection[Unknown, Unknown, P],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Unknown
|
||||
reveal_type(i2) # revealed: P & Unknown
|
||||
reveal_type(i3) # revealed: Unknown & P
|
||||
reveal_type(i4) # revealed: Unknown & P
|
||||
```
|
||||
|
||||
### No self-cancellation
|
||||
|
||||
Dynamic types do not cancel each other out. Intersecting an unknown set of values with the negation
|
||||
of another unknown set of values is not necessarily empty, so we keep the positive contribution:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not, Unknown
|
||||
|
||||
def any(
|
||||
i1: Intersection[Any, Not[Any]],
|
||||
i2: Intersection[Not[Any], Any],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Any
|
||||
reveal_type(i2) # revealed: Any
|
||||
|
||||
def unknown(
|
||||
i1: Intersection[Unknown, Not[Unknown]],
|
||||
i2: Intersection[Not[Unknown], Unknown],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Unknown
|
||||
reveal_type(i2) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Mixed dynamic types
|
||||
|
||||
We currently do not simplify mixed dynamic types, but might consider doing so in the future:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not, Unknown
|
||||
|
||||
def mixed(
|
||||
i1: Intersection[Any, Unknown],
|
||||
i2: Intersection[Any, Not[Unknown]],
|
||||
i3: Intersection[Not[Any], Unknown],
|
||||
i4: Intersection[Not[Any], Not[Unknown]],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: Any & Unknown
|
||||
reveal_type(i2) # revealed: Any & Unknown
|
||||
reveal_type(i3) # revealed: Any & Unknown
|
||||
reveal_type(i4) # revealed: Any & Unknown
|
||||
```
|
||||
|
||||
[complement laws]: https://en.wikipedia.org/wiki/Complement_(set_theory)
|
||||
[de morgan's laws]: https://en.wikipedia.org/wiki/De_Morgan%27s_laws
|
||||
@@ -21,22 +21,22 @@ else:
|
||||
if x and not x:
|
||||
reveal_type(x) # revealed: Never
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[0, "", b"", -1, "foo", b"bar"] | bool | None | tuple[()]
|
||||
reveal_type(x) # revealed: Literal[0, -1, "", "foo", b"", b"bar"] | bool | None | tuple[()]
|
||||
|
||||
if not (x and not x):
|
||||
reveal_type(x) # revealed: Literal[0, "", b"", -1, "foo", b"bar"] | bool | None | tuple[()]
|
||||
reveal_type(x) # revealed: Literal[0, -1, "", "foo", b"", b"bar"] | bool | None | tuple[()]
|
||||
else:
|
||||
reveal_type(x) # revealed: Never
|
||||
|
||||
if x or not x:
|
||||
reveal_type(x) # revealed: Literal[-1, "foo", b"bar", 0, "", b""] | bool | None | tuple[()]
|
||||
reveal_type(x) # revealed: Literal[0, -1, "", "foo", b"", b"bar"] | bool | None | tuple[()]
|
||||
else:
|
||||
reveal_type(x) # revealed: Never
|
||||
|
||||
if not (x or not x):
|
||||
reveal_type(x) # revealed: Never
|
||||
else:
|
||||
reveal_type(x) # revealed: Literal[-1, "foo", b"bar", 0, "", b""] | bool | None | tuple[()]
|
||||
reveal_type(x) # revealed: Literal[0, -1, "", "foo", b"", b"bar"] | bool | None | tuple[()]
|
||||
|
||||
if (isinstance(x, int) or isinstance(x, str)) and x:
|
||||
reveal_type(x) # revealed: Literal[-1, True, "foo"]
|
||||
@@ -87,10 +87,10 @@ def f(x: A | B):
|
||||
if x and not x:
|
||||
reveal_type(x) # revealed: A & ~AlwaysFalsy & ~AlwaysTruthy | B & ~AlwaysFalsy & ~AlwaysTruthy
|
||||
else:
|
||||
reveal_type(x) # revealed: A & ~AlwaysTruthy | B & ~AlwaysTruthy | A & ~AlwaysFalsy | B & ~AlwaysFalsy
|
||||
reveal_type(x) # revealed: A | B
|
||||
|
||||
if x or not x:
|
||||
reveal_type(x) # revealed: A & ~AlwaysFalsy | B & ~AlwaysFalsy | A & ~AlwaysTruthy | B & ~AlwaysTruthy
|
||||
reveal_type(x) # revealed: A | B
|
||||
else:
|
||||
reveal_type(x) # revealed: A & ~AlwaysTruthy & ~AlwaysFalsy | B & ~AlwaysTruthy & ~AlwaysFalsy
|
||||
```
|
||||
@@ -214,10 +214,9 @@ if x and not x:
|
||||
reveal_type(y) # revealed: A & ~AlwaysFalsy & ~AlwaysTruthy
|
||||
else:
|
||||
y = x
|
||||
reveal_type(y) # revealed: A & ~AlwaysTruthy | A & ~AlwaysFalsy
|
||||
reveal_type(y) # revealed: A
|
||||
|
||||
# TODO: It should be A. We should improve UnionBuilder or IntersectionBuilder. (issue #15023)
|
||||
reveal_type(y) # revealed: A & ~AlwaysTruthy | A & ~AlwaysFalsy
|
||||
reveal_type(y) # revealed: A
|
||||
```
|
||||
|
||||
## Truthiness of classes
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
# Tuples containing `Never`
|
||||
|
||||
A heterogeneous `tuple[…]` type that contains `Never` as a type argument simplifies to `Never`. One
|
||||
way to think about this is the following: in order to construct a tuple, you need to have an object
|
||||
of every element type. But since there is no object of type `Never`, you cannot construct the tuple.
|
||||
Such a tuple type is therefore uninhabited and equivalent to `Never`.
|
||||
|
||||
In the language of algebraic data types, a tuple type is a product type and `Never` acts like the
|
||||
zero element in multiplication, similar to how a Cartesian product with the empty set is the empty
|
||||
set.
|
||||
|
||||
```py
|
||||
from knot_extensions import static_assert, is_equivalent_to
|
||||
from typing_extensions import Never, NoReturn
|
||||
|
||||
static_assert(is_equivalent_to(Never, tuple[Never]))
|
||||
static_assert(is_equivalent_to(Never, tuple[Never, int]))
|
||||
static_assert(is_equivalent_to(Never, tuple[int, Never]))
|
||||
static_assert(is_equivalent_to(Never, tuple[int, Never, str]))
|
||||
static_assert(is_equivalent_to(Never, tuple[int, tuple[str, Never]]))
|
||||
static_assert(is_equivalent_to(Never, tuple[tuple[str, Never], int]))
|
||||
|
||||
# The empty tuple is *not* equivalent to Never!
|
||||
static_assert(not is_equivalent_to(Never, tuple[()]))
|
||||
|
||||
# NoReturn is just a different spelling of Never, so the same is true for NoReturn
|
||||
static_assert(is_equivalent_to(NoReturn, tuple[NoReturn]))
|
||||
static_assert(is_equivalent_to(NoReturn, tuple[NoReturn, int]))
|
||||
static_assert(is_equivalent_to(NoReturn, tuple[int, NoReturn]))
|
||||
static_assert(is_equivalent_to(NoReturn, tuple[int, NoReturn, str]))
|
||||
static_assert(is_equivalent_to(NoReturn, tuple[int, tuple[str, NoReturn]]))
|
||||
static_assert(is_equivalent_to(NoReturn, tuple[tuple[str, NoReturn], int]))
|
||||
```
|
||||
143
crates/red_knot_python_semantic/resources/mdtest/union_types.md
Normal file
143
crates/red_knot_python_semantic/resources/mdtest/union_types.md
Normal file
@@ -0,0 +1,143 @@
|
||||
# Union types
|
||||
|
||||
This test suite covers certain basic properties and simplification strategies for union types.
|
||||
|
||||
## Basic unions
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def _(u1: int | str, u2: Literal[0] | Literal[1]) -> None:
|
||||
reveal_type(u1) # revealed: int | str
|
||||
reveal_type(u2) # revealed: Literal[0, 1]
|
||||
```
|
||||
|
||||
## Duplicate elements are collapsed
|
||||
|
||||
```py
|
||||
def _(u1: int | int | str, u2: int | str | int) -> None:
|
||||
reveal_type(u1) # revealed: int | str
|
||||
reveal_type(u2) # revealed: int | str
|
||||
```
|
||||
|
||||
## `Never` is removed
|
||||
|
||||
`Never` is an empty set, a type with no inhabitants. Its presence in a union is always redundant,
|
||||
and so we eagerly simplify it away. `NoReturn` is equivalent to `Never`.
|
||||
|
||||
```py
|
||||
from typing_extensions import Never, NoReturn
|
||||
|
||||
def never(u1: int | Never, u2: int | Never | str) -> None:
|
||||
reveal_type(u1) # revealed: int
|
||||
reveal_type(u2) # revealed: int | str
|
||||
|
||||
def noreturn(u1: int | NoReturn, u2: int | NoReturn | str) -> None:
|
||||
reveal_type(u1) # revealed: int
|
||||
reveal_type(u2) # revealed: int | str
|
||||
```
|
||||
|
||||
## Flattening of nested unions
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def _(
|
||||
u1: (int | str) | bytes,
|
||||
u2: int | (str | bytes),
|
||||
u3: int | (str | (bytes | complex)),
|
||||
) -> None:
|
||||
reveal_type(u1) # revealed: int | str | bytes
|
||||
reveal_type(u2) # revealed: int | str | bytes
|
||||
reveal_type(u3) # revealed: int | str | bytes | complex
|
||||
```
|
||||
|
||||
## Simplification using subtyping
|
||||
|
||||
The type `S | T` can be simplified to `T` if `S` is a subtype of `T`:
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal, LiteralString
|
||||
|
||||
def _(
|
||||
u1: str | LiteralString, u2: LiteralString | str, u3: Literal["a"] | str | LiteralString, u4: str | bytes | LiteralString
|
||||
) -> None:
|
||||
reveal_type(u1) # revealed: str
|
||||
reveal_type(u2) # revealed: str
|
||||
reveal_type(u3) # revealed: str
|
||||
reveal_type(u4) # revealed: str | bytes
|
||||
```
|
||||
|
||||
## Boolean literals
|
||||
|
||||
The union `Literal[True] | Literal[False]` is exactly equivalent to `bool`:
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def _(
|
||||
u1: Literal[True, False],
|
||||
u2: bool | Literal[True],
|
||||
u3: Literal[True] | bool,
|
||||
u4: Literal[True] | Literal[True, 17],
|
||||
u5: Literal[True, False, True, 17],
|
||||
) -> None:
|
||||
reveal_type(u1) # revealed: bool
|
||||
reveal_type(u2) # revealed: bool
|
||||
reveal_type(u3) # revealed: bool
|
||||
reveal_type(u4) # revealed: Literal[True, 17]
|
||||
reveal_type(u5) # revealed: bool | Literal[17]
|
||||
```
|
||||
|
||||
## Do not erase `Unknown`
|
||||
|
||||
```py
|
||||
from knot_extensions import Unknown
|
||||
|
||||
def _(u1: Unknown | str, u2: str | Unknown) -> None:
|
||||
reveal_type(u1) # revealed: Unknown | str
|
||||
reveal_type(u2) # revealed: str | Unknown
|
||||
```
|
||||
|
||||
## Collapse multiple `Unknown`s
|
||||
|
||||
Since `Unknown` is a gradual type, it is not a subtype of anything, but multiple `Unknown`s in a
|
||||
union are still redundant:
|
||||
|
||||
```py
|
||||
from knot_extensions import Unknown
|
||||
|
||||
def _(u1: Unknown | Unknown | str, u2: Unknown | str | Unknown, u3: str | Unknown | Unknown) -> None:
|
||||
reveal_type(u1) # revealed: Unknown | str
|
||||
reveal_type(u2) # revealed: Unknown | str
|
||||
reveal_type(u3) # revealed: str | Unknown
|
||||
```
|
||||
|
||||
## Subsume multiple elements
|
||||
|
||||
Simplifications still apply when `Unknown` is present.
|
||||
|
||||
```py
|
||||
from knot_extensions import Unknown
|
||||
|
||||
def _(u1: str | Unknown | int | object):
|
||||
reveal_type(u1) # revealed: Unknown | object
|
||||
```
|
||||
|
||||
## Union of intersections
|
||||
|
||||
We can simplify unions of intersections:
|
||||
|
||||
```py
|
||||
from knot_extensions import Intersection, Not
|
||||
|
||||
class P: ...
|
||||
class Q: ...
|
||||
|
||||
def _(
|
||||
i1: Intersection[P, Q] | Intersection[P, Q],
|
||||
i2: Intersection[P, Q] | Intersection[Q, P],
|
||||
) -> None:
|
||||
reveal_type(i1) # revealed: P & Q
|
||||
reveal_type(i2) # revealed: P & Q
|
||||
```
|
||||
@@ -310,7 +310,7 @@ impl SymbolState {
|
||||
visibility_constraints: VisibilityConstraintPerBinding::default(),
|
||||
},
|
||||
declarations: SymbolDeclarations {
|
||||
live_declarations: self.declarations.live_declarations.clone(),
|
||||
live_declarations: Declarations::default(),
|
||||
visibility_constraints: VisibilityConstraintPerDeclaration::default(),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::hash::Hash;
|
||||
use std::iter;
|
||||
|
||||
use context::InferContext;
|
||||
use diagnostic::{report_not_iterable, report_not_iterable_possibly_unbound};
|
||||
@@ -475,20 +476,24 @@ impl std::fmt::Display for TodoType {
|
||||
#[cfg(debug_assertions)]
|
||||
macro_rules! todo_type {
|
||||
() => {
|
||||
Type::Todo(crate::types::TodoType::FileAndLine(file!(), line!()))
|
||||
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo(
|
||||
$crate::types::TodoType::FileAndLine(file!(), line!()),
|
||||
))
|
||||
};
|
||||
($message:literal) => {
|
||||
Type::Todo(crate::types::TodoType::Message($message))
|
||||
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo(
|
||||
$crate::types::TodoType::Message($message),
|
||||
))
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
macro_rules! todo_type {
|
||||
() => {
|
||||
Type::Todo(crate::types::TodoType)
|
||||
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo(crate::types::TodoType))
|
||||
};
|
||||
($message:literal) => {
|
||||
Type::Todo(crate::types::TodoType)
|
||||
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo(crate::types::TodoType))
|
||||
};
|
||||
}
|
||||
|
||||
@@ -498,21 +503,7 @@ pub(crate) use todo_type;
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, salsa::Update)]
|
||||
pub enum Type<'db> {
|
||||
/// The dynamic type: a statically unknown set of values
|
||||
Any,
|
||||
/// Unknown type (either no annotation, or some kind of type error).
|
||||
/// Equivalent to Any, or possibly to object in strict mode
|
||||
Unknown,
|
||||
/// Temporary type for symbols that can't be inferred yet because of missing implementations.
|
||||
/// Behaves equivalently to `Any`.
|
||||
///
|
||||
/// This variant should eventually be removed once red-knot is spec-compliant.
|
||||
///
|
||||
/// General rule: `Todo` should only propagate when the presence of the input `Todo` caused the
|
||||
/// output to be unknown. An output should only be `Todo` if fixing all `Todo` inputs to be not
|
||||
/// `Todo` would change the output type.
|
||||
///
|
||||
/// This variant should be created with the `todo_type!` macro.
|
||||
Todo(TodoType),
|
||||
Dynamic(DynamicType),
|
||||
/// The empty set of values
|
||||
Never,
|
||||
/// A specific function object
|
||||
@@ -556,8 +547,16 @@ pub enum Type<'db> {
|
||||
}
|
||||
|
||||
impl<'db> Type<'db> {
|
||||
pub const fn any() -> Self {
|
||||
Self::Dynamic(DynamicType::Any)
|
||||
}
|
||||
|
||||
pub const fn unknown() -> Self {
|
||||
Self::Dynamic(DynamicType::Unknown)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
matches!(self, Type::Dynamic(DynamicType::Unknown))
|
||||
}
|
||||
|
||||
pub const fn is_never(&self) -> bool {
|
||||
@@ -565,7 +564,7 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
|
||||
pub const fn is_todo(&self) -> bool {
|
||||
matches!(self, Type::Todo(_))
|
||||
matches!(self, Type::Dynamic(DynamicType::Todo(_)))
|
||||
}
|
||||
|
||||
pub const fn class_literal(class: Class<'db>) -> Self {
|
||||
@@ -757,8 +756,7 @@ impl<'db> Type<'db> {
|
||||
|
||||
match (self, target) {
|
||||
// We should have handled these immediately above.
|
||||
(Type::Any | Type::Unknown | Type::Todo(_), _)
|
||||
| (_, Type::Any | Type::Unknown | Type::Todo(_)) => {
|
||||
(Type::Dynamic(_), _) | (_, Type::Dynamic(_)) => {
|
||||
unreachable!("Non-fully-static types do not participate in subtyping!")
|
||||
}
|
||||
|
||||
@@ -975,8 +973,8 @@ impl<'db> Type<'db> {
|
||||
(Type::Never, _) => true,
|
||||
|
||||
// The dynamic type is assignable-to and assignable-from any type.
|
||||
(Type::Unknown | Type::Any | Type::Todo(_), _) => true,
|
||||
(_, Type::Unknown | Type::Any | Type::Todo(_)) => true,
|
||||
(Type::Dynamic(_), _) => true,
|
||||
(_, Type::Dynamic(_)) => true,
|
||||
|
||||
// All types are assignable to `object`.
|
||||
// TODO this special case might be removable once the below cases are comprehensive
|
||||
@@ -1085,12 +1083,100 @@ impl<'db> Type<'db> {
|
||||
pub(crate) fn is_same_gradual_form(self, other: Type<'db>) -> bool {
|
||||
matches!(
|
||||
(self, other),
|
||||
(Type::Unknown, Type::Unknown)
|
||||
| (Type::Any, Type::Any)
|
||||
| (Type::Todo(_), Type::Todo(_))
|
||||
(
|
||||
Type::Dynamic(DynamicType::Any),
|
||||
Type::Dynamic(DynamicType::Any)
|
||||
) | (
|
||||
Type::Dynamic(DynamicType::Unknown),
|
||||
Type::Dynamic(DynamicType::Unknown)
|
||||
) | (
|
||||
Type::Dynamic(DynamicType::Todo(_)),
|
||||
Type::Dynamic(DynamicType::Todo(_))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns true if this type and `other` are gradual equivalent.
|
||||
///
|
||||
/// > Two gradual types `A` and `B` are equivalent
|
||||
/// > (that is, the same gradual type, not merely consistent with one another)
|
||||
/// > if and only if all materializations of `A` are also materializations of `B`,
|
||||
/// > and all materializations of `B` are also materializations of `A`.
|
||||
/// >
|
||||
/// > — [Summary of type relations]
|
||||
///
|
||||
/// This powers the `assert_type()` directive.
|
||||
///
|
||||
/// [Summary of type relations]: https://typing.readthedocs.io/en/latest/spec/concepts.html#summary-of-type-relations
|
||||
pub(crate) fn is_gradual_equivalent_to(self, db: &'db dyn Db, other: Type<'db>) -> bool {
|
||||
let equivalent =
|
||||
|(first, second): (&Type<'db>, &Type<'db>)| first.is_gradual_equivalent_to(db, *second);
|
||||
|
||||
match (self, other) {
|
||||
(_, _) if self == other => true,
|
||||
|
||||
(Type::Dynamic(_), Type::Dynamic(_)) => true,
|
||||
|
||||
(Type::Instance(instance), Type::SubclassOf(subclass))
|
||||
| (Type::SubclassOf(subclass), Type::Instance(instance)) => {
|
||||
let Some(base_class) = subclass.subclass_of().into_class() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
instance.class.is_known(db, KnownClass::Type)
|
||||
&& base_class.is_known(db, KnownClass::Object)
|
||||
}
|
||||
|
||||
(Type::SubclassOf(first), Type::SubclassOf(second)) => {
|
||||
match (first.subclass_of(), second.subclass_of()) {
|
||||
(first, second) if first == second => true,
|
||||
(ClassBase::Dynamic(_), ClassBase::Dynamic(_)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
(Type::Tuple(first), Type::Tuple(second)) => {
|
||||
let first_elements = first.elements(db);
|
||||
let second_elements = second.elements(db);
|
||||
|
||||
first_elements.len() == second_elements.len()
|
||||
&& iter::zip(first_elements, second_elements).all(equivalent)
|
||||
}
|
||||
|
||||
// TODO: Handle equivalent unions with items in different order
|
||||
(Type::Union(first), Type::Union(second)) => {
|
||||
let first_elements = first.elements(db);
|
||||
let second_elements = second.elements(db);
|
||||
|
||||
if first_elements.len() != second_elements.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
iter::zip(first_elements, second_elements).all(equivalent)
|
||||
}
|
||||
|
||||
// TODO: Handle equivalent intersections with items in different order
|
||||
(Type::Intersection(first), Type::Intersection(second)) => {
|
||||
let first_positive = first.positive(db);
|
||||
let first_negative = first.negative(db);
|
||||
|
||||
let second_positive = second.positive(db);
|
||||
let second_negative = second.negative(db);
|
||||
|
||||
if first_positive.len() != second_positive.len()
|
||||
|| first_negative.len() != second_negative.len()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
iter::zip(first_positive, second_positive).all(equivalent)
|
||||
&& iter::zip(first_negative, second_negative).all(equivalent)
|
||||
}
|
||||
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if this type and `other` have no common elements.
|
||||
///
|
||||
/// Note: This function aims to have no false positives, but might return
|
||||
@@ -1099,9 +1185,7 @@ impl<'db> Type<'db> {
|
||||
match (self, other) {
|
||||
(Type::Never, _) | (_, Type::Never) => true,
|
||||
|
||||
(Type::Any, _) | (_, Type::Any) => false,
|
||||
(Type::Unknown, _) | (_, Type::Unknown) => false,
|
||||
(Type::Todo(_), _) | (_, Type::Todo(_)) => false,
|
||||
(Type::Dynamic(_), _) | (_, Type::Dynamic(_)) => false,
|
||||
|
||||
(Type::Union(union), other) | (other, Type::Union(union)) => union
|
||||
.elements(db)
|
||||
@@ -1181,7 +1265,7 @@ impl<'db> Type<'db> {
|
||||
Type::ClassLiteral(ClassLiteralType { class: class_b }),
|
||||
Type::SubclassOf(subclass_of_ty),
|
||||
) => match subclass_of_ty.subclass_of() {
|
||||
ClassBase::Any | ClassBase::Todo(_) | ClassBase::Unknown => false,
|
||||
ClassBase::Dynamic(_) => false,
|
||||
ClassBase::Class(class_a) => !class_b.is_subclass_of(db, class_a),
|
||||
},
|
||||
|
||||
@@ -1377,7 +1461,7 @@ impl<'db> Type<'db> {
|
||||
/// Returns true if the type does not contain any gradual forms (as a sub-part).
|
||||
pub(crate) fn is_fully_static(self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Any | Type::Unknown | Type::Todo(_) => false,
|
||||
Type::Dynamic(_) => false,
|
||||
Type::Never
|
||||
| Type::FunctionLiteral(..)
|
||||
| Type::ModuleLiteral(..)
|
||||
@@ -1440,10 +1524,8 @@ impl<'db> Type<'db> {
|
||||
/// for more complicated types that are actually singletons.
|
||||
pub(crate) fn is_singleton(self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Any
|
||||
Type::Dynamic(_)
|
||||
| Type::Never
|
||||
| Type::Unknown
|
||||
| Type::Todo(_)
|
||||
| Type::IntLiteral(..)
|
||||
| Type::StringLiteral(..)
|
||||
| Type::BytesLiteral(..)
|
||||
@@ -1553,10 +1635,8 @@ impl<'db> Type<'db> {
|
||||
None => false,
|
||||
},
|
||||
|
||||
Type::Any
|
||||
Type::Dynamic(_)
|
||||
| Type::Never
|
||||
| Type::Unknown
|
||||
| Type::Todo(_)
|
||||
| Type::Union(..)
|
||||
| Type::Intersection(..)
|
||||
| Type::LiteralString
|
||||
@@ -1577,7 +1657,7 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
|
||||
match self {
|
||||
Type::Any | Type::Unknown | Type::Todo(_) => self.into(),
|
||||
Type::Dynamic(_) => self.into(),
|
||||
|
||||
Type::Never => todo_type!("attribute lookup on Never").into(),
|
||||
|
||||
@@ -1702,7 +1782,7 @@ impl<'db> Type<'db> {
|
||||
/// when `bool(x)` is called on an object `x`.
|
||||
pub(crate) fn bool(&self, db: &'db dyn Db) -> Truthiness {
|
||||
match self {
|
||||
Type::Any | Type::Todo(_) | Type::Never | Type::Unknown => Truthiness::Ambiguous,
|
||||
Type::Dynamic(_) | Type::Never => Truthiness::Ambiguous,
|
||||
Type::FunctionLiteral(_) => Truthiness::AlwaysTrue,
|
||||
Type::ModuleLiteral(_) => Truthiness::AlwaysTrue,
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => {
|
||||
@@ -1836,7 +1916,7 @@ impl<'db> Type<'db> {
|
||||
let mut binding = bind_call(db, arguments, function_type.signature(db), Some(self));
|
||||
match function_type.known(db) {
|
||||
Some(KnownFunction::RevealType) => {
|
||||
let revealed_ty = binding.one_parameter_ty().unwrap_or(Type::Unknown);
|
||||
let revealed_ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
|
||||
CallOutcome::revealed(binding, revealed_ty)
|
||||
}
|
||||
Some(KnownFunction::StaticAssert) => {
|
||||
@@ -1872,7 +1952,7 @@ impl<'db> Type<'db> {
|
||||
Some(KnownFunction::IsEquivalentTo) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::Unknown, Type::Unknown));
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding
|
||||
.set_return_ty(Type::BooleanLiteral(ty_a.is_equivalent_to(db, ty_b)));
|
||||
CallOutcome::callable(binding)
|
||||
@@ -1880,14 +1960,14 @@ impl<'db> Type<'db> {
|
||||
Some(KnownFunction::IsSubtypeOf) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::Unknown, Type::Unknown));
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding.set_return_ty(Type::BooleanLiteral(ty_a.is_subtype_of(db, ty_b)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsAssignableTo) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::Unknown, Type::Unknown));
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding
|
||||
.set_return_ty(Type::BooleanLiteral(ty_a.is_assignable_to(db, ty_b)));
|
||||
CallOutcome::callable(binding)
|
||||
@@ -1895,23 +1975,23 @@ impl<'db> Type<'db> {
|
||||
Some(KnownFunction::IsDisjointFrom) => {
|
||||
let (ty_a, ty_b) = binding
|
||||
.two_parameter_tys()
|
||||
.unwrap_or((Type::Unknown, Type::Unknown));
|
||||
.unwrap_or((Type::unknown(), Type::unknown()));
|
||||
binding
|
||||
.set_return_ty(Type::BooleanLiteral(ty_a.is_disjoint_from(db, ty_b)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsFullyStatic) => {
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::Unknown);
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
|
||||
binding.set_return_ty(Type::BooleanLiteral(ty.is_fully_static(db)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsSingleton) => {
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::Unknown);
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
|
||||
binding.set_return_ty(Type::BooleanLiteral(ty.is_singleton(db)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
Some(KnownFunction::IsSingleValued) => {
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::Unknown);
|
||||
let ty = binding.one_parameter_ty().unwrap_or(Type::unknown());
|
||||
binding.set_return_ty(Type::BooleanLiteral(ty.is_single_valued(db)));
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
@@ -1926,6 +2006,14 @@ impl<'db> Type<'db> {
|
||||
CallOutcome::callable(binding)
|
||||
}
|
||||
|
||||
Some(KnownFunction::AssertType) => {
|
||||
let Some((_, asserted_ty)) = binding.two_parameter_tys() else {
|
||||
return CallOutcome::callable(binding);
|
||||
};
|
||||
|
||||
CallOutcome::asserted(binding, asserted_ty)
|
||||
}
|
||||
|
||||
_ => CallOutcome::callable(binding),
|
||||
}
|
||||
}
|
||||
@@ -1973,9 +2061,7 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
|
||||
// Dynamic types are callable, and the return type is the same dynamic type
|
||||
Type::Any | Type::Todo(_) | Type::Unknown => {
|
||||
CallOutcome::callable(CallBinding::from_return_ty(self))
|
||||
}
|
||||
Type::Dynamic(_) => CallOutcome::callable(CallBinding::from_return_ty(self)),
|
||||
|
||||
Type::Union(union) => CallOutcome::union(
|
||||
self,
|
||||
@@ -2083,16 +2169,12 @@ impl<'db> Type<'db> {
|
||||
#[must_use]
|
||||
pub fn to_instance(&self, db: &'db dyn Db) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
todo @ Type::Todo(_) => *todo,
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Dynamic(_) => *self,
|
||||
Type::Never => Type::Never,
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => Type::instance(*class),
|
||||
Type::SubclassOf(subclass_of_ty) => match subclass_of_ty.subclass_of() {
|
||||
ClassBase::Class(class) => Type::instance(class),
|
||||
ClassBase::Any => Type::Any,
|
||||
ClassBase::Unknown => Type::Unknown,
|
||||
ClassBase::Todo(todo) => Type::Todo(todo),
|
||||
ClassBase::Dynamic(dynamic) => Type::Dynamic(dynamic),
|
||||
},
|
||||
Type::Union(union) => union.map(db, |element| element.to_instance(db)),
|
||||
Type::Intersection(_) => todo_type!("Type::Intersection.to_instance()"),
|
||||
@@ -2110,7 +2192,7 @@ impl<'db> Type<'db> {
|
||||
| Type::Tuple(_)
|
||||
| Type::LiteralString
|
||||
| Type::AlwaysTruthy
|
||||
| Type::AlwaysFalsy => Type::Unknown,
|
||||
| Type::AlwaysFalsy => Type::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2177,7 +2259,7 @@ impl<'db> Type<'db> {
|
||||
})
|
||||
}
|
||||
}
|
||||
Type::Unknown => Ok(Type::Unknown),
|
||||
Type::Dynamic(_) => Ok(*self),
|
||||
// TODO map this to a new `Type::TypeVar` variant
|
||||
Type::KnownInstance(KnownInstanceType::TypeVar(_)) => Ok(*self),
|
||||
Type::KnownInstance(KnownInstanceType::TypeAliasType(alias)) => Ok(alias.value_ty(db)),
|
||||
@@ -2185,18 +2267,17 @@ impl<'db> Type<'db> {
|
||||
Ok(Type::Never)
|
||||
}
|
||||
Type::KnownInstance(KnownInstanceType::LiteralString) => Ok(Type::LiteralString),
|
||||
Type::KnownInstance(KnownInstanceType::Any) => Ok(Type::Any),
|
||||
Type::KnownInstance(KnownInstanceType::Any) => Ok(Type::any()),
|
||||
// TODO: Should emit a diagnostic
|
||||
Type::KnownInstance(KnownInstanceType::Annotated) => Err(InvalidTypeExpressionError {
|
||||
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::BareAnnotated],
|
||||
fallback_type: Type::Unknown,
|
||||
fallback_type: Type::unknown(),
|
||||
}),
|
||||
Type::KnownInstance(KnownInstanceType::Literal) => Err(InvalidTypeExpressionError {
|
||||
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::BareLiteral],
|
||||
fallback_type: Type::Unknown,
|
||||
fallback_type: Type::unknown(),
|
||||
}),
|
||||
Type::KnownInstance(KnownInstanceType::Unknown) => Ok(Type::Unknown),
|
||||
Type::Todo(_) => Ok(*self),
|
||||
Type::KnownInstance(KnownInstanceType::Unknown) => Ok(Type::unknown()),
|
||||
_ => Ok(todo_type!(
|
||||
"Unsupported or invalid type in a type expression"
|
||||
)),
|
||||
@@ -2260,16 +2341,15 @@ impl<'db> Type<'db> {
|
||||
Type::Tuple(_) => KnownClass::Tuple.to_class_literal(db),
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => class.metaclass(db),
|
||||
Type::SubclassOf(subclass_of_ty) => match subclass_of_ty.subclass_of() {
|
||||
ClassBase::Any | ClassBase::Unknown | ClassBase::Todo(_) => *self,
|
||||
ClassBase::Dynamic(_) => *self,
|
||||
ClassBase::Class(class) => SubclassOfType::from(
|
||||
db,
|
||||
ClassBase::try_from_ty(db, class.metaclass(db)).unwrap_or(ClassBase::Unknown),
|
||||
ClassBase::try_from_ty(db, class.metaclass(db)).unwrap_or(ClassBase::unknown()),
|
||||
),
|
||||
},
|
||||
|
||||
Type::StringLiteral(_) | Type::LiteralString => KnownClass::Str.to_class_literal(db),
|
||||
Type::Any => SubclassOfType::subclass_of_any(),
|
||||
Type::Unknown => SubclassOfType::subclass_of_unknown(),
|
||||
Type::Dynamic(dynamic) => SubclassOfType::from(db, ClassBase::Dynamic(*dynamic)),
|
||||
// TODO intersections
|
||||
Type::Intersection(_) => SubclassOfType::from(
|
||||
db,
|
||||
@@ -2277,7 +2357,6 @@ impl<'db> Type<'db> {
|
||||
.expect("Type::Todo should be a valid ClassBase"),
|
||||
),
|
||||
Type::AlwaysTruthy | Type::AlwaysFalsy => KnownClass::Type.to_instance(db),
|
||||
Type::Todo(todo) => SubclassOfType::from(db, ClassBase::Todo(*todo)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2338,6 +2417,36 @@ impl<'db> From<&Type<'db>> for Symbol<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
|
||||
pub enum DynamicType {
|
||||
// An explicitly annotated `typing.Any`
|
||||
Any,
|
||||
// An unannotated value, or a dynamic type resulting from an error
|
||||
Unknown,
|
||||
/// Temporary type for symbols that can't be inferred yet because of missing implementations.
|
||||
///
|
||||
/// This variant should eventually be removed once red-knot is spec-compliant.
|
||||
///
|
||||
/// General rule: `Todo` should only propagate when the presence of the input `Todo` caused the
|
||||
/// output to be unknown. An output should only be `Todo` if fixing all `Todo` inputs to be not
|
||||
/// `Todo` would change the output type.
|
||||
///
|
||||
/// This variant should be created with the `todo_type!` macro.
|
||||
Todo(TodoType),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DynamicType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DynamicType::Any => f.write_str("Any"),
|
||||
DynamicType::Unknown => f.write_str("Unknown"),
|
||||
// `DynamicType::Todo`'s display should be explicit that is not a valid display of
|
||||
// any other type
|
||||
DynamicType::Todo(todo) => write!(f, "@Todo{todo}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Error struct providing information on type(s) that were deemed to be invalid
|
||||
/// in a type expression context, and the type we should therefore fallback to
|
||||
/// for the problematic type expression.
|
||||
@@ -2480,7 +2589,7 @@ impl<'db> KnownClass {
|
||||
pub fn to_class_literal(self, db: &'db dyn Db) -> Type<'db> {
|
||||
known_module_symbol(db, self.canonical_module(db), self.as_str())
|
||||
.ignore_possibly_unbound()
|
||||
.unwrap_or(Type::Unknown)
|
||||
.unwrap_or(Type::unknown())
|
||||
}
|
||||
|
||||
pub fn to_subclass_of(self, db: &'db dyn Db) -> Type<'db> {
|
||||
@@ -3090,7 +3199,7 @@ impl<'db> IterationOutcome<'db> {
|
||||
Self::Iterable { element_ty } => element_ty,
|
||||
Self::NotIterable { not_iterable_ty } => {
|
||||
report_not_iterable(context, iterable_node, not_iterable_ty);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
Self::PossiblyUnboundDunderIter {
|
||||
iterable_ty,
|
||||
@@ -3242,6 +3351,9 @@ pub enum KnownFunction {
|
||||
/// [`typing(_extensions).no_type_check`](https://typing.readthedocs.io/en/latest/spec/directives.html#no-type-check)
|
||||
NoTypeCheck,
|
||||
|
||||
/// `typing(_extensions).assert_type`
|
||||
AssertType,
|
||||
|
||||
/// `knot_extensions.static_assert`
|
||||
StaticAssert,
|
||||
/// `knot_extensions.is_equivalent_to`
|
||||
@@ -3264,18 +3376,7 @@ impl KnownFunction {
|
||||
pub fn constraint_function(self) -> Option<KnownConstraintFunction> {
|
||||
match self {
|
||||
Self::ConstraintFunction(f) => Some(f),
|
||||
Self::RevealType
|
||||
| Self::Len
|
||||
| Self::Final
|
||||
| Self::NoTypeCheck
|
||||
| Self::StaticAssert
|
||||
| Self::IsEquivalentTo
|
||||
| Self::IsSubtypeOf
|
||||
| Self::IsAssignableTo
|
||||
| Self::IsDisjointFrom
|
||||
| Self::IsFullyStatic
|
||||
| Self::IsSingleton
|
||||
| Self::IsSingleValued => None,
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3297,6 +3398,7 @@ impl KnownFunction {
|
||||
"no_type_check" if definition.is_typing_definition(db) => {
|
||||
Some(KnownFunction::NoTypeCheck)
|
||||
}
|
||||
"assert_type" if definition.is_typing_definition(db) => Some(KnownFunction::AssertType),
|
||||
"static_assert" if definition.is_knot_extensions_definition(db) => {
|
||||
Some(KnownFunction::StaticAssert)
|
||||
}
|
||||
@@ -3326,23 +3428,90 @@ impl KnownFunction {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not a particular function takes type expression as arguments, i.e. should
|
||||
/// the argument of a call like `f(int)` be interpreted as the type int (true) or as the
|
||||
/// type of the expression `int`, i.e. `Literal[int]` (false).
|
||||
const fn takes_type_expression_arguments(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
KnownFunction::IsEquivalentTo
|
||||
| KnownFunction::IsSubtypeOf
|
||||
| KnownFunction::IsAssignableTo
|
||||
| KnownFunction::IsDisjointFrom
|
||||
| KnownFunction::IsFullyStatic
|
||||
| KnownFunction::IsSingleton
|
||||
| KnownFunction::IsSingleValued
|
||||
)
|
||||
/// Return the [`ParameterExpectations`] for this function.
|
||||
const fn parameter_expectations(self) -> ParameterExpectations {
|
||||
match self {
|
||||
Self::IsFullyStatic | Self::IsSingleton | Self::IsSingleValued => {
|
||||
ParameterExpectations::SingleTypeExpression
|
||||
}
|
||||
|
||||
Self::IsEquivalentTo
|
||||
| Self::IsSubtypeOf
|
||||
| Self::IsAssignableTo
|
||||
| Self::IsDisjointFrom => ParameterExpectations::TwoTypeExpressions,
|
||||
|
||||
Self::AssertType => ParameterExpectations::ValueExpressionAndTypeExpression,
|
||||
|
||||
Self::ConstraintFunction(_)
|
||||
| Self::Len
|
||||
| Self::Final
|
||||
| Self::NoTypeCheck
|
||||
| Self::RevealType
|
||||
| Self::StaticAssert => ParameterExpectations::AllValueExpressions,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Describes whether the parameters in a function expect value expressions or type expressions.
|
||||
///
|
||||
/// Whether a specific parameter in the function expects a type expression can be queried
|
||||
/// using [`ParameterExpectations::expectation_at_index`].
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Default)]
|
||||
enum ParameterExpectations {
|
||||
/// All parameters in the function expect value expressions
|
||||
#[default]
|
||||
AllValueExpressions,
|
||||
/// The first parameter in the function expects a type expression
|
||||
SingleTypeExpression,
|
||||
/// The first two parameters in the function expect type expressions
|
||||
TwoTypeExpressions,
|
||||
/// The first parameter in the function expects a value expression,
|
||||
/// and the second expects a type expression
|
||||
ValueExpressionAndTypeExpression,
|
||||
}
|
||||
|
||||
impl ParameterExpectations {
|
||||
/// Query whether the parameter at `parameter_index` expects a value expression or a type expression
|
||||
fn expectation_at_index(self, parameter_index: usize) -> ParameterExpectation {
|
||||
match self {
|
||||
Self::AllValueExpressions => ParameterExpectation::ValueExpression,
|
||||
Self::SingleTypeExpression => {
|
||||
if parameter_index == 0 {
|
||||
ParameterExpectation::TypeExpression
|
||||
} else {
|
||||
ParameterExpectation::ValueExpression
|
||||
}
|
||||
}
|
||||
Self::TwoTypeExpressions => {
|
||||
if parameter_index < 2 {
|
||||
ParameterExpectation::TypeExpression
|
||||
} else {
|
||||
ParameterExpectation::ValueExpression
|
||||
}
|
||||
}
|
||||
Self::ValueExpressionAndTypeExpression => {
|
||||
if parameter_index == 1 {
|
||||
ParameterExpectation::TypeExpression
|
||||
} else {
|
||||
ParameterExpectation::ValueExpression
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether a single parameter in a given function expects a value expression or a [type expression]
|
||||
///
|
||||
/// [type expression]: https://typing.readthedocs.io/en/latest/spec/annotations.html#type-and-annotation-expressions
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Default)]
|
||||
enum ParameterExpectation {
|
||||
/// The parameter expects a value expression
|
||||
#[default]
|
||||
ValueExpression,
|
||||
/// The parameter expects a type expression
|
||||
TypeExpression,
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct ModuleLiteralType<'db> {
|
||||
/// The file in which this module was imported.
|
||||
@@ -3650,7 +3819,7 @@ impl<'db> Class<'db> {
|
||||
kind: MetaclassErrorKind::PartlyNotCallable(called_ty),
|
||||
})
|
||||
} else {
|
||||
Ok(return_ty.unwrap_or(Type::Unknown))
|
||||
Ok(return_ty.unwrap_or(Type::unknown()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3662,7 +3831,8 @@ impl<'db> Class<'db> {
|
||||
// does not accept the right arguments
|
||||
CallOutcome::Callable { binding }
|
||||
| CallOutcome::RevealType { binding, .. }
|
||||
| CallOutcome::StaticAssertionError { binding, .. } => Ok(binding.return_ty()),
|
||||
| CallOutcome::StaticAssertionError { binding, .. }
|
||||
| CallOutcome::AssertType { binding, .. } => Ok(binding.return_ty()),
|
||||
};
|
||||
|
||||
return return_ty_result.map(|ty| ty.to_meta_type(db));
|
||||
@@ -3716,9 +3886,7 @@ impl<'db> Class<'db> {
|
||||
match superclass {
|
||||
// TODO we may instead want to record the fact that we encountered dynamic, and intersect it with
|
||||
// the type found on the next "real" class.
|
||||
ClassBase::Any | ClassBase::Unknown | ClassBase::Todo(_) => {
|
||||
return Type::from(superclass).member(db, name)
|
||||
}
|
||||
ClassBase::Dynamic(_) => return Type::from(superclass).member(db, name),
|
||||
ClassBase::Class(class) => {
|
||||
let member = class.own_class_member(db, name);
|
||||
if !member.is_unbound() {
|
||||
@@ -4059,9 +4227,9 @@ pub(crate) mod tests {
|
||||
pub(crate) fn into_type(self, db: &TestDb) -> Type<'_> {
|
||||
match self {
|
||||
Ty::Never => Type::Never,
|
||||
Ty::Unknown => Type::Unknown,
|
||||
Ty::Unknown => Type::unknown(),
|
||||
Ty::None => Type::none(db),
|
||||
Ty::Any => Type::Any,
|
||||
Ty::Any => Type::any(),
|
||||
Ty::Todo => todo_type!("Ty::Todo"),
|
||||
Ty::IntLiteral(n) => Type::IntLiteral(n),
|
||||
Ty::StringLiteral(s) => Type::string_literal(db, s),
|
||||
@@ -4127,14 +4295,6 @@ pub(crate) mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test_case(Ty::Tuple(vec![Ty::Never]))]
|
||||
#[test_case(Ty::Tuple(vec![Ty::BuiltinInstance("str"), Ty::Never, Ty::BuiltinInstance("int")]))]
|
||||
#[test_case(Ty::Tuple(vec![Ty::Tuple(vec![Ty::Never])]))]
|
||||
fn tuple_containing_never_simplifies_to_never(ty: Ty) {
|
||||
let db = setup_db();
|
||||
assert_eq!(ty.into_type(&db), Type::Never);
|
||||
}
|
||||
|
||||
#[test_case(Ty::BuiltinInstance("str"), Ty::BuiltinInstance("object"))]
|
||||
#[test_case(Ty::BuiltinInstance("int"), Ty::BuiltinInstance("object"))]
|
||||
#[test_case(Ty::BuiltinInstance("bool"), Ty::BuiltinInstance("object"))]
|
||||
@@ -4627,6 +4787,82 @@ pub(crate) mod tests {
|
||||
assert!(!from.into_type(&db).is_fully_static(&db));
|
||||
}
|
||||
|
||||
#[test_case(Ty::Todo, Ty::Todo)]
|
||||
#[test_case(Ty::Any, Ty::Any)]
|
||||
#[test_case(Ty::Unknown, Ty::Unknown)]
|
||||
#[test_case(Ty::Any, Ty::Unknown)]
|
||||
#[test_case(Ty::Todo, Ty::Unknown)]
|
||||
#[test_case(Ty::Todo, Ty::Any)]
|
||||
#[test_case(Ty::Never, Ty::Never)]
|
||||
#[test_case(Ty::AlwaysTruthy, Ty::AlwaysTruthy)]
|
||||
#[test_case(Ty::AlwaysFalsy, Ty::AlwaysFalsy)]
|
||||
#[test_case(Ty::LiteralString, Ty::LiteralString)]
|
||||
#[test_case(Ty::BooleanLiteral(true), Ty::BooleanLiteral(true))]
|
||||
#[test_case(Ty::BooleanLiteral(false), Ty::BooleanLiteral(false))]
|
||||
#[test_case(Ty::SliceLiteral(0, 1, 2), Ty::SliceLiteral(0, 1, 2))]
|
||||
#[test_case(Ty::BuiltinClassLiteral("str"), Ty::BuiltinClassLiteral("str"))]
|
||||
#[test_case(Ty::BuiltinInstance("type"), Ty::SubclassOfBuiltinClass("object"))]
|
||||
// TODO: Compare unions/intersections with different orders
|
||||
// #[test_case(
|
||||
// Ty::Union(vec![Ty::BuiltinInstance("str"), Ty::BuiltinInstance("int")]),
|
||||
// Ty::Union(vec![Ty::BuiltinInstance("int"), Ty::BuiltinInstance("str")])
|
||||
// )]
|
||||
// #[test_case(
|
||||
// Ty::Intersection {
|
||||
// pos: vec![Ty::BuiltinInstance("str"), Ty::BuiltinInstance("int")],
|
||||
// neg: vec![Ty::BuiltinInstance("bytes"), Ty::None]
|
||||
// },
|
||||
// Ty::Intersection {
|
||||
// pos: vec![Ty::BuiltinInstance("int"), Ty::BuiltinInstance("str")],
|
||||
// neg: vec![Ty::None, Ty::BuiltinInstance("bytes")]
|
||||
// }
|
||||
// )]
|
||||
// #[test_case(
|
||||
// Ty::Intersection {
|
||||
// pos: vec![Ty::Union(vec![Ty::BuiltinInstance("str"), Ty::BuiltinInstance("int")])],
|
||||
// neg: vec![Ty::SubclassOfAny]
|
||||
// },
|
||||
// Ty::Intersection {
|
||||
// pos: vec![Ty::Union(vec![Ty::BuiltinInstance("int"), Ty::BuiltinInstance("str")])],
|
||||
// neg: vec![Ty::SubclassOfUnknown]
|
||||
// }
|
||||
// )]
|
||||
fn is_gradual_equivalent_to(a: Ty, b: Ty) {
|
||||
let db = setup_db();
|
||||
let a = a.into_type(&db);
|
||||
let b = b.into_type(&db);
|
||||
|
||||
assert!(a.is_gradual_equivalent_to(&db, b));
|
||||
assert!(b.is_gradual_equivalent_to(&db, a));
|
||||
}
|
||||
|
||||
#[test_case(Ty::BuiltinInstance("type"), Ty::SubclassOfAny)]
|
||||
#[test_case(Ty::SubclassOfBuiltinClass("object"), Ty::SubclassOfAny)]
|
||||
#[test_case(
|
||||
Ty::Union(vec![Ty::BuiltinInstance("str"), Ty::BuiltinInstance("int")]),
|
||||
Ty::Union(vec![Ty::BuiltinInstance("int"), Ty::BuiltinInstance("str"), Ty::BuiltinInstance("bytes")])
|
||||
)]
|
||||
#[test_case(
|
||||
Ty::Union(vec![Ty::BuiltinInstance("str"), Ty::BuiltinInstance("int"), Ty::BuiltinInstance("bytes")]),
|
||||
Ty::Union(vec![Ty::BuiltinInstance("int"), Ty::BuiltinInstance("str"), Ty::BuiltinInstance("dict")])
|
||||
)]
|
||||
#[test_case(
|
||||
Ty::Tuple(vec![Ty::BuiltinInstance("str"), Ty::BuiltinInstance("int")]),
|
||||
Ty::Tuple(vec![Ty::BuiltinInstance("str"), Ty::BuiltinInstance("int"), Ty::BuiltinInstance("bytes")])
|
||||
)]
|
||||
#[test_case(
|
||||
Ty::Tuple(vec![Ty::BuiltinInstance("str"), Ty::BuiltinInstance("int")]),
|
||||
Ty::Tuple(vec![Ty::BuiltinInstance("int"), Ty::BuiltinInstance("str")])
|
||||
)]
|
||||
fn is_not_gradual_equivalent_to(a: Ty, b: Ty) {
|
||||
let db = setup_db();
|
||||
let a = a.into_type(&db);
|
||||
let b = b.into_type(&db);
|
||||
|
||||
assert!(!a.is_gradual_equivalent_to(&db, b));
|
||||
assert!(!b.is_gradual_equivalent_to(&db, a));
|
||||
}
|
||||
|
||||
#[test_case(Ty::IntLiteral(1); "is_int_literal_truthy")]
|
||||
#[test_case(Ty::IntLiteral(-1))]
|
||||
#[test_case(Ty::StringLiteral("foo"))]
|
||||
|
||||
@@ -65,6 +65,8 @@ impl<'db> UnionBuilder<'db> {
|
||||
|
||||
let mut to_add = ty;
|
||||
let mut to_remove = SmallVec::<[usize; 2]>::new();
|
||||
let ty_negated = ty.negate(self.db);
|
||||
|
||||
for (index, element) in self.elements.iter().enumerate() {
|
||||
if Some(*element) == bool_pair {
|
||||
to_add = KnownClass::Bool.to_instance(self.db);
|
||||
@@ -80,6 +82,17 @@ impl<'db> UnionBuilder<'db> {
|
||||
return self;
|
||||
} else if element.is_subtype_of(self.db, ty) {
|
||||
to_remove.push(index);
|
||||
} else if ty_negated.is_subtype_of(self.db, *element) {
|
||||
// We add `ty` to the union. We just checked that `~ty` is a subtype of an existing `element`.
|
||||
// This also means that `~ty | ty` is a subtype of `element | ty`, because both elements in the
|
||||
// first union are subtypes of the corresponding elements in the second union. But `~ty | ty` is
|
||||
// just `object`. Since `object` is a subtype of `element | ty`, we can only conclude that
|
||||
// `element | ty` must be `object` (object has no other supertypes). This means we can simplify
|
||||
// the whole union to just `object`, since all other potential elements would also be subtypes of
|
||||
// `object`.
|
||||
self.elements.clear();
|
||||
self.elements.push(KnownClass::Object.to_instance(self.db));
|
||||
return self;
|
||||
}
|
||||
}
|
||||
match to_remove[..] {
|
||||
@@ -321,7 +334,15 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
self.add_positive(db, *neg);
|
||||
}
|
||||
}
|
||||
ty @ (Type::Any | Type::Unknown | Type::Todo(_)) => {
|
||||
Type::Never => {
|
||||
// Adding ~Never to an intersection is a no-op.
|
||||
}
|
||||
Type::Instance(instance) if instance.class.is_known(db, KnownClass::Object) => {
|
||||
// Adding ~object to an intersection results in Never.
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
}
|
||||
ty @ Type::Dynamic(_) => {
|
||||
// Adding any of these types to the negative side of an intersection
|
||||
// is equivalent to adding it to the positive side. We do this to
|
||||
// simplify the representation.
|
||||
@@ -386,18 +407,34 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionType};
|
||||
use super::{IntersectionBuilder, Type, UnionBuilder, UnionType};
|
||||
|
||||
use crate::db::tests::{setup_db, TestDb};
|
||||
use crate::types::{global_symbol, todo_type, KnownClass, Truthiness, UnionBuilder};
|
||||
use crate::db::tests::setup_db;
|
||||
use crate::types::{KnownClass, Truthiness};
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use test_case::test_case;
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
fn build_union_no_elements() {
|
||||
let db = setup_db();
|
||||
|
||||
let empty_union = UnionBuilder::new(&db).build();
|
||||
assert_eq!(empty_union, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single_element() {
|
||||
let db = setup_db();
|
||||
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let union = UnionType::from_elements(&db, [t0]);
|
||||
assert_eq!(union, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_two_elements() {
|
||||
let db = setup_db();
|
||||
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let union = UnionType::from_elements(&db, [t0, t1]).expect_union();
|
||||
@@ -405,605 +442,12 @@ mod tests {
|
||||
assert_eq!(union.elements(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionType::from_elements(&db, [t0]);
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionType::from_elements(&db, [t0, Type::Never]);
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_bool() {
|
||||
let db = setup_db();
|
||||
let bool_instance_ty = KnownClass::Bool.to_instance(&db);
|
||||
|
||||
let t0 = Type::BooleanLiteral(true);
|
||||
let t1 = Type::BooleanLiteral(true);
|
||||
let t2 = Type::BooleanLiteral(false);
|
||||
let t3 = Type::IntLiteral(17);
|
||||
|
||||
let union = UnionType::from_elements(&db, [t0, t1, t3]).expect_union();
|
||||
assert_eq!(union.elements(&db), &[t0, t3]);
|
||||
|
||||
let union = UnionType::from_elements(&db, [t0, t1, t2, t3]).expect_union();
|
||||
assert_eq!(union.elements(&db), &[bool_instance_ty, t3]);
|
||||
|
||||
let result_ty = UnionType::from_elements(&db, [bool_instance_ty, t0]);
|
||||
assert_eq!(result_ty, bool_instance_ty);
|
||||
|
||||
let result_ty = UnionType::from_elements(&db, [t0, bool_instance_ty]);
|
||||
assert_eq!(result_ty, bool_instance_ty);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionType::from_elements(&db, [t0, t1]);
|
||||
let union = UnionType::from_elements(&db, [u1, t2]).expect_union();
|
||||
|
||||
assert_eq!(union.elements(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_simplify_subtype() {
|
||||
let db = setup_db();
|
||||
let t0 = KnownClass::Str.to_instance(&db);
|
||||
let t1 = Type::LiteralString;
|
||||
let u0 = UnionType::from_elements(&db, [t0, t1]);
|
||||
let u1 = UnionType::from_elements(&db, [t1, t0]);
|
||||
|
||||
assert_eq!(u0, t0);
|
||||
assert_eq!(u1, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_no_simplify_unknown() {
|
||||
let db = setup_db();
|
||||
let t0 = KnownClass::Str.to_instance(&db);
|
||||
let t1 = Type::Unknown;
|
||||
let u0 = UnionType::from_elements(&db, [t0, t1]);
|
||||
let u1 = UnionType::from_elements(&db, [t1, t0]);
|
||||
|
||||
assert_eq!(u0.expect_union().elements(&db), &[t0, t1]);
|
||||
assert_eq!(u1.expect_union().elements(&db), &[t1, t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_simplify_multiple_unknown() {
|
||||
let db = setup_db();
|
||||
let t0 = KnownClass::Str.to_instance(&db);
|
||||
let t1 = Type::Unknown;
|
||||
|
||||
let u = UnionType::from_elements(&db, [t0, t1, t1]);
|
||||
|
||||
assert_eq!(u.expect_union().elements(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_subsume_multiple() {
|
||||
let db = setup_db();
|
||||
let str_ty = KnownClass::Str.to_instance(&db);
|
||||
let int_ty = KnownClass::Int.to_instance(&db);
|
||||
let object_ty = KnownClass::Object.to_instance(&db);
|
||||
let unknown_ty = Type::Unknown;
|
||||
|
||||
let u0 = UnionType::from_elements(&db, [str_ty, unknown_ty, int_ty, object_ty]);
|
||||
|
||||
assert_eq!(u0.expect_union().elements(&db), &[unknown_ty, object_ty]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().copied().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().copied().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
|
||||
assert_eq!(intersection.pos_vec(&db), &[ta]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_empty_intersection_equals_object() {
|
||||
let db = setup_db();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db).build();
|
||||
|
||||
assert_eq!(ty, KnownClass::Object.to_instance(&db));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_positive() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
|
||||
assert_eq!(intersection.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_negative() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = KnownClass::Int.to_instance(&db);
|
||||
// i0 = Any & ~Literal[1]
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
// ta_not_i0 = int & ~(Any & ~Literal[1])
|
||||
// -> int & (~Any | Literal[1])
|
||||
// (~Any is equivalent to Any)
|
||||
// -> (int & Any) | (int & Literal[1])
|
||||
// -> (int & Any) | Literal[1]
|
||||
let ta_not_i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build();
|
||||
|
||||
assert_eq!(ta_not_i0.display(&db).to_string(), "int & Any | Literal[1]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_any() {
|
||||
let db = setup_db();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Any)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Any);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Never)
|
||||
.add_negative(Type::Any)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_multiple_unknown() {
|
||||
let db = setup_db();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unknown)
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::IntLiteral(0))
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(
|
||||
ty,
|
||||
IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::IntLiteral(0))
|
||||
.build()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionType::from_elements(&db, [t0, t1]);
|
||||
|
||||
let union = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
.expect_union();
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_negation_distributes_over_union() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/module.py",
|
||||
r#"
|
||||
class A: ...
|
||||
class B: ...
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let module = ruff_db::files::system_path_to_file(&db, "/src/module.py").unwrap();
|
||||
|
||||
let a = global_symbol(&db, module, "A")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
let b = global_symbol(&db, module, "B")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
|
||||
// intersection: A & B
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(a)
|
||||
.add_positive(b)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
assert_eq!(intersection.pos_vec(&db), &[a, b]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[]);
|
||||
|
||||
// ~intersection => ~A | ~B
|
||||
let negated_intersection = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Intersection(intersection))
|
||||
.build()
|
||||
.expect_union();
|
||||
|
||||
// should have as elements ~A and ~B
|
||||
let not_a = a.negate(&db);
|
||||
let not_b = b.negate(&db);
|
||||
assert_eq!(negated_intersection.elements(&db), &[not_a, not_b]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_intersection_negation_distributes_over_union() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/module.py",
|
||||
r#"
|
||||
class A: ...
|
||||
class B: ...
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let module = ruff_db::files::system_path_to_file(&db, "/src/module.py").unwrap();
|
||||
|
||||
let a = global_symbol(&db, module, "A")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
let b = global_symbol(&db, module, "B")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
let int = KnownClass::Int.to_instance(&db);
|
||||
|
||||
// a_not_b: A & ~B
|
||||
let a_not_b = IntersectionBuilder::new(&db)
|
||||
.add_positive(a)
|
||||
.add_negative(b)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
assert_eq!(a_not_b.pos_vec(&db), &[a]);
|
||||
assert_eq!(a_not_b.neg_vec(&db), &[b]);
|
||||
|
||||
// let's build
|
||||
// int & ~(A & ~B)
|
||||
// = int & ~(A & ~B)
|
||||
// = int & (~A | B)
|
||||
// = (int & ~A) | (int & B)
|
||||
let t = IntersectionBuilder::new(&db)
|
||||
.add_positive(int)
|
||||
.add_negative(Type::Intersection(a_not_b))
|
||||
.build();
|
||||
assert_eq!(t.display(&db).to_string(), "int & ~A | int & B");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_self_negation() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::none(&db))
|
||||
.add_negative(Type::none(&db))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::none(&db))
|
||||
.add_negative(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::none(&db));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::none(&db))
|
||||
.add_positive(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_none() {
|
||||
let db = setup_db();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::none(&db))
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.add_negative(Type::none(&db))
|
||||
.build();
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_negative_union_de_morgan() {
|
||||
let db = setup_db();
|
||||
|
||||
let union = UnionBuilder::new(&db)
|
||||
.add(Type::IntLiteral(1))
|
||||
.add(Type::IntLiteral(2))
|
||||
.build();
|
||||
assert_eq!(union.display(&db).to_string(), "Literal[1, 2]");
|
||||
|
||||
let ty = IntersectionBuilder::new(&db).add_negative(union).build();
|
||||
|
||||
let expected = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::IntLiteral(1))
|
||||
.add_negative(Type::IntLiteral(2))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty.display(&db).to_string(), "~Literal[1] & ~Literal[2]");
|
||||
assert_eq!(ty, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_type_and_positive_subtype() {
|
||||
let db = setup_db();
|
||||
|
||||
let t = KnownClass::Str.to_instance(&db);
|
||||
let s = Type::LiteralString;
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(t)
|
||||
.add_positive(s)
|
||||
.build();
|
||||
assert_eq!(ty, s);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_positive(t)
|
||||
.build();
|
||||
assert_eq!(ty, s);
|
||||
|
||||
let literal = Type::string_literal(&db, "a");
|
||||
let expected = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_negative(literal)
|
||||
.build();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(t)
|
||||
.add_negative(literal)
|
||||
.add_positive(s)
|
||||
.build();
|
||||
assert_eq!(ty, expected);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_negative(literal)
|
||||
.add_positive(t)
|
||||
.build();
|
||||
assert_eq!(ty, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_type_and_negative_subtype() {
|
||||
let db = setup_db();
|
||||
|
||||
let t = KnownClass::Str.to_instance(&db);
|
||||
let s = Type::LiteralString;
|
||||
|
||||
let expected = IntersectionBuilder::new(&db).add_negative(t).build();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(t)
|
||||
.add_negative(s)
|
||||
.build();
|
||||
assert_eq!(ty, expected);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(s)
|
||||
.add_negative(t)
|
||||
.build();
|
||||
assert_eq!(ty, expected);
|
||||
|
||||
let object = KnownClass::Object.to_instance(&db);
|
||||
let expected = IntersectionBuilder::new(&db)
|
||||
.add_negative(t)
|
||||
.add_positive(object)
|
||||
.build();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(t)
|
||||
.add_positive(object)
|
||||
.add_negative(s)
|
||||
.build();
|
||||
assert_eq!(ty, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_type_and_multiple_negative_subtypes() {
|
||||
let db = setup_db();
|
||||
|
||||
let s1 = Type::IntLiteral(1);
|
||||
let s2 = Type::IntLiteral(2);
|
||||
let t = KnownClass::Int.to_instance(&db);
|
||||
|
||||
let expected = IntersectionBuilder::new(&db).add_negative(t).build();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(s1)
|
||||
.add_negative(s2)
|
||||
.add_negative(t)
|
||||
.build();
|
||||
assert_eq!(ty, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_type_and_positive_subtype() {
|
||||
let db = setup_db();
|
||||
|
||||
let t = KnownClass::Str.to_instance(&db);
|
||||
let s = Type::LiteralString;
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(t)
|
||||
.add_positive(s)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_negative(t)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
|
||||
// This should also work in the presence of additional contributions:
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(KnownClass::Object.to_instance(&db))
|
||||
.add_negative(t)
|
||||
.add_positive(s)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_negative(Type::string_literal(&db, "a"))
|
||||
.add_negative(t)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_disjoint_positive_types() {
|
||||
let db = setup_db();
|
||||
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::none(&db);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(t1)
|
||||
.add_positive(t2)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
|
||||
// If there are any negative contributions, they should
|
||||
// be removed too.
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(KnownClass::Str.to_instance(&db))
|
||||
.add_negative(Type::LiteralString)
|
||||
.add_positive(t2)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_disjoint_positive_and_negative_types() {
|
||||
let db = setup_db();
|
||||
|
||||
let t_p = KnownClass::Int.to_instance(&db);
|
||||
let t_n = Type::string_literal(&db, "t_n");
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(t_p)
|
||||
.add_negative(t_n)
|
||||
.build();
|
||||
assert_eq!(ty, t_p);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(t_n)
|
||||
.add_positive(t_p)
|
||||
.build();
|
||||
assert_eq!(ty, t_p);
|
||||
|
||||
let int_literal = Type::IntLiteral(1);
|
||||
let expected = IntersectionBuilder::new(&db)
|
||||
.add_positive(t_p)
|
||||
.add_negative(int_literal)
|
||||
.build();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(t_p)
|
||||
.add_negative(int_literal)
|
||||
.add_negative(t_n)
|
||||
.build();
|
||||
assert_eq!(ty, expected);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(t_n)
|
||||
.add_negative(int_literal)
|
||||
.add_positive(t_p)
|
||||
.build();
|
||||
assert_eq!(ty, expected);
|
||||
let intersection = IntersectionBuilder::new(&db).build();
|
||||
assert_eq!(intersection, KnownClass::Object.to_instance(&db));
|
||||
}
|
||||
|
||||
#[test_case(Type::BooleanLiteral(true))]
|
||||
@@ -1048,85 +492,4 @@ mod tests {
|
||||
.build();
|
||||
assert_eq!(ty, Type::BooleanLiteral(!bool_value));
|
||||
}
|
||||
|
||||
#[test_case(Type::Any)]
|
||||
#[test_case(Type::Unknown)]
|
||||
#[test_case(todo_type!())]
|
||||
fn build_intersection_t_and_negative_t_does_not_simplify(ty: Type) {
|
||||
let db = setup_db();
|
||||
|
||||
let result = IntersectionBuilder::new(&db)
|
||||
.add_positive(ty)
|
||||
.add_negative(ty)
|
||||
.build();
|
||||
assert_eq!(result, ty);
|
||||
|
||||
let result = IntersectionBuilder::new(&db)
|
||||
.add_negative(ty)
|
||||
.add_positive(ty)
|
||||
.build();
|
||||
assert_eq!(result, ty);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_of_two_unions_simplify() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/module.py",
|
||||
"
|
||||
class A: ...
|
||||
class B: ...
|
||||
a = A()
|
||||
b = B()
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, "src/module.py").expect("file to exist");
|
||||
|
||||
let a = global_symbol(&db, file, "a").expect_type();
|
||||
let b = global_symbol(&db, file, "b").expect_type();
|
||||
let union = UnionBuilder::new(&db).add(a).add(b).build();
|
||||
assert_eq!(union.display(&db).to_string(), "A | B");
|
||||
let reversed_union = UnionBuilder::new(&db).add(b).add(a).build();
|
||||
assert_eq!(reversed_union.display(&db).to_string(), "B | A");
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(union)
|
||||
.add_positive(reversed_union)
|
||||
.build();
|
||||
assert_eq!(intersection.display(&db).to_string(), "B | A");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_of_two_intersections_simplify() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/module.py",
|
||||
"
|
||||
class A: ...
|
||||
class B: ...
|
||||
a = A()
|
||||
b = B()
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, "src/module.py").expect("file to exist");
|
||||
|
||||
let a = global_symbol(&db, file, "a").expect_type();
|
||||
let b = global_symbol(&db, file, "b").expect_type();
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(a)
|
||||
.add_positive(b)
|
||||
.build();
|
||||
let reversed_intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(b)
|
||||
.add_positive(a)
|
||||
.build();
|
||||
let union = UnionBuilder::new(&db)
|
||||
.add(intersection)
|
||||
.add(reversed_intersection)
|
||||
.build();
|
||||
assert_eq!(union.display(&db).to_string(), "A & B");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use super::context::InferContext;
|
||||
use super::diagnostic::CALL_NON_CALLABLE;
|
||||
use super::diagnostic::{CALL_NON_CALLABLE, TYPE_ASSERTION_FAILURE};
|
||||
use super::{Severity, Signature, Type, TypeArrayDisplay, UnionBuilder};
|
||||
use crate::types::diagnostic::STATIC_ASSERT_ERROR;
|
||||
use crate::Db;
|
||||
@@ -44,6 +44,10 @@ pub(super) enum CallOutcome<'db> {
|
||||
binding: CallBinding<'db>,
|
||||
error_kind: StaticAssertionErrorKind<'db>,
|
||||
},
|
||||
AssertType {
|
||||
binding: CallBinding<'db>,
|
||||
asserted_ty: Type<'db>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'db> CallOutcome<'db> {
|
||||
@@ -76,6 +80,14 @@ impl<'db> CallOutcome<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new `CallOutcome::AssertType` with given revealed and return types.
|
||||
pub(super) fn asserted(binding: CallBinding<'db>, asserted_ty: Type<'db>) -> CallOutcome<'db> {
|
||||
CallOutcome::AssertType {
|
||||
binding,
|
||||
asserted_ty,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the return type of the call, or `None` if not callable.
|
||||
pub(super) fn return_ty(&self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
match self {
|
||||
@@ -97,12 +109,16 @@ impl<'db> CallOutcome<'db> {
|
||||
match (acc, ty) {
|
||||
(None, None) => None,
|
||||
(None, Some(ty)) => Some(UnionBuilder::new(db).add(ty)),
|
||||
(Some(builder), ty) => Some(builder.add(ty.unwrap_or(Type::Unknown))),
|
||||
(Some(builder), ty) => Some(builder.add(ty.unwrap_or(Type::unknown()))),
|
||||
}
|
||||
})
|
||||
.map(UnionBuilder::build),
|
||||
Self::PossiblyUnboundDunderCall { call_outcome, .. } => call_outcome.return_ty(db),
|
||||
Self::StaticAssertionError { .. } => Some(Type::none(db)),
|
||||
Self::AssertType {
|
||||
binding,
|
||||
asserted_ty: _,
|
||||
} => Some(binding.return_ty()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -206,7 +222,7 @@ impl<'db> CallOutcome<'db> {
|
||||
}
|
||||
Self::NotCallable { not_callable_ty } => Err(NotCallableError::Type {
|
||||
not_callable_ty: *not_callable_ty,
|
||||
return_ty: Type::Unknown,
|
||||
return_ty: Type::unknown(),
|
||||
}),
|
||||
Self::PossiblyUnboundDunderCall {
|
||||
called_ty,
|
||||
@@ -215,7 +231,7 @@ impl<'db> CallOutcome<'db> {
|
||||
callable_ty: *called_ty,
|
||||
return_ty: call_outcome
|
||||
.return_ty(context.db())
|
||||
.unwrap_or(Type::Unknown),
|
||||
.unwrap_or(Type::unknown()),
|
||||
}),
|
||||
Self::Union {
|
||||
outcomes,
|
||||
@@ -228,7 +244,7 @@ impl<'db> CallOutcome<'db> {
|
||||
let return_ty = match outcome {
|
||||
Self::NotCallable { not_callable_ty } => {
|
||||
not_callable.push(*not_callable_ty);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
Self::RevealType {
|
||||
binding,
|
||||
@@ -307,7 +323,29 @@ impl<'db> CallOutcome<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Type::Unknown)
|
||||
Ok(Type::unknown())
|
||||
}
|
||||
CallOutcome::AssertType {
|
||||
binding,
|
||||
asserted_ty,
|
||||
} => {
|
||||
let [actual_ty, _asserted] = binding.parameter_tys() else {
|
||||
return Ok(binding.return_ty());
|
||||
};
|
||||
|
||||
if !actual_ty.is_gradual_equivalent_to(context.db(), *asserted_ty) {
|
||||
context.report_lint(
|
||||
&TYPE_ASSERTION_FAILURE,
|
||||
node,
|
||||
format_args!(
|
||||
"Actual type `{}` is not the same as asserted type `{}`",
|
||||
actual_ty.display(context.db()),
|
||||
asserted_ty.display(context.db()),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(binding.return_ty())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,10 +125,10 @@ pub(crate) fn bind_call<'db>(
|
||||
|
||||
CallBinding {
|
||||
callable_ty,
|
||||
return_ty: signature.return_ty.unwrap_or(Type::Unknown),
|
||||
return_ty: signature.return_ty.unwrap_or(Type::unknown()),
|
||||
parameter_tys: parameter_tys
|
||||
.into_iter()
|
||||
.map(|opt_ty| opt_ty.unwrap_or(Type::Unknown))
|
||||
.map(|opt_ty| opt_ty.unwrap_or(Type::unknown()))
|
||||
.collect(),
|
||||
errors,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::types::{
|
||||
todo_type, Class, ClassLiteralType, KnownClass, KnownInstanceType, TodoType, Type,
|
||||
todo_type, Class, ClassLiteralType, DynamicType, KnownClass, KnownInstanceType, Type,
|
||||
};
|
||||
use crate::Db;
|
||||
use itertools::Either;
|
||||
@@ -8,19 +8,25 @@ use itertools::Either;
|
||||
///
|
||||
/// This is much more limited than the [`Type`] enum:
|
||||
/// all types that would be invalid to have as a class base are
|
||||
/// transformed into [`ClassBase::Unknown`]
|
||||
/// transformed into [`ClassBase::unknown`]
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, salsa::Update)]
|
||||
pub enum ClassBase<'db> {
|
||||
Any,
|
||||
Unknown,
|
||||
Todo(TodoType),
|
||||
Dynamic(DynamicType),
|
||||
Class(Class<'db>),
|
||||
}
|
||||
|
||||
impl<'db> ClassBase<'db> {
|
||||
pub const fn any() -> Self {
|
||||
Self::Dynamic(DynamicType::Any)
|
||||
}
|
||||
|
||||
pub const fn unknown() -> Self {
|
||||
Self::Dynamic(DynamicType::Unknown)
|
||||
}
|
||||
|
||||
pub const fn is_dynamic(self) -> bool {
|
||||
match self {
|
||||
ClassBase::Any | ClassBase::Unknown | ClassBase::Todo(_) => true,
|
||||
ClassBase::Dynamic(_) => true,
|
||||
ClassBase::Class(_) => false,
|
||||
}
|
||||
}
|
||||
@@ -34,9 +40,7 @@ impl<'db> ClassBase<'db> {
|
||||
impl std::fmt::Display for Display<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self.base {
|
||||
ClassBase::Any => f.write_str("Any"),
|
||||
ClassBase::Todo(todo) => todo.fmt(f),
|
||||
ClassBase::Unknown => f.write_str("Unknown"),
|
||||
ClassBase::Dynamic(dynamic) => dynamic.fmt(f),
|
||||
ClassBase::Class(class) => write!(f, "<class '{}'>", class.name(self.db)),
|
||||
}
|
||||
}
|
||||
@@ -50,7 +54,7 @@ impl<'db> ClassBase<'db> {
|
||||
KnownClass::Object
|
||||
.to_class_literal(db)
|
||||
.into_class_literal()
|
||||
.map_or(Self::Unknown, |ClassLiteralType { class }| {
|
||||
.map_or(Self::unknown(), |ClassLiteralType { class }| {
|
||||
Self::Class(class)
|
||||
})
|
||||
}
|
||||
@@ -60,9 +64,7 @@ impl<'db> ClassBase<'db> {
|
||||
/// Return `None` if `ty` is not an acceptable type for a class base.
|
||||
pub(super) fn try_from_ty(db: &'db dyn Db, ty: Type<'db>) -> Option<Self> {
|
||||
match ty {
|
||||
Type::Any => Some(Self::Any),
|
||||
Type::Unknown => Some(Self::Unknown),
|
||||
Type::Todo(todo) => Some(Self::Todo(todo)),
|
||||
Type::Dynamic(dynamic) => Some(Self::Dynamic(dynamic)),
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => Some(Self::Class(class)),
|
||||
Type::Union(_) => None, // TODO -- forces consideration of multiple possible MROs?
|
||||
Type::Intersection(_) => None, // TODO -- probably incorrect?
|
||||
@@ -104,8 +106,8 @@ impl<'db> ClassBase<'db> {
|
||||
| KnownInstanceType::Not
|
||||
| KnownInstanceType::Intersection
|
||||
| KnownInstanceType::TypeOf => None,
|
||||
KnownInstanceType::Unknown => Some(Self::Unknown),
|
||||
KnownInstanceType::Any => Some(Self::Any),
|
||||
KnownInstanceType::Unknown => Some(Self::unknown()),
|
||||
KnownInstanceType::Any => Some(Self::any()),
|
||||
// TODO: Classes inheriting from `typing.Type` et al. also have `Generic` in their MRO
|
||||
KnownInstanceType::Dict => {
|
||||
Self::try_from_ty(db, KnownClass::Dict.to_class_literal(db))
|
||||
@@ -150,7 +152,7 @@ impl<'db> ClassBase<'db> {
|
||||
pub(super) fn into_class(self) -> Option<Class<'db>> {
|
||||
match self {
|
||||
Self::Class(class) => Some(class),
|
||||
_ => None,
|
||||
Self::Dynamic(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -160,13 +162,7 @@ impl<'db> ClassBase<'db> {
|
||||
db: &'db dyn Db,
|
||||
) -> Either<impl Iterator<Item = ClassBase<'db>>, impl Iterator<Item = ClassBase<'db>>> {
|
||||
match self {
|
||||
ClassBase::Any => Either::Left([ClassBase::Any, ClassBase::object(db)].into_iter()),
|
||||
ClassBase::Unknown => {
|
||||
Either::Left([ClassBase::Unknown, ClassBase::object(db)].into_iter())
|
||||
}
|
||||
ClassBase::Todo(todo) => {
|
||||
Either::Left([ClassBase::Todo(todo), ClassBase::object(db)].into_iter())
|
||||
}
|
||||
ClassBase::Dynamic(_) => Either::Left([self, ClassBase::object(db)].into_iter()),
|
||||
ClassBase::Class(class) => Either::Right(class.iter_mro(db)),
|
||||
}
|
||||
}
|
||||
@@ -181,9 +177,7 @@ impl<'db> From<Class<'db>> for ClassBase<'db> {
|
||||
impl<'db> From<ClassBase<'db>> for Type<'db> {
|
||||
fn from(value: ClassBase<'db>) -> Self {
|
||||
match value {
|
||||
ClassBase::Any => Type::Any,
|
||||
ClassBase::Todo(todo) => Type::Todo(todo),
|
||||
ClassBase::Unknown => Type::Unknown,
|
||||
ClassBase::Dynamic(dynamic) => Type::Dynamic(dynamic),
|
||||
ClassBase::Class(class) => Type::class_literal(class),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,6 +49,7 @@ pub(crate) fn register_lints(registry: &mut LintRegistryBuilder) {
|
||||
registry.register_lint(&POSSIBLY_UNBOUND_IMPORT);
|
||||
registry.register_lint(&POSSIBLY_UNRESOLVED_REFERENCE);
|
||||
registry.register_lint(&SUBCLASS_OF_FINAL_CLASS);
|
||||
registry.register_lint(&TYPE_ASSERTION_FAILURE);
|
||||
registry.register_lint(&TOO_MANY_POSITIONAL_ARGUMENTS);
|
||||
registry.register_lint(&UNDEFINED_REVEAL);
|
||||
registry.register_lint(&UNKNOWN_ARGUMENT);
|
||||
@@ -575,6 +576,28 @@ declare_lint! {
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for `assert_type()` calls where the actual type
|
||||
/// is not the same as the asserted type.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `assert_type()` allows confirming the inferred type of a certain value.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// def _(x: int):
|
||||
/// assert_type(x, int) # fine
|
||||
/// assert_type(x, str) # error: Actual type does not match asserted type
|
||||
/// ```
|
||||
pub(crate) static TYPE_ASSERTION_FAILURE = {
|
||||
summary: "detects failed type assertions",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for calls that pass more positional arguments than the callable can accept.
|
||||
|
||||
@@ -65,9 +65,8 @@ struct DisplayRepresentation<'db> {
|
||||
impl Display for DisplayRepresentation<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self.ty {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Dynamic(dynamic) => dynamic.fmt(f),
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Instance(InstanceType { class }) => {
|
||||
let representation = match class.known(self.db) {
|
||||
Some(KnownClass::NoneType) => "None",
|
||||
@@ -76,9 +75,6 @@ impl Display for DisplayRepresentation<'_> {
|
||||
};
|
||||
f.write_str(representation)
|
||||
}
|
||||
// `[Type::Todo]`'s display should be explicit that is not a valid display of
|
||||
// any other type
|
||||
Type::Todo(todo) => write!(f, "@Todo{todo}"),
|
||||
Type::ModuleLiteral(module) => {
|
||||
write!(f, "<module '{}'>", module.module(self.db).name())
|
||||
}
|
||||
@@ -88,9 +84,7 @@ impl Display for DisplayRepresentation<'_> {
|
||||
// Only show the bare class name here; ClassBase::display would render this as
|
||||
// type[<class 'Foo'>] instead of type[Foo].
|
||||
ClassBase::Class(class) => write!(f, "type[{}]", class.name(self.db)),
|
||||
base @ (ClassBase::Any | ClassBase::Todo(_) | ClassBase::Unknown) => {
|
||||
write!(f, "type[{}]", base.display(self.db))
|
||||
}
|
||||
ClassBase::Dynamic(dynamic) => write!(f, "type[{dynamic}]"),
|
||||
},
|
||||
Type::KnownInstance(known_instance) => f.write_str(known_instance.repr(self.db)),
|
||||
Type::FunctionLiteral(function) => f.write_str(function.name(self.db)),
|
||||
|
||||
@@ -62,11 +62,11 @@ use crate::types::mro::MroErrorKind;
|
||||
use crate::types::unpacker::{UnpackResult, Unpacker};
|
||||
use crate::types::{
|
||||
bindings_ty, builtins_symbol, declarations_ty, global_symbol, symbol, todo_type,
|
||||
typing_extensions_symbol, Boundness, CallDunderResult, Class, ClassLiteralType, FunctionType,
|
||||
InstanceType, IntersectionBuilder, IntersectionType, IterationOutcome, KnownClass,
|
||||
KnownFunction, KnownInstanceType, MetaclassCandidate, MetaclassErrorKind, SliceLiteralType,
|
||||
SubclassOfType, Symbol, Truthiness, TupleType, Type, TypeAliasType, TypeArrayDisplay,
|
||||
TypeVarBoundOrConstraints, TypeVarInstance, UnionBuilder, UnionType,
|
||||
typing_extensions_symbol, Boundness, CallDunderResult, Class, ClassLiteralType, DynamicType,
|
||||
FunctionType, InstanceType, IntersectionBuilder, IntersectionType, IterationOutcome,
|
||||
KnownClass, KnownFunction, KnownInstanceType, MetaclassCandidate, MetaclassErrorKind,
|
||||
SliceLiteralType, SubclassOfType, Symbol, Truthiness, TupleType, Type, TypeAliasType,
|
||||
TypeArrayDisplay, TypeVarBoundOrConstraints, TypeVarInstance, UnionBuilder, UnionType,
|
||||
};
|
||||
use crate::unpack::Unpack;
|
||||
use crate::util::subscript::{PyIndex, PySlice};
|
||||
@@ -83,6 +83,7 @@ use super::slots::check_class_slots;
|
||||
use super::string_annotation::{
|
||||
parse_string_annotation, BYTE_STRING_TYPE_ANNOTATION, FSTRING_TYPE_ANNOTATION,
|
||||
};
|
||||
use super::{ParameterExpectation, ParameterExpectations};
|
||||
|
||||
/// Infer all types for a [`ScopeId`], including all definitions and expressions in that scope.
|
||||
/// Use when checking a scope, or needing to provide a type for an arbitrary expression in the
|
||||
@@ -101,7 +102,7 @@ pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Ty
|
||||
TypeInferenceBuilder::new(db, InferenceRegion::Scope(scope), index).finish()
|
||||
}
|
||||
|
||||
/// Cycle recovery for [`infer_definition_types()`]: for now, just [`Type::Unknown`]
|
||||
/// Cycle recovery for [`infer_definition_types()`]: for now, just [`Type::unknown`]
|
||||
/// TODO fixpoint iteration
|
||||
fn infer_definition_types_cycle_recovery<'db>(
|
||||
db: &'db dyn Db,
|
||||
@@ -112,10 +113,10 @@ fn infer_definition_types_cycle_recovery<'db>(
|
||||
let mut inference = TypeInference::empty(input.scope(db));
|
||||
let category = input.category(db);
|
||||
if category.is_declaration() {
|
||||
inference.declarations.insert(input, Type::Unknown);
|
||||
inference.declarations.insert(input, Type::unknown());
|
||||
}
|
||||
if category.is_binding() {
|
||||
inference.bindings.insert(input, Type::Unknown);
|
||||
inference.bindings.insert(input, Type::unknown());
|
||||
}
|
||||
// TODO we don't fill in expression types for the cycle-participant definitions, which can
|
||||
// later cause a panic when looking up an expression type.
|
||||
@@ -310,6 +311,18 @@ enum IntersectionOn {
|
||||
Right,
|
||||
}
|
||||
|
||||
/// A helper to track if we already know that declared and inferred types are the same.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
enum DeclaredAndInferredType<'db> {
|
||||
/// We know that both the declared and inferred types are the same.
|
||||
AreTheSame(Type<'db>),
|
||||
/// Declared and inferred types might be different, we need to check assignability.
|
||||
MightBeDifferent {
|
||||
declared_ty: Type<'db>,
|
||||
inferred_ty: Type<'db>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Builder to infer all types in a region.
|
||||
///
|
||||
/// A builder is used by creating it with [`new()`](TypeInferenceBuilder::new), and then calling
|
||||
@@ -842,7 +855,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let declarations = use_def.declarations_at_binding(binding);
|
||||
let mut bound_ty = ty;
|
||||
let declared_ty = declarations_ty(self.db(), declarations)
|
||||
.map(|s| s.ignore_possibly_unbound().unwrap_or(Type::Unknown))
|
||||
.map(|s| s.ignore_possibly_unbound().unwrap_or(Type::unknown()))
|
||||
.unwrap_or_else(|(ty, conflicting)| {
|
||||
// TODO point out the conflicting declarations in the diagnostic?
|
||||
let symbol_table = self.index.symbol_table(binding.file_scope(self.db()));
|
||||
@@ -886,7 +899,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
inferred_ty.display(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
};
|
||||
self.types.declarations.insert(declaration, ty);
|
||||
}
|
||||
@@ -895,20 +908,28 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
&mut self,
|
||||
node: AnyNodeRef,
|
||||
definition: Definition<'db>,
|
||||
declared_ty: Type<'db>,
|
||||
inferred_ty: Type<'db>,
|
||||
declared_and_inferred_ty: &DeclaredAndInferredType<'db>,
|
||||
) {
|
||||
debug_assert!(definition.is_binding(self.db()));
|
||||
debug_assert!(definition.is_declaration(self.db()));
|
||||
let inferred_ty = if inferred_ty.is_assignable_to(self.db(), declared_ty) {
|
||||
inferred_ty
|
||||
} else {
|
||||
report_invalid_assignment(&self.context, node, declared_ty, inferred_ty);
|
||||
// if the assignment is invalid, fall back to assuming the annotation is correct
|
||||
declared_ty
|
||||
|
||||
let (declared_ty, inferred_ty) = match declared_and_inferred_ty {
|
||||
DeclaredAndInferredType::AreTheSame(ty) => (ty, ty),
|
||||
DeclaredAndInferredType::MightBeDifferent {
|
||||
declared_ty,
|
||||
inferred_ty,
|
||||
} => {
|
||||
if inferred_ty.is_assignable_to(self.db(), *declared_ty) {
|
||||
(declared_ty, inferred_ty)
|
||||
} else {
|
||||
report_invalid_assignment(&self.context, node, *declared_ty, *inferred_ty);
|
||||
// if the assignment is invalid, fall back to assuming the annotation is correct
|
||||
(declared_ty, declared_ty)
|
||||
}
|
||||
}
|
||||
};
|
||||
self.types.declarations.insert(definition, declared_ty);
|
||||
self.types.bindings.insert(definition, inferred_ty);
|
||||
self.types.declarations.insert(definition, *declared_ty);
|
||||
self.types.bindings.insert(definition, *inferred_ty);
|
||||
}
|
||||
|
||||
fn add_unknown_declaration_with_binding(
|
||||
@@ -916,7 +937,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
node: AnyNodeRef,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
self.add_declaration_with_binding(node, definition, Type::Unknown, Type::Unknown);
|
||||
self.add_declaration_with_binding(
|
||||
node,
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(Type::unknown()),
|
||||
);
|
||||
}
|
||||
|
||||
fn infer_module(&mut self, module: &ast::ModModule) {
|
||||
@@ -932,7 +957,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_type_parameters(type_params);
|
||||
|
||||
if let Some(arguments) = class.arguments.as_deref() {
|
||||
self.infer_arguments(arguments, false);
|
||||
self.infer_arguments(arguments, ParameterExpectations::default());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1097,7 +1122,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
decorator_tys.into_boxed_slice(),
|
||||
));
|
||||
|
||||
self.add_declaration_with_binding(function.into(), definition, function_ty, function_ty);
|
||||
self.add_declaration_with_binding(
|
||||
function.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(function_ty),
|
||||
);
|
||||
}
|
||||
|
||||
fn infer_parameters(&mut self, parameters: &ast::Parameters) {
|
||||
@@ -1188,15 +1217,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.map(|default| self.file_expression_ty(default));
|
||||
if let Some(annotation) = parameter.annotation.as_ref() {
|
||||
let declared_ty = self.file_expression_ty(annotation);
|
||||
let inferred_ty = if let Some(default_ty) = default_ty {
|
||||
let declared_and_inferred_ty = if let Some(default_ty) = default_ty {
|
||||
if default_ty.is_assignable_to(self.db(), declared_ty) {
|
||||
UnionType::from_elements(self.db(), [declared_ty, default_ty])
|
||||
DeclaredAndInferredType::MightBeDifferent {
|
||||
declared_ty,
|
||||
inferred_ty: UnionType::from_elements(self.db(), [declared_ty, default_ty]),
|
||||
}
|
||||
} else if self.in_stub()
|
||||
&& default
|
||||
.as_ref()
|
||||
.is_some_and(|d| d.is_ellipsis_literal_expr())
|
||||
{
|
||||
declared_ty
|
||||
DeclaredAndInferredType::AreTheSame(declared_ty)
|
||||
} else {
|
||||
self.context.report_lint(
|
||||
&INVALID_PARAMETER_DEFAULT,
|
||||
@@ -1205,22 +1237,21 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
"Default value of type `{}` is not assignable to annotated parameter type `{}`",
|
||||
default_ty.display(self.db()), declared_ty.display(self.db())),
|
||||
);
|
||||
declared_ty
|
||||
DeclaredAndInferredType::AreTheSame(declared_ty)
|
||||
}
|
||||
} else {
|
||||
declared_ty
|
||||
DeclaredAndInferredType::AreTheSame(declared_ty)
|
||||
};
|
||||
self.add_declaration_with_binding(
|
||||
parameter.into(),
|
||||
definition,
|
||||
declared_ty,
|
||||
inferred_ty,
|
||||
&declared_and_inferred_ty,
|
||||
);
|
||||
} else {
|
||||
let ty = if let Some(default_ty) = default_ty {
|
||||
UnionType::from_elements(self.db(), [Type::Unknown, default_ty])
|
||||
UnionType::from_elements(self.db(), [Type::unknown(), default_ty])
|
||||
} else {
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
};
|
||||
self.add_binding(parameter.into(), definition, ty);
|
||||
}
|
||||
@@ -1240,7 +1271,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let _annotated_ty = self.file_expression_ty(annotation);
|
||||
// TODO `tuple[annotated_ty, ...]`
|
||||
let ty = KnownClass::Tuple.to_instance(self.db());
|
||||
self.add_declaration_with_binding(parameter.into(), definition, ty, ty);
|
||||
self.add_declaration_with_binding(
|
||||
parameter.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(ty),
|
||||
);
|
||||
} else {
|
||||
self.add_binding(
|
||||
parameter.into(),
|
||||
@@ -1265,7 +1300,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let _annotated_ty = self.file_expression_ty(annotation);
|
||||
// TODO `dict[str, annotated_ty]`
|
||||
let ty = KnownClass::Dict.to_instance(self.db());
|
||||
self.add_declaration_with_binding(parameter.into(), definition, ty, ty);
|
||||
self.add_declaration_with_binding(
|
||||
parameter.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(ty),
|
||||
);
|
||||
} else {
|
||||
self.add_binding(
|
||||
parameter.into(),
|
||||
@@ -1308,7 +1347,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let class = Class::new(self.db(), &name.id, body_scope, maybe_known_class);
|
||||
let class_ty = Type::class_literal(class);
|
||||
|
||||
self.add_declaration_with_binding(class_node.into(), definition, class_ty, class_ty);
|
||||
self.add_declaration_with_binding(
|
||||
class_node.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(class_ty),
|
||||
);
|
||||
|
||||
// if there are type parameters, then the keywords and bases are within that scope
|
||||
// and we don't need to run inference here
|
||||
@@ -1365,8 +1408,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.add_declaration_with_binding(
|
||||
type_alias.into(),
|
||||
definition,
|
||||
type_alias_ty,
|
||||
type_alias_ty,
|
||||
&DeclaredAndInferredType::AreTheSame(type_alias_ty),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1487,7 +1529,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
/// Infers the type of a context expression (`with expr`) and returns the target's type
|
||||
///
|
||||
/// Returns [`Type::Unknown`] if the context expression doesn't implement the context manager protocol.
|
||||
/// Returns [`Type::unknown`] if the context expression doesn't implement the context manager protocol.
|
||||
///
|
||||
/// ## Terminology
|
||||
/// See [PEP343](https://peps.python.org/pep-0343/#standard-terminology).
|
||||
@@ -1518,7 +1560,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
context_expression_ty.display(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
(Symbol::Unbound, _) => {
|
||||
self.context.report_lint(
|
||||
@@ -1529,7 +1571,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
context_expression_ty.display(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
(Symbol::Type(enter_ty, enter_boundness), exit) => {
|
||||
if enter_boundness == Boundness::PossiblyUnbound {
|
||||
@@ -1622,7 +1664,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
// If there is no handled exception, it's invalid syntax;
|
||||
// a diagnostic will have already been emitted
|
||||
let node_ty = node.map_or(Type::Unknown, |ty| self.infer_expression(ty));
|
||||
let node_ty = node.map_or(Type::unknown(), |ty| self.infer_expression(ty));
|
||||
|
||||
// If it's an `except*` handler, this won't actually be the type of the bound symbol;
|
||||
// it will actually be the type of the generic parameters to `BaseExceptionGroup` or `ExceptionGroup`.
|
||||
@@ -1637,7 +1679,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
if let Some(node) = node {
|
||||
report_invalid_exception_caught(&self.context, node, element);
|
||||
}
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -1652,7 +1694,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
if let Some(node) = node {
|
||||
report_invalid_exception_caught(&self.context, node, node_ty);
|
||||
}
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1718,7 +1760,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
bound_or_constraint,
|
||||
default_ty,
|
||||
)));
|
||||
self.add_declaration_with_binding(node.into(), definition, ty, ty);
|
||||
self.add_declaration_with_binding(
|
||||
node.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(ty),
|
||||
);
|
||||
}
|
||||
|
||||
fn infer_paramspec_definition(
|
||||
@@ -1733,7 +1779,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
} = node;
|
||||
self.infer_optional_expression(default.as_deref());
|
||||
let pep_695_todo = todo_type!("PEP-695 ParamSpec definition types");
|
||||
self.add_declaration_with_binding(node.into(), definition, pep_695_todo, pep_695_todo);
|
||||
self.add_declaration_with_binding(
|
||||
node.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(pep_695_todo),
|
||||
);
|
||||
}
|
||||
|
||||
fn infer_typevartuple_definition(
|
||||
@@ -1748,7 +1798,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
} = node;
|
||||
self.infer_optional_expression(default.as_deref());
|
||||
let pep_695_todo = todo_type!("PEP-695 TypeVarTuple definition types");
|
||||
self.add_declaration_with_binding(node.into(), definition, pep_695_todo, pep_695_todo);
|
||||
self.add_declaration_with_binding(
|
||||
node.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(pep_695_todo),
|
||||
);
|
||||
}
|
||||
|
||||
fn infer_match_statement(&mut self, match_statement: &ast::StmtMatch) {
|
||||
@@ -1953,11 +2007,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
|
||||
let name_ast_id = name.scoped_expression_id(self.db(), self.scope());
|
||||
unpacked.get(name_ast_id).unwrap_or(Type::Unknown)
|
||||
unpacked.get(name_ast_id).unwrap_or(Type::unknown())
|
||||
}
|
||||
TargetKind::Name => {
|
||||
if self.in_stub() && value.is_ellipsis_literal_expr() {
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
} else {
|
||||
value_ty
|
||||
}
|
||||
@@ -2006,13 +2060,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
simple: _,
|
||||
} = assignment;
|
||||
|
||||
let mut annotation_ty = self.infer_annotation_expression(
|
||||
let mut declared_ty = self.infer_annotation_expression(
|
||||
annotation,
|
||||
DeferredExpressionState::from(self.are_all_types_deferred()),
|
||||
);
|
||||
|
||||
// Handle various singletons.
|
||||
if let Type::Instance(InstanceType { class }) = annotation_ty {
|
||||
if let Type::Instance(InstanceType { class }) = declared_ty {
|
||||
if class.is_known(self.db(), KnownClass::SpecialForm) {
|
||||
if let Some(name_expr) = target.as_name_expr() {
|
||||
if let Some(known_instance) = KnownInstanceType::try_from_file_and_name(
|
||||
@@ -2020,27 +2074,29 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.file(),
|
||||
&name_expr.id,
|
||||
) {
|
||||
annotation_ty = Type::KnownInstance(known_instance);
|
||||
declared_ty = Type::KnownInstance(known_instance);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(value) = value.as_deref() {
|
||||
let value_ty = self.infer_expression(value);
|
||||
let value_ty = if self.in_stub() && value.is_ellipsis_literal_expr() {
|
||||
annotation_ty
|
||||
let inferred_ty = self.infer_expression(value);
|
||||
let inferred_ty = if self.in_stub() && value.is_ellipsis_literal_expr() {
|
||||
declared_ty
|
||||
} else {
|
||||
value_ty
|
||||
inferred_ty
|
||||
};
|
||||
self.add_declaration_with_binding(
|
||||
assignment.into(),
|
||||
definition,
|
||||
annotation_ty,
|
||||
value_ty,
|
||||
&DeclaredAndInferredType::MightBeDifferent {
|
||||
declared_ty,
|
||||
inferred_ty,
|
||||
},
|
||||
);
|
||||
} else {
|
||||
self.add_declaration(assignment.into(), definition, annotation_ty);
|
||||
self.add_declaration(assignment.into(), definition, declared_ty);
|
||||
}
|
||||
|
||||
self.infer_expression(target);
|
||||
@@ -2113,7 +2169,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
right_ty.display(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
});
|
||||
|
||||
UnionType::from_elements(
|
||||
@@ -2142,7 +2198,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
right_ty.display(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2219,7 +2275,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.context.extend(unpacked);
|
||||
}
|
||||
let name_ast_id = name.scoped_expression_id(self.db(), self.scope());
|
||||
unpacked.get(name_ast_id).unwrap_or(Type::Unknown)
|
||||
unpacked.get(name_ast_id).unwrap_or(Type::unknown())
|
||||
}
|
||||
TargetKind::Name => iterable_ty
|
||||
.iterate(self.db())
|
||||
@@ -2294,7 +2350,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
full_module_ty
|
||||
};
|
||||
|
||||
self.add_declaration_with_binding(alias.into(), definition, binding_ty, binding_ty);
|
||||
self.add_declaration_with_binding(
|
||||
alias.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(binding_ty),
|
||||
);
|
||||
}
|
||||
|
||||
fn infer_import_from_statement(&mut self, import: &ast::StmtImportFrom) {
|
||||
@@ -2470,7 +2530,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
format_args!("Member `{name}` of module `{module_name}` is possibly unbound",),
|
||||
);
|
||||
}
|
||||
self.add_declaration_with_binding(alias.into(), definition, ty, ty);
|
||||
self.add_declaration_with_binding(
|
||||
alias.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(ty),
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -2496,8 +2560,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.add_declaration_with_binding(
|
||||
alias.into(),
|
||||
definition,
|
||||
submodule_ty,
|
||||
submodule_ty,
|
||||
&DeclaredAndInferredType::AreTheSame(submodule_ty),
|
||||
);
|
||||
return;
|
||||
}
|
||||
@@ -2539,17 +2602,17 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
fn infer_arguments<'a>(
|
||||
&mut self,
|
||||
arguments: &'a ast::Arguments,
|
||||
infer_as_type_expressions: bool,
|
||||
parameter_expectations: ParameterExpectations,
|
||||
) -> CallArguments<'a, 'db> {
|
||||
let infer_argument_type = if infer_as_type_expressions {
|
||||
Self::infer_type_expression
|
||||
} else {
|
||||
Self::infer_expression
|
||||
};
|
||||
|
||||
arguments
|
||||
.arguments_source_order()
|
||||
.map(|arg_or_keyword| {
|
||||
.enumerate()
|
||||
.map(|(index, arg_or_keyword)| {
|
||||
let infer_argument_type = match parameter_expectations.expectation_at_index(index) {
|
||||
ParameterExpectation::TypeExpression => Self::infer_type_expression,
|
||||
ParameterExpectation::ValueExpression => Self::infer_expression,
|
||||
};
|
||||
|
||||
match arg_or_keyword {
|
||||
ast::ArgOrKeyword::Arg(arg) => match arg {
|
||||
ast::Expr::Starred(ast::ExprStarred {
|
||||
@@ -2672,7 +2735,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ast::Number::Float(_) => KnownClass::Float.to_instance(self.db()),
|
||||
ast::Number::Complex { .. } => builtins_symbol(self.db(), "complex")
|
||||
.ignore_possibly_unbound()
|
||||
.unwrap_or(Type::Unknown)
|
||||
.unwrap_or(Type::unknown())
|
||||
.to_instance(self.db()),
|
||||
}
|
||||
}
|
||||
@@ -2759,7 +2822,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
) -> Type<'db> {
|
||||
builtins_symbol(self.db(), "Ellipsis")
|
||||
.ignore_possibly_unbound()
|
||||
.unwrap_or(Type::Unknown)
|
||||
.unwrap_or(Type::unknown())
|
||||
}
|
||||
|
||||
fn infer_tuple_expression(&mut self, tuple: &ast::ExprTuple) -> Type<'db> {
|
||||
@@ -3016,7 +3079,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// For syntactically invalid targets, we still need to run type inference:
|
||||
self.infer_expression(&named.target);
|
||||
self.infer_expression(&named.value);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3092,12 +3155,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
let function_type = self.infer_expression(func);
|
||||
|
||||
let infer_arguments_as_type_expressions = function_type
|
||||
let parameter_expectations = function_type
|
||||
.into_function_literal()
|
||||
.and_then(|f| f.known(self.db()))
|
||||
.is_some_and(KnownFunction::takes_type_expression_arguments);
|
||||
.map(KnownFunction::parameter_expectations)
|
||||
.unwrap_or_default();
|
||||
|
||||
let call_arguments = self.infer_arguments(arguments, infer_arguments_as_type_expressions);
|
||||
let call_arguments = self.infer_arguments(arguments, parameter_expectations);
|
||||
function_type
|
||||
.call(self.db(), &call_arguments)
|
||||
.unwrap_with_diagnostic(&self.context, call_expression.into())
|
||||
@@ -3267,7 +3331,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
Symbol::Unbound => {
|
||||
report_unresolved_reference(&self.context, name);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
Symbol::Type(_, Boundness::Bound) => unreachable!("Handled above"),
|
||||
},
|
||||
@@ -3279,7 +3343,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
match name.ctx {
|
||||
ExprContext::Load => self.infer_name_load(name),
|
||||
ExprContext::Store | ExprContext::Del => Type::Never,
|
||||
ExprContext::Invalid => Type::Unknown,
|
||||
ExprContext::Invalid => Type::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3319,7 +3383,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
attr.id
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3340,7 +3404,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
ExprContext::Invalid => {
|
||||
self.infer_expression(value);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3355,10 +3419,8 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let operand_type = self.infer_expression(operand);
|
||||
|
||||
match (op, operand_type) {
|
||||
(_, Type::Any) => Type::Any,
|
||||
(_, Type::Todo(_)) => operand_type,
|
||||
(_, Type::Dynamic(_)) => operand_type,
|
||||
(_, Type::Never) => Type::Never,
|
||||
(_, Type::Unknown) => Type::Unknown,
|
||||
|
||||
(ast::UnaryOp::UAdd, Type::IntLiteral(value)) => Type::IntLiteral(value),
|
||||
(ast::UnaryOp::USub, Type::IntLiteral(value)) => Type::IntLiteral(-value),
|
||||
@@ -3428,7 +3490,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
),
|
||||
);
|
||||
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3468,7 +3530,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
right_ty.display(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3479,12 +3541,15 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
op: ast::Operator,
|
||||
) -> Option<Type<'db>> {
|
||||
match (left_ty, right_ty, op) {
|
||||
// When interacting with Todo, Any and Unknown should propagate (as if we fix this
|
||||
// `Todo` in the future, the result would then become Any or Unknown, respectively.)
|
||||
(Type::Any, _, _) | (_, Type::Any, _) => Some(Type::Any),
|
||||
(todo @ Type::Todo(_), _, _) | (_, todo @ Type::Todo(_), _) => Some(todo),
|
||||
// Non-todo Anys take precedence over Todos (as if we fix this `Todo` in the future,
|
||||
// the result would then become Any or Unknown, respectively).
|
||||
(any @ Type::Dynamic(DynamicType::Any), _, _)
|
||||
| (_, any @ Type::Dynamic(DynamicType::Any), _) => Some(any),
|
||||
(unknown @ Type::Dynamic(DynamicType::Unknown), _, _)
|
||||
| (_, unknown @ Type::Dynamic(DynamicType::Unknown), _) => Some(unknown),
|
||||
(todo @ Type::Dynamic(DynamicType::Todo(_)), _, _)
|
||||
| (_, todo @ Type::Dynamic(DynamicType::Todo(_)), _) => Some(todo),
|
||||
(Type::Never, _, _) | (_, Type::Never, _) => Some(Type::Never),
|
||||
(Type::Unknown, _, _) | (_, Type::Unknown, _) => Some(Type::Unknown),
|
||||
|
||||
(Type::IntLiteral(n), Type::IntLiteral(m), ast::Operator::Add) => Some(
|
||||
n.checked_add(m)
|
||||
@@ -3827,7 +3892,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
| ast::CmpOp::Is
|
||||
| ast::CmpOp::IsNot => KnownClass::Bool.to_instance(self.db()),
|
||||
// Other operators can return arbitrary types
|
||||
_ => Type::Unknown,
|
||||
_ => Type::unknown(),
|
||||
}
|
||||
})
|
||||
}),
|
||||
@@ -4151,7 +4216,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
).expect("infer_binary_type_comparison should never return None for `CmpOp::Eq`");
|
||||
|
||||
match eq_result {
|
||||
todo @ Type::Todo(_) => return Ok(todo),
|
||||
todo @ Type::Dynamic(DynamicType::Todo(_)) => return Ok(todo),
|
||||
ty => match ty.bool(self.db()) {
|
||||
Truthiness::AlwaysTrue => eq_count += 1,
|
||||
Truthiness::AlwaysFalse => not_eq_count += 1,
|
||||
@@ -4176,7 +4241,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
);
|
||||
|
||||
Ok(match eq_result {
|
||||
todo @ Type::Todo(_) => todo,
|
||||
todo @ Type::Dynamic(DynamicType::Todo(_)) => todo,
|
||||
ty => match ty.bool(self.db()) {
|
||||
Truthiness::AlwaysFalse => Type::BooleanLiteral(op.is_is_not()),
|
||||
_ => KnownClass::Bool.to_instance(self.db()),
|
||||
@@ -4258,7 +4323,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
match pairwise_eq_result {
|
||||
// If propagation is required, return the result as is
|
||||
todo @ Type::Todo(_) => return Ok(todo),
|
||||
todo @ Type::Dynamic(DynamicType::Todo(_)) => return Ok(todo),
|
||||
ty => match ty.bool(self.db()) {
|
||||
// - AlwaysTrue : Continue to the next pair for lexicographic comparison
|
||||
Truthiness::AlwaysTrue => continue,
|
||||
@@ -4357,7 +4422,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
elements.len(),
|
||||
int,
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
})
|
||||
}
|
||||
// Ex) Given `("a", 1, Null)[0:2]`, return `("a", 1)`
|
||||
@@ -4369,7 +4434,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
TupleType::from_elements(self.db(), new_elements)
|
||||
} else {
|
||||
report_slice_step_size_zero(&self.context, value_node.into());
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
// Ex) Given `"value"[1]`, return `"a"`
|
||||
@@ -4390,7 +4455,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
literal_value.chars().count(),
|
||||
int,
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
})
|
||||
}
|
||||
// Ex) Given `"value"[1:3]`, return `"al"`
|
||||
@@ -4404,7 +4469,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::string_literal(self.db(), &literal)
|
||||
} else {
|
||||
report_slice_step_size_zero(&self.context, value_node.into());
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
};
|
||||
result
|
||||
}
|
||||
@@ -4426,7 +4491,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
literal_value.len(),
|
||||
int,
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
})
|
||||
}
|
||||
// Ex) Given `b"value"[1:3]`, return `b"al"`
|
||||
@@ -4439,7 +4504,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::bytes_literal(self.db(), &new_bytes)
|
||||
} else {
|
||||
report_slice_step_size_zero(&self.context, value_node.into());
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
// Ex) Given `"value"[True]`, return `"a"`
|
||||
@@ -4555,7 +4620,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
);
|
||||
}
|
||||
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4670,7 +4735,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
bytes.into(),
|
||||
format_args!("Type expressions cannot use bytes literal"),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
|
||||
ast::Expr::FString(fstring) => {
|
||||
@@ -4680,7 +4745,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
format_args!("Type expressions cannot use f-strings"),
|
||||
);
|
||||
self.infer_fstring_expression(fstring);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
|
||||
// All other annotation expressions are (possibly) valid type expressions, so handle
|
||||
@@ -4703,7 +4768,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
DeferredExpressionState::InStringAnnotation,
|
||||
)
|
||||
}
|
||||
None => Type::Unknown,
|
||||
None => Type::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4751,7 +4816,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.infer_name_expression(name)
|
||||
.in_type_expression(self.db())
|
||||
.unwrap_or_else(|error| error.into_fallback_type(&self.context, expression)),
|
||||
ast::ExprContext::Invalid => Type::Unknown,
|
||||
ast::ExprContext::Invalid => Type::unknown(),
|
||||
ast::ExprContext::Store | ast::ExprContext::Del => todo_type!(),
|
||||
},
|
||||
|
||||
@@ -4760,7 +4825,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.infer_attribute_expression(attribute_expression)
|
||||
.in_type_expression(self.db())
|
||||
.unwrap_or_else(|error| error.into_fallback_type(&self.context, expression)),
|
||||
ast::ExprContext::Invalid => Type::Unknown,
|
||||
ast::ExprContext::Invalid => Type::unknown(),
|
||||
ast::ExprContext::Store | ast::ExprContext::Del => todo_type!(),
|
||||
},
|
||||
|
||||
@@ -4813,7 +4878,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// anything else is an invalid annotation:
|
||||
_ => {
|
||||
self.infer_binary_expression(binary);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4826,90 +4891,90 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
// Avoid inferring the types of invalid type expressions that have been parsed from a
|
||||
// string annotation, as they are not present in the semantic index.
|
||||
_ if self.deferred_state.in_string_annotation() => Type::Unknown,
|
||||
_ if self.deferred_state.in_string_annotation() => Type::unknown(),
|
||||
|
||||
// Forms which are invalid in the context of annotation expressions: we infer their
|
||||
// nested expressions as normal expressions, but the type of the top-level expression is
|
||||
// always `Type::Unknown` in these cases.
|
||||
// always `Type::unknown` in these cases.
|
||||
ast::Expr::BoolOp(bool_op) => {
|
||||
self.infer_boolean_expression(bool_op);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Named(named) => {
|
||||
self.infer_named_expression(named);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::UnaryOp(unary) => {
|
||||
self.infer_unary_expression(unary);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Lambda(lambda_expression) => {
|
||||
self.infer_lambda_expression(lambda_expression);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::If(if_expression) => {
|
||||
self.infer_if_expression(if_expression);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Dict(dict) => {
|
||||
self.infer_dict_expression(dict);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Set(set) => {
|
||||
self.infer_set_expression(set);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::ListComp(listcomp) => {
|
||||
self.infer_list_comprehension_expression(listcomp);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::SetComp(setcomp) => {
|
||||
self.infer_set_comprehension_expression(setcomp);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::DictComp(dictcomp) => {
|
||||
self.infer_dict_comprehension_expression(dictcomp);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Generator(generator) => {
|
||||
self.infer_generator_expression(generator);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Await(await_expression) => {
|
||||
self.infer_await_expression(await_expression);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Yield(yield_expression) => {
|
||||
self.infer_yield_expression(yield_expression);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::YieldFrom(yield_from) => {
|
||||
self.infer_yield_from_expression(yield_from);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Compare(compare) => {
|
||||
self.infer_compare_expression(compare);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Call(call_expr) => {
|
||||
self.infer_call_expression(call_expr);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::FString(fstring) => {
|
||||
self.infer_fstring_expression(fstring);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::List(list) => {
|
||||
self.infer_list_expression(list);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Tuple(tuple) => {
|
||||
self.infer_tuple_expression(tuple);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Slice(slice) => {
|
||||
self.infer_slice_expression(slice);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::IpyEscapeCommand(_) => todo!("Implement Ipy escape command support"),
|
||||
}
|
||||
@@ -4925,7 +4990,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
DeferredExpressionState::InStringAnnotation,
|
||||
)
|
||||
}
|
||||
None => Type::Unknown,
|
||||
None => Type::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5001,6 +5066,9 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::KnownInstance(KnownInstanceType::Any) => {
|
||||
SubclassOfType::subclass_of_any()
|
||||
}
|
||||
Type::KnownInstance(KnownInstanceType::Unknown) => {
|
||||
SubclassOfType::subclass_of_unknown()
|
||||
}
|
||||
_ => todo_type!("unsupported type[X] special form"),
|
||||
}
|
||||
}
|
||||
@@ -5023,7 +5091,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
slice.into(),
|
||||
format_args!("type[...] must have exactly one type argument"),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
ast::Expr::Subscript(ast::ExprSubscript {
|
||||
value,
|
||||
@@ -5076,7 +5144,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::KnownInstance(known_instance) => {
|
||||
self.infer_parameterized_known_instance_type_expression(subscript, known_instance)
|
||||
}
|
||||
Type::Todo(_) => {
|
||||
Type::Dynamic(DynamicType::Todo(_)) => {
|
||||
self.infer_type_expression(slice);
|
||||
value_ty
|
||||
}
|
||||
@@ -5125,7 +5193,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
let [type_expr, metadata @ ..] = &arguments[..] else {
|
||||
self.infer_type_expression(arguments_slice);
|
||||
return Type::Unknown;
|
||||
return Type::unknown();
|
||||
};
|
||||
|
||||
for element in metadata {
|
||||
@@ -5150,7 +5218,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
),
|
||||
);
|
||||
}
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5193,7 +5261,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
known_instance.repr(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
_ => {
|
||||
let argument_type = self.infer_type_expression(arguments_slice);
|
||||
@@ -5222,7 +5290,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
known_instance.repr(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
_ => {
|
||||
// NB: This calls `infer_expression` instead of `infer_type_expression`.
|
||||
@@ -5314,7 +5382,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
known_instance.repr(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
KnownInstanceType::TypingSelf
|
||||
| KnownInstanceType::TypeAlias
|
||||
@@ -5327,7 +5395,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
known_instance.repr(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
KnownInstanceType::LiteralString => {
|
||||
self.context.report_lint(
|
||||
@@ -5338,7 +5406,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
known_instance.repr(self.db())
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
Type::unknown()
|
||||
}
|
||||
KnownInstanceType::Type => self.infer_subclass_of_type_expression(arguments_slice),
|
||||
KnownInstanceType::Tuple => self.infer_tuple_type_expression(arguments_slice),
|
||||
@@ -5361,7 +5429,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.store_expression_type(parameters, ty);
|
||||
ty
|
||||
} else {
|
||||
self.store_expression_type(parameters, Type::Unknown);
|
||||
self.store_expression_type(parameters, Type::unknown());
|
||||
|
||||
return Err(vec![parameters]);
|
||||
}
|
||||
@@ -5388,7 +5456,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
union_type
|
||||
} else {
|
||||
self.store_expression_type(parameters, Type::Unknown);
|
||||
self.store_expression_type(parameters, Type::unknown());
|
||||
|
||||
return Err(errors);
|
||||
}
|
||||
@@ -5408,7 +5476,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
value_ty
|
||||
.member(self.db(), &attr.id)
|
||||
.ignore_possibly_unbound()
|
||||
.unwrap_or(Type::Unknown)
|
||||
.unwrap_or(Type::unknown())
|
||||
}
|
||||
// for negative and positive numbers
|
||||
ast::Expr::UnaryOp(ref u)
|
||||
@@ -5715,7 +5783,7 @@ fn perform_membership_test_comparison<'db>(
|
||||
|
||||
compare_result_opt
|
||||
.map(|ty| {
|
||||
if matches!(ty, Type::Todo(_)) {
|
||||
if matches!(ty, Type::Dynamic(DynamicType::Todo(_))) {
|
||||
return ty;
|
||||
}
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ impl<'db> Mro<'db> {
|
||||
pub(super) fn from_error(db: &'db dyn Db, class: Class<'db>) -> Self {
|
||||
Self::from([
|
||||
ClassBase::Class(class),
|
||||
ClassBase::Unknown,
|
||||
ClassBase::unknown(),
|
||||
ClassBase::object(db),
|
||||
])
|
||||
}
|
||||
|
||||
@@ -22,9 +22,7 @@ impl<'db> SubclassOfType<'db> {
|
||||
pub(crate) fn from(db: &'db dyn Db, subclass_of: impl Into<ClassBase<'db>>) -> Type<'db> {
|
||||
let subclass_of = subclass_of.into();
|
||||
match subclass_of {
|
||||
ClassBase::Any | ClassBase::Unknown | ClassBase::Todo(_) => {
|
||||
Type::SubclassOf(Self { subclass_of })
|
||||
}
|
||||
ClassBase::Dynamic(_) => Type::SubclassOf(Self { subclass_of }),
|
||||
ClassBase::Class(class) => {
|
||||
if class.is_final(db) {
|
||||
Type::ClassLiteral(ClassLiteralType { class })
|
||||
@@ -40,14 +38,14 @@ impl<'db> SubclassOfType<'db> {
|
||||
/// Return a [`Type`] instance representing the type `type[Unknown]`.
|
||||
pub(crate) const fn subclass_of_unknown() -> Type<'db> {
|
||||
Type::SubclassOf(SubclassOfType {
|
||||
subclass_of: ClassBase::Unknown,
|
||||
subclass_of: ClassBase::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Return a [`Type`] instance representing the type `type[Any]`.
|
||||
pub(crate) const fn subclass_of_any() -> Type<'db> {
|
||||
Type::SubclassOf(SubclassOfType {
|
||||
subclass_of: ClassBase::Any,
|
||||
subclass_of: ClassBase::any(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -77,8 +75,7 @@ impl<'db> SubclassOfType<'db> {
|
||||
pub(crate) fn is_subtype_of(self, db: &'db dyn Db, other: SubclassOfType<'db>) -> bool {
|
||||
match (self.subclass_of, other.subclass_of) {
|
||||
// Non-fully-static types do not participate in subtyping
|
||||
(ClassBase::Any | ClassBase::Unknown | ClassBase::Todo(_), _)
|
||||
| (_, ClassBase::Any | ClassBase::Unknown | ClassBase::Todo(_)) => false,
|
||||
(ClassBase::Dynamic(_), _) | (_, ClassBase::Dynamic(_)) => false,
|
||||
|
||||
// For example, `type[bool]` describes all possible runtime subclasses of the class `bool`,
|
||||
// and `type[int]` describes all possible runtime subclasses of the class `int`.
|
||||
|
||||
@@ -52,7 +52,7 @@ impl<'db> Unpacker<'db> {
|
||||
.node_ref(self.db())
|
||||
.is_ellipsis_literal_expr()
|
||||
{
|
||||
value_ty = Type::Unknown;
|
||||
value_ty = Type::unknown();
|
||||
}
|
||||
if value.is_iterable() {
|
||||
// If the value is an iterable, then the type that needs to be unpacked is the iterator
|
||||
@@ -164,7 +164,7 @@ impl<'db> Unpacker<'db> {
|
||||
for (index, element) in elts.iter().enumerate() {
|
||||
// SAFETY: `target_types` is initialized with the same length as `elts`.
|
||||
let element_ty = match target_types[index].as_slice() {
|
||||
[] => Type::Unknown,
|
||||
[] => Type::unknown(),
|
||||
types => UnionType::from_elements(self.db(), types),
|
||||
};
|
||||
self.unpack_inner(element, element_ty);
|
||||
@@ -241,7 +241,7 @@ impl<'db> Unpacker<'db> {
|
||||
|
||||
// Subtract 1 to insert the starred expression type at the correct
|
||||
// index.
|
||||
element_types.resize(targets.len() - 1, Type::Unknown);
|
||||
element_types.resize(targets.len() - 1, Type::unknown());
|
||||
// TODO: This should be `list[Unknown]`
|
||||
element_types.insert(starred_index, todo_type!("starred unpacking"));
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ impl Index {
|
||||
DocumentKey::NotebookCell(url)
|
||||
} else if Path::new(url.path())
|
||||
.extension()
|
||||
.map_or(false, |ext| ext.eq_ignore_ascii_case("ipynb"))
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("ipynb"))
|
||||
{
|
||||
DocumentKey::Notebook(url)
|
||||
} else {
|
||||
|
||||
@@ -285,7 +285,7 @@ impl Workspace {
|
||||
open_files.contains(&file)
|
||||
} else if let Some(system_path) = file.path(db).as_system_path() {
|
||||
self.package(db, system_path)
|
||||
.map_or(false, |package| package.contains_file(db, file))
|
||||
.is_some_and(|package| package.contains_file(db, file))
|
||||
} else {
|
||||
file.path(db).is_system_virtual_path()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -959,7 +959,7 @@ A `--config` flag must either be a path to a `.toml` configuration file
|
||||
// We want to display the most helpful error to the user as possible.
|
||||
if Path::new(value)
|
||||
.extension()
|
||||
.map_or(false, |ext| ext.eq_ignore_ascii_case("toml"))
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("toml"))
|
||||
{
|
||||
if !value.contains('=') {
|
||||
tip.push_str(&format!(
|
||||
|
||||
@@ -87,13 +87,13 @@ pub trait System: Debug {
|
||||
/// Returns `true` if `path` exists and is a directory.
|
||||
fn is_directory(&self, path: &SystemPath) -> bool {
|
||||
self.path_metadata(path)
|
||||
.map_or(false, |metadata| metadata.file_type.is_directory())
|
||||
.is_ok_and(|metadata| metadata.file_type.is_directory())
|
||||
}
|
||||
|
||||
/// Returns `true` if `path` exists and is a file.
|
||||
fn is_file(&self, path: &SystemPath) -> bool {
|
||||
self.path_metadata(path)
|
||||
.map_or(false, |metadata| metadata.file_type.is_file())
|
||||
.is_ok_and(|metadata| metadata.file_type.is_file())
|
||||
}
|
||||
|
||||
/// Returns the current working directory
|
||||
|
||||
@@ -497,12 +497,7 @@ impl<'a> Printer<'a> {
|
||||
dest: self.state.buffer.text_len(),
|
||||
};
|
||||
|
||||
if self
|
||||
.state
|
||||
.source_markers
|
||||
.last()
|
||||
.map_or(true, |last| last != &marker)
|
||||
{
|
||||
if self.state.source_markers.last() != Some(&marker) {
|
||||
self.state.source_markers.push(marker);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -18,6 +18,59 @@ foo({**foo, **{"bar": True}}) # PIE800
|
||||
|
||||
{**foo, **buzz, **{bar: 10}} # PIE800
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/15366
|
||||
{
|
||||
"data": [],
|
||||
**({"count": 1 if include_count else {}}),
|
||||
}
|
||||
|
||||
{
|
||||
"data": [],
|
||||
**( # Comment
|
||||
{ # Comment
|
||||
"count": 1 if include_count else {}}),
|
||||
}
|
||||
|
||||
{
|
||||
"data": [],
|
||||
**(
|
||||
{
|
||||
"count": (a := 1),}),
|
||||
}
|
||||
|
||||
{
|
||||
"data": [],
|
||||
**(
|
||||
{
|
||||
"count": (a := 1)
|
||||
}
|
||||
)
|
||||
,
|
||||
}
|
||||
|
||||
{
|
||||
"data": [],
|
||||
**(
|
||||
{
|
||||
"count": (a := 1), # Comment
|
||||
} # Comment
|
||||
) # Comment
|
||||
,
|
||||
}
|
||||
|
||||
({
|
||||
"data": [],
|
||||
**( # Comment
|
||||
( # Comment
|
||||
{ # Comment
|
||||
"count": (a := 1), # Comment
|
||||
} # Comment
|
||||
)
|
||||
) # Comment
|
||||
,
|
||||
})
|
||||
|
||||
|
||||
{**foo, "bar": True } # OK
|
||||
|
||||
{"foo": 1, "buzz": {"bar": 1}} # OK
|
||||
|
||||
92
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391.ipynb
vendored
Normal file
92
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391.ipynb
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"True\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"True"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# just a comment in this cell"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# a comment and some newlines\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"1 + 1\n",
|
||||
"# a comment\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"1+1\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
53
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP037_2.pyi
vendored
Normal file
53
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP037_2.pyi
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
# https://github.com/astral-sh/ruff/issues/7102
|
||||
|
||||
def f(a: Foo['SingleLine # Comment']): ...
|
||||
|
||||
|
||||
def f(a: Foo['''Bar[
|
||||
Multi |
|
||||
Line]''']): ...
|
||||
|
||||
|
||||
def f(a: Foo['''Bar[
|
||||
Multi |
|
||||
Line # Comment
|
||||
]''']): ...
|
||||
|
||||
|
||||
def f(a: Foo['''Bar[
|
||||
Multi |
|
||||
Line] # Comment''']): ...
|
||||
|
||||
|
||||
def f(a: Foo['''
|
||||
Bar[
|
||||
Multi |
|
||||
Line] # Comment''']): ...
|
||||
|
||||
|
||||
def f(a: '''list[int]
|
||||
''' = []): ...
|
||||
|
||||
|
||||
a: '''\\
|
||||
list[int]''' = [42]
|
||||
|
||||
|
||||
# TODO: These are valid too. String annotations are assumed to be enclosed in parentheses.
|
||||
# https://github.com/astral-sh/ruff/issues/9467
|
||||
|
||||
def f(a: '''
|
||||
list[int]
|
||||
''' = []): ...
|
||||
|
||||
|
||||
def f(a: Foo['''
|
||||
Bar
|
||||
[
|
||||
Multi |
|
||||
Line
|
||||
] # Comment''']): ...
|
||||
|
||||
|
||||
a: '''list
|
||||
[int]''' = [42]
|
||||
@@ -145,3 +145,19 @@ def special_calls():
|
||||
_NotADynamicClass = type("_NotADynamicClass")
|
||||
|
||||
print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
|
||||
# Do not emit diagnostic if parameter is private
|
||||
# even if it is later shadowed in the body of the function
|
||||
# see https://github.com/astral-sh/ruff/issues/14968
|
||||
class Node:
|
||||
|
||||
connected: list[Node]
|
||||
|
||||
def recurse(self, *, _seen: set[Node] | None = None):
|
||||
if _seen is None:
|
||||
_seen = set()
|
||||
elif self in _seen:
|
||||
return
|
||||
_seen.add(self)
|
||||
for other in self.connected:
|
||||
other.recurse(_seen=_seen)
|
||||
|
||||
@@ -24,7 +24,7 @@ pub(crate) fn bindings(checker: &mut Checker) {
|
||||
return;
|
||||
}
|
||||
|
||||
for binding in &*checker.semantic.bindings {
|
||||
for (binding_id, binding) in checker.semantic.bindings.iter_enumerated() {
|
||||
if checker.enabled(Rule::UnusedVariable) {
|
||||
if binding.kind.is_bound_exception()
|
||||
&& binding.is_unused()
|
||||
@@ -90,7 +90,8 @@ pub(crate) fn bindings(checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::UsedDummyVariable) {
|
||||
if let Some(diagnostic) = ruff::rules::used_dummy_variable(checker, binding) {
|
||||
if let Some(diagnostic) = ruff::rules::used_dummy_variable(checker, binding, binding_id)
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -183,7 +183,11 @@ pub(crate) fn check_tokens(
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::TooManyNewlinesAtEndOfFile) {
|
||||
pycodestyle::rules::too_many_newlines_at_end_of_file(&mut diagnostics, tokens);
|
||||
pycodestyle::rules::too_many_newlines_at_end_of_file(
|
||||
&mut diagnostics,
|
||||
tokens,
|
||||
cell_offsets,
|
||||
);
|
||||
}
|
||||
|
||||
diagnostics.retain(|diagnostic| settings.rules.enabled(diagnostic.kind.rule()));
|
||||
|
||||
@@ -540,7 +540,7 @@ fn is_docstring_section(
|
||||
// The return value of the function.
|
||||
// """
|
||||
// ```
|
||||
if previous_line.map_or(false, |line| line.trim().is_empty()) {
|
||||
if previous_line.is_some_and(|line| line.trim().is_empty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ pub(crate) fn fix_file(
|
||||
diagnostic
|
||||
.fix
|
||||
.as_ref()
|
||||
.map_or(false, |fix| fix.applies(required_applicability))
|
||||
.is_some_and(|fix| fix.applies(required_applicability))
|
||||
})
|
||||
.peekable();
|
||||
|
||||
|
||||
@@ -78,11 +78,7 @@ impl<'a> Directive<'a> {
|
||||
comment_start = text[..comment_start].trim_end().len();
|
||||
|
||||
// The next character has to be the `#` character.
|
||||
if text[..comment_start]
|
||||
.chars()
|
||||
.last()
|
||||
.map_or(true, |c| c != '#')
|
||||
{
|
||||
if !text[..comment_start].ends_with('#') {
|
||||
continue;
|
||||
}
|
||||
comment_start -= '#'.len_utf8();
|
||||
|
||||
@@ -30,33 +30,14 @@ use crate::checkers::ast::Checker;
|
||||
/// ```
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct AssertRaisesException {
|
||||
assertion: AssertionKind,
|
||||
exception: ExceptionKind,
|
||||
}
|
||||
|
||||
impl Violation for AssertRaisesException {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let AssertRaisesException {
|
||||
assertion,
|
||||
exception,
|
||||
} = self;
|
||||
format!("`{assertion}({exception})` should be considered evil")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum AssertionKind {
|
||||
AssertRaises,
|
||||
PytestRaises,
|
||||
}
|
||||
|
||||
impl fmt::Display for AssertionKind {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
AssertionKind::AssertRaises => fmt.write_str("assertRaises"),
|
||||
AssertionKind::PytestRaises => fmt.write_str("pytest.raises"),
|
||||
}
|
||||
let AssertRaisesException { exception } = self;
|
||||
format!("Do not assert blind exception: `{exception}`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,24 +88,19 @@ pub(crate) fn assert_raises_exception(checker: &mut Checker, items: &[WithItem])
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let assertion = if matches!(func.as_ref(), Expr::Attribute(ast::ExprAttribute { attr, .. }) if attr == "assertRaises")
|
||||
if !(matches!(func.as_ref(), Expr::Attribute(ast::ExprAttribute { attr, .. }) if attr == "assertRaises")
|
||||
|| semantic
|
||||
.resolve_qualified_name(func)
|
||||
.is_some_and(|qualified_name| {
|
||||
matches!(qualified_name.segments(), ["pytest", "raises"])
|
||||
})
|
||||
&& arguments.find_keyword("match").is_none())
|
||||
{
|
||||
AssertionKind::AssertRaises
|
||||
} else if semantic
|
||||
.resolve_qualified_name(func)
|
||||
.is_some_and(|qualified_name| matches!(qualified_name.segments(), ["pytest", "raises"]))
|
||||
&& arguments.find_keyword("match").is_none()
|
||||
{
|
||||
AssertionKind::PytestRaises
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
AssertRaisesException {
|
||||
assertion,
|
||||
exception,
|
||||
},
|
||||
AssertRaisesException { exception },
|
||||
item.range(),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
source: crates/ruff_linter/src/rules/flake8_bugbear/mod.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
B017.py:23:14: B017 `assertRaises(Exception)` should be considered evil
|
||||
B017.py:23:14: B017 Do not assert blind exception: `Exception`
|
||||
|
|
||||
21 | class Foobar(unittest.TestCase):
|
||||
22 | def evil_raises(self) -> None:
|
||||
@@ -11,7 +11,7 @@ B017.py:23:14: B017 `assertRaises(Exception)` should be considered evil
|
||||
24 | raise Exception("Evil I say!")
|
||||
|
|
||||
|
||||
B017.py:27:14: B017 `assertRaises(BaseException)` should be considered evil
|
||||
B017.py:27:14: B017 Do not assert blind exception: `BaseException`
|
||||
|
|
||||
26 | def also_evil_raises(self) -> None:
|
||||
27 | with self.assertRaises(BaseException):
|
||||
@@ -19,7 +19,7 @@ B017.py:27:14: B017 `assertRaises(BaseException)` should be considered evil
|
||||
28 | raise Exception("Evil I say!")
|
||||
|
|
||||
|
||||
B017.py:45:10: B017 `pytest.raises(Exception)` should be considered evil
|
||||
B017.py:45:10: B017 Do not assert blind exception: `Exception`
|
||||
|
|
||||
44 | def test_pytest_raises():
|
||||
45 | with pytest.raises(Exception):
|
||||
@@ -27,7 +27,7 @@ B017.py:45:10: B017 `pytest.raises(Exception)` should be considered evil
|
||||
46 | raise ValueError("Hello")
|
||||
|
|
||||
|
||||
B017.py:48:10: B017 `pytest.raises(Exception)` should be considered evil
|
||||
B017.py:48:10: B017 Do not assert blind exception: `Exception`
|
||||
|
|
||||
46 | raise ValueError("Hello")
|
||||
47 |
|
||||
@@ -36,7 +36,7 @@ B017.py:48:10: B017 `pytest.raises(Exception)` should be considered evil
|
||||
49 | raise ValueError("Hello")
|
||||
|
|
||||
|
||||
B017.py:57:36: B017 `pytest.raises(Exception)` should be considered evil
|
||||
B017.py:57:36: B017 Do not assert blind exception: `Exception`
|
||||
|
|
||||
55 | raise ValueError("This is also fine")
|
||||
56 |
|
||||
|
||||
@@ -77,22 +77,54 @@ fn unnecessary_spread_fix(
|
||||
if let Some(last) = dict.iter_values().last() {
|
||||
// Ex) `**{a: 1, b: 2}`
|
||||
let mut edits = vec![];
|
||||
let mut open_parens: u32 = 0;
|
||||
|
||||
for tok in SimpleTokenizer::starts_at(doublestar.end(), locator.contents()).skip_trivia() {
|
||||
match tok.kind() {
|
||||
SimpleTokenKind::LParen => {
|
||||
edits.push(Edit::range_deletion(tok.range()));
|
||||
open_parens += 1;
|
||||
}
|
||||
SimpleTokenKind::LBrace => {
|
||||
edits.push(Edit::range_deletion(tok.range()));
|
||||
break;
|
||||
}
|
||||
_ => {
|
||||
// Unexpected token, bail
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut found_r_curly = false;
|
||||
for tok in SimpleTokenizer::starts_at(last.end(), locator.contents()).skip_trivia() {
|
||||
if found_r_curly && open_parens == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
match tok.kind() {
|
||||
SimpleTokenKind::Comma => {
|
||||
edits.push(Edit::range_deletion(tok.range()));
|
||||
}
|
||||
SimpleTokenKind::RParen => {
|
||||
if found_r_curly {
|
||||
edits.push(Edit::range_deletion(tok.range()));
|
||||
open_parens -= 1;
|
||||
}
|
||||
}
|
||||
SimpleTokenKind::RBrace => {
|
||||
edits.push(Edit::range_deletion(tok.range()));
|
||||
break;
|
||||
found_r_curly = true;
|
||||
}
|
||||
_ => {
|
||||
// Unexpected token, bail
|
||||
return None;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Fix::safe_edits(
|
||||
// Delete the first `**{`
|
||||
Edit::deletion(doublestar.start(), dict.start() + TextSize::from(1)),
|
||||
// Delete the trailing `}`
|
||||
Edit::range_deletion(doublestar.range()),
|
||||
edits,
|
||||
))
|
||||
} else {
|
||||
|
||||
@@ -118,7 +118,7 @@ PIE800.py:19:19: PIE800 [*] Unnecessary spread `**`
|
||||
19 | {**foo, **buzz, **{bar: 10}} # PIE800
|
||||
| ^^^^^^^^^ PIE800
|
||||
20 |
|
||||
21 | {**foo, "bar": True } # OK
|
||||
21 | # https://github.com/astral-sh/ruff/issues/15366
|
||||
|
|
||||
= help: Remove unnecessary dict
|
||||
|
||||
@@ -129,5 +129,175 @@ PIE800.py:19:19: PIE800 [*] Unnecessary spread `**`
|
||||
19 |-{**foo, **buzz, **{bar: 10}} # PIE800
|
||||
19 |+{**foo, **buzz, bar: 10} # PIE800
|
||||
20 20 |
|
||||
21 21 | {**foo, "bar": True } # OK
|
||||
22 22 |
|
||||
21 21 | # https://github.com/astral-sh/ruff/issues/15366
|
||||
22 22 | {
|
||||
|
||||
PIE800.py:24:8: PIE800 [*] Unnecessary spread `**`
|
||||
|
|
||||
22 | {
|
||||
23 | "data": [],
|
||||
24 | **({"count": 1 if include_count else {}}),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PIE800
|
||||
25 | }
|
||||
|
|
||||
= help: Remove unnecessary dict
|
||||
|
||||
ℹ Safe fix
|
||||
21 21 | # https://github.com/astral-sh/ruff/issues/15366
|
||||
22 22 | {
|
||||
23 23 | "data": [],
|
||||
24 |- **({"count": 1 if include_count else {}}),
|
||||
24 |+ "count": 1 if include_count else {},
|
||||
25 25 | }
|
||||
26 26 |
|
||||
27 27 | {
|
||||
|
||||
PIE800.py:30:9: PIE800 [*] Unnecessary spread `**`
|
||||
|
|
||||
28 | "data": [],
|
||||
29 | **( # Comment
|
||||
30 | { # Comment
|
||||
| _________^
|
||||
31 | | "count": 1 if include_count else {}}),
|
||||
| |________________________________________________^ PIE800
|
||||
32 | }
|
||||
|
|
||||
= help: Remove unnecessary dict
|
||||
|
||||
ℹ Safe fix
|
||||
26 26 |
|
||||
27 27 | {
|
||||
28 28 | "data": [],
|
||||
29 |- **( # Comment
|
||||
30 |- { # Comment
|
||||
31 |- "count": 1 if include_count else {}}),
|
||||
29 |+ # Comment
|
||||
30 |+ # Comment
|
||||
31 |+ "count": 1 if include_count else {},
|
||||
32 32 | }
|
||||
33 33 |
|
||||
34 34 | {
|
||||
|
||||
PIE800.py:37:9: PIE800 [*] Unnecessary spread `**`
|
||||
|
|
||||
35 | "data": [],
|
||||
36 | **(
|
||||
37 | {
|
||||
| _________^
|
||||
38 | | "count": (a := 1),}),
|
||||
| |_______________________________^ PIE800
|
||||
39 | }
|
||||
|
|
||||
= help: Remove unnecessary dict
|
||||
|
||||
ℹ Safe fix
|
||||
33 33 |
|
||||
34 34 | {
|
||||
35 35 | "data": [],
|
||||
36 |- **(
|
||||
37 |- {
|
||||
38 |- "count": (a := 1),}),
|
||||
36 |+
|
||||
37 |+
|
||||
38 |+ "count": (a := 1),
|
||||
39 39 | }
|
||||
40 40 |
|
||||
41 41 | {
|
||||
|
||||
PIE800.py:44:9: PIE800 [*] Unnecessary spread `**`
|
||||
|
|
||||
42 | "data": [],
|
||||
43 | **(
|
||||
44 | {
|
||||
| _________^
|
||||
45 | | "count": (a := 1)
|
||||
46 | | }
|
||||
| |_____________^ PIE800
|
||||
47 | )
|
||||
48 | ,
|
||||
|
|
||||
= help: Remove unnecessary dict
|
||||
|
||||
ℹ Safe fix
|
||||
40 40 |
|
||||
41 41 | {
|
||||
42 42 | "data": [],
|
||||
43 |- **(
|
||||
44 |- {
|
||||
43 |+
|
||||
44 |+
|
||||
45 45 | "count": (a := 1)
|
||||
46 |- }
|
||||
47 |- )
|
||||
46 |+
|
||||
47 |+
|
||||
48 48 | ,
|
||||
49 49 | }
|
||||
50 50 |
|
||||
|
||||
PIE800.py:54:9: PIE800 [*] Unnecessary spread `**`
|
||||
|
|
||||
52 | "data": [],
|
||||
53 | **(
|
||||
54 | {
|
||||
| _________^
|
||||
55 | | "count": (a := 1), # Comment
|
||||
56 | | } # Comment
|
||||
| |_____________^ PIE800
|
||||
57 | ) # Comment
|
||||
58 | ,
|
||||
|
|
||||
= help: Remove unnecessary dict
|
||||
|
||||
ℹ Safe fix
|
||||
50 50 |
|
||||
51 51 | {
|
||||
52 52 | "data": [],
|
||||
53 |- **(
|
||||
54 |- {
|
||||
55 |- "count": (a := 1), # Comment
|
||||
56 |- } # Comment
|
||||
57 |- ) # Comment
|
||||
53 |+
|
||||
54 |+
|
||||
55 |+ "count": (a := 1) # Comment
|
||||
56 |+ # Comment
|
||||
57 |+ # Comment
|
||||
58 58 | ,
|
||||
59 59 | }
|
||||
60 60 |
|
||||
|
||||
PIE800.py:65:1: PIE800 [*] Unnecessary spread `**`
|
||||
|
|
||||
63 | **( # Comment
|
||||
64 | ( # Comment
|
||||
65 | / { # Comment
|
||||
66 | | "count": (a := 1), # Comment
|
||||
67 | | } # Comment
|
||||
| |_________^ PIE800
|
||||
68 | )
|
||||
69 | ) # Comment
|
||||
|
|
||||
= help: Remove unnecessary dict
|
||||
|
||||
ℹ Safe fix
|
||||
60 60 |
|
||||
61 61 | ({
|
||||
62 62 | "data": [],
|
||||
63 |- **( # Comment
|
||||
64 |- ( # Comment
|
||||
65 |-{ # Comment
|
||||
66 |- "count": (a := 1), # Comment
|
||||
67 |- } # Comment
|
||||
68 |- )
|
||||
69 |- ) # Comment
|
||||
63 |+ # Comment
|
||||
64 |+ # Comment
|
||||
65 |+ # Comment
|
||||
66 |+ "count": (a := 1) # Comment
|
||||
67 |+ # Comment
|
||||
68 |+
|
||||
69 |+ # Comment
|
||||
70 70 | ,
|
||||
71 71 | })
|
||||
72 72 |
|
||||
|
||||
@@ -79,6 +79,7 @@ mod tests {
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391_2.py"))]
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391_3.py"))]
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391_4.py"))]
|
||||
#[test_case(Rule::TooManyNewlinesAtEndOfFile, Path::new("W391.ipynb"))]
|
||||
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__{}_{}",
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
use std::iter::Peekable;
|
||||
|
||||
use itertools::Itertools;
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, ViolationMetadata};
|
||||
use ruff_python_parser::{TokenKind, Tokens};
|
||||
use ruff_notebook::CellOffsets;
|
||||
use ruff_python_parser::{Token, TokenKind, Tokens};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for files with multiple trailing blank lines.
|
||||
///
|
||||
/// In the case of notebooks, this check is applied to
|
||||
/// each cell separately.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Trailing blank lines in a file are superfluous.
|
||||
///
|
||||
@@ -23,17 +30,19 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct TooManyNewlinesAtEndOfFile {
|
||||
num_trailing_newlines: u32,
|
||||
in_notebook: bool,
|
||||
}
|
||||
|
||||
impl AlwaysFixableViolation for TooManyNewlinesAtEndOfFile {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let domain = if self.in_notebook { "cell" } else { "file" };
|
||||
// We expect a single trailing newline; so two trailing newlines is one too many, three
|
||||
// trailing newlines is two too many, etc.
|
||||
if self.num_trailing_newlines > 2 {
|
||||
"Too many newlines at end of file".to_string()
|
||||
format!("Too many newlines at end of {domain}")
|
||||
} else {
|
||||
"Extra newline at end of file".to_string()
|
||||
format!("Extra newline at end of {domain}")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,22 +57,68 @@ impl AlwaysFixableViolation for TooManyNewlinesAtEndOfFile {
|
||||
}
|
||||
|
||||
/// W391
|
||||
pub(crate) fn too_many_newlines_at_end_of_file(diagnostics: &mut Vec<Diagnostic>, tokens: &Tokens) {
|
||||
let mut num_trailing_newlines = 0u32;
|
||||
let mut start: Option<TextSize> = None;
|
||||
let mut end: Option<TextSize> = None;
|
||||
pub(crate) fn too_many_newlines_at_end_of_file(
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
tokens: &Tokens,
|
||||
cell_offsets: Option<&CellOffsets>,
|
||||
) {
|
||||
let mut tokens_iter = tokens.iter().rev().peekable();
|
||||
|
||||
// Count the number of trailing newlines.
|
||||
for token in tokens.iter().rev() {
|
||||
match token.kind() {
|
||||
TokenKind::NonLogicalNewline | TokenKind::Newline => {
|
||||
if num_trailing_newlines == 0 {
|
||||
end = Some(token.end());
|
||||
if let Some(cell_offsets) = cell_offsets {
|
||||
diagnostics.extend(notebook_newline_diagnostics(tokens_iter, cell_offsets));
|
||||
} else if let Some(diagnostic) = newline_diagnostic(&mut tokens_iter, false) {
|
||||
diagnostics.push(diagnostic);
|
||||
};
|
||||
}
|
||||
|
||||
/// Collects trailing newline diagnostics for each cell
|
||||
fn notebook_newline_diagnostics<'a>(
|
||||
mut tokens_iter: Peekable<impl Iterator<Item = &'a Token>>,
|
||||
cell_offsets: &CellOffsets,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut results = Vec::new();
|
||||
let offset_iter = cell_offsets.iter().rev();
|
||||
|
||||
// NB: When interpreting the below, recall that the iterators
|
||||
// have been reversed.
|
||||
for &offset in offset_iter {
|
||||
// Advance to offset
|
||||
tokens_iter
|
||||
.peeking_take_while(|tok| tok.end() >= offset)
|
||||
.for_each(drop);
|
||||
|
||||
let Some(diagnostic) = newline_diagnostic(&mut tokens_iter, true) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
results.push(diagnostic);
|
||||
}
|
||||
results
|
||||
}
|
||||
|
||||
/// Possible diagnostic, with fix, for too many newlines in cell or source file
|
||||
fn newline_diagnostic<'a>(
|
||||
tokens_iter: &mut Peekable<impl Iterator<Item = &'a Token>>,
|
||||
in_notebook: bool,
|
||||
) -> Option<Diagnostic> {
|
||||
let mut num_trailing_newlines: u32 = 0;
|
||||
let mut newline_range_start: Option<TextSize> = None;
|
||||
let mut newline_range_end: Option<TextSize> = None;
|
||||
|
||||
while let Some(next_token) = tokens_iter.peek() {
|
||||
match next_token.kind() {
|
||||
TokenKind::Newline | TokenKind::NonLogicalNewline => {
|
||||
if newline_range_end.is_none() {
|
||||
newline_range_end = Some(next_token.end());
|
||||
}
|
||||
start = Some(token.end());
|
||||
newline_range_start = Some(next_token.end());
|
||||
|
||||
tokens_iter.next();
|
||||
num_trailing_newlines += 1;
|
||||
}
|
||||
TokenKind::Dedent => continue,
|
||||
TokenKind::Dedent => {
|
||||
tokens_iter.next();
|
||||
}
|
||||
_ => {
|
||||
break;
|
||||
}
|
||||
@@ -71,19 +126,23 @@ pub(crate) fn too_many_newlines_at_end_of_file(diagnostics: &mut Vec<Diagnostic>
|
||||
}
|
||||
|
||||
if num_trailing_newlines == 0 || num_trailing_newlines == 1 {
|
||||
return;
|
||||
}
|
||||
|
||||
let range = match (start, end) {
|
||||
(Some(start), Some(end)) => TextRange::new(start, end),
|
||||
_ => return,
|
||||
return None;
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
TooManyNewlinesAtEndOfFile {
|
||||
num_trailing_newlines,
|
||||
},
|
||||
range,
|
||||
);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||
diagnostics.push(diagnostic);
|
||||
|
||||
let (start, end) = (match (newline_range_start, newline_range_end) {
|
||||
(Some(s), Some(e)) => Some((s, e)),
|
||||
_ => None,
|
||||
})?;
|
||||
|
||||
let diagnostic_range = TextRange::new(start, end);
|
||||
Some(
|
||||
Diagnostic::new(
|
||||
TooManyNewlinesAtEndOfFile {
|
||||
num_trailing_newlines,
|
||||
in_notebook,
|
||||
},
|
||||
diagnostic_range,
|
||||
)
|
||||
.with_fix(Fix::safe_edit(Edit::range_deletion(diagnostic_range))),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,74 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
W391.ipynb:5:1: W391 [*] Too many newlines at end of cell
|
||||
|
|
||||
3 | # just a comment in this cell
|
||||
4 | # a comment and some newlines
|
||||
5 | /
|
||||
6 | |
|
||||
7 | |
|
||||
8 | |
|
||||
| |_^ W391
|
||||
9 | 1 + 1
|
||||
10 | # a comment
|
||||
|
|
||||
= help: Remove trailing newlines
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | # just a comment in this cell
|
||||
4 4 | # a comment and some newlines
|
||||
5 5 |
|
||||
6 |-
|
||||
7 |-
|
||||
8 |-
|
||||
9 6 | 1 + 1
|
||||
10 7 | # a comment
|
||||
11 8 |
|
||||
|
||||
W391.ipynb:11:1: W391 [*] Too many newlines at end of cell
|
||||
|
|
||||
9 | 1 + 1
|
||||
10 | # a comment
|
||||
11 | /
|
||||
12 | |
|
||||
13 | |
|
||||
14 | |
|
||||
15 | |
|
||||
| |_^ W391
|
||||
16 | 1+1
|
||||
|
|
||||
= help: Remove trailing newlines
|
||||
|
||||
ℹ Safe fix
|
||||
9 9 | 1 + 1
|
||||
10 10 | # a comment
|
||||
11 11 |
|
||||
12 |-
|
||||
13 |-
|
||||
14 |-
|
||||
15 |-
|
||||
16 12 | 1+1
|
||||
17 13 |
|
||||
18 14 |
|
||||
|
||||
W391.ipynb:17:1: W391 [*] Too many newlines at end of cell
|
||||
|
|
||||
16 | 1+1
|
||||
17 | /
|
||||
18 | |
|
||||
19 | |
|
||||
20 | |
|
||||
21 | |
|
||||
| |_^ W391
|
||||
|
|
||||
= help: Remove trailing newlines
|
||||
|
||||
ℹ Safe fix
|
||||
15 15 |
|
||||
16 16 | 1+1
|
||||
17 17 |
|
||||
18 |-
|
||||
19 |-
|
||||
20 |-
|
||||
21 |-
|
||||
@@ -58,6 +58,7 @@ mod tests {
|
||||
#[test_case(Rule::PrintfStringFormatting, Path::new("UP031_1.py"))]
|
||||
#[test_case(Rule::QuotedAnnotation, Path::new("UP037_0.py"))]
|
||||
#[test_case(Rule::QuotedAnnotation, Path::new("UP037_1.py"))]
|
||||
#[test_case(Rule::QuotedAnnotation, Path::new("UP037_2.pyi"))]
|
||||
#[test_case(Rule::RedundantOpenModes, Path::new("UP015.py"))]
|
||||
#[test_case(Rule::RedundantOpenModes, Path::new("UP015_1.py"))]
|
||||
#[test_case(Rule::ReplaceStdoutStderr, Path::new("UP022.py"))]
|
||||
|
||||
@@ -493,7 +493,7 @@ pub(crate) fn f_strings(checker: &mut Checker, call: &ast::ExprCall, summary: &F
|
||||
checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(call.func.as_ref())
|
||||
.map_or(false, |qualified_name| {
|
||||
.is_some_and(|qualified_name| {
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
["django", "utils", "translation", "gettext" | "gettext_lazy"]
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, ViolationMetadata};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, ViolationMetadata};
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_trivia::{SimpleToken, SimpleTokenKind, SimpleTokenizer};
|
||||
use ruff_source_file::LineRanges;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for the presence of unnecessary quotes in type annotations.
|
||||
@@ -79,10 +82,38 @@ impl AlwaysFixableViolation for QuotedAnnotation {
|
||||
|
||||
/// UP037
|
||||
pub(crate) fn quoted_annotation(checker: &mut Checker, annotation: &str, range: TextRange) {
|
||||
let mut diagnostic = Diagnostic::new(QuotedAnnotation, range);
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
annotation.to_string(),
|
||||
range,
|
||||
)));
|
||||
checker.diagnostics.push(diagnostic);
|
||||
let diagnostic = Diagnostic::new(QuotedAnnotation, range);
|
||||
|
||||
let placeholder_range = TextRange::up_to(annotation.text_len());
|
||||
let spans_multiple_lines = annotation.contains_line_break(placeholder_range);
|
||||
|
||||
let tokenizer = SimpleTokenizer::new(annotation, placeholder_range);
|
||||
let last_token_is_comment = matches!(
|
||||
tokenizer.last(),
|
||||
Some(SimpleToken {
|
||||
kind: SimpleTokenKind::Comment,
|
||||
..
|
||||
})
|
||||
);
|
||||
|
||||
let new_content = match (spans_multiple_lines, last_token_is_comment) {
|
||||
(_, false) if in_parameter_annotation(range.start(), checker.semantic()) => {
|
||||
annotation.to_string()
|
||||
}
|
||||
(false, false) => annotation.to_string(),
|
||||
(true, false) => format!("({annotation})"),
|
||||
(_, true) => format!("({annotation}\n)"),
|
||||
};
|
||||
let edit = Edit::range_replacement(new_content, range);
|
||||
let fix = Fix::safe_edit(edit);
|
||||
|
||||
checker.diagnostics.push(diagnostic.with_fix(fix));
|
||||
}
|
||||
|
||||
fn in_parameter_annotation(offset: TextSize, semantic: &SemanticModel) -> bool {
|
||||
let Stmt::FunctionDef(stmt) = semantic.current_statement() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
stmt.parameters.range.contains(offset)
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ pub(crate) fn super_call_with_parameters(checker: &mut Checker, call: &ast::Expr
|
||||
.resolve_qualified_name(func)
|
||||
.is_some_and(|name| name.segments() == ["dataclasses", "dataclass"])
|
||||
{
|
||||
arguments.find_keyword("slots").map_or(false, |keyword| {
|
||||
arguments.find_keyword("slots").is_some_and(|keyword| {
|
||||
matches!(
|
||||
keyword.value,
|
||||
Expr::BooleanLiteral(ast::ExprBooleanLiteral { value: true, .. })
|
||||
|
||||
@@ -117,7 +117,7 @@ fn in_subscript_index(expr: &ExprSubscript, semantic: &SemanticModel) -> bool {
|
||||
}
|
||||
|
||||
// E.g., `Generic[DType, Unpack[int]]`.
|
||||
if parent.slice.as_tuple_expr().map_or(false, |slice| {
|
||||
if parent.slice.as_tuple_expr().is_some_and(|slice| {
|
||||
slice
|
||||
.elts
|
||||
.iter()
|
||||
@@ -144,5 +144,5 @@ fn in_vararg(expr: &ExprSubscript, semantic: &SemanticModel) -> bool {
|
||||
.as_ref()
|
||||
.and_then(|vararg| vararg.annotation.as_ref())
|
||||
.and_then(|annotation| annotation.as_subscript_expr())
|
||||
.map_or(false, |annotation| annotation == expr)
|
||||
== Some(expr)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,162 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyupgrade/mod.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
UP037_2.pyi:3:14: UP037 [*] Remove quotes from type annotation
|
||||
|
|
||||
1 | # https://github.com/astral-sh/ruff/issues/7102
|
||||
2 |
|
||||
3 | def f(a: Foo['SingleLine # Comment']): ...
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ UP037
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | # https://github.com/astral-sh/ruff/issues/7102
|
||||
2 2 |
|
||||
3 |-def f(a: Foo['SingleLine # Comment']): ...
|
||||
3 |+def f(a: Foo[(SingleLine # Comment
|
||||
4 |+)]): ...
|
||||
4 5 |
|
||||
5 6 |
|
||||
6 7 | def f(a: Foo['''Bar[
|
||||
|
||||
UP037_2.pyi:6:14: UP037 [*] Remove quotes from type annotation
|
||||
|
|
||||
6 | def f(a: Foo['''Bar[
|
||||
| ______________^
|
||||
7 | | Multi |
|
||||
8 | | Line]''']): ...
|
||||
| |____________^ UP037
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | def f(a: Foo['SingleLine # Comment']): ...
|
||||
4 4 |
|
||||
5 5 |
|
||||
6 |-def f(a: Foo['''Bar[
|
||||
6 |+def f(a: Foo[Bar[
|
||||
7 7 | Multi |
|
||||
8 |- Line]''']): ...
|
||||
8 |+ Line]]): ...
|
||||
9 9 |
|
||||
10 10 |
|
||||
11 11 | def f(a: Foo['''Bar[
|
||||
|
||||
UP037_2.pyi:11:14: UP037 [*] Remove quotes from type annotation
|
||||
|
|
||||
11 | def f(a: Foo['''Bar[
|
||||
| ______________^
|
||||
12 | | Multi |
|
||||
13 | | Line # Comment
|
||||
14 | | ]''']): ...
|
||||
| |____^ UP037
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
8 8 | Line]''']): ...
|
||||
9 9 |
|
||||
10 10 |
|
||||
11 |-def f(a: Foo['''Bar[
|
||||
11 |+def f(a: Foo[Bar[
|
||||
12 12 | Multi |
|
||||
13 13 | Line # Comment
|
||||
14 |-]''']): ...
|
||||
14 |+]]): ...
|
||||
15 15 |
|
||||
16 16 |
|
||||
17 17 | def f(a: Foo['''Bar[
|
||||
|
||||
UP037_2.pyi:17:14: UP037 [*] Remove quotes from type annotation
|
||||
|
|
||||
17 | def f(a: Foo['''Bar[
|
||||
| ______________^
|
||||
18 | | Multi |
|
||||
19 | | Line] # Comment''']): ...
|
||||
| |_______________________^ UP037
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
14 14 | ]''']): ...
|
||||
15 15 |
|
||||
16 16 |
|
||||
17 |-def f(a: Foo['''Bar[
|
||||
17 |+def f(a: Foo[(Bar[
|
||||
18 18 | Multi |
|
||||
19 |- Line] # Comment''']): ...
|
||||
19 |+ Line] # Comment
|
||||
20 |+)]): ...
|
||||
20 21 |
|
||||
21 22 |
|
||||
22 23 | def f(a: Foo['''
|
||||
|
||||
UP037_2.pyi:22:14: UP037 [*] Remove quotes from type annotation
|
||||
|
|
||||
22 | def f(a: Foo['''
|
||||
| ______________^
|
||||
23 | | Bar[
|
||||
24 | | Multi |
|
||||
25 | | Line] # Comment''']): ...
|
||||
| |_______________________^ UP037
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
19 19 | Line] # Comment''']): ...
|
||||
20 20 |
|
||||
21 21 |
|
||||
22 |-def f(a: Foo['''
|
||||
22 |+def f(a: Foo[(
|
||||
23 23 | Bar[
|
||||
24 24 | Multi |
|
||||
25 |- Line] # Comment''']): ...
|
||||
25 |+ Line] # Comment
|
||||
26 |+)]): ...
|
||||
26 27 |
|
||||
27 28 |
|
||||
28 29 | def f(a: '''list[int]
|
||||
|
||||
UP037_2.pyi:28:10: UP037 [*] Remove quotes from type annotation
|
||||
|
|
||||
28 | def f(a: '''list[int]
|
||||
| __________^
|
||||
29 | | ''' = []): ...
|
||||
| |_______^ UP037
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
25 25 | Line] # Comment''']): ...
|
||||
26 26 |
|
||||
27 27 |
|
||||
28 |-def f(a: '''list[int]
|
||||
29 |- ''' = []): ...
|
||||
28 |+def f(a: list[int]
|
||||
29 |+ = []): ...
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 32 | a: '''\\
|
||||
|
||||
UP037_2.pyi:32:4: UP037 [*] Remove quotes from type annotation
|
||||
|
|
||||
32 | a: '''\\
|
||||
| ____^
|
||||
33 | | list[int]''' = [42]
|
||||
| |____________^ UP037
|
||||
|
|
||||
= help: Remove quotes
|
||||
|
||||
ℹ Safe fix
|
||||
29 29 | ''' = []): ...
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 |-a: '''\\
|
||||
33 |-list[int]''' = [42]
|
||||
32 |+a: (\
|
||||
33 |+list[int]) = [42]
|
||||
34 34 |
|
||||
35 35 |
|
||||
36 36 | # TODO: These are valid too. String annotations are assumed to be enclosed in parentheses.
|
||||
@@ -144,7 +144,7 @@ pub(crate) fn print_empty_string(checker: &mut Checker, call: &ast::ExprCall) {
|
||||
let empty_separator = call
|
||||
.arguments
|
||||
.find_keyword("sep")
|
||||
.map_or(false, |keyword| is_empty_string(&keyword.value));
|
||||
.is_some_and(|keyword| is_empty_string(&keyword.value));
|
||||
if !empty_separator {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, ViolationMetadata};
|
||||
use ruff_python_ast::helpers::is_dunder;
|
||||
use ruff_python_semantic::{Binding, ScopeId};
|
||||
use ruff_python_semantic::{Binding, BindingId, ScopeId};
|
||||
use ruff_python_stdlib::{
|
||||
builtins::is_python_builtin, identifiers::is_identifier, keyword::is_keyword,
|
||||
};
|
||||
@@ -97,7 +97,11 @@ impl Violation for UsedDummyVariable {
|
||||
}
|
||||
|
||||
/// RUF052
|
||||
pub(crate) fn used_dummy_variable(checker: &Checker, binding: &Binding) -> Option<Diagnostic> {
|
||||
pub(crate) fn used_dummy_variable(
|
||||
checker: &Checker,
|
||||
binding: &Binding,
|
||||
binding_id: BindingId,
|
||||
) -> Option<Diagnostic> {
|
||||
let name = binding.name(checker.source());
|
||||
|
||||
// Ignore `_` and dunder variables
|
||||
@@ -141,6 +145,21 @@ pub(crate) fn used_dummy_variable(checker: &Checker, binding: &Binding) -> Optio
|
||||
if !scope.kind.is_function() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Recall from above that we do not wish to flag "private"
|
||||
// function parameters. The previous early exit ensured
|
||||
// that the binding in hand was not a function parameter.
|
||||
// We now check that, in the body of our function, we are
|
||||
// not looking at a shadowing of a private parameter.
|
||||
//
|
||||
// (Technically this also covers the case in the previous early exit,
|
||||
// but it is more expensive so we keep both.)
|
||||
if scope
|
||||
.shadowed_bindings(binding_id)
|
||||
.any(|shadow_id| semantic.binding(shadow_id).kind.is_argument())
|
||||
{
|
||||
return None;
|
||||
}
|
||||
if !checker.settings.dummy_variable_rgx.is_match(name) {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -175,6 +175,9 @@ RUF052.py:138:5: RUF052 [*] Local dummy variable `_P` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:139:5: RUF052 [*] Local dummy variable `_T` is accessed
|
||||
|
|
||||
@@ -201,6 +204,9 @@ RUF052.py:139:5: RUF052 [*] Local dummy variable `_T` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:140:5: RUF052 [*] Local dummy variable `_NT` is accessed
|
||||
|
|
||||
@@ -227,6 +233,9 @@ RUF052.py:140:5: RUF052 [*] Local dummy variable `_NT` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:141:5: RUF052 [*] Local dummy variable `_E` is accessed
|
||||
|
|
||||
@@ -252,6 +261,9 @@ RUF052.py:141:5: RUF052 [*] Local dummy variable `_E` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:142:5: RUF052 [*] Local dummy variable `_NT2` is accessed
|
||||
|
|
||||
@@ -276,6 +288,9 @@ RUF052.py:142:5: RUF052 [*] Local dummy variable `_NT2` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, _E, NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:143:5: RUF052 [*] Local dummy variable `_NT3` is accessed
|
||||
|
|
||||
@@ -299,6 +314,9 @@ RUF052.py:143:5: RUF052 [*] Local dummy variable `_NT3` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, _E, _NT2, NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:144:5: RUF052 [*] Local dummy variable `_DynamicClass` is accessed
|
||||
|
|
||||
@@ -320,6 +338,9 @@ RUF052.py:144:5: RUF052 [*] Local dummy variable `_DynamicClass` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, _E, _NT2, _NT3, DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:145:5: RUF052 [*] Local dummy variable `_NotADynamicClass` is accessed
|
||||
|
|
||||
@@ -341,3 +362,6 @@ RUF052.py:145:5: RUF052 [*] Local dummy variable `_NotADynamicClass` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
@@ -175,6 +175,9 @@ RUF052.py:138:5: RUF052 [*] Local dummy variable `_P` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:139:5: RUF052 [*] Local dummy variable `_T` is accessed
|
||||
|
|
||||
@@ -201,6 +204,9 @@ RUF052.py:139:5: RUF052 [*] Local dummy variable `_T` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:140:5: RUF052 [*] Local dummy variable `_NT` is accessed
|
||||
|
|
||||
@@ -227,6 +233,9 @@ RUF052.py:140:5: RUF052 [*] Local dummy variable `_NT` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:141:5: RUF052 [*] Local dummy variable `_E` is accessed
|
||||
|
|
||||
@@ -252,6 +261,9 @@ RUF052.py:141:5: RUF052 [*] Local dummy variable `_E` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:142:5: RUF052 [*] Local dummy variable `_NT2` is accessed
|
||||
|
|
||||
@@ -276,6 +288,9 @@ RUF052.py:142:5: RUF052 [*] Local dummy variable `_NT2` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, _E, NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:143:5: RUF052 [*] Local dummy variable `_NT3` is accessed
|
||||
|
|
||||
@@ -299,6 +314,9 @@ RUF052.py:143:5: RUF052 [*] Local dummy variable `_NT3` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, _E, _NT2, NT3, _DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:144:5: RUF052 [*] Local dummy variable `_DynamicClass` is accessed
|
||||
|
|
||||
@@ -320,6 +338,9 @@ RUF052.py:144:5: RUF052 [*] Local dummy variable `_DynamicClass` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, _E, _NT2, _NT3, DynamicClass, _NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
RUF052.py:145:5: RUF052 [*] Local dummy variable `_NotADynamicClass` is accessed
|
||||
|
|
||||
@@ -341,3 +362,6 @@ RUF052.py:145:5: RUF052 [*] Local dummy variable `_NotADynamicClass` is accessed
|
||||
146 146 |
|
||||
147 |- print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, _NotADynamicClass)
|
||||
147 |+ print(_T, _P, _NT, _E, _NT2, _NT3, _DynamicClass, NotADynamicClass)
|
||||
148 148 |
|
||||
149 149 | # Do not emit diagnostic if parameter is private
|
||||
150 150 | # even if it is later shadowed in the body of the function
|
||||
|
||||
@@ -407,13 +407,13 @@ pub enum StringLike<'a> {
|
||||
FString(&'a ast::ExprFString),
|
||||
}
|
||||
|
||||
impl StringLike<'_> {
|
||||
impl<'a> StringLike<'a> {
|
||||
pub const fn is_fstring(self) -> bool {
|
||||
matches!(self, Self::FString(_))
|
||||
}
|
||||
|
||||
/// Returns an iterator over the [`StringLikePart`] contained in this string-like expression.
|
||||
pub fn parts(&self) -> StringLikePartIter<'_> {
|
||||
pub fn parts(&self) -> StringLikePartIter<'a> {
|
||||
match self {
|
||||
StringLike::String(expr) => StringLikePartIter::String(expr.value.iter()),
|
||||
StringLike::Bytes(expr) => StringLikePartIter::Bytes(expr.value.iter()),
|
||||
@@ -429,6 +429,14 @@ impl StringLike<'_> {
|
||||
Self::FString(ExprFString { value, .. }) => value.is_implicit_concatenated(),
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn as_expression_ref(self) -> ExpressionRef<'a> {
|
||||
match self {
|
||||
StringLike::String(expr) => ExpressionRef::StringLiteral(expr),
|
||||
StringLike::Bytes(expr) => ExpressionRef::BytesLiteral(expr),
|
||||
StringLike::FString(expr) => ExpressionRef::FString(expr),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprStringLiteral> for StringLike<'a> {
|
||||
@@ -488,6 +496,19 @@ impl<'a> TryFrom<&'a Expr> for StringLike<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TryFrom<AnyNodeRef<'a>> for StringLike<'a> {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: AnyNodeRef<'a>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
AnyNodeRef::ExprStringLiteral(value) => Ok(Self::String(value)),
|
||||
AnyNodeRef::ExprBytesLiteral(value) => Ok(Self::Bytes(value)),
|
||||
AnyNodeRef::ExprFString(value) => Ok(Self::FString(value)),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for StringLike<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
match self {
|
||||
@@ -561,6 +582,12 @@ impl<'a> From<&'a ast::FString> for StringLikePart<'a> {
|
||||
|
||||
impl<'a> From<&StringLikePart<'a>> for AnyNodeRef<'a> {
|
||||
fn from(value: &StringLikePart<'a>) -> Self {
|
||||
AnyNodeRef::from(*value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<StringLikePart<'a>> for AnyNodeRef<'a> {
|
||||
fn from(value: StringLikePart<'a>) -> Self {
|
||||
match value {
|
||||
StringLikePart::String(part) => AnyNodeRef::StringLiteral(part),
|
||||
StringLikePart::Bytes(part) => AnyNodeRef::BytesLiteral(part),
|
||||
|
||||
@@ -1732,7 +1732,7 @@ impl StringLiteralValue {
|
||||
pub fn is_unicode(&self) -> bool {
|
||||
self.iter()
|
||||
.next()
|
||||
.map_or(false, |part| part.flags.prefix().is_unicode())
|
||||
.is_some_and(|part| part.flags.prefix().is_unicode())
|
||||
}
|
||||
|
||||
/// Returns a slice of all the [`StringLiteral`] parts contained in this value.
|
||||
|
||||
@@ -291,3 +291,87 @@ aaaaa[aaaaaaaaaaa] = (
|
||||
f"testeeeeeeeeeeeeeeeeeeeeeeeee{a
|
||||
=}" "moreeeeeeeeeeeeeeeeeetest" # comment
|
||||
)
|
||||
|
||||
|
||||
# Trailing last-part comments
|
||||
|
||||
a = (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a: Literal[str] = (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a += (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a = (
|
||||
r"a"
|
||||
r"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a = (
|
||||
"a"
|
||||
"b"
|
||||
) # belongs to the assignment
|
||||
|
||||
a = (((
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)))
|
||||
|
||||
a = (((
|
||||
"a"
|
||||
"b"
|
||||
) # belongs to the f-string expression
|
||||
))
|
||||
|
||||
a = (
|
||||
"a" "b" # belongs to the f-string expression
|
||||
)
|
||||
|
||||
a = (
|
||||
"a" "b"
|
||||
# belongs to the f-string expression
|
||||
)
|
||||
|
||||
# There's no "right" answer if some parts are on the same line while others are on separate lines.
|
||||
# This is likely a comment for one of the last two parts but could also just be a comment for the entire f-string expression.
|
||||
# Because there's no right answer, follow what we do elsewhere and associate the comment with the outer-most node which
|
||||
# is the f-string expression.
|
||||
a = (
|
||||
"a"
|
||||
"b" "ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" # belongs to the f-string expression
|
||||
)
|
||||
|
||||
logger.error(
|
||||
f"Failed to run task {task} for job"
|
||||
f"with id {str(job.id)}" # type: ignore[union-attr]
|
||||
)
|
||||
|
||||
a = (10 +
|
||||
"Exception in {call_back_name} "
|
||||
f"'{msg}'" # belongs to binary operation
|
||||
)
|
||||
|
||||
a = 10 + (
|
||||
"Exception in {call_back_name} "
|
||||
f"'{msg}'" # belongs to f-string
|
||||
)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{self._device.temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address}_"
|
||||
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
|
||||
)
|
||||
|
||||
return (
|
||||
f"Exception in {call_back_name} when handling msg on "
|
||||
f"'{msg.topic}': '{msg.payload}'" # type: ignore[str-bytes-safe]
|
||||
)
|
||||
@@ -85,7 +85,7 @@ pub(crate) struct FormatLeadingAlternateBranchComments<'a> {
|
||||
|
||||
impl Format<PyFormatContext<'_>> for FormatLeadingAlternateBranchComments<'_> {
|
||||
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
|
||||
if self.last_node.map_or(false, |preceding| {
|
||||
if self.last_node.is_some_and(|preceding| {
|
||||
should_insert_blank_line_after_class_in_stub_file(preceding, None, f.context())
|
||||
}) {
|
||||
write!(f, [empty_line(), leading_comments(self.comments)])?;
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use ast::helpers::comment_indentation_after;
|
||||
use ruff_python_ast::whitespace::indentation;
|
||||
use ruff_python_ast::{
|
||||
self as ast, AnyNodeRef, Comprehension, Expr, ModModule, Parameter, Parameters,
|
||||
self as ast, AnyNodeRef, Comprehension, Expr, ModModule, Parameter, Parameters, StringLike,
|
||||
};
|
||||
use ruff_python_trivia::{
|
||||
find_only_token_in_range, first_non_trivia_token, indentation_at_offset, BackwardsTokenizer,
|
||||
@@ -11,9 +9,11 @@ use ruff_python_trivia::{
|
||||
};
|
||||
use ruff_source_file::LineRanges;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange};
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use crate::comments::visitor::{CommentPlacement, DecoratedComment};
|
||||
use crate::expression::expr_slice::{assign_comment_in_slice, ExprSliceCommentSection};
|
||||
use crate::expression::parentheses::is_expression_parenthesized;
|
||||
use crate::other::parameters::{
|
||||
assign_argument_separator_comment_placement, find_parameter_separators,
|
||||
};
|
||||
@@ -355,6 +355,41 @@ fn handle_enclosed_comment<'a>(
|
||||
AnyNodeRef::ExprGenerator(generator) if generator.parenthesized => {
|
||||
handle_bracketed_end_of_line_comment(comment, source)
|
||||
}
|
||||
AnyNodeRef::StmtReturn(_) => {
|
||||
handle_trailing_implicit_concatenated_string_comment(comment, comment_ranges, source)
|
||||
}
|
||||
AnyNodeRef::StmtAssign(assignment)
|
||||
if comment.preceding_node().is_some_and(|preceding| {
|
||||
preceding.ptr_eq(AnyNodeRef::from(&*assignment.value))
|
||||
}) =>
|
||||
{
|
||||
handle_trailing_implicit_concatenated_string_comment(comment, comment_ranges, source)
|
||||
}
|
||||
AnyNodeRef::StmtAnnAssign(assignment)
|
||||
if comment.preceding_node().is_some_and(|preceding| {
|
||||
assignment
|
||||
.value
|
||||
.as_deref()
|
||||
.is_some_and(|value| preceding.ptr_eq(value.into()))
|
||||
}) =>
|
||||
{
|
||||
handle_trailing_implicit_concatenated_string_comment(comment, comment_ranges, source)
|
||||
}
|
||||
AnyNodeRef::StmtAugAssign(assignment)
|
||||
if comment.preceding_node().is_some_and(|preceding| {
|
||||
preceding.ptr_eq(AnyNodeRef::from(&*assignment.value))
|
||||
}) =>
|
||||
{
|
||||
handle_trailing_implicit_concatenated_string_comment(comment, comment_ranges, source)
|
||||
}
|
||||
AnyNodeRef::StmtTypeAlias(assignment)
|
||||
if comment.preceding_node().is_some_and(|preceding| {
|
||||
preceding.ptr_eq(AnyNodeRef::from(&*assignment.value))
|
||||
}) =>
|
||||
{
|
||||
handle_trailing_implicit_concatenated_string_comment(comment, comment_ranges, source)
|
||||
}
|
||||
|
||||
_ => CommentPlacement::Default(comment),
|
||||
}
|
||||
}
|
||||
@@ -2086,6 +2121,75 @@ fn handle_comprehension_comment<'a>(
|
||||
CommentPlacement::Default(comment)
|
||||
}
|
||||
|
||||
/// Handle end-of-line comments for parenthesized implicitly concatenated strings when used in
|
||||
/// a `FormatStatementLastExpression` context:
|
||||
///
|
||||
/// ```python
|
||||
/// a = (
|
||||
/// "a"
|
||||
/// "b"
|
||||
/// "c" # comment
|
||||
/// )
|
||||
/// ```
|
||||
///
|
||||
/// `# comment` is a trailing comment of the last part and not a trailing comment of the entire f-string.
|
||||
/// Associating the comment with the last part is important or the assignment formatting might move
|
||||
/// the comment at the end of the assignment, making it impossible to suppress an error for the last part.
|
||||
///
|
||||
/// On the other hand, `# comment` is a trailing end-of-line f-string comment for:
|
||||
///
|
||||
/// ```python
|
||||
/// a = (
|
||||
/// "a" "b" "c" # comment
|
||||
/// )
|
||||
///
|
||||
/// a = (
|
||||
/// "a"
|
||||
/// "b"
|
||||
/// "c"
|
||||
/// ) # comment
|
||||
/// ```
|
||||
///
|
||||
/// Associating the comment with the f-string is desired in those cases because it allows
|
||||
/// joining the string literals into a single string literal if it fits on the line.
|
||||
fn handle_trailing_implicit_concatenated_string_comment<'a>(
|
||||
comment: DecoratedComment<'a>,
|
||||
comment_ranges: &CommentRanges,
|
||||
source: &str,
|
||||
) -> CommentPlacement<'a> {
|
||||
if !comment.line_position().is_end_of_line() {
|
||||
return CommentPlacement::Default(comment);
|
||||
}
|
||||
|
||||
let Some(string_like) = comment
|
||||
.preceding_node()
|
||||
.and_then(|preceding| StringLike::try_from(preceding).ok())
|
||||
else {
|
||||
return CommentPlacement::Default(comment);
|
||||
};
|
||||
|
||||
let mut parts = string_like.parts();
|
||||
|
||||
let (Some(last), Some(second_last)) = (parts.next_back(), parts.next_back()) else {
|
||||
return CommentPlacement::Default(comment);
|
||||
};
|
||||
|
||||
if source.contains_line_break(TextRange::new(second_last.end(), last.start()))
|
||||
&& is_expression_parenthesized(string_like.as_expression_ref(), comment_ranges, source)
|
||||
{
|
||||
let range = TextRange::new(last.end(), comment.start());
|
||||
|
||||
if !SimpleTokenizer::new(source, range)
|
||||
.skip_trivia()
|
||||
.any(|token| token.kind() == SimpleTokenKind::RParen)
|
||||
{
|
||||
return CommentPlacement::trailing(AnyNodeRef::from(last), comment);
|
||||
}
|
||||
}
|
||||
|
||||
CommentPlacement::Default(comment)
|
||||
}
|
||||
|
||||
/// Returns `true` if the parameters are parenthesized (as in a function definition), or `false` if
|
||||
/// not (as in a lambda).
|
||||
fn are_parameters_parenthesized(parameters: &Parameters, contents: &str) -> bool {
|
||||
|
||||
@@ -99,7 +99,7 @@ impl Format<PyFormatContext<'_>> for FormatImplicitConcatenatedStringExpanded<'_
|
||||
StringLikePart::FString(part) => part.format().fmt(f),
|
||||
});
|
||||
|
||||
let part_comments = comments.leading_dangling_trailing(&part);
|
||||
let part_comments = comments.leading_dangling_trailing(part);
|
||||
joiner.entry(&format_args![
|
||||
leading_comments(part_comments.leading),
|
||||
format_part,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_python_formatter/tests/fixtures.rs
|
||||
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/fstring.py
|
||||
snapshot_kind: text
|
||||
---
|
||||
## Input
|
||||
```python
|
||||
|
||||
@@ -297,7 +297,90 @@ aaaaa[aaaaaaaaaaa] = (
|
||||
f"testeeeeeeeeeeeeeeeeeeeeeeeee{a
|
||||
=}" "moreeeeeeeeeeeeeeeeeetest" # comment
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
# Trailing last-part comments
|
||||
|
||||
a = (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a: Literal[str] = (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a += (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a = (
|
||||
r"a"
|
||||
r"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a = (
|
||||
"a"
|
||||
"b"
|
||||
) # belongs to the assignment
|
||||
|
||||
a = (((
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)))
|
||||
|
||||
a = (((
|
||||
"a"
|
||||
"b"
|
||||
) # belongs to the f-string expression
|
||||
))
|
||||
|
||||
a = (
|
||||
"a" "b" # belongs to the f-string expression
|
||||
)
|
||||
|
||||
a = (
|
||||
"a" "b"
|
||||
# belongs to the f-string expression
|
||||
)
|
||||
|
||||
# There's no "right" answer if some parts are on the same line while others are on separate lines.
|
||||
# This is likely a comment for one of the last two parts but could also just be a comment for the entire f-string expression.
|
||||
# Because there's no right answer, follow what we do elsewhere and associate the comment with the outer-most node which
|
||||
# is the f-string expression.
|
||||
a = (
|
||||
"a"
|
||||
"b" "ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" # belongs to the f-string expression
|
||||
)
|
||||
|
||||
logger.error(
|
||||
f"Failed to run task {task} for job"
|
||||
f"with id {str(job.id)}" # type: ignore[union-attr]
|
||||
)
|
||||
|
||||
a = (10 +
|
||||
"Exception in {call_back_name} "
|
||||
f"'{msg}'" # belongs to binary operation
|
||||
)
|
||||
|
||||
a = 10 + (
|
||||
"Exception in {call_back_name} "
|
||||
f"'{msg}'" # belongs to f-string
|
||||
)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{self._device.temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address}_"
|
||||
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
|
||||
)
|
||||
|
||||
return (
|
||||
f"Exception in {call_back_name} when handling msg on "
|
||||
f"'{msg.topic}': '{msg.payload}'" # type: ignore[str-bytes-safe]
|
||||
)```
|
||||
|
||||
## Output
|
||||
```python
|
||||
@@ -619,4 +702,77 @@ aaaaa[aaaaaaaaaaa] = (
|
||||
=}"
|
||||
"moreeeeeeeeeeeeeeeeeetest" # comment
|
||||
)
|
||||
|
||||
|
||||
# Trailing last-part comments
|
||||
|
||||
a = (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a: Literal[str] = (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a += (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a = (
|
||||
r"a"
|
||||
r"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a = "ab" # belongs to the assignment
|
||||
|
||||
a = (
|
||||
"a"
|
||||
"b" # belongs to `b`
|
||||
)
|
||||
|
||||
a = "ab" # belongs to the f-string expression
|
||||
|
||||
a = "ab" # belongs to the f-string expression
|
||||
|
||||
a = (
|
||||
"ab"
|
||||
# belongs to the f-string expression
|
||||
)
|
||||
|
||||
# There's no "right" answer if some parts are on the same line while others are on separate lines.
|
||||
# This is likely a comment for one of the last two parts but could also just be a comment for the entire f-string expression.
|
||||
# Because there's no right answer, follow what we do elsewhere and associate the comment with the outer-most node which
|
||||
# is the f-string expression.
|
||||
a = (
|
||||
"a"
|
||||
"b"
|
||||
"ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
|
||||
) # belongs to the f-string expression
|
||||
|
||||
logger.error(
|
||||
f"Failed to run task {task} for jobwith id {str(job.id)}" # type: ignore[union-attr]
|
||||
)
|
||||
|
||||
a = (
|
||||
10 + f"Exception in {{call_back_name}} '{msg}'" # belongs to binary operation
|
||||
)
|
||||
|
||||
a = 10 + (
|
||||
f"Exception in {{call_back_name}} '{msg}'" # belongs to f-string
|
||||
)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{self._device.temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address}_"
|
||||
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
|
||||
)
|
||||
|
||||
return (
|
||||
f"Exception in {call_back_name} when handling msg on "
|
||||
f"'{msg.topic}': '{msg.payload}'" # type: ignore[str-bytes-safe]
|
||||
)
|
||||
```
|
||||
|
||||
@@ -1002,24 +1002,21 @@ impl<'a> SemanticModel<'a> {
|
||||
let value_name = UnqualifiedName::from_expr(value)?;
|
||||
let (_, tail) = value_name.segments().split_first()?;
|
||||
|
||||
let resolved: QualifiedName = if qualified_name
|
||||
.segments()
|
||||
.first()
|
||||
.map_or(false, |segment| *segment == ".")
|
||||
{
|
||||
from_relative_import(
|
||||
self.module.qualified_name()?,
|
||||
qualified_name.segments(),
|
||||
tail,
|
||||
)?
|
||||
} else {
|
||||
qualified_name
|
||||
.segments()
|
||||
.iter()
|
||||
.chain(tail)
|
||||
.copied()
|
||||
.collect()
|
||||
};
|
||||
let resolved: QualifiedName =
|
||||
if qualified_name.segments().first().copied() == Some(".") {
|
||||
from_relative_import(
|
||||
self.module.qualified_name()?,
|
||||
qualified_name.segments(),
|
||||
tail,
|
||||
)?
|
||||
} else {
|
||||
qualified_name
|
||||
.segments()
|
||||
.iter()
|
||||
.chain(tail)
|
||||
.copied()
|
||||
.collect()
|
||||
};
|
||||
Some(resolved)
|
||||
}
|
||||
BindingKind::Builtin => {
|
||||
|
||||
@@ -136,17 +136,15 @@ impl NotebookDocument {
|
||||
// provide the actual contents of the cells, so we'll initialize them with empty
|
||||
// contents.
|
||||
for cell in structure.array.cells.into_iter().flatten().rev() {
|
||||
if let Some(text_document) = deleted_cells.remove(&cell.document) {
|
||||
let version = text_document.version();
|
||||
self.cells.push(NotebookCell::new(
|
||||
cell,
|
||||
text_document.into_contents(),
|
||||
version,
|
||||
));
|
||||
} else {
|
||||
self.cells
|
||||
.insert(start, NotebookCell::new(cell, String::new(), 0));
|
||||
}
|
||||
let (content, version) =
|
||||
if let Some(text_document) = deleted_cells.remove(&cell.document) {
|
||||
let version = text_document.version();
|
||||
(text_document.into_contents(), version)
|
||||
} else {
|
||||
(String::new(), 0)
|
||||
};
|
||||
self.cells
|
||||
.insert(start, NotebookCell::new(cell, content, version));
|
||||
}
|
||||
|
||||
// Third, register the new cells in the index and update existing ones that came
|
||||
@@ -243,3 +241,115 @@ impl NotebookCell {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::NotebookDocument;
|
||||
|
||||
enum TestCellContent {
|
||||
#[allow(dead_code)]
|
||||
Markup(String),
|
||||
Code(String),
|
||||
}
|
||||
|
||||
fn create_test_url(index: usize) -> lsp_types::Url {
|
||||
lsp_types::Url::parse(&format!("cell:/test.ipynb#{index}")).unwrap()
|
||||
}
|
||||
|
||||
fn create_test_notebook(test_cells: Vec<TestCellContent>) -> NotebookDocument {
|
||||
let mut cells = Vec::with_capacity(test_cells.len());
|
||||
let mut cell_documents = Vec::with_capacity(test_cells.len());
|
||||
|
||||
for (index, test_cell) in test_cells.into_iter().enumerate() {
|
||||
let url = create_test_url(index);
|
||||
match test_cell {
|
||||
TestCellContent::Markup(content) => {
|
||||
cells.push(lsp_types::NotebookCell {
|
||||
kind: lsp_types::NotebookCellKind::Markup,
|
||||
document: url.clone(),
|
||||
metadata: None,
|
||||
execution_summary: None,
|
||||
});
|
||||
cell_documents.push(lsp_types::TextDocumentItem {
|
||||
uri: url,
|
||||
language_id: "markdown".to_owned(),
|
||||
version: 0,
|
||||
text: content,
|
||||
});
|
||||
}
|
||||
TestCellContent::Code(content) => {
|
||||
cells.push(lsp_types::NotebookCell {
|
||||
kind: lsp_types::NotebookCellKind::Code,
|
||||
document: url.clone(),
|
||||
metadata: None,
|
||||
execution_summary: None,
|
||||
});
|
||||
cell_documents.push(lsp_types::TextDocumentItem {
|
||||
uri: url,
|
||||
language_id: "python".to_owned(),
|
||||
version: 0,
|
||||
text: content,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
NotebookDocument::new(0, cells, serde_json::Map::default(), cell_documents).unwrap()
|
||||
}
|
||||
|
||||
/// This test case checks that for a notebook with three code cells, when the client sends a
|
||||
/// change request to swap the first two cells, the notebook document is updated correctly.
|
||||
///
|
||||
/// The swap operation as a change request is represented as deleting the first two cells and
|
||||
/// adding them back in the reverse order.
|
||||
#[test]
|
||||
fn swap_cells() {
|
||||
let mut notebook = create_test_notebook(vec![
|
||||
TestCellContent::Code("cell = 0".to_owned()),
|
||||
TestCellContent::Code("cell = 1".to_owned()),
|
||||
TestCellContent::Code("cell = 2".to_owned()),
|
||||
]);
|
||||
|
||||
notebook
|
||||
.update(
|
||||
Some(lsp_types::NotebookDocumentCellChange {
|
||||
structure: Some(lsp_types::NotebookDocumentCellChangeStructure {
|
||||
array: lsp_types::NotebookCellArrayChange {
|
||||
start: 0,
|
||||
delete_count: 2,
|
||||
cells: Some(vec![
|
||||
lsp_types::NotebookCell {
|
||||
kind: lsp_types::NotebookCellKind::Code,
|
||||
document: create_test_url(1),
|
||||
metadata: None,
|
||||
execution_summary: None,
|
||||
},
|
||||
lsp_types::NotebookCell {
|
||||
kind: lsp_types::NotebookCellKind::Code,
|
||||
document: create_test_url(0),
|
||||
metadata: None,
|
||||
execution_summary: None,
|
||||
},
|
||||
]),
|
||||
},
|
||||
did_open: None,
|
||||
did_close: None,
|
||||
}),
|
||||
data: None,
|
||||
text_content: None,
|
||||
}),
|
||||
None,
|
||||
1,
|
||||
crate::PositionEncoding::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
notebook.make_ruff_notebook().source_code(),
|
||||
"cell = 1
|
||||
cell = 0
|
||||
cell = 2
|
||||
"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,7 +125,7 @@ impl Index {
|
||||
DocumentKey::NotebookCell(url)
|
||||
} else if Path::new(url.path())
|
||||
.extension()
|
||||
.map_or(false, |ext| ext.eq_ignore_ascii_case("ipynb"))
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("ipynb"))
|
||||
{
|
||||
DocumentKey::Notebook(url)
|
||||
} else {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_wasm"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -427,9 +427,9 @@ or f-string literal that contains the [configured quote style]:
|
||||
```python
|
||||
# format.quote-style = "double"
|
||||
|
||||
f'{"""nested " """}'`
|
||||
f'{"""nested " """}'
|
||||
# This f-string cannot be formatted as follows when targeting Python < 3.12
|
||||
f"{'''nested " '''}``
|
||||
f"{'''nested " '''}"
|
||||
```
|
||||
|
||||
For all target Python versions, when a [self-documenting f-string] contains an expression between
|
||||
|
||||
@@ -80,7 +80,7 @@ You can add the following configuration to `.gitlab-ci.yml` to run a `ruff forma
|
||||
stage: build
|
||||
interruptible: true
|
||||
image:
|
||||
name: ghcr.io/astral-sh/ruff:0.9.0-alpine
|
||||
name: ghcr.io/astral-sh/ruff:0.9.1-alpine
|
||||
before_script:
|
||||
- cd $CI_PROJECT_DIR
|
||||
- ruff --version
|
||||
@@ -106,7 +106,7 @@ Ruff can be used as a [pre-commit](https://pre-commit.com) hook via [`ruff-pre-c
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.0
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -119,7 +119,7 @@ To enable lint fixes, add the `--fix` argument to the lint hook:
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.0
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -128,20 +128,20 @@ To enable lint fixes, add the `--fix` argument to the lint hook:
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
To run the hooks over Jupyter Notebooks too, add `jupyter` to the list of allowed filetypes:
|
||||
To avoid running on Jupyter Notebooks, remove `jupyter` from the list of allowed filetypes:
|
||||
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.0
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
types_or: [ python, pyi, jupyter ]
|
||||
types_or: [ python, pyi ]
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
types_or: [ python, pyi, jupyter ]
|
||||
types_or: [ python, pyi ]
|
||||
```
|
||||
|
||||
When running with `--fix`, Ruff's lint hook should be placed _before_ Ruff's formatter hook, and
|
||||
|
||||
@@ -5,6 +5,3 @@ mkdocs-material @ git+ssh://git@github.com/astral-sh/mkdocs-material-insiders.gi
|
||||
mkdocs-redirects==1.2.2
|
||||
mdformat==0.7.21
|
||||
mdformat-mkdocs==4.1.1
|
||||
# Using a commit from pygments main branch so we get
|
||||
# https://github.com/pygments/pygments/pull/2773 before it's been released
|
||||
pygments @ git+https://github.com/pygments/pygments.git@67b460fdde6d9a00342b5102b37b3a8399f0e8ef
|
||||
|
||||
@@ -5,6 +5,3 @@ mkdocs-material==9.5.38
|
||||
mkdocs-redirects==1.2.2
|
||||
mdformat==0.7.21
|
||||
mdformat-mkdocs==4.1.1
|
||||
# Using a commit from pygments main branch so we get
|
||||
# https://github.com/pygments/pygments/pull/2773 before it's been released
|
||||
pygments @ git+https://github.com/pygments/pygments.git@67b460fdde6d9a00342b5102b37b3a8399f0e8ef
|
||||
|
||||
@@ -364,7 +364,7 @@ This tutorial has focused on Ruff's command-line interface, but Ruff can also be
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.0
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "maturin"
|
||||
|
||||
[project]
|
||||
name = "ruff"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
readme = "README.md"
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "1.83"
|
||||
channel = "1.84"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "scripts"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
description = ""
|
||||
authors = ["Charles Marsh <charlie.r.marsh@gmail.com>"]
|
||||
|
||||
|
||||
Reference in New Issue
Block a user