Compare commits

...

32 Commits

Author SHA1 Message Date
Douglas Creager
a59fae85cc here 2025-12-03 16:38:04 -05:00
Douglas Creager
705e4725ad generic_context should work for callables too 2025-12-03 16:38:04 -05:00
Douglas Creager
3d73506e05 make PartialSpec an enum 2025-12-03 16:38:04 -05:00
Douglas Creager
af67d7307a debug 2025-12-03 16:38:04 -05:00
Douglas Creager
1e33d25d1c fix test 2025-12-03 16:38:01 -05:00
Douglas Creager
b90cdfc2f7 generic 2025-12-03 16:36:21 -05:00
Douglas Creager
94aca37ca8 skip non-inferable 2025-12-03 16:30:44 -05:00
Douglas Creager
75e9d66d4b self 2025-12-03 12:37:04 -05:00
Douglas Creager
3bcca62472 doc 2025-12-03 12:12:00 -05:00
Douglas Creager
85e6143e07 use self annotation in synthesized __init__ callable 2025-12-03 12:09:04 -05:00
Douglas Creager
77ce24a5bf allow multiple overloads/callables when inferring 2025-12-03 12:04:59 -05:00
Douglas Creager
db5834dfd7 add failing tests 2025-12-03 12:04:00 -05:00
Douglas Creager
2e46c8de06 Merge remote-tracking branch 'origin/main' into dcreager/callable-return
* origin/main:
  [ty] Reachability constraints: minor documentation fixes (#21774)
  [ty] Fix non-determinism in `ConstraintSet.specialize_constrained` (#21744)
  [ty] Improve `@override`, `@final` and Liskov checks in cases where there are multiple reachable definitions (#21767)
  [ty] Extend `invalid-explicit-override` to also cover properties decorated with `@override` that do not override anything (#21756)
  [ty] Enable LRU collection for parsed module (#21749)
  [ty] Support typevar-specialized dynamic types in generic type aliases (#21730)
  Add token based `parenthesized_ranges` implementation (#21738)
  [ty] Default-specialization of generic type aliases (#21765)
  [ty] Suppress false positives when `dataclasses.dataclass(...)(cls)` is called imperatively (#21729)
  [syntax-error] Default type parameter followed by non-default type parameter (#21657)
2025-12-03 10:48:36 -05:00
Douglas Creager
d3fd988337 fix tests 2025-12-02 21:49:03 -05:00
Douglas Creager
a0f64bd0ae even more hack 2025-12-02 21:41:55 -05:00
Douglas Creager
beb2956a14 carry over failing test from conformance suite 2025-12-02 21:32:02 -05:00
Douglas Creager
58c67fd4cd don't create T ≤ T constraints 2025-12-02 19:01:08 -05:00
Douglas Creager
a303b7a8aa Merge remote-tracking branch 'origin/main' into dcreager/callable-return
* origin/main:
  new module for parsing ranged suppressions (#21441)
  [ty] `type[T]` is assignable to an inferable typevar (#21766)
  Fix syntax error false positives for `await` outside functions (#21763)
  [ty] Improve diagnostics for unsupported comparison operations (#21737)
2025-12-02 18:42:43 -05:00
Douglas Creager
30452586ad clippity bippity 2025-12-02 18:27:16 -05:00
Douglas Creager
7bbf839325 hackity hack 2025-12-02 18:24:15 -05:00
Douglas Creager
957304ec15 mdlint 2025-12-02 15:40:43 -05:00
Douglas Creager
d88120b187 mark these as TODO 2025-12-02 14:46:29 -05:00
Douglas Creager
2b949b3e67 Merge remote-tracking branch 'origin/main' into dcreager/callable-return
* origin/main: (67 commits)
  Move `Token`, `TokenKind` and `Tokens` to `ruff-python-ast` (#21760)
  [ty] Don't confuse multiple occurrences of `typing.Self` when binding bound methods (#21754)
  Use our org-wide Renovate preset (#21759)
  Delete `my-script.py` (#21751)
  [ty] Move `all_members`, and related types/routines, out of `ide_support.rs` (#21695)
  [ty] Fix find-references for import aliases (#21736)
  [ty] add tests for workspaces (#21741)
  [ty] Stop testing the (brittle) constraint set display implementation (#21743)
  [ty] Use generator over list comprehension to avoid cast (#21748)
  [ty] Add a diagnostic for prohibited `NamedTuple` attribute overrides (#21717)
  [ty] Fix subtyping with `type[T]` and unions (#21740)
  Use `npm ci --ignore-scripts` everywhere (#21742)
  [`flake8-simplify`] Fix truthiness assumption for non-iterable arguments in tuple/list/set calls (`SIM222`, `SIM223`) (#21479)
  [`flake8-use-pathlib`] Mark fixes unsafe for return type changes (`PTH104`, `PTH105`, `PTH109`, `PTH115`) (#21440)
  [ty] Fix auto-import code action to handle pre-existing import
  Enable PEP 740 attestations when publishing to PyPI (#21735)
  [ty] Fix find references for type defined in stub (#21732)
  Use OIDC instead of codspeed token (#21719)
  [ty] Exclude `typing_extensions` from completions unless it's really available
  [ty] Fix false positives for `class F(Generic[*Ts]): ...` (#21723)
  ...
2025-12-02 14:23:15 -05:00
Douglas Creager
2c6267436f clean up the diff 2025-11-26 18:35:15 -05:00
Douglas Creager
fedc75463b this gets recursively expanded now 2025-11-26 18:35:15 -05:00
Douglas Creager
9950c126fe these need to be positional only to be assignable 2025-11-26 18:35:15 -05:00
Douglas Creager
b7fb6797b4 it works! 2025-11-26 18:35:15 -05:00
Douglas Creager
fc2f17508b use constraint set assignable 2025-11-26 18:35:15 -05:00
Douglas Creager
20ecb561bb add ConstraintSetAssignability relation 2025-11-26 18:35:15 -05:00
Douglas Creager
3b509e9015 it's a start 2025-11-26 18:35:15 -05:00
Douglas Creager
998b20f078 add for_each_path 2025-11-26 18:35:15 -05:00
Douglas Creager
544dafa66e add more sequents 2025-11-26 18:35:15 -05:00
14 changed files with 701 additions and 105 deletions

View File

@@ -43,9 +43,7 @@ async def main():
loop = asyncio.get_event_loop()
with concurrent.futures.ThreadPoolExecutor() as pool:
result = await loop.run_in_executor(pool, blocking_function)
# TODO: should be `int`
reveal_type(result) # revealed: Unknown
reveal_type(result) # revealed: int
```
### `asyncio.Task`

View File

@@ -82,8 +82,7 @@ def get_default() -> str:
reveal_type(field(default=1)) # revealed: dataclasses.Field[Literal[1]]
reveal_type(field(default=None)) # revealed: dataclasses.Field[None]
# TODO: this could ideally be `dataclasses.Field[str]` with a better generics solver
reveal_type(field(default_factory=get_default)) # revealed: dataclasses.Field[Unknown]
reveal_type(field(default_factory=get_default)) # revealed: dataclasses.Field[str]
```
## dataclass_transform field_specifiers

View File

@@ -144,11 +144,12 @@ from functools import cache
def f(x: int) -> int:
return x**2
# TODO: Should be `_lru_cache_wrapper[int]`
reveal_type(f) # revealed: _lru_cache_wrapper[Unknown]
# TODO: Should be `int`
reveal_type(f(1)) # revealed: Unknown
# TODO: revealed: _lru_cache_wrapper[int]
# revealed: _lru_cache_wrapper[int] | _lru_cache_wrapper[Unknown]
reveal_type(f)
# TODO: revealed: int
# revealed: int | Unknown
reveal_type(f(1))
```
## Lambdas as decorators

View File

@@ -11,9 +11,9 @@ classes. Uses of these items should subsequently produce a warning.
from typing_extensions import deprecated
@deprecated("use OtherClass")
def myfunc(): ...
def myfunc(x: int): ...
myfunc() # error: [deprecated] "use OtherClass"
myfunc(1) # error: [deprecated] "use OtherClass"
```
```py

View File

@@ -379,14 +379,13 @@ T = TypeVar("T")
def invoke(fn: Callable[[A], B], value: A) -> B:
return fn(value)
def identity(x: T) -> T:
def identity(x: T, /) -> T:
return x
def head(xs: list[T]) -> T:
def head(xs: list[T], /) -> T:
return xs[0]
# TODO: this should be `Literal[1]`
reveal_type(invoke(identity, 1)) # revealed: Unknown
reveal_type(invoke(identity, 1)) # revealed: Literal[1]
# TODO: this should be `Unknown | int`
reveal_type(invoke(head, [1, 2, 3])) # revealed: Unknown

View File

@@ -334,14 +334,13 @@ from typing import Callable
def invoke[A, B](fn: Callable[[A], B], value: A) -> B:
return fn(value)
def identity[T](x: T) -> T:
def identity[T](x: T, /) -> T:
return x
def head[T](xs: list[T]) -> T:
def head[T](xs: list[T], /) -> T:
return xs[0]
# TODO: this should be `Literal[1]`
reveal_type(invoke(identity, 1)) # revealed: Unknown
reveal_type(invoke(identity, 1)) # revealed: Literal[1]
# TODO: this should be `Unknown | int`
reveal_type(invoke(head, [1, 2, 3])) # revealed: Unknown
@@ -583,3 +582,102 @@ def f[T](x: T, y: Not[T]) -> T:
y = x # error: [invalid-assignment]
return x
```
## `Callable` parameters
We can recurse into the parameters and return values of `Callable` parameters to infer
specializations of a generic function.
```py
from typing import Any, Callable, NoReturn, overload, Self
def accepts_callable[**P, R](callable: Callable[P, R]) -> Callable[P, R]:
return callable
def returns_int() -> int:
raise NotImplementedError
# revealed: int
reveal_type(accepts_callable(returns_int)())
class ClassWithoutConstructor: ...
# revealed: ClassWithoutConstructor
reveal_type(accepts_callable(ClassWithoutConstructor)())
class ClassWithNew:
def __new__(cls, *args, **kwargs) -> Self:
raise NotImplementedError
# revealed: ClassWithNew
reveal_type(accepts_callable(ClassWithNew)())
class ClassWithInit:
def __init__(self) -> None: ...
# revealed: ClassWithInit
reveal_type(accepts_callable(ClassWithInit)())
class ClassWithNewAndInit:
def __new__(cls, *args, **kwargs) -> Self:
raise NotImplementedError
def __init__(self, x: int) -> None: ...
# revealed: ClassWithNewAndInit
reveal_type(accepts_callable(ClassWithNewAndInit)())
class Meta(type):
def __call__(cls, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError
class ClassWithNoReturnMetatype(metaclass=Meta):
def __new__(cls, *args: Any, **kwargs: Any) -> Self:
raise NotImplementedError
# revealed: Never
reveal_type(accepts_callable(ClassWithNoReturnMetatype)())
class Proxy: ...
class ClassWithIgnoredInit:
def __new__(cls) -> Proxy:
return Proxy()
def __init__(self, x: int) -> None: ...
# revealed: Proxy
reveal_type(accepts_callable(ClassWithIgnoredInit)())
class ClassWithOverloadedInit[T]:
t: T # invariant
@overload
def __init__(self: "ClassWithOverloadedInit[int]", x: int) -> None: ...
@overload
def __init__(self: "ClassWithOverloadedInit[str]", x: str) -> None: ...
def __init__(self, x: int | str) -> None: ...
# TODO: These unions are because we don't handle the ParamSpec in accepts_callable, so when
# inferring a specialization through the Callable we lose the information about how the parameter
# types distinguish the two overloads.
# TODO: revealed: ClassWithOverloadedInit[int]
# revealed: ClassWithOverloadedInit[int] | ClassWithOverloadedInit[str]
reveal_type(accepts_callable(ClassWithOverloadedInit)(0))
# TODO: revealed: ClassWithOverloadedInit[str]
# revealed: ClassWithOverloadedInit[int] | ClassWithOverloadedInit[str]
reveal_type(accepts_callable(ClassWithOverloadedInit)(""))
class GenericClass[T]:
t: T # invariant
def __new__(cls, x: list[T], y: list[T]) -> Self:
raise NotImplementedError
def _(x: list[str]):
# TODO: This fails because we are not propagating GenericClass's generic context into the
# Callable that we create for it.
# TODO: revealed: GenericClass[str]
# revealed: Unknown
reveal_type(accepts_callable(GenericClass)(x, x))
```

View File

@@ -304,7 +304,7 @@ x11: list[Literal[1] | Literal[2] | Literal[3]] = [1, 2, 3]
reveal_type(x11) # revealed: list[Literal[1, 2, 3]]
x12: Y[Y[Literal[1]]] = [[1]]
reveal_type(x12) # revealed: list[Y[Literal[1]]]
reveal_type(x12) # revealed: list[list[Literal[1]]]
x13: list[tuple[Literal[1], Literal[2], Literal[3]]] = [(1, 2, 3)]
reveal_type(x13) # revealed: list[tuple[Literal[1], Literal[2], Literal[3]]]

View File

@@ -15,9 +15,9 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/deprecated.md
1 | from typing_extensions import deprecated
2 |
3 | @deprecated("use OtherClass")
4 | def myfunc(): ...
4 | def myfunc(x: int): ...
5 |
6 | myfunc() # error: [deprecated] "use OtherClass"
6 | myfunc(1) # error: [deprecated] "use OtherClass"
7 | from typing_extensions import deprecated
8 |
9 | @deprecated("use BetterClass")
@@ -42,9 +42,9 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/deprecated.md
warning[deprecated]: The function `myfunc` is deprecated
--> src/mdtest_snippet.py:6:1
|
4 | def myfunc(): ...
4 | def myfunc(x: int): ...
5 |
6 | myfunc() # error: [deprecated] "use OtherClass"
6 | myfunc(1) # error: [deprecated] "use OtherClass"
| ^^^^^^ use OtherClass
7 | from typing_extensions import deprecated
|

View File

@@ -910,6 +910,22 @@ impl<'db> Type<'db> {
previous: Self,
cycle: &salsa::Cycle,
) -> Self {
// When we encounter a salsa cycle, we want to avoid oscillating between two or more types
// without converging on a fixed-point result. Most of the time, we union together the
// types from each cycle iteration to ensure that our result is monotonic, even if we
// encounter oscillation.
//
// However, there are several parts of our type inference machinery that assume that we
// infer a single Type::FunctionLiteral type for each overload of each function definition.
// So we avoid the union behavior for those cases, and instead return the inferred type of
// the last cycle iteration.
//
// TODO: If this reintroduces "too many cycle iterations" panics, then we will need to
// consider a different union-like behavior for combining function signatures to ensure
// monotonicity.
if self.is_function_literal() && previous.is_function_literal() {
return self;
}
UnionType::from_elements_cycle_recovery(db, [self, previous])
.recursive_type_normalized(db, cycle)
}
@@ -1756,7 +1772,7 @@ impl<'db> Type<'db> {
}
}
Type::ClassLiteral(class_literal) => {
Some(class_literal.default_specialization(db).into_callable(db))
Some(class_literal.identity_specialization(db).into_callable(db))
}
Type::GenericAlias(alias) => Some(ClassType::Generic(alias).into_callable(db)),
@@ -1949,6 +1965,33 @@ impl<'db> Type<'db> {
return constraints.implies_subtype_of(db, self, target);
}
// Handle the new constraint-set-based assignability relation next. Comparisons with a
// typevar are translated directly into a constraint set.
if relation.is_constraint_set_assignability() {
// A typevar satisfies a relation when...it satisfies the relation. Yes that's a
// tautology! We're moving the caller's subtyping/assignability requirement into a
// constraint set. If the typevar has an upper bound or constraints, then the relation
// only has to hold when the typevar has a valid specialization (i.e., one that
// satisfies the upper bound/constraints).
if let Type::TypeVar(bound_typevar) = self {
return ConstraintSet::constrain_typevar(
db,
bound_typevar,
Type::Never,
target,
relation,
);
} else if let Type::TypeVar(bound_typevar) = target {
return ConstraintSet::constrain_typevar(
db,
bound_typevar,
self,
Type::object(),
relation,
);
}
}
match (self, target) {
// Everything is a subtype of `object`.
(_, Type::NominalInstance(instance)) if instance.is_object() => {
@@ -2029,7 +2072,7 @@ impl<'db> Type<'db> {
);
ConstraintSet::from(match relation {
TypeRelation::Subtyping | TypeRelation::SubtypingAssuming(_) => false,
TypeRelation::Assignability => true,
TypeRelation::Assignability | TypeRelation::ConstraintSetAssignability => true,
TypeRelation::Redundancy => match target {
Type::Dynamic(_) => true,
Type::Union(union) => union.elements(db).iter().any(Type::is_dynamic),
@@ -2039,7 +2082,7 @@ impl<'db> Type<'db> {
}
(_, Type::Dynamic(_)) => ConstraintSet::from(match relation {
TypeRelation::Subtyping | TypeRelation::SubtypingAssuming(_) => false,
TypeRelation::Assignability => true,
TypeRelation::Assignability | TypeRelation::ConstraintSetAssignability => true,
TypeRelation::Redundancy => match self {
Type::Dynamic(_) => true,
Type::Intersection(intersection) => {
@@ -2303,14 +2346,19 @@ impl<'db> Type<'db> {
TypeRelation::Subtyping
| TypeRelation::Redundancy
| TypeRelation::SubtypingAssuming(_) => self,
TypeRelation::Assignability => self.bottom_materialization(db),
TypeRelation::Assignability | TypeRelation::ConstraintSetAssignability => {
self.bottom_materialization(db)
}
};
intersection.negative(db).iter().when_all(db, |&neg_ty| {
let neg_ty = match relation {
TypeRelation::Subtyping
| TypeRelation::Redundancy
| TypeRelation::SubtypingAssuming(_) => neg_ty,
TypeRelation::Assignability => neg_ty.bottom_materialization(db),
TypeRelation::Assignability
| TypeRelation::ConstraintSetAssignability => {
neg_ty.bottom_materialization(db)
}
};
self_ty.is_disjoint_from_impl(
db,
@@ -11460,6 +11508,11 @@ pub(crate) enum TypeRelation<'db> {
/// are not actually subtypes of each other. (That is, `implies_subtype_of(false, int, str)`
/// will return true!)
SubtypingAssuming(ConstraintSet<'db>),
/// A placeholder for the new assignability relation that uses constraint sets to encode
/// relationships with a typevar. This will eventually replace `Assignability`, but allows us
/// to start using the new relation in a controlled manner in some places.
ConstraintSetAssignability,
}
impl TypeRelation<'_> {
@@ -11467,6 +11520,10 @@ impl TypeRelation<'_> {
matches!(self, TypeRelation::Assignability)
}
pub(crate) const fn is_constraint_set_assignability(self) -> bool {
matches!(self, TypeRelation::ConstraintSetAssignability)
}
pub(crate) const fn is_subtyping(self) -> bool {
matches!(self, TypeRelation::Subtyping)
}
@@ -11909,6 +11966,10 @@ impl<'db> CallableTypes<'db> {
}
}
fn as_slice(&self) -> &SmallVec<[CallableType<'db>; 1]> {
&self.0
}
fn into_inner(self) -> SmallVec<[CallableType<'db>; 1]> {
self.0
}

View File

@@ -821,6 +821,23 @@ impl<'db> Bindings<'db> {
function_generic_context(bound_method.function(db))
}
Type::Callable(callable) => {
let union = UnionType::from_elements(
db,
callable
.signatures(db)
.overloads
.iter()
.filter_map(|signature| signature.generic_context)
.map(wrap_generic_context),
);
if union.is_never() {
Type::none(db)
} else {
union
}
}
Type::KnownInstance(KnownInstanceType::TypeAliasType(
TypeAliasType::PEP695(alias),
)) => alias

View File

@@ -625,7 +625,9 @@ impl<'db> ClassType<'db> {
| TypeRelation::SubtypingAssuming(_) => {
ConstraintSet::from(other.is_object(db))
}
TypeRelation::Assignability => ConstraintSet::from(!other.is_final(db)),
TypeRelation::Assignability | TypeRelation::ConstraintSetAssignability => {
ConstraintSet::from(!other.is_final(db))
}
},
// Protocol, Generic, and TypedDict are not represented by a ClassType.
@@ -1124,6 +1126,9 @@ impl<'db> ClassType<'db> {
/// constructor signature of this class.
#[salsa::tracked(cycle_initial=into_callable_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
pub(super) fn into_callable(self, db: &'db dyn Db) -> CallableTypes<'db> {
let (class_literal, _) = self.class_literal(db);
let generic_context = class_literal.generic_context(db);
let self_ty = Type::from(self);
let metaclass_dunder_call_function_symbol = self_ty
.member_lookup_with_policy(
@@ -1197,39 +1202,53 @@ impl<'db> ClassType<'db> {
// If the class defines an `__init__` method, then we synthesize a callable type with the
// same parameters as the `__init__` method after it is bound, and with the return type of
// the concrete type of `Self`.
let synthesized_dunder_init_callable =
if let Place::Defined(ty, _, _) = dunder_init_function_symbol {
let signature = match ty {
Type::FunctionLiteral(dunder_init_function) => {
Some(dunder_init_function.signature(db))
}
Type::Callable(callable) => Some(callable.signatures(db)),
_ => None,
let synthesized_dunder_init_callable = if let Place::Defined(ty, _, _) =
dunder_init_function_symbol
{
let signature = match ty {
Type::FunctionLiteral(dunder_init_function) => {
Some(dunder_init_function.signature(db))
}
Type::Callable(callable) => Some(callable.signatures(db)),
_ => None,
};
if let Some(signature) = signature {
let synthesized_signature = |signature: &Signature<'db>| {
let self_annotation = signature
.parameters()
.get_positional(0)
.and_then(Parameter::annotated_type)
.filter(|ty| {
ty.as_typevar()
.is_none_or(|bound_typevar| !bound_typevar.typevar(db).is_self(db))
});
let return_type = self_annotation.unwrap_or(correct_return_type);
let instance_ty = self_annotation.unwrap_or_else(|| Type::instance(db, self));
Signature::new_generic(
generic_context,
signature.parameters().clone(),
Some(return_type),
)
.with_definition(signature.definition())
.bind_self(db, Some(instance_ty))
};
if let Some(signature) = signature {
let synthesized_signature = |signature: &Signature<'db>| {
let instance_ty = Type::instance(db, self);
Signature::new(signature.parameters().clone(), Some(correct_return_type))
.with_definition(signature.definition())
.bind_self(db, Some(instance_ty))
};
let synthesized_dunder_init_signature = CallableSignature::from_overloads(
signature.overloads.iter().map(synthesized_signature),
);
let synthesized_dunder_init_signature = CallableSignature::from_overloads(
signature.overloads.iter().map(synthesized_signature),
);
Some(CallableType::new(
db,
synthesized_dunder_init_signature,
true,
))
} else {
None
}
Some(CallableType::new(
db,
synthesized_dunder_init_signature,
true,
))
} else {
None
};
}
} else {
None
};
match (dunder_new_function, synthesized_dunder_init_callable) {
(Some(dunder_new_function), Some(synthesized_dunder_init_callable)) => {

View File

@@ -69,19 +69,21 @@
use std::cell::RefCell;
use std::cmp::Ordering;
use std::fmt::Display;
use std::ops::Range;
use std::ops::{Deref, DerefMut, Range};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use salsa::plumbing::AsId;
use crate::types::generics::{GenericContext, InferableTypeVars, Specialization};
use crate::types::generics::{
GenericContext, InferableTypeVars, PartialSpecialization, Specialization,
};
use crate::types::visitor::{TypeCollector, TypeVisitor, walk_type_with_recursion_guard};
use crate::types::{
BoundTypeVarIdentity, BoundTypeVarInstance, IntersectionType, Type, TypeRelation,
TypeVarBoundOrConstraints, UnionType, walk_bound_type_var_type,
BoundTypeVarIdentity, BoundTypeVarInstance, IntersectionType, Type, TypeContext, TypeMapping,
TypeRelation, TypeVarBoundOrConstraints, UnionType, walk_bound_type_var_type,
};
use crate::{Db, FxOrderSet};
use crate::{Db, FxOrderMap, FxOrderSet};
/// An extension trait for building constraint sets from [`Option`] values.
pub(crate) trait OptionConstraintsExtension<T> {
@@ -207,7 +209,7 @@ impl<'db> ConstraintSet<'db> {
lower.top_materialization(db),
upper.bottom_materialization(db),
),
TypeRelation::Assignability => (
TypeRelation::Assignability | TypeRelation::ConstraintSetAssignability => (
lower.bottom_materialization(db),
upper.top_materialization(db),
),
@@ -422,6 +424,10 @@ impl<'db> ConstraintSet<'db> {
Self { node }
}
pub(crate) fn for_each_path(self, db: &'db dyn Db, f: impl FnMut(&PathAssignments<'db>)) {
self.node.for_each_path(db, f);
}
pub(crate) fn range(
db: &'db dyn Db,
lower: Type<'db>,
@@ -435,6 +441,10 @@ impl<'db> ConstraintSet<'db> {
pub(crate) fn display(self, db: &'db dyn Db) -> impl Display {
self.node.simplify_for_display(db).display(db)
}
pub(crate) fn display_graph(self, db: &'db dyn Db, prefix: &dyn Display) -> impl Display {
self.node.display_graph(db, prefix)
}
}
impl From<bool> for ConstraintSet<'_> {
@@ -462,9 +472,9 @@ impl<'db> BoundTypeVarInstance<'db> {
/// lower and upper bound.
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
pub(crate) struct ConstrainedTypeVar<'db> {
typevar: BoundTypeVarInstance<'db>,
lower: Type<'db>,
upper: Type<'db>,
pub(crate) typevar: BoundTypeVarInstance<'db>,
pub(crate) lower: Type<'db>,
pub(crate) upper: Type<'db>,
}
// The Salsa heap is tracked separately.
@@ -673,7 +683,7 @@ impl<'db> ConstrainedTypeVar<'db> {
Some(Self::new(db, self.typevar(db), lower, upper))
}
fn display(self, db: &'db dyn Db) -> impl Display {
pub(crate) fn display(self, db: &'db dyn Db) -> impl Display {
self.display_inner(db, false)
}
@@ -837,6 +847,40 @@ impl<'db> Node<'db> {
}
}
fn for_each_path(self, db: &'db dyn Db, mut f: impl FnMut(&PathAssignments<'db>)) {
match self {
Node::AlwaysTrue => {}
Node::AlwaysFalse => {}
Node::Interior(interior) => {
let map = interior.sequent_map(db);
let mut path = PathAssignments::default();
self.for_each_path_inner(db, &mut f, map, &mut path);
}
}
}
fn for_each_path_inner(
self,
db: &'db dyn Db,
f: &mut dyn FnMut(&PathAssignments<'db>),
map: &SequentMap<'db>,
path: &mut PathAssignments<'db>,
) {
match self {
Node::AlwaysTrue => f(path),
Node::AlwaysFalse => {}
Node::Interior(interior) => {
let constraint = interior.constraint(db);
path.walk_edge(db, map, constraint.when_true(), |path, _| {
interior.if_true(db).for_each_path_inner(db, f, map, path);
});
path.walk_edge(db, map, constraint.when_false(), |path, _| {
interior.if_false(db).for_each_path_inner(db, f, map, path);
});
}
}
}
/// Returns whether this BDD represent the constant function `true`.
fn is_always_satisfied(self, db: &'db dyn Db) -> bool {
match self {
@@ -1052,6 +1096,14 @@ impl<'db> Node<'db> {
self.implies(db, constraint)
}
fn typevars(self, db: &'db dyn Db) -> FxHashSet<BoundTypeVarInstance<'db>> {
let mut typevars = FxHashSet::default();
self.for_each_constraint(db, &mut |constraint| {
typevars.insert(constraint.typevar(db));
});
typevars
}
fn satisfied_by_all_typevars(
self,
db: &'db dyn Db,
@@ -1063,11 +1115,6 @@ impl<'db> Node<'db> {
Node::Interior(_) => {}
}
let mut typevars = FxHashSet::default();
self.for_each_constraint(db, &mut |constraint| {
typevars.insert(constraint.typevar(db));
});
// Returns if some specialization satisfies this constraint set.
let some_specialization_satisfies = move |specializations: Node<'db>| {
let when_satisfied = specializations.implies(db, self).and(db, specializations);
@@ -1082,7 +1129,7 @@ impl<'db> Node<'db> {
.is_always_satisfied(db)
};
for typevar in typevars {
for typevar in self.typevars(db) {
if typevar.is_inferable(db, inferable) {
// If the typevar is in inferable position, we need to verify that some valid
// specialization satisfies the constraint set.
@@ -2178,7 +2225,7 @@ fn sequent_map_cycle_initial<'db>(
/// An assignment of one BDD variable to either `true` or `false`. (When evaluating a BDD, we
/// must provide an assignment for each variable present in the BDD.)
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
enum ConstraintAssignment<'db> {
pub(crate) enum ConstraintAssignment<'db> {
Positive(ConstrainedTypeVar<'db>),
Negative(ConstrainedTypeVar<'db>),
}
@@ -2611,6 +2658,20 @@ impl<'db> SequentMap<'db> {
(bound_constraint.lower(db), constrained_upper)
}
// (CL ≤ C ≤ pivot) ∧ (pivot ≤ B ≤ BU) → (CL ≤ C ≤ B)
(constrained_lower, constrained_upper)
if constrained_upper == bound_constraint.lower(db) =>
{
(constrained_lower, Type::TypeVar(bound_typevar))
}
// (pivot ≤ C ≤ CU) ∧ (BL ≤ B ≤ pivot) → (B ≤ C ≤ CU)
(constrained_lower, constrained_upper)
if constrained_lower == bound_constraint.upper(db) =>
{
(Type::TypeVar(bound_typevar), constrained_upper)
}
_ => return,
};
@@ -2633,17 +2694,36 @@ impl<'db> SequentMap<'db> {
let left_upper = left_constraint.upper(db);
let right_lower = right_constraint.lower(db);
let right_upper = right_constraint.upper(db);
let new_constraint = |bound_typevar: BoundTypeVarInstance<'db>,
right_lower: Type<'db>,
right_upper: Type<'db>| {
let right_lower = if let Type::TypeVar(other_bound_typevar) = right_lower
&& bound_typevar.is_same_typevar_as(db, other_bound_typevar)
{
Type::Never
} else {
right_lower
};
let right_upper = if let Type::TypeVar(other_bound_typevar) = right_upper
&& bound_typevar.is_same_typevar_as(db, other_bound_typevar)
{
Type::object()
} else {
right_upper
};
ConstrainedTypeVar::new(db, bound_typevar, right_lower, right_upper)
};
let post_constraint = match (left_lower, left_upper) {
(Type::TypeVar(bound_typevar), Type::TypeVar(other_bound_typevar))
if bound_typevar.is_same_typevar_as(db, other_bound_typevar) =>
{
ConstrainedTypeVar::new(db, bound_typevar, right_lower, right_upper)
new_constraint(bound_typevar, right_lower, right_upper)
}
(Type::TypeVar(bound_typevar), _) => {
ConstrainedTypeVar::new(db, bound_typevar, Type::Never, right_upper)
new_constraint(bound_typevar, Type::Never, right_upper)
}
(_, Type::TypeVar(bound_typevar)) => {
ConstrainedTypeVar::new(db, bound_typevar, right_lower, Type::object())
new_constraint(bound_typevar, right_lower, Type::object())
}
_ => return,
};
@@ -2784,7 +2864,7 @@ impl<'db> SequentMap<'db> {
/// The collection of constraints that we know to be true or false at a certain point when
/// traversing a BDD.
#[derive(Debug, Default)]
struct PathAssignments<'db> {
pub(crate) struct PathAssignments<'db> {
assignments: FxOrderSet<ConstraintAssignment<'db>>,
}
@@ -2862,6 +2942,17 @@ impl<'db> PathAssignments<'db> {
result
}
pub(crate) fn positive_constraints(
&self,
) -> impl Iterator<Item = ConstrainedTypeVar<'db>> + '_ {
self.assignments
.iter()
.filter_map(|assignment| match assignment {
ConstraintAssignment::Positive(constraint) => Some(*constraint),
ConstraintAssignment::Negative(_) => None,
})
}
fn assignment_holds(&self, assignment: ConstraintAssignment<'db>) -> bool {
self.assignments.contains(&assignment)
}
@@ -3268,6 +3359,219 @@ impl<'db> BoundTypeVarInstance<'db> {
}
}
#[derive(Clone, Debug, Default)]
pub(crate) struct ConstraintSolutions<'db>(Vec<ConstraintSolution<'db>>);
impl<'db> Deref for ConstraintSolutions<'db> {
type Target = Vec<ConstraintSolution<'db>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'db> DerefMut for ConstraintSolutions<'db> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<'a, 'db> IntoIterator for &'a ConstraintSolutions<'db> {
type Item = &'a ConstraintSolution<'db>;
type IntoIter = std::slice::Iter<'a, ConstraintSolution<'db>>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}
#[derive(Clone, Debug, Default, Eq, Hash, PartialEq)]
pub struct ConstraintSolution<'db> {
assignments: FxOrderMap<BoundTypeVarInstance<'db>, (Type<'db>, Type<'db>)>,
}
impl<'db> ConstraintSolution<'db> {
pub(crate) fn get_upper_bound(
&self,
bound_typevar: BoundTypeVarInstance<'db>,
) -> Option<Type<'db>> {
self.assignments
.get(&bound_typevar)
.map(|(_, upper_bound)| *upper_bound)
}
fn add_mapping(
&mut self,
db: &'db dyn Db,
typevar: BoundTypeVarInstance<'db>,
lower_bound: Type<'db>,
upper_bound: Type<'db>,
) {
eprintln!(
" -> ADD {}{}{}",
lower_bound.display(db),
typevar.identity(db).display(db),
upper_bound.display(db),
);
let (existing_lower_bound, existing_upper_bound) = self
.assignments
.entry(typevar)
.or_insert_with(|| (Type::Never, Type::object()));
let new_lower_bound = UnionType::from_elements(db, [*existing_lower_bound, lower_bound]);
let new_upper_bound =
IntersectionType::from_elements(db, [*existing_upper_bound, upper_bound]);
*existing_lower_bound = new_lower_bound;
*existing_upper_bound = new_upper_bound;
}
fn collapse_to_single_types(&mut self) {
for (_, (lower_bound, upper_bound)) in &mut self.assignments {
// Use the lower bound if it's more "interesting", otherwise use the upper bound.
if upper_bound.is_object() && !lower_bound.is_never() {
*upper_bound = *lower_bound;
} else {
*lower_bound = *upper_bound;
}
}
}
fn close_over_typevars(&mut self, db: &'db dyn Db) {
// We have to pull this out into a separate variable to satisfy the borrow checker.
let typevars: Vec<_> = self.assignments.keys().copied().collect();
loop {
let mut any_changed = false;
for bound_typevar in &typevars {
let (existing_lower, existing_upper) = self.assignments[bound_typevar];
let updated_lower = existing_lower.apply_type_mapping(
db,
&TypeMapping::PartialSpecialization(
PartialSpecialization::FromConstraintSolution(self),
),
TypeContext::default(),
);
let updated_upper = existing_upper.apply_type_mapping(
db,
&TypeMapping::PartialSpecialization(
PartialSpecialization::FromConstraintSolution(self),
),
TypeContext::default(),
);
if updated_lower != existing_lower || updated_upper != existing_upper {
self.assignments[bound_typevar] = (updated_lower, updated_upper);
any_changed = true;
}
}
if !any_changed {
return;
}
}
}
pub(crate) fn display(&self, db: &'db dyn Db) -> impl Display {
struct DisplayConstraintSolution<'a, 'db> {
solution: &'a ConstraintSolution<'db>,
db: &'db dyn Db,
}
impl Display for DisplayConstraintSolution<'_, '_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"[{}]",
self.solution.assignments.iter().format_with(
", ",
|(bound_typevar, (lower, upper)), f| if lower == upper {
f(&format_args!(
"{} = {}",
bound_typevar.identity(self.db).display(self.db),
lower.display(self.db)
))
} else {
f(&format_args!(
"{}{}{}",
lower.display(self.db),
bound_typevar.identity(self.db).display(self.db),
upper.display(self.db),
))
}
)
)
}
}
DisplayConstraintSolution { solution: self, db }
}
}
impl<'a, 'db> IntoIterator for &'a ConstraintSolution<'db> {
type Item = (&'a BoundTypeVarInstance<'db>, &'a (Type<'db>, Type<'db>));
type IntoIter = ordermap::map::Iter<'a, BoundTypeVarInstance<'db>, (Type<'db>, Type<'db>)>;
fn into_iter(self) -> Self::IntoIter {
self.assignments.iter()
}
}
impl<'db> ConstraintSet<'db> {
pub(crate) fn solve_for(
self,
db: &'db dyn Db,
inferable: InferableTypeVars<'_, 'db>,
) -> ConstraintSolutions<'db> {
let mut solutions = ConstraintSolutions::default();
self.for_each_path(db, |path| {
eprintln!(
" -> PATH [{}]",
path.assignments
.iter()
.format_with(", ", |assignment, f| f(&assignment.display(db)))
);
let mut solution = ConstraintSolution::default();
for constraint in path.positive_constraints() {
let typevar = constraint.typevar(db);
let lower = constraint.lower(db);
let upper = constraint.upper(db);
eprintln!(
" ~> constraint {}{}{}",
lower.display(db),
typevar.identity(db).display(db),
upper.display(db)
);
if typevar.is_inferable(db, inferable) {
eprintln!(" ~> add1");
solution.add_mapping(db, typevar, lower, upper);
}
if let Type::TypeVar(lower_bound_typevar) = lower
&& lower_bound_typevar.is_inferable(db, inferable)
{
eprintln!(" ~> add2");
solution.add_mapping(
db,
lower_bound_typevar,
Type::Never,
Type::TypeVar(typevar),
);
}
if let Type::TypeVar(upper_bound_typevar) = upper
&& upper_bound_typevar.is_inferable(db, inferable)
{
eprintln!(" ~> add3");
solution.add_mapping(
db,
upper_bound_typevar,
Type::TypeVar(typevar),
Type::object(),
);
}
}
solution.collapse_to_single_types();
solution.close_over_typevars(db);
solutions.push(solution);
});
solutions
}
}
impl<'db> GenericContext<'db> {
pub(crate) fn specialize_constrained(
self,

View File

@@ -11,11 +11,13 @@ use crate::semantic_index::scope::{FileScopeId, NodeWithScopeKind, ScopeId};
use crate::semantic_index::{SemanticIndex, semantic_index};
use crate::types::class::ClassType;
use crate::types::class_base::ClassBase;
use crate::types::constraints::ConstraintSet;
use crate::types::constraints::{ConstraintSet, ConstraintSolution};
use crate::types::instance::{Protocol, ProtocolInstanceType};
use crate::types::signatures::Parameters;
use crate::types::tuple::{TupleSpec, TupleType, walk_tuple_type};
use crate::types::visitor::{TypeCollector, TypeVisitor, walk_type_with_recursion_guard};
use crate::types::visitor::{
TypeCollector, TypeVisitor, any_over_type, walk_type_with_recursion_guard,
};
use crate::types::{
ApplyTypeMappingVisitor, BoundTypeVarIdentity, BoundTypeVarInstance, ClassLiteral,
FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor,
@@ -501,7 +503,7 @@ impl<'db> GenericContext<'db> {
loop {
let mut any_changed = false;
for i in 0..len {
let partial = PartialSpecialization {
let partial = PartialSpecialization::FromGenericContext {
generic_context: self,
types: &types,
};
@@ -563,7 +565,7 @@ impl<'db> GenericContext<'db> {
// Typevars are only allowed to refer to _earlier_ typevars in their defaults. (This is
// statically enforced for PEP-695 contexts, and is explicitly called out as a
// requirement for legacy contexts.)
let partial = PartialSpecialization {
let partial = PartialSpecialization::FromGenericContext {
generic_context: self,
types: &expanded[0..idx],
};
@@ -842,7 +844,11 @@ fn has_relation_in_invariant_position<'db>(
disjointness_visitor,
),
// And A <~ B (assignability) is Bottom[A] <: Top[B]
(None, Some(base_mat), TypeRelation::Assignability) => is_subtype_in_invariant_position(
(
None,
Some(base_mat),
TypeRelation::Assignability | TypeRelation::ConstraintSetAssignability,
) => is_subtype_in_invariant_position(
db,
derived_type,
MaterializationKind::Bottom,
@@ -852,7 +858,11 @@ fn has_relation_in_invariant_position<'db>(
relation_visitor,
disjointness_visitor,
),
(Some(derived_mat), None, TypeRelation::Assignability) => is_subtype_in_invariant_position(
(
Some(derived_mat),
None,
TypeRelation::Assignability | TypeRelation::ConstraintSetAssignability,
) => is_subtype_in_invariant_position(
db,
derived_type,
derived_mat,
@@ -1289,9 +1299,12 @@ impl<'db> Specialization<'db> {
/// You will usually use [`Specialization`] instead of this type. This type is used when we need to
/// substitute types for type variables before we have fully constructed a [`Specialization`].
#[derive(Clone, Debug, Eq, Hash, PartialEq, get_size2::GetSize)]
pub struct PartialSpecialization<'a, 'db> {
generic_context: GenericContext<'db>,
types: &'a [Type<'db>],
pub enum PartialSpecialization<'a, 'db> {
FromGenericContext {
generic_context: GenericContext<'db>,
types: &'a [Type<'db>],
},
FromConstraintSolution(&'a ConstraintSolution<'db>),
}
impl<'db> PartialSpecialization<'_, 'db> {
@@ -1302,11 +1315,20 @@ impl<'db> PartialSpecialization<'_, 'db> {
db: &'db dyn Db,
bound_typevar: BoundTypeVarInstance<'db>,
) -> Option<Type<'db>> {
let index = self
.generic_context
.variables_inner(db)
.get_index_of(&bound_typevar.identity(db))?;
self.types.get(index).copied()
match self {
PartialSpecialization::FromGenericContext {
generic_context,
types,
} => {
let index = generic_context
.variables_inner(db)
.get_index_of(&bound_typevar.identity(db))?;
types.get(index).copied()
}
PartialSpecialization::FromConstraintSolution(solution) => {
solution.get_upper_bound(bound_typevar)
}
}
}
}
@@ -1363,7 +1385,7 @@ impl<'db> SpecializationBuilder<'db> {
.map(|(identity, _)| self.types.get(identity).copied());
// TODO Infer the tuple spec for a tuple type
generic_context.specialize_partial(self.db, types)
generic_context.specialize_recursive(self.db, types)
}
fn add_type_mapping(
@@ -1640,6 +1662,46 @@ impl<'db> SpecializationBuilder<'db> {
}
}
(Type::Callable(formal_callable), _) => {
eprintln!("==> {}", formal.display(self.db));
eprintln!(" {}", actual.display(self.db));
eprintln!(" {}", self.inferable.display(self.db));
let Some(actual_callables) = actual.try_upcast_to_callable(self.db) else {
eprintln!(" -> NOPE");
return Ok(());
};
let mut when = ConstraintSet::from(false);
for formal_signature in &formal_callable.signatures(self.db).overloads {
for actual_callable in actual_callables.as_slice() {
for actual_signature in &actual_callable.signatures(self.db).overloads {
eprintln!(" -> pair");
eprintln!(" {}", formal_signature.display(self.db));
eprintln!(" {}", actual_signature.display(self.db));
let x = formal_signature.when_constraint_set_assignable_to(
self.db,
actual_signature,
self.inferable,
);
eprintln!(" {}", x.display(self.db));
eprintln!(" {}", x.display_graph(self.db, &" "));
when.union(self.db, x);
}
}
}
eprintln!("--> combined");
eprintln!(" {}", when.display(self.db));
eprintln!(" {}", when.display_graph(self.db, &" "));
let solutions = when.solve_for(self.db, self.inferable);
for solution in &solutions {
eprintln!("--> solution [{}]", solution.display(self.db));
for (bound_typevar, (_, ty)) in solution {
self.add_type_mapping(*bound_typevar, *ty, polarity, &mut f);
}
}
}
// TODO: Add more forms that we can structurally induct into: type[C], callables
_ => {}
}

View File

@@ -752,7 +752,8 @@ impl<'db> Signature<'db> {
// we produce, we reduce it back down to the inferable set that the caller asked about.
// If we introduced new inferable typevars, those will be existentially quantified away
// before returning.
when.reduce_inferable(db, self_inferable.iter().chain(other_inferable.iter()))
//when.reduce_inferable(db, self_inferable.iter().chain(other_inferable.iter()))
when
}
fn is_equivalent_to_inner(
@@ -841,6 +842,22 @@ impl<'db> Signature<'db> {
result
}
pub(crate) fn when_constraint_set_assignable_to(
&self,
db: &'db dyn Db,
other: &Signature<'db>,
inferable: InferableTypeVars<'_, 'db>,
) -> ConstraintSet<'db> {
self.has_relation_to_impl(
db,
other,
inferable,
TypeRelation::ConstraintSetAssignability,
&HasRelationToVisitor::default(),
&IsDisjointVisitor::default(),
)
}
/// Implementation of subtyping and assignability for signature.
fn has_relation_to_impl(
&self,
@@ -882,7 +899,8 @@ impl<'db> Signature<'db> {
// we produce, we reduce it back down to the inferable set that the caller asked about.
// If we introduced new inferable typevars, those will be existentially quantified away
// before returning.
when.reduce_inferable(db, self_inferable.iter().chain(other_inferable.iter()))
//when.reduce_inferable(db, self_inferable.iter().chain(other_inferable.iter()))
when
}
fn has_relation_to_inner(
@@ -959,19 +977,33 @@ impl<'db> Signature<'db> {
let mut check_types = |type1: Option<Type<'db>>, type2: Option<Type<'db>>| {
let type1 = type1.unwrap_or(Type::unknown());
let type2 = type2.unwrap_or(Type::unknown());
!result
.intersect(
db,
type1.has_relation_to_impl(
eprintln!(" ~> {}{}", type1.display(db), type2.display(db));
eprintln!(
" ~> when {}",
type1
.has_relation_to_impl(
db,
type2,
inferable,
relation,
relation_visitor,
disjointness_visitor,
),
)
.is_never_satisfied(db)
)
.display(db)
);
result.intersect(
db,
type1.has_relation_to_impl(
db,
type2,
inferable,
relation,
relation_visitor,
disjointness_visitor,
),
);
eprintln!(" ~> inter {}", result.display(db));
!result.is_never_satisfied(db)
};
// Return types are covariant.
@@ -997,7 +1029,13 @@ impl<'db> Signature<'db> {
// If either of the parameter lists is gradual (`...`), then it is assignable to and from
// any other parameter list, but not a subtype or supertype of any other parameter list.
if self.parameters.is_gradual() || other.parameters.is_gradual() {
return ConstraintSet::from(relation.is_assignability());
result.intersect(
db,
ConstraintSet::from(
relation.is_assignability() || relation.is_constraint_set_assignability(),
),
);
return result;
}
let mut parameters = ParametersZip {