Compare commits
4 Commits
micha/ty-p
...
dcreager/e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7f7fb50a43 | ||
|
|
0b6bd9a735 | ||
|
|
c790aa7474 | ||
|
|
298e2dcb4e |
@@ -69,7 +69,7 @@
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::Display;
|
||||
use std::ops::Range;
|
||||
use std::ops::{BitAnd, BitOr, Range};
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
@@ -81,7 +81,7 @@ use crate::types::{
|
||||
BoundTypeVarIdentity, BoundTypeVarInstance, IntersectionType, Type, TypeVarBoundOrConstraints,
|
||||
UnionType, walk_bound_type_var_type,
|
||||
};
|
||||
use crate::{Db, FxOrderSet};
|
||||
use crate::{Db, FxOrderMap};
|
||||
|
||||
/// An extension trait for building constraint sets from [`Option`] values.
|
||||
pub(crate) trait OptionConstraintsExtension<T> {
|
||||
@@ -205,7 +205,13 @@ impl<'db> ConstraintSet<'db> {
|
||||
upper: Type<'db>,
|
||||
) -> Self {
|
||||
Self {
|
||||
node: ConstrainedTypeVar::new_node(db, typevar, lower, upper),
|
||||
node: ConstrainedTypeVar::new_node(
|
||||
db,
|
||||
typevar,
|
||||
lower,
|
||||
upper,
|
||||
ExplicitConstraint::Explicit,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -490,9 +496,10 @@ impl IntersectionResult<'_> {
|
||||
/// lower and upper bound.
|
||||
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
|
||||
pub(crate) struct ConstrainedTypeVar<'db> {
|
||||
typevar: BoundTypeVarInstance<'db>,
|
||||
lower: Type<'db>,
|
||||
upper: Type<'db>,
|
||||
pub(crate) typevar: BoundTypeVarInstance<'db>,
|
||||
pub(crate) lower: Type<'db>,
|
||||
pub(crate) upper: Type<'db>,
|
||||
pub(crate) explicit: ExplicitConstraint,
|
||||
}
|
||||
|
||||
// The Salsa heap is tracked separately.
|
||||
@@ -501,13 +508,12 @@ impl get_size2::GetSize for ConstrainedTypeVar<'_> {}
|
||||
#[salsa::tracked]
|
||||
impl<'db> ConstrainedTypeVar<'db> {
|
||||
/// Returns a new range constraint.
|
||||
///
|
||||
/// Panics if `lower` and `upper` are not both fully static.
|
||||
fn new_node(
|
||||
db: &'db dyn Db,
|
||||
typevar: BoundTypeVarInstance<'db>,
|
||||
mut lower: Type<'db>,
|
||||
mut upper: Type<'db>,
|
||||
explicit: ExplicitConstraint,
|
||||
) -> Node<'db> {
|
||||
// It's not useful for an upper bound to be an intersection type, or for a lower bound to
|
||||
// be a union type. Because the following equivalences hold, we can break these bounds
|
||||
@@ -522,7 +528,7 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
for lower_element in lower_union.elements(db) {
|
||||
result = result.and_with_offset(
|
||||
db,
|
||||
ConstrainedTypeVar::new_node(db, typevar, *lower_element, upper),
|
||||
ConstrainedTypeVar::new_node(db, typevar, *lower_element, upper, explicit),
|
||||
);
|
||||
}
|
||||
return result;
|
||||
@@ -537,13 +543,19 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
for upper_element in upper_intersection.iter_positive(db) {
|
||||
result = result.and_with_offset(
|
||||
db,
|
||||
ConstrainedTypeVar::new_node(db, typevar, lower, upper_element),
|
||||
ConstrainedTypeVar::new_node(db, typevar, lower, upper_element, explicit),
|
||||
);
|
||||
}
|
||||
for upper_element in upper_intersection.iter_negative(db) {
|
||||
result = result.and_with_offset(
|
||||
db,
|
||||
ConstrainedTypeVar::new_node(db, typevar, lower, upper_element.negate(db)),
|
||||
ConstrainedTypeVar::new_node(
|
||||
db,
|
||||
typevar,
|
||||
lower,
|
||||
upper_element.negate(db),
|
||||
explicit,
|
||||
),
|
||||
);
|
||||
}
|
||||
return result;
|
||||
@@ -575,7 +587,7 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
{
|
||||
return Node::new_constraint(
|
||||
db,
|
||||
ConstrainedTypeVar::new(db, typevar, Type::Never, Type::object()),
|
||||
ConstrainedTypeVar::new(db, typevar, Type::Never, Type::object(), explicit),
|
||||
1,
|
||||
)
|
||||
.negate(db);
|
||||
@@ -627,6 +639,7 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
typevar,
|
||||
Type::TypeVar(bound),
|
||||
Type::TypeVar(bound),
|
||||
explicit,
|
||||
),
|
||||
1,
|
||||
)
|
||||
@@ -638,12 +651,24 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
{
|
||||
let lower = Node::new_constraint(
|
||||
db,
|
||||
ConstrainedTypeVar::new(db, lower, Type::Never, Type::TypeVar(typevar)),
|
||||
ConstrainedTypeVar::new(
|
||||
db,
|
||||
lower,
|
||||
Type::Never,
|
||||
Type::TypeVar(typevar),
|
||||
explicit,
|
||||
),
|
||||
1,
|
||||
);
|
||||
let upper = Node::new_constraint(
|
||||
db,
|
||||
ConstrainedTypeVar::new(db, upper, Type::TypeVar(typevar), Type::object()),
|
||||
ConstrainedTypeVar::new(
|
||||
db,
|
||||
upper,
|
||||
Type::TypeVar(typevar),
|
||||
Type::object(),
|
||||
explicit,
|
||||
),
|
||||
1,
|
||||
);
|
||||
lower.and(db, upper)
|
||||
@@ -653,13 +678,19 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
(Type::TypeVar(lower), _) if typevar.can_be_bound_for(db, lower) => {
|
||||
let lower = Node::new_constraint(
|
||||
db,
|
||||
ConstrainedTypeVar::new(db, lower, Type::Never, Type::TypeVar(typevar)),
|
||||
ConstrainedTypeVar::new(
|
||||
db,
|
||||
lower,
|
||||
Type::Never,
|
||||
Type::TypeVar(typevar),
|
||||
explicit,
|
||||
),
|
||||
1,
|
||||
);
|
||||
let upper = if upper.is_object() {
|
||||
Node::AlwaysTrue
|
||||
} else {
|
||||
Self::new_node(db, typevar, Type::Never, upper)
|
||||
Self::new_node(db, typevar, Type::Never, upper, explicit)
|
||||
};
|
||||
lower.and(db, upper)
|
||||
}
|
||||
@@ -669,17 +700,27 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
let lower = if lower.is_never() {
|
||||
Node::AlwaysTrue
|
||||
} else {
|
||||
Self::new_node(db, typevar, lower, Type::object())
|
||||
Self::new_node(db, typevar, lower, Type::object(), explicit)
|
||||
};
|
||||
let upper = Node::new_constraint(
|
||||
db,
|
||||
ConstrainedTypeVar::new(db, upper, Type::TypeVar(typevar), Type::object()),
|
||||
ConstrainedTypeVar::new(
|
||||
db,
|
||||
upper,
|
||||
Type::TypeVar(typevar),
|
||||
Type::object(),
|
||||
explicit,
|
||||
),
|
||||
1,
|
||||
);
|
||||
lower.and(db, upper)
|
||||
}
|
||||
|
||||
_ => Node::new_constraint(db, ConstrainedTypeVar::new(db, typevar, lower, upper), 1),
|
||||
_ => Node::new_constraint(
|
||||
db,
|
||||
ConstrainedTypeVar::new(db, typevar, lower, upper, explicit),
|
||||
1,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -697,6 +738,7 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
self.typevar(db),
|
||||
self.lower(db).normalized(db),
|
||||
self.upper(db).normalized(db),
|
||||
self.explicit(db),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -713,7 +755,12 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
/// simplifications that we perform that operate on constraints with the same typevar, and this
|
||||
/// ensures that we can find all candidate simplifications more easily.
|
||||
fn ordering(self, db: &'db dyn Db) -> impl Ord {
|
||||
(self.typevar(db).identity(db), self.as_id())
|
||||
(
|
||||
self.typevar(db).binding_context(db),
|
||||
self.typevar(db).identity(db),
|
||||
self.explicit(db),
|
||||
self.as_id(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns whether this constraint implies another — i.e., whether every type that
|
||||
@@ -745,7 +792,13 @@ impl<'db> ConstrainedTypeVar<'db> {
|
||||
return IntersectionResult::CannotSimplify;
|
||||
}
|
||||
|
||||
IntersectionResult::Simplified(Self::new(db, self.typevar(db), lower, upper))
|
||||
IntersectionResult::Simplified(Self::new(
|
||||
db,
|
||||
self.typevar(db),
|
||||
lower,
|
||||
upper,
|
||||
self.explicit(db) & other.explicit(db),
|
||||
))
|
||||
}
|
||||
|
||||
fn display(self, db: &'db dyn Db) -> impl Display {
|
||||
@@ -1000,8 +1053,9 @@ impl<'db> Node<'db> {
|
||||
// from it) causes the if_true edge to become impossible. We want to ignore
|
||||
// impossible paths, and so we treat them as passing the "always satisfied" check.
|
||||
let constraint = interior.constraint(db);
|
||||
let source_order = interior.source_order(db);
|
||||
let true_always_satisfied = path
|
||||
.walk_edge(db, map, constraint.when_true(), |path, _| {
|
||||
.walk_edge(db, map, constraint.when_true(), source_order, |path, _| {
|
||||
interior
|
||||
.if_true(db)
|
||||
.is_always_satisfied_inner(db, map, path)
|
||||
@@ -1012,7 +1066,7 @@ impl<'db> Node<'db> {
|
||||
}
|
||||
|
||||
// Ditto for the if_false branch
|
||||
path.walk_edge(db, map, constraint.when_false(), |path, _| {
|
||||
path.walk_edge(db, map, constraint.when_false(), source_order, |path, _| {
|
||||
interior
|
||||
.if_false(db)
|
||||
.is_always_satisfied_inner(db, map, path)
|
||||
@@ -1049,8 +1103,9 @@ impl<'db> Node<'db> {
|
||||
// from it) causes the if_true edge to become impossible. We want to ignore
|
||||
// impossible paths, and so we treat them as passing the "never satisfied" check.
|
||||
let constraint = interior.constraint(db);
|
||||
let source_order = interior.source_order(db);
|
||||
let true_never_satisfied = path
|
||||
.walk_edge(db, map, constraint.when_true(), |path, _| {
|
||||
.walk_edge(db, map, constraint.when_true(), source_order, |path, _| {
|
||||
interior.if_true(db).is_never_satisfied_inner(db, map, path)
|
||||
})
|
||||
.unwrap_or(true);
|
||||
@@ -1059,7 +1114,7 @@ impl<'db> Node<'db> {
|
||||
}
|
||||
|
||||
// Ditto for the if_false branch
|
||||
path.walk_edge(db, map, constraint.when_false(), |path, _| {
|
||||
path.walk_edge(db, map, constraint.when_false(), source_order, |path, _| {
|
||||
interior
|
||||
.if_false(db)
|
||||
.is_never_satisfied_inner(db, map, path)
|
||||
@@ -1232,12 +1287,14 @@ impl<'db> Node<'db> {
|
||||
bound_typevar,
|
||||
Type::Never,
|
||||
rhs.bottom_materialization(db),
|
||||
ExplicitConstraint::Implicit,
|
||||
),
|
||||
(_, Type::TypeVar(bound_typevar)) => ConstrainedTypeVar::new_node(
|
||||
db,
|
||||
bound_typevar,
|
||||
lhs.top_materialization(db),
|
||||
Type::object(),
|
||||
ExplicitConstraint::Implicit,
|
||||
),
|
||||
_ => panic!("at least one type should be a typevar"),
|
||||
};
|
||||
@@ -1749,10 +1806,12 @@ impl<'db> Node<'db> {
|
||||
Node::AlwaysTrue => write!(f, "always"),
|
||||
Node::AlwaysFalse => write!(f, "never"),
|
||||
Node::Interior(interior) => {
|
||||
let constraint = interior.constraint(self.db);
|
||||
write!(
|
||||
f,
|
||||
"{} {}/{}",
|
||||
interior.constraint(self.db).display(self.db),
|
||||
"{} {:?} {}/{}",
|
||||
constraint.display(self.db),
|
||||
constraint.explicit(self.db),
|
||||
interior.source_order(self.db),
|
||||
interior.max_source_order(self.db),
|
||||
)?;
|
||||
@@ -1809,6 +1868,42 @@ impl<'db> RepresentativeBounds<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, get_size2::GetSize, salsa::Update,
|
||||
)]
|
||||
pub(crate) enum ExplicitConstraint {
|
||||
Implicit,
|
||||
Explicit,
|
||||
}
|
||||
|
||||
impl BitAnd for ExplicitConstraint {
|
||||
type Output = Self;
|
||||
fn bitand(self, other: Self) -> Self {
|
||||
match (self, other) {
|
||||
(ExplicitConstraint::Explicit, ExplicitConstraint::Explicit) => {
|
||||
ExplicitConstraint::Explicit
|
||||
}
|
||||
(ExplicitConstraint::Implicit, _) | (_, ExplicitConstraint::Implicit) => {
|
||||
ExplicitConstraint::Implicit
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr for ExplicitConstraint {
|
||||
type Output = Self;
|
||||
fn bitor(self, other: Self) -> Self {
|
||||
match (self, other) {
|
||||
(ExplicitConstraint::Implicit, ExplicitConstraint::Implicit) => {
|
||||
ExplicitConstraint::Implicit
|
||||
}
|
||||
(ExplicitConstraint::Explicit, _) | (_, ExplicitConstraint::Explicit) => {
|
||||
ExplicitConstraint::Explicit
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An interior node of a BDD
|
||||
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
|
||||
struct InteriorNode<'db> {
|
||||
@@ -2005,6 +2100,7 @@ impl<'db> InteriorNode<'db> {
|
||||
path: &mut PathAssignments<'db>,
|
||||
) -> Node<'db> {
|
||||
let self_constraint = self.constraint(db);
|
||||
let self_source_order = self.source_order(db);
|
||||
if should_remove(self_constraint) {
|
||||
// If we should remove constraints involving this typevar, then we replace this node
|
||||
// with the OR of its if_false/if_true edges. That is, the result is true if there's
|
||||
@@ -2012,73 +2108,90 @@ impl<'db> InteriorNode<'db> {
|
||||
//
|
||||
// We also have to check if there are any derived facts that depend on the constraint
|
||||
// we're about to remove. If so, we need to "remember" them by AND-ing them in with the
|
||||
// corresponding branch. We currently reuse the `source_order` of the constraint being
|
||||
// removed when we add these derived facts.
|
||||
//
|
||||
// TODO: This might not be stable enough, if we add more than one derived fact for this
|
||||
// constraint. If we still see inconsistent test output, we might need a more complex
|
||||
// way of tracking source order for derived facts.
|
||||
let self_source_order = self.source_order(db);
|
||||
// corresponding branch.
|
||||
let if_true = path
|
||||
.walk_edge(db, map, self_constraint.when_true(), |path, new_range| {
|
||||
let branch = self
|
||||
.if_true(db)
|
||||
.abstract_one_inner(db, should_remove, map, path);
|
||||
path.assignments[new_range]
|
||||
.iter()
|
||||
.filter(|assignment| {
|
||||
// Don't add back any derived facts if they are ones that we would have
|
||||
// removed!
|
||||
!should_remove(assignment.constraint())
|
||||
})
|
||||
.fold(branch, |branch, assignment| {
|
||||
branch.and(
|
||||
db,
|
||||
Node::new_satisfied_constraint(db, *assignment, self_source_order),
|
||||
)
|
||||
})
|
||||
})
|
||||
.walk_edge(
|
||||
db,
|
||||
map,
|
||||
self_constraint.when_true(),
|
||||
self_source_order,
|
||||
|path, new_range| {
|
||||
let branch =
|
||||
self.if_true(db)
|
||||
.abstract_one_inner(db, should_remove, map, path);
|
||||
path.assignments[new_range]
|
||||
.iter()
|
||||
.filter(|(assignment, _)| {
|
||||
// Don't add back any derived facts if they are ones that we would have
|
||||
// removed!
|
||||
!should_remove(assignment.constraint())
|
||||
})
|
||||
.fold(branch, |branch, (assignment, source_order)| {
|
||||
branch.and(
|
||||
db,
|
||||
Node::new_satisfied_constraint(db, *assignment, *source_order),
|
||||
)
|
||||
})
|
||||
},
|
||||
)
|
||||
.unwrap_or(Node::AlwaysFalse);
|
||||
let if_false = path
|
||||
.walk_edge(db, map, self_constraint.when_false(), |path, new_range| {
|
||||
let branch = self
|
||||
.if_false(db)
|
||||
.abstract_one_inner(db, should_remove, map, path);
|
||||
path.assignments[new_range]
|
||||
.iter()
|
||||
.filter(|assignment| {
|
||||
// Don't add back any derived facts if they are ones that we would have
|
||||
// removed!
|
||||
!should_remove(assignment.constraint())
|
||||
})
|
||||
.fold(branch, |branch, assignment| {
|
||||
branch.and(
|
||||
db,
|
||||
Node::new_satisfied_constraint(db, *assignment, self_source_order),
|
||||
)
|
||||
})
|
||||
})
|
||||
.walk_edge(
|
||||
db,
|
||||
map,
|
||||
self_constraint.when_false(),
|
||||
self_source_order,
|
||||
|path, new_range| {
|
||||
let branch =
|
||||
self.if_false(db)
|
||||
.abstract_one_inner(db, should_remove, map, path);
|
||||
path.assignments[new_range]
|
||||
.iter()
|
||||
.filter(|(assignment, _)| {
|
||||
// Don't add back any derived facts if they are ones that we would have
|
||||
// removed!
|
||||
!should_remove(assignment.constraint())
|
||||
})
|
||||
.fold(branch, |branch, (assignment, source_order)| {
|
||||
branch.and(
|
||||
db,
|
||||
Node::new_satisfied_constraint(db, *assignment, *source_order),
|
||||
)
|
||||
})
|
||||
},
|
||||
)
|
||||
.unwrap_or(Node::AlwaysFalse);
|
||||
if_true.or(db, if_false)
|
||||
} else {
|
||||
// Otherwise, we abstract the if_false/if_true edges recursively.
|
||||
let if_true = path
|
||||
.walk_edge(db, map, self_constraint.when_true(), |path, _| {
|
||||
self.if_true(db)
|
||||
.abstract_one_inner(db, should_remove, map, path)
|
||||
})
|
||||
.walk_edge(
|
||||
db,
|
||||
map,
|
||||
self_constraint.when_true(),
|
||||
self_source_order,
|
||||
|path, _| {
|
||||
self.if_true(db)
|
||||
.abstract_one_inner(db, should_remove, map, path)
|
||||
},
|
||||
)
|
||||
.unwrap_or(Node::AlwaysFalse);
|
||||
let if_false = path
|
||||
.walk_edge(db, map, self_constraint.when_false(), |path, _| {
|
||||
self.if_false(db)
|
||||
.abstract_one_inner(db, should_remove, map, path)
|
||||
})
|
||||
.walk_edge(
|
||||
db,
|
||||
map,
|
||||
self_constraint.when_false(),
|
||||
self_source_order,
|
||||
|path, _| {
|
||||
self.if_false(db)
|
||||
.abstract_one_inner(db, should_remove, map, path)
|
||||
},
|
||||
)
|
||||
.unwrap_or(Node::AlwaysFalse);
|
||||
// NB: We cannot use `Node::new` here, because the recursive calls might introduce new
|
||||
// derived constraints into the result, and those constraints might appear before this
|
||||
// one in the BDD ordering.
|
||||
Node::new_constraint(db, self_constraint, self.source_order(db))
|
||||
.ite(db, if_true, if_false)
|
||||
Node::new_constraint(db, self_constraint, self_source_order).ite(db, if_true, if_false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2241,8 +2354,13 @@ impl<'db> InteriorNode<'db> {
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let new_constraint =
|
||||
ConstrainedTypeVar::new(db, constrained_typevar, new_lower, new_upper);
|
||||
let new_constraint = ConstrainedTypeVar::new(
|
||||
db,
|
||||
constrained_typevar,
|
||||
new_lower,
|
||||
new_upper,
|
||||
ExplicitConstraint::Implicit,
|
||||
);
|
||||
if seen_constraints.contains(&new_constraint) {
|
||||
continue;
|
||||
}
|
||||
@@ -2865,21 +2983,35 @@ impl<'db> SequentMap<'db> {
|
||||
// Case 1
|
||||
(Type::TypeVar(lower_typevar), Type::TypeVar(upper_typevar)) => {
|
||||
if !lower_typevar.is_same_typevar_as(db, upper_typevar) {
|
||||
ConstrainedTypeVar::new(db, lower_typevar, Type::Never, upper)
|
||||
ConstrainedTypeVar::new(
|
||||
db,
|
||||
lower_typevar,
|
||||
Type::Never,
|
||||
upper,
|
||||
constraint.explicit(db),
|
||||
)
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Case 2
|
||||
(Type::TypeVar(lower_typevar), _) => {
|
||||
ConstrainedTypeVar::new(db, lower_typevar, Type::Never, upper)
|
||||
}
|
||||
(Type::TypeVar(lower_typevar), _) => ConstrainedTypeVar::new(
|
||||
db,
|
||||
lower_typevar,
|
||||
Type::Never,
|
||||
upper,
|
||||
constraint.explicit(db),
|
||||
),
|
||||
|
||||
// Case 3
|
||||
(_, Type::TypeVar(upper_typevar)) => {
|
||||
ConstrainedTypeVar::new(db, upper_typevar, lower, Type::object())
|
||||
}
|
||||
(_, Type::TypeVar(upper_typevar)) => ConstrainedTypeVar::new(
|
||||
db,
|
||||
upper_typevar,
|
||||
lower,
|
||||
Type::object(),
|
||||
constraint.explicit(db),
|
||||
),
|
||||
|
||||
_ => return,
|
||||
};
|
||||
@@ -2993,8 +3125,13 @@ impl<'db> SequentMap<'db> {
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let post_constraint =
|
||||
ConstrainedTypeVar::new(db, constrained_typevar, new_lower, new_upper);
|
||||
let post_constraint = ConstrainedTypeVar::new(
|
||||
db,
|
||||
constrained_typevar,
|
||||
new_lower,
|
||||
new_upper,
|
||||
left_constraint.explicit(db) & right_constraint.explicit(db),
|
||||
);
|
||||
self.add_pair_implication(db, left_constraint, right_constraint, post_constraint);
|
||||
self.enqueue_constraint(post_constraint);
|
||||
}
|
||||
@@ -3016,14 +3153,28 @@ impl<'db> SequentMap<'db> {
|
||||
(Type::TypeVar(bound_typevar), Type::TypeVar(other_bound_typevar))
|
||||
if bound_typevar.is_same_typevar_as(db, other_bound_typevar) =>
|
||||
{
|
||||
ConstrainedTypeVar::new(db, bound_typevar, right_lower, right_upper)
|
||||
}
|
||||
(Type::TypeVar(bound_typevar), _) => {
|
||||
ConstrainedTypeVar::new(db, bound_typevar, Type::Never, right_upper)
|
||||
}
|
||||
(_, Type::TypeVar(bound_typevar)) => {
|
||||
ConstrainedTypeVar::new(db, bound_typevar, right_lower, Type::object())
|
||||
ConstrainedTypeVar::new(
|
||||
db,
|
||||
bound_typevar,
|
||||
right_lower,
|
||||
right_upper,
|
||||
left_constraint.explicit(db) & right_constraint.explicit(db),
|
||||
)
|
||||
}
|
||||
(Type::TypeVar(bound_typevar), _) => ConstrainedTypeVar::new(
|
||||
db,
|
||||
bound_typevar,
|
||||
Type::Never,
|
||||
right_upper,
|
||||
left_constraint.explicit(db) & right_constraint.explicit(db),
|
||||
),
|
||||
(_, Type::TypeVar(bound_typevar)) => ConstrainedTypeVar::new(
|
||||
db,
|
||||
bound_typevar,
|
||||
right_lower,
|
||||
Type::object(),
|
||||
left_constraint.explicit(db) & right_constraint.explicit(db),
|
||||
),
|
||||
_ => return,
|
||||
};
|
||||
self.add_pair_implication(db, left_constraint, right_constraint, post_constraint);
|
||||
@@ -3169,8 +3320,8 @@ impl<'db> SequentMap<'db> {
|
||||
/// The collection of constraints that we know to be true or false at a certain point when
|
||||
/// traversing a BDD.
|
||||
#[derive(Debug, Default)]
|
||||
struct PathAssignments<'db> {
|
||||
assignments: FxOrderSet<ConstraintAssignment<'db>>,
|
||||
pub(crate) struct PathAssignments<'db> {
|
||||
assignments: FxOrderMap<ConstraintAssignment<'db>, usize>,
|
||||
}
|
||||
|
||||
impl<'db> PathAssignments<'db> {
|
||||
@@ -3201,6 +3352,7 @@ impl<'db> PathAssignments<'db> {
|
||||
db: &'db dyn Db,
|
||||
map: &SequentMap<'db>,
|
||||
assignment: ConstraintAssignment<'db>,
|
||||
source_order: usize,
|
||||
f: impl FnOnce(&mut Self, Range<usize>) -> R,
|
||||
) -> Option<R> {
|
||||
// Record a snapshot of the assignments that we already knew held — both so that we can
|
||||
@@ -3213,12 +3365,12 @@ impl<'db> PathAssignments<'db> {
|
||||
target: "ty_python_semantic::types::constraints::PathAssignment",
|
||||
before = %format_args!(
|
||||
"[{}]",
|
||||
self.assignments[..start].iter().map(|assignment| assignment.display(db)).format(", "),
|
||||
self.assignments[..start].iter().map(|(assignment, _)| assignment.display(db)).format(", "),
|
||||
),
|
||||
edge = %assignment.display(db),
|
||||
"walk edge",
|
||||
);
|
||||
let found_conflict = self.add_assignment(db, map, assignment);
|
||||
let found_conflict = self.add_assignment(db, map, assignment, source_order);
|
||||
let result = if found_conflict.is_err() {
|
||||
// If that results in the path now being impossible due to a contradiction, return
|
||||
// without invoking the callback.
|
||||
@@ -3233,7 +3385,7 @@ impl<'db> PathAssignments<'db> {
|
||||
target: "ty_python_semantic::types::constraints::PathAssignment",
|
||||
new = %format_args!(
|
||||
"[{}]",
|
||||
self.assignments[start..].iter().map(|assignment| assignment.display(db)).format(", "),
|
||||
self.assignments[start..].iter().map(|(assignment, _)| assignment.display(db)).format(", "),
|
||||
),
|
||||
"new assignments",
|
||||
);
|
||||
@@ -3248,7 +3400,7 @@ impl<'db> PathAssignments<'db> {
|
||||
}
|
||||
|
||||
fn assignment_holds(&self, assignment: ConstraintAssignment<'db>) -> bool {
|
||||
self.assignments.contains(&assignment)
|
||||
self.assignments.contains_key(&assignment)
|
||||
}
|
||||
|
||||
/// Adds a new assignment, along with any derived information that we can infer from the new
|
||||
@@ -3259,26 +3411,34 @@ impl<'db> PathAssignments<'db> {
|
||||
db: &'db dyn Db,
|
||||
map: &SequentMap<'db>,
|
||||
assignment: ConstraintAssignment<'db>,
|
||||
source_order: usize,
|
||||
) -> Result<(), PathAssignmentConflict> {
|
||||
// First add this assignment. If it causes a conflict, return that as an error. If we've
|
||||
// already know this assignment holds, just return.
|
||||
if self.assignments.contains(&assignment.negated()) {
|
||||
if self.assignments.contains_key(&assignment.negated()) {
|
||||
tracing::trace!(
|
||||
target: "ty_python_semantic::types::constraints::PathAssignment",
|
||||
assignment = %assignment.display(db),
|
||||
facts = %format_args!(
|
||||
"[{}]",
|
||||
self.assignments.iter().map(|assignment| assignment.display(db)).format(", "),
|
||||
self.assignments.iter().map(|(assignment, _)| assignment.display(db)).format(", "),
|
||||
),
|
||||
"found contradiction",
|
||||
);
|
||||
return Err(PathAssignmentConflict);
|
||||
}
|
||||
if !self.assignments.insert(assignment) {
|
||||
if self.assignments.insert(assignment, source_order).is_some() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Then use our sequents to add additional facts that we know to be true.
|
||||
// Then use our sequents to add additional facts that we know to be true. We currently
|
||||
// reuse the `source_order` of the "real" constraint passed into `walk_edge` when we add
|
||||
// these derived facts.
|
||||
//
|
||||
// TODO: This might not be stable enough, if we add more than one derived fact for this
|
||||
// constraint. If we still see inconsistent test output, we might need a more complex
|
||||
// way of tracking source order for derived facts.
|
||||
//
|
||||
// TODO: This is very naive at the moment, partly for expediency, and partly because we
|
||||
// don't anticipate the sequent maps to be very large. We might consider avoiding the
|
||||
// brute-force search.
|
||||
@@ -3292,7 +3452,7 @@ impl<'db> PathAssignments<'db> {
|
||||
ante = %ante.display(db),
|
||||
facts = %format_args!(
|
||||
"[{}]",
|
||||
self.assignments.iter().map(|assignment| assignment.display(db)).format(", "),
|
||||
self.assignments.iter().map(|(assignment, _)| assignment.display(db)).format(", "),
|
||||
),
|
||||
"found contradiction",
|
||||
);
|
||||
@@ -3311,7 +3471,7 @@ impl<'db> PathAssignments<'db> {
|
||||
ante2 = %ante2.display(db),
|
||||
facts = %format_args!(
|
||||
"[{}]",
|
||||
self.assignments.iter().map(|assignment| assignment.display(db)).format(", "),
|
||||
self.assignments.iter().map(|(assignment, _)| assignment.display(db)).format(", "),
|
||||
),
|
||||
"found contradiction",
|
||||
);
|
||||
@@ -3324,7 +3484,7 @@ impl<'db> PathAssignments<'db> {
|
||||
if self.assignment_holds(ante1.when_true())
|
||||
&& self.assignment_holds(ante2.when_true())
|
||||
{
|
||||
self.add_assignment(db, map, post.when_true())?;
|
||||
self.add_assignment(db, map, post.when_true(), source_order)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3332,7 +3492,7 @@ impl<'db> PathAssignments<'db> {
|
||||
for (ante, posts) in &map.single_implications {
|
||||
for post in posts {
|
||||
if self.assignment_holds(ante.when_true()) {
|
||||
self.add_assignment(db, map, post.when_true())?;
|
||||
self.add_assignment(db, map, post.when_true(), source_order)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3585,7 +3745,13 @@ impl<'db> BoundTypeVarInstance<'db> {
|
||||
None => Node::AlwaysTrue,
|
||||
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
|
||||
let bound = bound.top_materialization(db);
|
||||
ConstrainedTypeVar::new_node(db, self, Type::Never, bound)
|
||||
ConstrainedTypeVar::new_node(
|
||||
db,
|
||||
self,
|
||||
Type::Never,
|
||||
bound,
|
||||
ExplicitConstraint::Implicit,
|
||||
)
|
||||
}
|
||||
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
||||
let mut specializations = Node::AlwaysFalse;
|
||||
@@ -3594,7 +3760,13 @@ impl<'db> BoundTypeVarInstance<'db> {
|
||||
let constraint_upper = constraint.top_materialization(db);
|
||||
specializations = specializations.or_with_offset(
|
||||
db,
|
||||
ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper),
|
||||
ConstrainedTypeVar::new_node(
|
||||
db,
|
||||
self,
|
||||
constraint_lower,
|
||||
constraint_upper,
|
||||
ExplicitConstraint::Implicit,
|
||||
),
|
||||
);
|
||||
}
|
||||
specializations
|
||||
@@ -3629,7 +3801,13 @@ impl<'db> BoundTypeVarInstance<'db> {
|
||||
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
|
||||
let bound = bound.bottom_materialization(db);
|
||||
(
|
||||
ConstrainedTypeVar::new_node(db, self, Type::Never, bound),
|
||||
ConstrainedTypeVar::new_node(
|
||||
db,
|
||||
self,
|
||||
Type::Never,
|
||||
bound,
|
||||
ExplicitConstraint::Implicit,
|
||||
),
|
||||
Vec::new(),
|
||||
)
|
||||
}
|
||||
@@ -3639,8 +3817,13 @@ impl<'db> BoundTypeVarInstance<'db> {
|
||||
for constraint in constraints.elements(db) {
|
||||
let constraint_lower = constraint.bottom_materialization(db);
|
||||
let constraint_upper = constraint.top_materialization(db);
|
||||
let constraint =
|
||||
ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper);
|
||||
let constraint = ConstrainedTypeVar::new_node(
|
||||
db,
|
||||
self,
|
||||
constraint_lower,
|
||||
constraint_upper,
|
||||
ExplicitConstraint::Implicit,
|
||||
);
|
||||
if constraint_lower == constraint_upper {
|
||||
non_gradual_constraints =
|
||||
non_gradual_constraints.or_with_offset(db, constraint);
|
||||
@@ -3809,28 +3992,28 @@ mod tests {
|
||||
#[test]
|
||||
fn test_display_graph_output() {
|
||||
let expected = indoc! {r#"
|
||||
(T = str) 3/4
|
||||
┡━₁ (T = bool) 4/4
|
||||
│ ┡━₁ (U = str) 1/2
|
||||
│ │ ┡━₁ (U = bool) 2/2
|
||||
(T = str) Explicit 3/4
|
||||
┡━₁ (T = bool) Explicit 4/4
|
||||
│ ┡━₁ (U = str) Explicit 1/2
|
||||
│ │ ┡━₁ (U = bool) Explicit 2/2
|
||||
│ │ │ ┡━₁ always
|
||||
│ │ │ └─₀ always
|
||||
│ │ └─₀ (U = bool) 2/2
|
||||
│ │ └─₀ (U = bool) Explicit 2/2
|
||||
│ │ ┡━₁ always
|
||||
│ │ └─₀ never
|
||||
│ └─₀ (U = str) 1/2
|
||||
│ ┡━₁ (U = bool) 2/2
|
||||
│ └─₀ (U = str) Explicit 1/2
|
||||
│ ┡━₁ (U = bool) Explicit 2/2
|
||||
│ │ ┡━₁ always
|
||||
│ │ └─₀ always
|
||||
│ └─₀ (U = bool) 2/2
|
||||
│ └─₀ (U = bool) Explicit 2/2
|
||||
│ ┡━₁ always
|
||||
│ └─₀ never
|
||||
└─₀ (T = bool) 4/4
|
||||
┡━₁ (U = str) 1/2
|
||||
│ ┡━₁ (U = bool) 2/2
|
||||
└─₀ (T = bool) Explicit 4/4
|
||||
┡━₁ (U = str) Explicit 1/2
|
||||
│ ┡━₁ (U = bool) Explicit 2/2
|
||||
│ │ ┡━₁ always
|
||||
│ │ └─₀ always
|
||||
│ └─₀ (U = bool) 2/2
|
||||
│ └─₀ (U = bool) Explicit 2/2
|
||||
│ ┡━₁ always
|
||||
│ └─₀ never
|
||||
└─₀ never
|
||||
|
||||
Reference in New Issue
Block a user