Compare commits

...

6 Commits

Author SHA1 Message Date
Micha Reiser
fcde25773d Avoid cloning live declarations 2025-01-11 11:23:25 +01:00
Micha Reiser
c39ca8fe6d Upgrade Rust toolchain to 1.84.0 (#15408) 2025-01-11 09:51:58 +01:00
David Peter
2d82445794 [red-knot] Simplify unions of T and ~T (#15400)
## Summary

Simplify unions of `T` and `~T` to `object`.

## Test Plan

Adapted existing tests.
2025-01-10 23:00:52 +01:00
David Peter
398f2e8b0c [red-knot] Minor fixes in intersection-types tests (#15410)
## Summary

Minor fixes in intersection-types tests
2025-01-10 22:53:03 +01:00
InSync
232fbc1300 [red-knot] Understand type[Unknown] (#15409)
## Summary

Follow-up to #15194.

## Test Plan

Markdown tests.
2025-01-10 13:25:59 -08:00
Alex Waygood
c82932e580 [red-knot] Refactor KnownFunction::takes_expression_arguments() (#15406) 2025-01-10 19:09:03 +00:00
25 changed files with 167 additions and 112 deletions

View File

@@ -211,6 +211,9 @@ redundant_clone = "warn"
debug_assert_with_mut_call = "warn"
unused_peekable = "warn"
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
large_stack_arrays = "allow"
[profile.release]
# Note that we set these explicitly, and these values
# were chosen based on a trade-off between compile times

View File

@@ -66,14 +66,11 @@ def _(a: Unknown, b: Any):
assert_type(b, Unknown) # fine
def _(a: type[Unknown], b: type[Any]):
# TODO: Should be `type[Unknown]`
reveal_type(a) # revealed: @Todo(unsupported type[X] special form)
# TODO: Should be fine
assert_type(a, type[Any]) # error: [type-assertion-failure]
reveal_type(a) # revealed: type[Unknown]
assert_type(a, type[Any]) # fine
reveal_type(b) # revealed: type[Any]
# TODO: Should be fine
assert_type(b, type[Unknown]) # error: [type-assertion-failure]
assert_type(b, type[Unknown]) # fine
```
## Tuples

View File

@@ -105,10 +105,10 @@ static_assert(not is_subtype_of(B2, B1))
This section covers structural properties of intersection types and documents some decisions on how
to represent mixtures of intersections and unions.
### Single-element unions
### Single-element intersections
If we have a union of a single element, we can simplify to that element. Similarly, we show an
intersection with a single negative contribution as just the negation of that element.
If we have an intersection with a single element, we can simplify to that element. Similarly, we
show an intersection with a single negative contribution as just the negation of that element.
```py
from knot_extensions import Intersection, Not
@@ -313,22 +313,24 @@ def _(
### Union of a type and its negation
Similarly, if we have both `P` and `~P` in a _union_, we could simplify that to `object`. However,
this is a rather costly operation which would require us to build the negation of each type that we
add to a union, so this is not implemented at the moment.
Similarly, if we have both `P` and `~P` in a _union_, we can simplify that to `object`.
```py
from knot_extensions import Intersection, Not
class P: ...
class Q: ...
def _(
i1: P | Not[P],
i2: Not[P] | P,
i3: P | Q | Not[P],
i4: Not[P] | Q | P,
) -> None:
# These could be simplified to `object`
reveal_type(i1) # revealed: P | ~P
reveal_type(i2) # revealed: ~P | P
reveal_type(i1) # revealed: object
reveal_type(i2) # revealed: object
reveal_type(i3) # revealed: object
reveal_type(i4) # revealed: object
```
### Negation is an involution
@@ -422,8 +424,8 @@ def example_type_bool_type_str(
#### Positive and negative contributions
If we intersect a type `X` with the negation of a disjoint type `Y`, we can remove the negative
contribution `~Y`, as it necessarily overlaps with the positive contribution `X`:
If we intersect a type `X` with the negation `~Y` of a disjoint type `Y`, we can remove the negative
contribution `~Y`, as `~Y` must fully contain the positive contribution `X` as a subtype:
```py
from knot_extensions import Intersection, Not
@@ -515,8 +517,7 @@ def _(
#### Negative type and negative subtype
For negative contributions, this property is reversed. Here we can get remove superfluous
_subtypes_:
For negative contributions, this property is reversed. Here we can remove superfluous _subtypes_:
```py
from knot_extensions import Intersection, Not

View File

@@ -21,22 +21,22 @@ else:
if x and not x:
reveal_type(x) # revealed: Never
else:
reveal_type(x) # revealed: Literal[0, "", b"", -1, "foo", b"bar"] | bool | None | tuple[()]
reveal_type(x) # revealed: Literal[0, -1, "", "foo", b"", b"bar"] | bool | None | tuple[()]
if not (x and not x):
reveal_type(x) # revealed: Literal[0, "", b"", -1, "foo", b"bar"] | bool | None | tuple[()]
reveal_type(x) # revealed: Literal[0, -1, "", "foo", b"", b"bar"] | bool | None | tuple[()]
else:
reveal_type(x) # revealed: Never
if x or not x:
reveal_type(x) # revealed: Literal[-1, "foo", b"bar", 0, "", b""] | bool | None | tuple[()]
reveal_type(x) # revealed: Literal[0, -1, "", "foo", b"", b"bar"] | bool | None | tuple[()]
else:
reveal_type(x) # revealed: Never
if not (x or not x):
reveal_type(x) # revealed: Never
else:
reveal_type(x) # revealed: Literal[-1, "foo", b"bar", 0, "", b""] | bool | None | tuple[()]
reveal_type(x) # revealed: Literal[0, -1, "", "foo", b"", b"bar"] | bool | None | tuple[()]
if (isinstance(x, int) or isinstance(x, str)) and x:
reveal_type(x) # revealed: Literal[-1, True, "foo"]
@@ -87,10 +87,10 @@ def f(x: A | B):
if x and not x:
reveal_type(x) # revealed: A & ~AlwaysFalsy & ~AlwaysTruthy | B & ~AlwaysFalsy & ~AlwaysTruthy
else:
reveal_type(x) # revealed: A & ~AlwaysTruthy | B & ~AlwaysTruthy | A & ~AlwaysFalsy | B & ~AlwaysFalsy
reveal_type(x) # revealed: A | B
if x or not x:
reveal_type(x) # revealed: A & ~AlwaysFalsy | B & ~AlwaysFalsy | A & ~AlwaysTruthy | B & ~AlwaysTruthy
reveal_type(x) # revealed: A | B
else:
reveal_type(x) # revealed: A & ~AlwaysTruthy & ~AlwaysFalsy | B & ~AlwaysTruthy & ~AlwaysFalsy
```
@@ -214,10 +214,9 @@ if x and not x:
reveal_type(y) # revealed: A & ~AlwaysFalsy & ~AlwaysTruthy
else:
y = x
reveal_type(y) # revealed: A & ~AlwaysTruthy | A & ~AlwaysFalsy
reveal_type(y) # revealed: A
# TODO: It should be A. We should improve UnionBuilder or IntersectionBuilder. (issue #15023)
reveal_type(y) # revealed: A & ~AlwaysTruthy | A & ~AlwaysFalsy
reveal_type(y) # revealed: A
```
## Truthiness of classes

View File

@@ -310,7 +310,7 @@ impl SymbolState {
visibility_constraints: VisibilityConstraintPerBinding::default(),
},
declarations: SymbolDeclarations {
live_declarations: self.declarations.live_declarations.clone(),
live_declarations: Declarations::default(),
visibility_constraints: VisibilityConstraintPerDeclaration::default(),
},
};

View File

@@ -3428,37 +3428,90 @@ impl KnownFunction {
}
}
/// Returns a `u32` bitmask specifying whether or not
/// arguments given to a particular function
/// should be interpreted as type expressions or value expressions.
///
/// The argument is treated as a type expression
/// when the corresponding bit is `1`.
/// The least-significant (right-most) bit corresponds to
/// the argument at the index 0 and so on.
///
/// For example, `assert_type()` has the bitmask value of `0b10`.
/// This means the second argument is a type expression and the first a value expression.
const fn takes_type_expression_arguments(self) -> u32 {
const ALL_VALUES: u32 = 0b0;
const SINGLE_TYPE: u32 = 0b1;
const TYPE_TYPE: u32 = 0b11;
const VALUE_TYPE: u32 = 0b10;
/// Return the [`ParameterExpectations`] for this function.
const fn parameter_expectations(self) -> ParameterExpectations {
match self {
KnownFunction::IsEquivalentTo => TYPE_TYPE,
KnownFunction::IsSubtypeOf => TYPE_TYPE,
KnownFunction::IsAssignableTo => TYPE_TYPE,
KnownFunction::IsDisjointFrom => TYPE_TYPE,
KnownFunction::IsFullyStatic => SINGLE_TYPE,
KnownFunction::IsSingleton => SINGLE_TYPE,
KnownFunction::IsSingleValued => SINGLE_TYPE,
KnownFunction::AssertType => VALUE_TYPE,
_ => ALL_VALUES,
Self::IsFullyStatic | Self::IsSingleton | Self::IsSingleValued => {
ParameterExpectations::SingleTypeExpression
}
Self::IsEquivalentTo
| Self::IsSubtypeOf
| Self::IsAssignableTo
| Self::IsDisjointFrom => ParameterExpectations::TwoTypeExpressions,
Self::AssertType => ParameterExpectations::ValueExpressionAndTypeExpression,
Self::ConstraintFunction(_)
| Self::Len
| Self::Final
| Self::NoTypeCheck
| Self::RevealType
| Self::StaticAssert => ParameterExpectations::AllValueExpressions,
}
}
}
/// Describes whether the parameters in a function expect value expressions or type expressions.
///
/// Whether a specific parameter in the function expects a type expression can be queried
/// using [`ParameterExpectations::expectation_at_index`].
#[derive(Debug, Copy, Clone, PartialEq, Eq, Default)]
enum ParameterExpectations {
/// All parameters in the function expect value expressions
#[default]
AllValueExpressions,
/// The first parameter in the function expects a type expression
SingleTypeExpression,
/// The first two parameters in the function expect type expressions
TwoTypeExpressions,
/// The first parameter in the function expects a value expression,
/// and the second expects a type expression
ValueExpressionAndTypeExpression,
}
impl ParameterExpectations {
/// Query whether the parameter at `parameter_index` expects a value expression or a type expression
fn expectation_at_index(self, parameter_index: usize) -> ParameterExpectation {
match self {
Self::AllValueExpressions => ParameterExpectation::ValueExpression,
Self::SingleTypeExpression => {
if parameter_index == 0 {
ParameterExpectation::TypeExpression
} else {
ParameterExpectation::ValueExpression
}
}
Self::TwoTypeExpressions => {
if parameter_index < 2 {
ParameterExpectation::TypeExpression
} else {
ParameterExpectation::ValueExpression
}
}
Self::ValueExpressionAndTypeExpression => {
if parameter_index == 1 {
ParameterExpectation::TypeExpression
} else {
ParameterExpectation::ValueExpression
}
}
}
}
}
/// Whether a single parameter in a given function expects a value expression or a [type expression]
///
/// [type expression]: https://typing.readthedocs.io/en/latest/spec/annotations.html#type-and-annotation-expressions
#[derive(Debug, Copy, Clone, PartialEq, Eq, Default)]
enum ParameterExpectation {
/// The parameter expects a value expression
#[default]
ValueExpression,
/// The parameter expects a type expression
TypeExpression,
}
#[salsa::interned]
pub struct ModuleLiteralType<'db> {
/// The file in which this module was imported.

View File

@@ -65,6 +65,8 @@ impl<'db> UnionBuilder<'db> {
let mut to_add = ty;
let mut to_remove = SmallVec::<[usize; 2]>::new();
let ty_negated = ty.negate(self.db);
for (index, element) in self.elements.iter().enumerate() {
if Some(*element) == bool_pair {
to_add = KnownClass::Bool.to_instance(self.db);
@@ -80,6 +82,17 @@ impl<'db> UnionBuilder<'db> {
return self;
} else if element.is_subtype_of(self.db, ty) {
to_remove.push(index);
} else if ty_negated.is_subtype_of(self.db, *element) {
// We add `ty` to the union. We just checked that `~ty` is a subtype of an existing `element`.
// This also means that `~ty | ty` is a subtype of `element | ty`, because both elements in the
// first union are subtypes of the corresponding elements in the second union. But `~ty | ty` is
// just `object`. Since `object` is a subtype of `element | ty`, we can only conclude that
// `element | ty` must be `object` (object has no other supertypes). This means we can simplify
// the whole union to just `object`, since all other potential elements would also be subtypes of
// `object`.
self.elements.clear();
self.elements.push(KnownClass::Object.to_instance(self.db));
return self;
}
}
match to_remove[..] {

View File

@@ -83,6 +83,7 @@ use super::slots::check_class_slots;
use super::string_annotation::{
parse_string_annotation, BYTE_STRING_TYPE_ANNOTATION, FSTRING_TYPE_ANNOTATION,
};
use super::{ParameterExpectation, ParameterExpectations};
/// Infer all types for a [`ScopeId`], including all definitions and expressions in that scope.
/// Use when checking a scope, or needing to provide a type for an arbitrary expression in the
@@ -956,7 +957,7 @@ impl<'db> TypeInferenceBuilder<'db> {
self.infer_type_parameters(type_params);
if let Some(arguments) = class.arguments.as_deref() {
self.infer_arguments(arguments, 0b0);
self.infer_arguments(arguments, ParameterExpectations::default());
}
}
@@ -2601,18 +2602,15 @@ impl<'db> TypeInferenceBuilder<'db> {
fn infer_arguments<'a>(
&mut self,
arguments: &'a ast::Arguments,
infer_as_type_expressions: u32,
parameter_expectations: ParameterExpectations,
) -> CallArguments<'a, 'db> {
arguments
.arguments_source_order()
.enumerate()
.map(|(index, arg_or_keyword)| {
let infer_argument_type = if index < u32::BITS as usize
&& infer_as_type_expressions & (1 << index) != 0
{
Self::infer_type_expression
} else {
Self::infer_expression
let infer_argument_type = match parameter_expectations.expectation_at_index(index) {
ParameterExpectation::TypeExpression => Self::infer_type_expression,
ParameterExpectation::ValueExpression => Self::infer_expression,
};
match arg_or_keyword {
@@ -3157,13 +3155,13 @@ impl<'db> TypeInferenceBuilder<'db> {
let function_type = self.infer_expression(func);
let infer_arguments_as_type_expressions = function_type
let parameter_expectations = function_type
.into_function_literal()
.and_then(|f| f.known(self.db()))
.map(KnownFunction::takes_type_expression_arguments)
.unwrap_or(0b0);
.map(KnownFunction::parameter_expectations)
.unwrap_or_default();
let call_arguments = self.infer_arguments(arguments, infer_arguments_as_type_expressions);
let call_arguments = self.infer_arguments(arguments, parameter_expectations);
function_type
.call(self.db(), &call_arguments)
.unwrap_with_diagnostic(&self.context, call_expression.into())
@@ -5068,6 +5066,9 @@ impl<'db> TypeInferenceBuilder<'db> {
Type::KnownInstance(KnownInstanceType::Any) => {
SubclassOfType::subclass_of_any()
}
Type::KnownInstance(KnownInstanceType::Unknown) => {
SubclassOfType::subclass_of_unknown()
}
_ => todo_type!("unsupported type[X] special form"),
}
}

View File

@@ -74,7 +74,7 @@ impl Index {
DocumentKey::NotebookCell(url)
} else if Path::new(url.path())
.extension()
.map_or(false, |ext| ext.eq_ignore_ascii_case("ipynb"))
.is_some_and(|ext| ext.eq_ignore_ascii_case("ipynb"))
{
DocumentKey::Notebook(url)
} else {

View File

@@ -285,7 +285,7 @@ impl Workspace {
open_files.contains(&file)
} else if let Some(system_path) = file.path(db).as_system_path() {
self.package(db, system_path)
.map_or(false, |package| package.contains_file(db, file))
.is_some_and(|package| package.contains_file(db, file))
} else {
file.path(db).is_system_virtual_path()
}

View File

@@ -959,7 +959,7 @@ A `--config` flag must either be a path to a `.toml` configuration file
// We want to display the most helpful error to the user as possible.
if Path::new(value)
.extension()
.map_or(false, |ext| ext.eq_ignore_ascii_case("toml"))
.is_some_and(|ext| ext.eq_ignore_ascii_case("toml"))
{
if !value.contains('=') {
tip.push_str(&format!(

View File

@@ -87,13 +87,13 @@ pub trait System: Debug {
/// Returns `true` if `path` exists and is a directory.
fn is_directory(&self, path: &SystemPath) -> bool {
self.path_metadata(path)
.map_or(false, |metadata| metadata.file_type.is_directory())
.is_ok_and(|metadata| metadata.file_type.is_directory())
}
/// Returns `true` if `path` exists and is a file.
fn is_file(&self, path: &SystemPath) -> bool {
self.path_metadata(path)
.map_or(false, |metadata| metadata.file_type.is_file())
.is_ok_and(|metadata| metadata.file_type.is_file())
}
/// Returns the current working directory

View File

@@ -497,12 +497,7 @@ impl<'a> Printer<'a> {
dest: self.state.buffer.text_len(),
};
if self
.state
.source_markers
.last()
.map_or(true, |last| last != &marker)
{
if self.state.source_markers.last() != Some(&marker) {
self.state.source_markers.push(marker);
}
}

View File

@@ -540,7 +540,7 @@ fn is_docstring_section(
// The return value of the function.
// """
// ```
if previous_line.map_or(false, |line| line.trim().is_empty()) {
if previous_line.is_some_and(|line| line.trim().is_empty()) {
return true;
}

View File

@@ -38,7 +38,7 @@ pub(crate) fn fix_file(
diagnostic
.fix
.as_ref()
.map_or(false, |fix| fix.applies(required_applicability))
.is_some_and(|fix| fix.applies(required_applicability))
})
.peekable();

View File

@@ -78,11 +78,7 @@ impl<'a> Directive<'a> {
comment_start = text[..comment_start].trim_end().len();
// The next character has to be the `#` character.
if text[..comment_start]
.chars()
.last()
.map_or(true, |c| c != '#')
{
if !text[..comment_start].ends_with('#') {
continue;
}
comment_start -= '#'.len_utf8();

View File

@@ -493,7 +493,7 @@ pub(crate) fn f_strings(checker: &mut Checker, call: &ast::ExprCall, summary: &F
checker
.semantic()
.resolve_qualified_name(call.func.as_ref())
.map_or(false, |qualified_name| {
.is_some_and(|qualified_name| {
matches!(
qualified_name.segments(),
["django", "utils", "translation", "gettext" | "gettext_lazy"]

View File

@@ -145,7 +145,7 @@ pub(crate) fn super_call_with_parameters(checker: &mut Checker, call: &ast::Expr
.resolve_qualified_name(func)
.is_some_and(|name| name.segments() == ["dataclasses", "dataclass"])
{
arguments.find_keyword("slots").map_or(false, |keyword| {
arguments.find_keyword("slots").is_some_and(|keyword| {
matches!(
keyword.value,
Expr::BooleanLiteral(ast::ExprBooleanLiteral { value: true, .. })

View File

@@ -117,7 +117,7 @@ fn in_subscript_index(expr: &ExprSubscript, semantic: &SemanticModel) -> bool {
}
// E.g., `Generic[DType, Unpack[int]]`.
if parent.slice.as_tuple_expr().map_or(false, |slice| {
if parent.slice.as_tuple_expr().is_some_and(|slice| {
slice
.elts
.iter()
@@ -144,5 +144,5 @@ fn in_vararg(expr: &ExprSubscript, semantic: &SemanticModel) -> bool {
.as_ref()
.and_then(|vararg| vararg.annotation.as_ref())
.and_then(|annotation| annotation.as_subscript_expr())
.map_or(false, |annotation| annotation == expr)
== Some(expr)
}

View File

@@ -144,7 +144,7 @@ pub(crate) fn print_empty_string(checker: &mut Checker, call: &ast::ExprCall) {
let empty_separator = call
.arguments
.find_keyword("sep")
.map_or(false, |keyword| is_empty_string(&keyword.value));
.is_some_and(|keyword| is_empty_string(&keyword.value));
if !empty_separator {
return;
}

View File

@@ -1732,7 +1732,7 @@ impl StringLiteralValue {
pub fn is_unicode(&self) -> bool {
self.iter()
.next()
.map_or(false, |part| part.flags.prefix().is_unicode())
.is_some_and(|part| part.flags.prefix().is_unicode())
}
/// Returns a slice of all the [`StringLiteral`] parts contained in this value.

View File

@@ -85,7 +85,7 @@ pub(crate) struct FormatLeadingAlternateBranchComments<'a> {
impl Format<PyFormatContext<'_>> for FormatLeadingAlternateBranchComments<'_> {
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
if self.last_node.map_or(false, |preceding| {
if self.last_node.is_some_and(|preceding| {
should_insert_blank_line_after_class_in_stub_file(preceding, None, f.context())
}) {
write!(f, [empty_line(), leading_comments(self.comments)])?;

View File

@@ -1002,24 +1002,21 @@ impl<'a> SemanticModel<'a> {
let value_name = UnqualifiedName::from_expr(value)?;
let (_, tail) = value_name.segments().split_first()?;
let resolved: QualifiedName = if qualified_name
.segments()
.first()
.map_or(false, |segment| *segment == ".")
{
from_relative_import(
self.module.qualified_name()?,
qualified_name.segments(),
tail,
)?
} else {
qualified_name
.segments()
.iter()
.chain(tail)
.copied()
.collect()
};
let resolved: QualifiedName =
if qualified_name.segments().first().copied() == Some(".") {
from_relative_import(
self.module.qualified_name()?,
qualified_name.segments(),
tail,
)?
} else {
qualified_name
.segments()
.iter()
.chain(tail)
.copied()
.collect()
};
Some(resolved)
}
BindingKind::Builtin => {

View File

@@ -125,7 +125,7 @@ impl Index {
DocumentKey::NotebookCell(url)
} else if Path::new(url.path())
.extension()
.map_or(false, |ext| ext.eq_ignore_ascii_case("ipynb"))
.is_some_and(|ext| ext.eq_ignore_ascii_case("ipynb"))
{
DocumentKey::Notebook(url)
} else {

View File

@@ -1,2 +1,2 @@
[toolchain]
channel = "1.83"
channel = "1.84"