Compare commits

..

3 Commits

Author SHA1 Message Date
Charlie Marsh
87eec9bb51 [ty] Show dynamic NamedTuple defaults in signature (#22574)
## Summary

Follow-up from https://github.com/astral-sh/ruff/pull/22327.
2026-01-14 18:28:23 +00:00
Micha Reiser
eaed0d9b5c [ty] Fix flaky completions (#22576) 2026-01-14 19:23:38 +01:00
Charlie Marsh
eb96456e1e [ty] Synthesize an empty __slots__ for named tuples (#22573)
## Summary

Closes https://github.com/astral-sh/ty/issues/2490.
2026-01-14 18:22:27 +00:00
8 changed files with 514 additions and 427 deletions

View File

@@ -367,7 +367,7 @@ def f_wrong(c: Callable[[], None]):
# error: [unresolved-attribute] "Object of type `() -> None` has no attribute `__qualname__`"
c.__qualname__
# error: [unresolved-attribute] "Unresolved attribute `__qualname__` on type `() -> None`"
# error: [unresolved-attribute] "Unresolved attribute `__qualname__` on type `() -> None`."
c.__qualname__ = "my_callable"
```

View File

@@ -469,7 +469,8 @@ reveal_type(Point2.__new__) # revealed: (cls: type, _0: Any, _1: Any) -> Point2
# `defaults` provides default values for the rightmost fields
Person = collections.namedtuple("Person", ["name", "age", "city"], defaults=["Unknown"])
reveal_type(Person) # revealed: <class 'Person'>
reveal_type(Person.__new__) # revealed: (cls: type, name: Any, age: Any, city: Any = ...) -> Person
reveal_type(Person.__new__) # revealed: (cls: type, name: Any, age: Any, city: Any = "Unknown") -> Person
reveal_mro(Person) # revealed: (<class 'Person'>, <class 'tuple[Any, Any, Any]'>, <class 'object'>)
# Can create with all fields
person1 = Person("Alice", 30, "NYC")
@@ -486,7 +487,7 @@ reveal_type(Config) # revealed: <class 'Config'>
# TODO: This should emit a diagnostic since it would fail at runtime.
TooManyDefaults = collections.namedtuple("TooManyDefaults", ["x", "y"], defaults=("a", "b", "c"))
reveal_type(TooManyDefaults) # revealed: <class 'TooManyDefaults'>
reveal_type(TooManyDefaults.__new__) # revealed: (cls: type, x: Any = ..., y: Any = ...) -> TooManyDefaults
reveal_type(TooManyDefaults.__new__) # revealed: (cls: type, x: Any = "a", y: Any = "b") -> TooManyDefaults
# Unknown keyword arguments produce an error
# error: [unknown-argument]
@@ -845,6 +846,7 @@ class Person(NamedTuple):
reveal_type(Person._field_defaults) # revealed: dict[str, Any]
reveal_type(Person._fields) # revealed: tuple[Literal["name"], Literal["age"]]
reveal_type(Person.__slots__) # revealed: tuple[()]
reveal_type(Person._make) # revealed: bound method <class 'Person'>._make(iterable: Iterable[Any]) -> Person
reveal_type(Person._asdict) # revealed: def _asdict(self) -> dict[str, Any]
reveal_type(Person._replace) # revealed: (self: Self, *, name: str = ..., age: int | None = ...) -> Self
@@ -887,6 +889,8 @@ Person = namedtuple("Person", ["id", "name", "age"], defaults=[None])
alice = Person(1, "Alice", 42)
bob = Person(2, "Bob")
reveal_type(Person.__slots__) # revealed: tuple[()]
```
## `collections.namedtuple` with tuple variable field names

View File

@@ -39,7 +39,7 @@ info: rule `unresolved-attribute` is enabled by default
```
```
error[unresolved-attribute]: Unresolved attribute `non_existent` on type `C`
error[unresolved-attribute]: Unresolved attribute `non_existent` on type `C`.
--> src/mdtest_snippet.py:6:1
|
5 | instance = C()

View File

@@ -3384,16 +3384,17 @@ impl<'db> Type<'db> {
.map(|class| class.class_literal(db)),
_ => None,
};
if let Some(enum_class) = enum_class
&& let Some(metadata) = enum_metadata(db, enum_class)
&& let Some(resolved_name) = metadata.resolve_member(&name)
{
return Place::bound(Type::EnumLiteral(EnumLiteralType::new(
db,
enum_class,
resolved_name,
)))
.into();
if let Some(enum_class) = enum_class {
if let Some(metadata) = enum_metadata(db, enum_class) {
if let Some(resolved_name) = metadata.resolve_member(&name) {
return Place::bound(Type::EnumLiteral(EnumLiteralType::new(
db,
enum_class,
resolved_name,
)))
.into();
}
}
}
let class_attr_plain = self.find_name_in_mro_with_policy(db, name_str, policy).expect(
@@ -5062,15 +5063,16 @@ impl<'db> Type<'db> {
let from_class_base = |base: ClassBase<'db>| {
let class = base.into_class()?;
if class.is_known(db, KnownClass::Generator)
&& let Some((_, Some(specialization))) =
if class.is_known(db, KnownClass::Generator) {
if let Some((_, Some(specialization))) =
class.static_class_literal_specialized(db, None)
&& let [_, _, return_ty] = specialization.types(db)
{
Some(*return_ty)
} else {
None
{
if let [_, _, return_ty] = specialization.types(db) {
return Some(*return_ty);
}
}
}
None
};
match self {
@@ -8052,10 +8054,15 @@ impl<'db> TypeVarInstance<'db> {
let typevar_node = typevar.node(&module);
let bound =
definition_expression_type(db, definition, typevar_node.bound.as_ref()?);
let constraints = if let Some(tuple) = bound.tuple_instance_spec(db)
&& let Tuple::Fixed(tuple) = tuple.into_owned()
let constraints = if let Some(tuple) = bound
.as_nominal_instance()
.and_then(|instance| instance.tuple_spec(db))
{
tuple.owned_elements()
if let Tuple::Fixed(tuple) = tuple.into_owned() {
tuple.owned_elements()
} else {
vec![Type::unknown()].into_boxed_slice()
}
} else {
vec![Type::unknown()].into_boxed_slice()
};
@@ -9133,13 +9140,13 @@ impl<'db> AwaitError<'db> {
""
};
diag.info(format_args!("`__await__` is{possibly} not callable"));
if let Some(definition) = bindings.callable_type().definition(db)
&& let Some(definition_range) = definition.focus_range(db)
{
diag.annotate(
Annotation::secondary(definition_range.into())
.message("attribute defined here"),
);
if let Some(definition) = bindings.callable_type().definition(db) {
if let Some(definition_range) = definition.focus_range(db) {
diag.annotate(
Annotation::secondary(definition_range.into())
.message("attribute defined here"),
);
}
}
}
Self::Call(CallDunderError::PossiblyUnbound(bindings)) => {
@@ -9153,12 +9160,13 @@ impl<'db> AwaitError<'db> {
}
Self::Call(CallDunderError::MethodNotAvailable) => {
diag.info("`__await__` is missing");
if let Some(type_definition) = context_expression_type.definition(db)
&& let Some(definition_range) = type_definition.focus_range(db)
{
diag.annotate(
Annotation::secondary(definition_range.into()).message("type defined here"),
);
if let Some(type_definition) = context_expression_type.definition(db) {
if let Some(definition_range) = type_definition.focus_range(db) {
diag.annotate(
Annotation::secondary(definition_range.into())
.message("type defined here"),
);
}
}
}
Self::InvalidReturnType(return_type, bindings) => {
@@ -11344,20 +11352,20 @@ impl<'db> ModuleLiteralType<'db> {
// if it exists. First, we need to look up the `__getattr__` function in the module's scope.
if let Some(file) = self.module(db).file(db) {
let getattr_symbol = imported_symbol(db, file, "__getattr__", None);
// If we found a __getattr__ function, try to call it with the name argument
if let Place::Defined(place) = getattr_symbol.place
&& let Ok(outcome) = place.ty.try_call(
if let Place::Defined(place) = getattr_symbol.place {
// If we found a __getattr__ function, try to call it with the name argument
if let Ok(outcome) = place.ty.try_call(
db,
&CallArguments::positional([Type::string_literal(db, name)]),
)
{
return PlaceAndQualifiers {
place: Place::Defined(DefinedPlace {
ty: outcome.return_type(db),
..place
}),
qualifiers: TypeQualifiers::FROM_MODULE_GETATTR,
};
) {
return PlaceAndQualifiers {
place: Place::Defined(DefinedPlace {
ty: outcome.return_type(db),
..place
}),
qualifiers: TypeQualifiers::FROM_MODULE_GETATTR,
};
}
}
}
@@ -11383,10 +11391,10 @@ impl<'db> ModuleLiteralType<'db> {
// the parent module's `__init__.py` file being evaluated. That said, we have
// chosen to always have the submodule take priority. (This matches pyright's
// current behavior, but is the opposite of mypy's current behavior.)
if self.available_submodule_attributes(db).contains(name)
&& let Some(submodule) = self.resolve_submodule(db, name)
{
return Place::bound(submodule).into();
if self.available_submodule_attributes(db).contains(name) {
if let Some(submodule) = self.resolve_submodule(db, name) {
return Place::bound(submodule).into();
}
}
let place_and_qualifiers = self
@@ -12085,11 +12093,17 @@ impl<'db> UnionType<'db> {
let mut has_float = false;
let mut has_complex = false;
for element in self.elements(db) {
match element.as_nominal_instance()?.known_class(db)? {
KnownClass::Int => has_int = true,
KnownClass::Float => has_float = true,
KnownClass::Complex => has_complex = true,
_ => return None,
if let Type::NominalInstance(nominal) = element
&& let Some(known) = nominal.known_class(db)
{
match known {
KnownClass::Int => has_int = true,
KnownClass::Float => has_float = true,
KnownClass::Complex => has_complex = true,
_ => return None,
}
} else {
return None;
}
}
match (has_int, has_float, has_complex) {

View File

@@ -3244,7 +3244,10 @@ impl<'db> StaticClassLiteral<'db> {
)
})
}
(CodeGeneratorKind::NamedTuple, "__new__" | "_replace" | "__replace__" | "_fields") => {
(
CodeGeneratorKind::NamedTuple,
"__new__" | "_replace" | "__replace__" | "_fields" | "__slots__",
) => {
let fields = self.fields(db, specialization, field_policy);
let fields_iter = fields.iter().map(|(name, field)| {
let default_ty = match &field.kind {
@@ -5212,6 +5215,10 @@ fn synthesize_namedtuple_class_member<'db>(
fields.map(|(field_name, _, _)| Type::string_literal(db, &field_name));
Some(Type::heterogeneous_tuple(db, field_types))
}
"__slots__" => {
// __slots__: tuple[()] - always empty for namedtuples
Some(Type::empty_tuple(db))
}
"_replace" | "__replace__" => {
if name == "__replace__" && Program::get(db).python_version(db) < PythonVersion::PY313 {
return None;
@@ -5536,7 +5543,10 @@ impl<'db> DynamicNamedTupleLiteral<'db> {
// For fallback members from NamedTupleFallback, apply type mapping to handle
// `Self` types. The explicitly synthesized members (__new__, _fields, _replace,
// __replace__) don't need this mapping.
if matches!(name, "__new__" | "_fields" | "_replace" | "__replace__") {
if matches!(
name,
"__new__" | "_fields" | "_replace" | "__replace__" | "__slots__"
) {
result
} else {
result.map(|ty| {

View File

@@ -628,17 +628,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
for (class, class_node) in class_definitions {
// (1) Check that the class does not have a cyclic definition
if let Some(inheritance_cycle) = class.inheritance_cycle(self.db()) {
if inheritance_cycle.is_participant()
&& let Some(builder) = self
if inheritance_cycle.is_participant() {
if let Some(builder) = self
.context
.report_lint(&CYCLIC_CLASS_DEFINITION, class_node)
{
builder.into_diagnostic(format_args!(
"Cyclic definition of `{}` (class cannot inherit from itself)",
class.name(self.db())
));
{
builder.into_diagnostic(format_args!(
"Cyclic definition of `{}` (class cannot inherit from itself)",
class.name(self.db())
));
}
}
// If a class is cyclically defined, that's a sufficient error to report; the
// following checks (which are all inheritance-based) aren't even relevant.
continue;
@@ -1026,14 +1026,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
class.legacy_generic_context(self.db()),
class.inherited_legacy_generic_context(self.db()),
) {
if !inherited.is_subset_of(self.db(), legacy)
&& let Some(builder) =
if !inherited.is_subset_of(self.db(), legacy) {
if let Some(builder) =
self.context.report_lint(&INVALID_GENERIC_CLASS, class_node)
{
builder.into_diagnostic(
"`Generic` base class must include all type \
variables used in other base classes",
);
{
builder.into_diagnostic(
"`Generic` base class must include all type \
variables used in other base classes",
);
}
}
}
@@ -1154,14 +1155,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// Annotated assignments are allowed (that's the whole point), but they're
// not allowed to have a value.
ast::Stmt::AnnAssign(ann_assign) => {
if let Some(value) = &ann_assign.value
&& let Some(builder) = self
if let Some(value) = &ann_assign.value {
if let Some(builder) = self
.context
.report_lint(&INVALID_TYPED_DICT_STATEMENT, &**value)
{
builder.into_diagnostic("TypedDict item cannot have a value");
{
builder.into_diagnostic(format_args!(
"TypedDict item cannot have a value"
));
}
}
continue;
}
// Pass statements are allowed.
@@ -1829,17 +1832,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
// Fall back to implicit module globals for (possibly) unbound names
if !place_and_quals.place.is_definitely_bound()
&& let PlaceExprRef::Symbol(symbol) = place
{
let symbol_id = place_id.expect_symbol();
if !place_and_quals.place.is_definitely_bound() {
if let PlaceExprRef::Symbol(symbol) = place {
let symbol_id = place_id.expect_symbol();
if self.skip_non_global_scopes(file_scope_id, symbol_id)
|| self.scope.file_scope_id(self.db()).is_global()
{
place_and_quals = place_and_quals.or_fall_back_to(self.db(), || {
module_type_implicit_global_declaration(self.db(), symbol.name())
});
if self.skip_non_global_scopes(file_scope_id, symbol_id)
|| self.scope.file_scope_id(self.db()).is_global()
{
place_and_quals = place_and_quals.or_fall_back_to(self.db(), || {
module_type_implicit_global_declaration(self.db(), symbol.name())
});
}
}
}
@@ -2618,26 +2621,25 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
returns: Option<&ast::Expr>,
deferred_expression_state: DeferredExpressionState,
) {
let Some(returns) = returns else {
return;
};
let annotated = self.infer_annotation_expression(returns, deferred_expression_state);
if let Some(returns) = returns {
let annotated = self.infer_annotation_expression(returns, deferred_expression_state);
if annotated.qualifiers.is_empty() {
return;
}
for qualifier in [
TypeQualifiers::FINAL,
TypeQualifiers::CLASS_VAR,
TypeQualifiers::INIT_VAR,
] {
if annotated.qualifiers.contains(qualifier)
&& let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, returns)
{
builder.into_diagnostic(format!(
"`{name}` is not allowed in function return type annotations",
name = qualifier.name()
));
if !annotated.qualifiers.is_empty() {
for qualifier in [
TypeQualifiers::FINAL,
TypeQualifiers::CLASS_VAR,
TypeQualifiers::INIT_VAR,
] {
if annotated.qualifiers.contains(qualifier) {
if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, returns)
{
builder.into_diagnostic(format!(
"`{name}` is not allowed in function return type annotations",
name = qualifier.name()
));
}
}
}
}
}
}
@@ -2677,28 +2679,24 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
self.defer_annotations().into(),
);
let Some(annotated) = annotated else {
return;
};
let qualifiers = annotated.qualifiers;
if qualifiers.is_empty() {
return;
}
for qualifier in [
TypeQualifiers::FINAL,
TypeQualifiers::CLASS_VAR,
TypeQualifiers::INIT_VAR,
] {
if qualifiers.contains(qualifier)
&& let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, parameter)
{
builder.into_diagnostic(format!(
"`{name}` is not allowed in function parameter annotations",
name = qualifier.name()
));
if let Some(qualifiers) = annotated.map(|annotated| annotated.qualifiers) {
if !qualifiers.is_empty() {
for qualifier in [
TypeQualifiers::FINAL,
TypeQualifiers::CLASS_VAR,
TypeQualifiers::INIT_VAR,
] {
if qualifiers.contains(qualifier) {
if let Some(builder) =
self.context.report_lint(&INVALID_TYPE_FORM, parameter)
{
builder.into_diagnostic(format!(
"`{name}` is not allowed in function parameter annotations",
name = qualifier.name()
));
}
}
}
}
}
}
@@ -4547,13 +4545,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
// If none are valid, emit a diagnostic for the first failing element
if !any_valid && let Some(element_ty) = intersection.positive(db).first() {
self.validate_subscript_deletion_impl(
target,
full_object_ty.or(Some(object_ty)),
*element_ty,
slice_ty,
);
if !any_valid {
if let Some(element_ty) = intersection.positive(db).first() {
self.validate_subscript_deletion_impl(
target,
full_object_ty.or(Some(object_ty)),
*element_ty,
slice_ty,
);
}
}
}
@@ -4761,13 +4761,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
};
let emit_invalid_final = |builder: &Self| {
if emit_diagnostics
&& let Some(builder) = builder.context.report_lint(&INVALID_ASSIGNMENT, target)
{
builder.into_diagnostic(format_args!(
"Cannot assign to final attribute `{attribute}` on type `{}`",
object_ty.display(db)
));
if emit_diagnostics {
if let Some(builder) = builder.context.report_lint(&INVALID_ASSIGNMENT, target) {
builder.into_diagnostic(format_args!(
"Cannot assign to final attribute `{attribute}` on type `{}`",
object_ty.display(db)
));
}
}
};
@@ -4811,20 +4811,20 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let class_scope_id = class_literal.body_scope(db).file_scope_id(db);
let place_table = builder.index.place_table(class_scope_id);
if let Some(symbol) = place_table.symbol_by_name(attribute)
&& symbol.is_bound()
{
if emit_diagnostics
&& let Some(diag_builder) =
builder.context.report_lint(&INVALID_ASSIGNMENT, target)
{
diag_builder.into_diagnostic(format_args!(
"Cannot assign to final attribute `{attribute}` in `__init__` \
because it already has a value at class level"
));
if let Some(symbol) = place_table.symbol_by_name(attribute) {
if symbol.is_bound() {
if emit_diagnostics {
if let Some(diag_builder) =
builder.context.report_lint(&INVALID_ASSIGNMENT, target)
{
diag_builder.into_diagnostic(format_args!(
"Cannot assign to final attribute `{attribute}` in `__init__` \
because it already has a value at class level"
));
}
}
return true;
}
return true;
}
}
@@ -4851,15 +4851,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
} else {
// TODO: This is not a very helpful error message, as it does not include the underlying reason
// why the assignment is invalid. This would be a good use case for sub-diagnostics.
if emit_diagnostics
&& let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target)
{
builder.into_diagnostic(format_args!(
"Object of type `{}` is not assignable \
if emit_diagnostics {
if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target)
{
builder.into_diagnostic(format_args!(
"Object of type `{}` is not assignable \
to attribute `{attribute}` on type `{}`",
value_ty.display(self.db()),
object_ty.display(self.db()),
));
value_ty.display(self.db()),
object_ty.display(self.db()),
));
}
}
false
@@ -4883,18 +4884,18 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}) {
true
} else {
if emit_diagnostics
&& let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target)
{
// TODO: same here, see above
builder.into_diagnostic(format_args!(
"Object of type `{}` is not assignable \
if emit_diagnostics {
if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target)
{
// TODO: same here, see above
builder.into_diagnostic(format_args!(
"Object of type `{}` is not assignable \
to attribute `{attribute}` on type `{}`",
value_ty.display(self.db()),
object_ty.display(self.db()),
));
value_ty.display(self.db()),
object_ty.display(self.db()),
));
}
}
false
}
}
@@ -4911,27 +4912,26 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
Type::NominalInstance(instance) if instance.has_known_class(db, KnownClass::Super) => {
infer_value_ty(self, TypeContext::default());
if emit_diagnostics
&& let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target)
{
builder.into_diagnostic(format_args!(
"Cannot assign to attribute `{attribute}` on type `{}`",
object_ty.display(self.db()),
));
if emit_diagnostics {
if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target) {
builder.into_diagnostic(format_args!(
"Cannot assign to attribute `{attribute}` on type `{}`",
object_ty.display(self.db()),
));
}
}
false
}
Type::BoundSuper(_) => {
infer_value_ty(self, TypeContext::default());
if emit_diagnostics
&& let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target)
{
builder.into_diagnostic(format_args!(
"Cannot assign to attribute `{attribute}` on type `{}`",
object_ty.display(self.db()),
));
if emit_diagnostics {
if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target) {
builder.into_diagnostic(format_args!(
"Cannot assign to attribute `{attribute}` on type `{}`",
object_ty.display(self.db()),
));
}
}
false
}
@@ -5036,15 +5036,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// Only fall back to `__setattr__` when no explicit attribute is found.
match object_ty.class_member(db, attribute.into()) {
meta_attr @ PlaceAndQualifiers { .. } if meta_attr.is_class_var() => {
if emit_diagnostics
&& let Some(builder) =
if emit_diagnostics {
if let Some(builder) =
self.context.report_lint(&INVALID_ATTRIBUTE_ACCESS, target)
{
builder.into_diagnostic(format_args!(
"Cannot assign to ClassVar `{attribute}` \
from an instance of type `{ty}`",
ty = object_ty.display(self.db()),
));
{
builder.into_diagnostic(format_args!(
"Cannot assign to ClassVar `{attribute}` \
from an instance of type `{ty}`",
ty = object_ty.display(self.db()),
));
}
}
false
}
@@ -5074,16 +5075,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
&CallArguments::positional([meta_attr_ty, object_ty, value_ty]),
);
if emit_diagnostics
&& let Err(dunder_set_failure) = dunder_set_result.as_ref()
{
report_bad_dunder_set_call(
&self.context,
dunder_set_failure,
attribute,
object_ty,
target,
);
if emit_diagnostics {
if let Err(dunder_set_failure) = dunder_set_result.as_ref() {
report_bad_dunder_set_call(
&self.context,
dunder_set_failure,
attribute,
object_ty,
target,
);
}
}
dunder_set_result.is_ok()
@@ -5176,30 +5177,32 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// If __setattr__ succeeded, allow the assignment.
Ok(_) | Err(CallDunderError::PossiblyUnbound(_)) => true,
Err(CallDunderError::CallError(..)) => {
if emit_diagnostics
&& let Some(builder) =
if emit_diagnostics {
if let Some(builder) =
self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target)
{
builder.into_diagnostic(format_args!(
"Cannot assign object of type `{}` to attribute \
`{attribute}` on type `{}` with \
custom `__setattr__` method.",
value_ty.display(db),
object_ty.display(db)
));
{
builder.into_diagnostic(format_args!(
"Cannot assign object of type `{}` to attribute \
`{attribute}` on type `{}` with \
custom `__setattr__` method.",
value_ty.display(db),
object_ty.display(db)
));
}
}
false
}
Err(CallDunderError::MethodNotAvailable) => {
if emit_diagnostics
&& let Some(builder) =
if emit_diagnostics {
if let Some(builder) =
self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target)
{
builder.into_diagnostic(format_args!(
"Unresolved attribute `{}` on type `{}`",
attribute,
object_ty.display(db)
));
{
builder.into_diagnostic(format_args!(
"Unresolved attribute `{}` on type `{}`.",
attribute,
object_ty.display(db)
));
}
}
false
}
@@ -5241,16 +5244,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
&CallArguments::positional([meta_attr_ty, object_ty, value_ty]),
);
if emit_diagnostics
&& let Err(dunder_set_failure) = dunder_set_result.as_ref()
{
report_bad_dunder_set_call(
&self.context,
dunder_set_failure,
attribute,
object_ty,
target,
);
if emit_diagnostics {
if let Err(dunder_set_failure) = dunder_set_result.as_ref() {
report_bad_dunder_set_call(
&self.context,
dunder_set_failure,
attribute,
object_ty,
target,
);
}
}
dunder_set_result.is_ok()
@@ -5403,15 +5406,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
} else {
infer_value_ty(self, TypeContext::default());
if emit_diagnostics
&& let Some(builder) =
if emit_diagnostics {
if let Some(builder) =
self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target)
{
builder.into_diagnostic(format_args!(
"Unresolved attribute `{}` on type `{}`.",
attribute,
object_ty.display(db)
));
{
builder.into_diagnostic(format_args!(
"Unresolved attribute `{}` on type `{}`.",
attribute,
object_ty.display(db)
));
}
}
false
@@ -6560,7 +6564,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
// Infer keyword arguments.
let mut defaults_count = None;
let mut default_types: Vec<Type<'db>> = vec![];
let mut rename_type = None;
for kw in keywords {
@@ -6571,11 +6575,42 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
};
match arg.id.as_str() {
"defaults" if kind.is_collections() => {
defaults_count = kw_type
.exact_tuple_instance_spec(db)
.and_then(|spec| spec.len().maximum())
.or_else(|| kw.value.as_list_expr().map(|list| list.elts.len()));
// Extract element types from AST literals (using already-inferred types)
// or fall back to the inferred tuple spec.
match &kw.value {
ast::Expr::List(list) => {
// Elements were already inferred when we inferred kw.value above.
default_types = list
.elts
.iter()
.map(|elt| self.expression_type(elt))
.collect();
}
ast::Expr::Tuple(tuple) => {
// Elements were already inferred when we inferred kw.value above.
default_types = tuple
.elts
.iter()
.map(|elt| self.expression_type(elt))
.collect();
}
_ => {
// Fall back to using the already-inferred type.
// Try to extract element types from tuple.
if let Some(spec) = kw_type.exact_tuple_instance_spec(db)
&& let Some(fixed) = spec.as_fixed_length()
{
default_types = fixed.all_elements().to_vec();
} else {
// Can't determine individual types; use Any for each element.
let count = kw_type
.exact_tuple_instance_spec(db)
.and_then(|spec| spec.len().maximum())
.unwrap_or(0);
default_types = vec![Type::any(); count];
}
}
}
// Emit diagnostic for invalid types (not Iterable[Any] | None).
let iterable_any =
KnownClass::Iterable.to_specialized_instance(db, &[Type::any()]);
@@ -6640,8 +6675,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
}
let defaults_count = defaults_count.unwrap_or_default();
// Extract name.
let name = if let Type::StringLiteral(literal) = name_type {
Name::new(literal.value(db))
@@ -6774,14 +6807,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// TODO: emit a diagnostic when `defaults_count > num_fields` (which would
// fail at runtime with `TypeError: Got more default values than field names`).
let num_fields = field_names.len();
let defaults_count = defaults_count.min(num_fields);
let defaults_count = default_types.len().min(num_fields);
let fields = field_names
.iter()
.enumerate()
.map(|(i, field_name)| {
let default =
if defaults_count > 0 && i >= num_fields - defaults_count {
Some(Type::any())
// Index into default_types: first default corresponds to first
// field that has a default.
let default_idx = i - (num_fields - defaults_count);
Some(default_types[default_idx])
} else {
None
};
@@ -7188,15 +7224,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
if !annotated.qualifiers.is_empty() {
for qualifier in [TypeQualifiers::CLASS_VAR, TypeQualifiers::INIT_VAR] {
if annotated.qualifiers.contains(qualifier)
&& let Some(builder) = self
if annotated.qualifiers.contains(qualifier) {
if let Some(builder) = self
.context
.report_lint(&INVALID_TYPE_FORM, annotation.as_ref())
{
builder.into_diagnostic(format_args!(
"`{name}` annotations are not allowed for non-name targets",
name = qualifier.name()
));
{
builder.into_diagnostic(format_args!(
"`{name}` annotations are not allowed for non-name targets",
name = qualifier.name()
));
}
}
}
}
@@ -7240,14 +7277,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let current_scope = self.index.scope(current_scope_id);
if current_scope.kind() != ScopeKind::Class {
for qualifier in [TypeQualifiers::CLASS_VAR, TypeQualifiers::INIT_VAR] {
if declared.qualifiers.contains(qualifier)
&& let Some(builder) =
if declared.qualifiers.contains(qualifier) {
if let Some(builder) =
self.context.report_lint(&INVALID_TYPE_FORM, annotation)
{
builder.into_diagnostic(format_args!(
"`{name}` annotations are only allowed in class-body scopes",
name = qualifier.name()
));
{
builder.into_diagnostic(format_args!(
"`{name}` annotations are only allowed in class-body scopes",
name = qualifier.name()
));
}
}
}
}
@@ -7286,11 +7324,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let is_pep_613_type_alias = declared.inner_type().is_typealias_special_form();
// Handle various singletons.
if let Some(name_expr) = target.as_name_expr()
&& let Some(special_form) =
if let Some(name_expr) = target.as_name_expr() {
if let Some(special_form) =
SpecialFormType::try_from_file_and_name(self.db(), self.file(), &name_expr.id)
{
declared.inner = Type::SpecialForm(special_form);
{
declared.inner = Type::SpecialForm(special_form);
}
}
// If the target of an assignment is not one of the place expressions we support,
@@ -9601,9 +9640,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// Simplify the inference based on a non-covariant declared type.
if let Some(elt_tcx) =
elt_tcx.filter(|_| !elt_tcx_variance[&elt_ty_identity].is_covariant())
&& inferred_elt_ty.is_assignable_to(self.db(), elt_tcx)
{
continue;
if inferred_elt_ty.is_assignable_to(self.db(), elt_tcx) {
continue;
}
}
// Convert any element literals to their promoted type form to avoid excessively large
@@ -10219,50 +10259,50 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// Special handling for `TypedDict` method calls
if let ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() {
let value_type = self.expression_type(value);
if let Type::TypedDict(typed_dict_ty) = value_type {
if matches!(attr.id.as_str(), "pop" | "setdefault") && !arguments.args.is_empty() {
// Validate the key argument for `TypedDict` methods
if let Some(first_arg) = arguments.args.first() {
if let ast::Expr::StringLiteral(ast::ExprStringLiteral {
value: key_literal,
..
}) = first_arg
{
let key = key_literal.to_str();
let items = typed_dict_ty.items(self.db());
if let Type::TypedDict(typed_dict_ty) = value_type
&& matches!(attr.id.as_str(), "pop" | "setdefault")
&& !arguments.args.is_empty()
// Validate the key argument for `TypedDict` methods
&& let Some(first_arg) = arguments.args.first()
&& let ast::Expr::StringLiteral(ast::ExprStringLiteral {
value: key_literal,
..
}) = first_arg
{
let key = key_literal.to_str();
let items = typed_dict_ty.items(self.db());
// Check if key exists
if let Some((_, field)) = items
.iter()
.find(|(field_name, _)| field_name.as_str() == key)
{
// Key exists - check if it's a `pop()` on a required field
if attr.id.as_str() == "pop" && field.is_required() {
report_cannot_pop_required_field_on_typed_dict(
&self.context,
first_arg.into(),
Type::TypedDict(typed_dict_ty),
key,
);
return Type::unknown();
// Check if key exists
if let Some((_, field)) = items
.iter()
.find(|(field_name, _)| field_name.as_str() == key)
{
// Key exists - check if it's a `pop()` on a required field
if attr.id.as_str() == "pop" && field.is_required() {
report_cannot_pop_required_field_on_typed_dict(
&self.context,
first_arg.into(),
Type::TypedDict(typed_dict_ty),
key,
);
return Type::unknown();
}
} else {
// Key not found, report error with suggestion and return early
let key_ty = Type::string_literal(self.db(), key);
report_invalid_key_on_typed_dict(
&self.context,
first_arg.into(),
first_arg.into(),
Type::TypedDict(typed_dict_ty),
None,
key_ty,
items,
);
// Return `Unknown` to prevent the overload system from generating its own error
return Type::unknown();
}
}
}
} else {
// Key not found, report error with suggestion and return early
let key_ty = Type::string_literal(self.db(), key);
report_invalid_key_on_typed_dict(
&self.context,
first_arg.into(),
first_arg.into(),
Type::TypedDict(typed_dict_ty),
None,
key_ty,
items,
);
// Return `Unknown` to prevent the overload system from generating its own error
return Type::unknown();
}
}
}
@@ -10439,18 +10479,18 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
);
// Validate `TypedDict` constructor calls after argument type inference
if let Type::ClassLiteral(class_literal) = callable_type
&& class_literal.is_typed_dict(self.db())
{
let typed_dict_type = Type::typed_dict(ClassType::NonGeneric(class_literal));
if let Some(typed_dict) = typed_dict_type.as_typed_dict() {
validate_typed_dict_constructor(
&self.context,
typed_dict,
arguments,
func.as_ref().into(),
|expr| self.expression_type(expr),
);
if let Some(class_literal) = callable_type.as_class_literal() {
if class_literal.is_typed_dict(self.db()) {
let typed_dict_type = Type::typed_dict(ClassType::NonGeneric(class_literal));
if let Some(typed_dict) = typed_dict_type.as_typed_dict() {
validate_typed_dict_constructor(
&self.context,
typed_dict,
arguments,
func.as_ref().into(),
|expr| self.expression_type(expr),
);
}
}
}
@@ -10863,24 +10903,26 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let place = PlaceAndQualifiers::from(local_scope_place).or_fall_back_to(db, || {
let mut symbol_resolves_locally = false;
if let Some(symbol) = place_expr.as_symbol()
&& let Some(symbol_id) = place_table.symbol_id(symbol.name())
{
// Footgun: `place_expr` and `symbol` were probably constructed with all-zero
// flags. We need to read the place table to get correct flags.
symbol_resolves_locally = place_table.symbol(symbol_id).is_local();
// If we try to access a variable in a class before it has been defined, the
// lookup will fall back to global. See the comment on `Symbol::is_local`.
let fallback_to_global =
scope.node(db).scope_kind().is_class() && symbol_resolves_locally;
if self.skip_non_global_scopes(file_scope_id, symbol_id) || fallback_to_global {
return global_symbol(self.db(), self.file(), symbol.name()).map_type(|ty| {
self.narrow_place_with_applicable_constraints(
place_expr,
ty,
&constraint_keys,
)
});
if let Some(symbol) = place_expr.as_symbol() {
if let Some(symbol_id) = place_table.symbol_id(symbol.name()) {
// Footgun: `place_expr` and `symbol` were probably constructed with all-zero
// flags. We need to read the place table to get correct flags.
symbol_resolves_locally = place_table.symbol(symbol_id).is_local();
// If we try to access a variable in a class before it has been defined, the
// lookup will fall back to global. See the comment on `Symbol::is_local`.
let fallback_to_global =
scope.node(db).scope_kind().is_class() && symbol_resolves_locally;
if self.skip_non_global_scopes(file_scope_id, symbol_id) || fallback_to_global {
return global_symbol(self.db(), self.file(), symbol.name()).map_type(
|ty| {
self.narrow_place_with_applicable_constraints(
place_expr,
ty,
&constraint_keys,
)
},
);
}
}
}
@@ -11977,12 +12019,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
| (Type::IntLiteral(n), Type::StringLiteral(s), ast::Operator::Mult) => {
let ty = if n < 1 {
Type::string_literal(self.db(), "")
} else if let Ok(n) = usize::try_from(n)
&& n.checked_mul(s.value(self.db()).len())
} else if let Ok(n) = usize::try_from(n) {
if n.checked_mul(s.value(self.db()).len())
.is_some_and(|new_length| new_length <= Self::MAX_STRING_LITERAL_SIZE)
{
let new_literal = s.value(self.db()).repeat(n);
Type::string_literal(self.db(), &new_literal)
{
let new_literal = s.value(self.db()).repeat(n);
Type::string_literal(self.db(), &new_literal)
} else {
Type::LiteralString
}
} else {
Type::LiteralString
};
@@ -13417,12 +13462,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
)));
}
Type::SpecialForm(SpecialFormType::Optional) => {
if matches!(**slice, ast::Expr::Tuple(_))
&& let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript)
{
builder.into_diagnostic(format_args!(
"`typing.Optional` requires exactly one argument"
));
if matches!(**slice, ast::Expr::Tuple(_)) {
if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) {
builder.into_diagnostic(format_args!(
"`typing.Optional` requires exactly one argument"
));
}
}
let ty = self.infer_expression(slice, TypeContext::default());
@@ -13459,13 +13504,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
),
));
if is_empty
&& let Some(builder) =
if is_empty {
if let Some(builder) =
self.context.report_lint(&INVALID_TYPE_FORM, subscript)
{
builder.into_diagnostic(
"`typing.Union` requires at least one type argument",
);
{
builder.into_diagnostic(
"`typing.Union` requires at least one type argument",
);
}
}
return union_type;
@@ -13583,15 +13629,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
..
}) = **slice
{
if arguments.len() != 2
&& let Some(builder) =
if arguments.len() != 2 {
if let Some(builder) =
self.context.report_lint(&INVALID_TYPE_FORM, subscript)
{
builder.into_diagnostic(format_args!(
"`typing.{}` requires exactly two arguments, got {}",
special_form.name(),
arguments.len()
));
{
builder.into_diagnostic(format_args!(
"`typing.{}` requires exactly two arguments, got {}",
special_form.name(),
arguments.len()
));
}
}
if let [first_expr, second_expr] = &arguments[..] {

View File

@@ -163,6 +163,13 @@ export default function Chrome({
[workspace, files.index, onRemoveFile],
);
const handleChange = useCallback(
(content: string) => {
onChangeFile(workspace, content);
},
[onChangeFile, workspace],
);
const { defaultLayout, onLayoutChange } = useDefaultLayout({
groupId: "editor-diagnostics",
storage: localStorage,
@@ -221,7 +228,7 @@ export default function Chrome({
diagnostics={checkResult.diagnostics}
workspace={workspace}
onMount={handleEditorMount}
onChange={(content) => onChangeFile(workspace, content)}
onChange={handleChange}
onOpenFile={onSelectFile}
onVendoredFileChange={onSelectVendoredFile}
onBackToUserFile={handleBackToUserFile}

View File

@@ -58,7 +58,7 @@ export default function Playground() {
}
}, [files]);
const handleFileAdded = (workspace: Workspace, name: string) => {
const handleFileAdded = useCallback((workspace: Workspace, name: string) => {
let handle = null;
if (name === SETTINGS_FILE_NAME) {
@@ -68,69 +68,74 @@ export default function Playground() {
}
dispatchFiles({ type: "add", name, handle, content: "" });
};
}, []);
const handleFileChanged = (workspace: Workspace, content: string) => {
if (files.selected == null) {
return;
}
const handleFileChanged = useCallback(
(workspace: Workspace, content: string) => {
if (files.selected == null) {
return;
}
dispatchFiles({
type: "change",
id: files.selected,
content,
});
const handle = files.handles[files.selected];
const handle = files.handles[files.selected];
if (handle != null) {
updateFile(workspace, handle, content, setError);
} else if (fileName === SETTINGS_FILE_NAME) {
updateOptions(workspace, content, setError);
}
if (handle != null) {
updateFile(workspace, handle, content, setError);
} else if (fileName === SETTINGS_FILE_NAME) {
updateOptions(workspace, content, setError);
}
};
dispatchFiles({
type: "change",
id: files.selected,
content,
});
},
[fileName, files.handles, files.selected],
);
const handleFileRenamed = (
workspace: Workspace,
file: FileId,
newName: string,
) => {
if (newName.startsWith("/")) {
setError("File names cannot start with '/'.");
return;
}
if (newName.startsWith("vendored:")) {
setError("File names cannot start with 'vendored:'.");
return;
}
const handleFileRenamed = useCallback(
(workspace: Workspace, file: FileId, newName: string) => {
if (newName.startsWith("/")) {
setError("File names cannot start with '/'.");
return;
}
if (newName.startsWith("vendored:")) {
setError("File names cannot start with 'vendored:'.");
return;
}
const handle = files.handles[file];
let newHandle: FileHandle | null = null;
if (handle == null) {
updateOptions(workspace, null, setError);
} else {
workspace.closeFile(handle);
}
const handle = files.handles[file];
let newHandle: FileHandle | null = null;
if (handle == null) {
updateOptions(workspace, null, setError);
} else {
workspace.closeFile(handle);
}
if (newName === SETTINGS_FILE_NAME) {
updateOptions(workspace, files.contents[file], setError);
} else {
newHandle = workspace.openFile(newName, files.contents[file]);
}
if (newName === SETTINGS_FILE_NAME) {
updateOptions(workspace, files.contents[file], setError);
} else {
newHandle = workspace.openFile(newName, files.contents[file]);
}
dispatchFiles({ type: "rename", id: file, to: newName, newHandle });
};
dispatchFiles({ type: "rename", id: file, to: newName, newHandle });
},
[files.contents, files.handles],
);
const handleFileRemoved = (workspace: Workspace, file: FileId) => {
const handle = files.handles[file];
if (handle == null) {
updateOptions(workspace, null, setError);
} else {
workspace.closeFile(handle);
}
const handleFileRemoved = useCallback(
(workspace: Workspace, file: FileId) => {
const handle = files.handles[file];
if (handle == null) {
updateOptions(workspace, null, setError);
} else {
workspace.closeFile(handle);
}
dispatchFiles({ type: "remove", id: file });
};
dispatchFiles({ type: "remove", id: file });
},
[files.handles],
);
const handleFileSelected = useCallback((file: FileId) => {
dispatchFiles({ type: "selectFile", id: file });