From 0fc4e8f795f2cc4ad0ea2b3c3d592ed50da29caf Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Wed, 18 Dec 2024 13:22:33 +0100 Subject: [PATCH] Introduce `InferContext` (#14956) ## Summary I'm currently on the fence about landing the #14760 PR because it's unclear how we'd support tracking used and unused suppression comments in a performant way: * Salsa adds an "untracked" dependency to every query reading accumulated values. This has the effect that the query re-runs on every revision. For example, a possible future query `unused_suppression_comments(db, file)` would re-run on every incremental change and for every file. I don't expect the operation itself to be expensive, but it all adds up in a project with 100k+ files * Salsa collects the accumulated values by traversing the entire query dependency graph. It can skip over sub-graphs if it is known that they contain no accumulated values. This makes accumulators a great tool for when they are rare; diagnostics are a good example. Unfortunately, suppressions are more common, and they often appear in many different files, making the "skip over subgraphs" optimization less effective. Because of that, I want to wait to adopt salsa accumulators for type check diagnostics (we could start using them for other diagnostics) until we have very specific reasons that justify regressing incremental check performance. This PR does a "small" refactor that brings us closer to what I have in #14760 but without using accumulators. To emit a diagnostic, a method needs: * Access to the db * Access to the currently checked file This PR introduces a new `InferContext` that holds on to the db, the current file, and the reported diagnostics. It replaces the `TypeCheckDiagnosticsBuilder`. We pass the `InferContext` instead of the `db` to methods that *might* emit diagnostics. This simplifies some of the `Outcome` methods, which can now be called with a context instead of a `db` and the diagnostics builder. Having the `db` and the file on a single type like this would also be useful when using accumulators. This PR doesn't solve the issue that the `Outcome` types feel somewhat complicated nor that it can be annoying when you need to report a `Diagnostic,` but you don't have access to an `InferContext` (or the file). However, I also believe that accumulators won't solve these problems because: * Even with accumulators, it's necessary to have a reference to the file that's being checked. The struggle would be to get a reference to that file rather than getting a reference to `InferContext`. * Users of the `HasTy` trait (e.g., a linter) don't want to bother getting the `File` when calling `Type::return_ty` because they aren't interested in the created diagnostics. They just want to know what calling the current expression would return (and if it even is a callable). This is what the different methods of `Outcome` enable today. I can ask for the return type without needing extra data that's only relevant for emitting a diagnostic. A shortcoming of this approach is that it is now a bit confusing when to pass `db` and when an `InferContext`. An option is that we'd make the `file` on `InferContext` optional (it won't collect any diagnostics if `None`) and change all methods on `Type` to take `InferContext` as the first argument instead of a `db`. I'm interested in your opinion on this. Accumulators are definitely harder to use incorrectly because they remove the need to merge the diagnostics explicitly and there's no risk that we accidentally merge the diagnostics twice, resulting in duplicated diagnostics. I still value performance more over making our life slightly easier. --- Cargo.lock | 1 + crates/red_knot_python_semantic/Cargo.toml | 2 +- .../src/suppression.rs | 50 + crates/red_knot_python_semantic/src/types.rs | 19 +- .../src/types/call.rs | 49 +- .../src/types/context.rs | 131 +++ .../src/types/diagnostic.rs | 367 +++---- .../src/types/infer.rs | 921 +++++++++--------- .../src/types/string_annotation.rs | 29 +- .../src/types/unpacker.rs | 44 +- 10 files changed, 868 insertions(+), 745 deletions(-) create mode 100644 crates/red_knot_python_semantic/src/suppression.rs create mode 100644 crates/red_knot_python_semantic/src/types/context.rs diff --git a/Cargo.lock b/Cargo.lock index 3f5155adf7..53c84f4599 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2288,6 +2288,7 @@ dependencies = [ "compact_str", "countme", "dir-test", + "drop_bomb", "hashbrown 0.15.2", "indexmap", "insta", diff --git a/crates/red_knot_python_semantic/Cargo.toml b/crates/red_knot_python_semantic/Cargo.toml index 6ed49b1f71..d56a6a6ddf 100644 --- a/crates/red_knot_python_semantic/Cargo.toml +++ b/crates/red_knot_python_semantic/Cargo.toml @@ -26,6 +26,7 @@ bitflags = { workspace = true } camino = { workspace = true } compact_str = { workspace = true } countme = { workspace = true } +drop_bomb = { workspace = true } indexmap = { workspace = true } itertools = { workspace = true } ordermap = { workspace = true } @@ -58,4 +59,3 @@ serde = ["ruff_db/serde", "dep:serde"] [lints] workspace = true - diff --git a/crates/red_knot_python_semantic/src/suppression.rs b/crates/red_knot_python_semantic/src/suppression.rs new file mode 100644 index 0000000000..c87cbfc8bc --- /dev/null +++ b/crates/red_knot_python_semantic/src/suppression.rs @@ -0,0 +1,50 @@ +use salsa; + +use ruff_db::{files::File, parsed::comment_ranges, source::source_text}; +use ruff_index::{newtype_index, IndexVec}; + +use crate::{lint::LintId, Db}; + +#[salsa::tracked(return_ref)] +pub(crate) fn suppressions(db: &dyn Db, file: File) -> IndexVec { + let comments = comment_ranges(db.upcast(), file); + let source = source_text(db.upcast(), file); + + let mut suppressions = IndexVec::default(); + + for range in comments { + let text = &source[range]; + + if text.starts_with("# type: ignore") { + suppressions.push(Suppression { + target: None, + kind: SuppressionKind::TypeIgnore, + }); + } else if text.starts_with("# knot: ignore") { + suppressions.push(Suppression { + target: None, + kind: SuppressionKind::KnotIgnore, + }); + } + } + + suppressions +} + +#[newtype_index] +pub(crate) struct SuppressionIndex; + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub(crate) struct Suppression { + target: Option, + kind: SuppressionKind, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] +pub(crate) enum SuppressionKind { + /// A `type: ignore` comment + TypeIgnore, + + /// A `knot: ignore` comment + KnotIgnore, +} diff --git a/crates/red_knot_python_semantic/src/types.rs b/crates/red_knot_python_semantic/src/types.rs index cf5e0833d4..5e68263cc0 100644 --- a/crates/red_knot_python_semantic/src/types.rs +++ b/crates/red_knot_python_semantic/src/types.rs @@ -1,5 +1,7 @@ use std::hash::Hash; +use context::InferContext; +use diagnostic::{report_not_iterable, report_not_iterable_possibly_unbound}; use indexmap::IndexSet; use itertools::Itertools; use ruff_db::diagnostic::Severity; @@ -29,7 +31,7 @@ use crate::stdlib::{ use crate::symbol::{Boundness, Symbol}; use crate::types::call::{CallDunderResult, CallOutcome}; use crate::types::class_base::ClassBase; -use crate::types::diagnostic::{TypeCheckDiagnosticsBuilder, INVALID_TYPE_FORM}; +use crate::types::diagnostic::INVALID_TYPE_FORM; use crate::types::mro::{Mro, MroError, MroIterator}; use crate::types::narrow::narrowing_constraint; use crate::{Db, FxOrderSet, Module, Program, PythonVersion}; @@ -37,6 +39,7 @@ use crate::{Db, FxOrderSet, Module, Program, PythonVersion}; mod builder; mod call; mod class_base; +mod context; mod diagnostic; mod display; mod infer; @@ -2177,17 +2180,13 @@ pub struct InvalidTypeExpressionError<'db> { } impl<'db> InvalidTypeExpressionError<'db> { - fn into_fallback_type( - self, - diagnostics: &mut TypeCheckDiagnosticsBuilder, - node: &ast::Expr, - ) -> Type<'db> { + fn into_fallback_type(self, context: &InferContext, node: &ast::Expr) -> Type<'db> { let InvalidTypeExpressionError { fallback_type, invalid_expressions, } = self; for error in invalid_expressions { - diagnostics.add_lint( + context.report_lint( &INVALID_TYPE_FORM, node.into(), format_args!("{}", error.reason()), @@ -2827,20 +2826,20 @@ enum IterationOutcome<'db> { impl<'db> IterationOutcome<'db> { fn unwrap_with_diagnostic( self, + context: &InferContext<'db>, iterable_node: ast::AnyNodeRef, - diagnostics: &mut TypeCheckDiagnosticsBuilder<'db>, ) -> Type<'db> { match self { Self::Iterable { element_ty } => element_ty, Self::NotIterable { not_iterable_ty } => { - diagnostics.add_not_iterable(iterable_node, not_iterable_ty); + report_not_iterable(context, iterable_node, not_iterable_ty); Type::Unknown } Self::PossiblyUnboundDunderIter { iterable_ty, element_ty, } => { - diagnostics.add_not_iterable_possibly_unbound(iterable_node, iterable_ty); + report_not_iterable_possibly_unbound(context, iterable_node, iterable_ty); element_ty } } diff --git a/crates/red_knot_python_semantic/src/types/call.rs b/crates/red_knot_python_semantic/src/types/call.rs index b573db1139..87d16d9478 100644 --- a/crates/red_knot_python_semantic/src/types/call.rs +++ b/crates/red_knot_python_semantic/src/types/call.rs @@ -1,4 +1,5 @@ -use super::diagnostic::{TypeCheckDiagnosticsBuilder, CALL_NON_CALLABLE}; +use super::context::InferContext; +use super::diagnostic::CALL_NON_CALLABLE; use super::{Severity, Type, TypeArrayDisplay, UnionBuilder}; use crate::Db; use ruff_db::diagnostic::DiagnosticId; @@ -86,24 +87,23 @@ impl<'db> CallOutcome<'db> { } /// Get the return type of the call, emitting default diagnostics if needed. - pub(super) fn unwrap_with_diagnostic<'a>( + pub(super) fn unwrap_with_diagnostic( &self, - db: &'db dyn Db, + context: &InferContext<'db>, node: ast::AnyNodeRef, - diagnostics: &'a mut TypeCheckDiagnosticsBuilder<'db>, ) -> Type<'db> { - match self.return_ty_result(db, node, diagnostics) { + match self.return_ty_result(context, node) { Ok(return_ty) => return_ty, Err(NotCallableError::Type { not_callable_ty, return_ty, }) => { - diagnostics.add_lint( + context.report_lint( &CALL_NON_CALLABLE, node, format_args!( "Object of type `{}` is not callable", - not_callable_ty.display(db) + not_callable_ty.display(context.db()) ), ); return_ty @@ -113,13 +113,13 @@ impl<'db> CallOutcome<'db> { called_ty, return_ty, }) => { - diagnostics.add_lint( + context.report_lint( &CALL_NON_CALLABLE, node, format_args!( "Object of type `{}` is not callable (due to union element `{}`)", - called_ty.display(db), - not_callable_ty.display(db), + called_ty.display(context.db()), + not_callable_ty.display(context.db()), ), ); return_ty @@ -129,13 +129,13 @@ impl<'db> CallOutcome<'db> { called_ty, return_ty, }) => { - diagnostics.add_lint( + context.report_lint( &CALL_NON_CALLABLE, node, format_args!( "Object of type `{}` is not callable (due to union elements {})", - called_ty.display(db), - not_callable_tys.display(db), + called_ty.display(context.db()), + not_callable_tys.display(context.db()), ), ); return_ty @@ -144,12 +144,12 @@ impl<'db> CallOutcome<'db> { callable_ty: called_ty, return_ty, }) => { - diagnostics.add_lint( + context.report_lint( &CALL_NON_CALLABLE, node, format_args!( "Object of type `{}` is not callable (possibly unbound `__call__` method)", - called_ty.display(db) + called_ty.display(context.db()) ), ); return_ty @@ -158,11 +158,10 @@ impl<'db> CallOutcome<'db> { } /// Get the return type of the call as a result. - pub(super) fn return_ty_result<'a>( + pub(super) fn return_ty_result( &self, - db: &'db dyn Db, + context: &InferContext<'db>, node: ast::AnyNodeRef, - diagnostics: &'a mut TypeCheckDiagnosticsBuilder<'db>, ) -> Result, NotCallableError<'db>> { match self { Self::Callable { return_ty } => Ok(*return_ty), @@ -170,11 +169,11 @@ impl<'db> CallOutcome<'db> { return_ty, revealed_ty, } => { - diagnostics.add( + context.report_diagnostic( node, DiagnosticId::RevealedType, Severity::Info, - format_args!("Revealed type is `{}`", revealed_ty.display(db)), + format_args!("Revealed type is `{}`", revealed_ty.display(context.db())), ); Ok(*return_ty) } @@ -187,14 +186,16 @@ impl<'db> CallOutcome<'db> { call_outcome, } => Err(NotCallableError::PossiblyUnboundDunderCall { callable_ty: *called_ty, - return_ty: call_outcome.return_ty(db).unwrap_or(Type::Unknown), + return_ty: call_outcome + .return_ty(context.db()) + .unwrap_or(Type::Unknown), }), Self::Union { outcomes, called_ty, } => { let mut not_callable = vec![]; - let mut union_builder = UnionBuilder::new(db); + let mut union_builder = UnionBuilder::new(context.db()); let mut revealed = false; for outcome in outcomes { let return_ty = match outcome { @@ -210,10 +211,10 @@ impl<'db> CallOutcome<'db> { *return_ty } else { revealed = true; - outcome.unwrap_with_diagnostic(db, node, diagnostics) + outcome.unwrap_with_diagnostic(context, node) } } - _ => outcome.unwrap_with_diagnostic(db, node, diagnostics), + _ => outcome.unwrap_with_diagnostic(context, node), }; union_builder = union_builder.add(return_ty); } diff --git a/crates/red_knot_python_semantic/src/types/context.rs b/crates/red_knot_python_semantic/src/types/context.rs new file mode 100644 index 0000000000..efd0c651be --- /dev/null +++ b/crates/red_knot_python_semantic/src/types/context.rs @@ -0,0 +1,131 @@ +use std::fmt; + +use drop_bomb::DebugDropBomb; +use ruff_db::{ + diagnostic::{DiagnosticId, Severity}, + files::File, +}; +use ruff_python_ast::AnyNodeRef; +use ruff_text_size::Ranged; + +use crate::{ + lint::{LintId, LintMetadata}, + Db, +}; + +use super::{TypeCheckDiagnostic, TypeCheckDiagnostics}; + +/// Context for inferring the types of a single file. +/// +/// One context exists for at least for every inferred region but it's +/// possible that inferring a sub-region, like an unpack assignment, creates +/// a sub-context. +/// +/// Tracks the reported diagnostics of the inferred region. +/// +/// ## Consuming +/// It's important that the context is explicitly consumed before dropping by calling +/// [`InferContext::finish`] and the returned diagnostics must be stored +/// on the current [`TypeInference`](super::infer::TypeInference) result. +pub(crate) struct InferContext<'db> { + db: &'db dyn Db, + file: File, + diagnostics: std::cell::RefCell, + bomb: DebugDropBomb, +} + +impl<'db> InferContext<'db> { + pub(crate) fn new(db: &'db dyn Db, file: File) -> Self { + Self { + db, + file, + diagnostics: std::cell::RefCell::new(TypeCheckDiagnostics::default()), + bomb: DebugDropBomb::new("`InferContext` needs to be explicitly consumed by calling `::finish` to prevent accidental loss of diagnostics."), + } + } + + /// The file for which the types are inferred. + pub(crate) fn file(&self) -> File { + self.file + } + + pub(crate) fn db(&self) -> &'db dyn Db { + self.db + } + + pub(crate) fn extend(&mut self, other: &T) + where + T: WithDiagnostics, + { + self.diagnostics + .get_mut() + .extend(other.diagnostics().iter().cloned()); + } + + /// Reports a lint located at `node`. + pub(super) fn report_lint( + &self, + lint: &'static LintMetadata, + node: AnyNodeRef, + message: std::fmt::Arguments, + ) { + // Skip over diagnostics if the rule is disabled. + let Some(severity) = self.db.rule_selection().severity(LintId::of(lint)) else { + return; + }; + + self.report_diagnostic(node, DiagnosticId::Lint(lint.name()), severity, message); + } + + /// Adds a new diagnostic. + /// + /// The diagnostic does not get added if the rule isn't enabled for this file. + pub(super) fn report_diagnostic( + &self, + node: AnyNodeRef, + id: DiagnosticId, + severity: Severity, + message: std::fmt::Arguments, + ) { + if !self.db.is_file_open(self.file) { + return; + } + + // TODO: Don't emit the diagnostic if: + // * The enclosing node contains any syntax errors + // * The rule is disabled for this file. We probably want to introduce a new query that + // returns a rule selector for a given file that respects the package's settings, + // any global pragma comments in the file, and any per-file-ignores. + // * Check for suppression comments, bump a counter if the diagnostic is suppressed. + + self.diagnostics.borrow_mut().push(TypeCheckDiagnostic { + file: self.file, + id, + message: message.to_string(), + range: node.range(), + severity, + }); + } + + #[must_use] + pub(crate) fn finish(mut self) -> TypeCheckDiagnostics { + self.bomb.defuse(); + let mut diagnostics = self.diagnostics.into_inner(); + diagnostics.shrink_to_fit(); + diagnostics + } +} + +impl fmt::Debug for InferContext<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("TyContext") + .field("file", &self.file) + .field("diagnostics", &self.diagnostics) + .field("defused", &self.bomb) + .finish() + } +} + +pub(crate) trait WithDiagnostics { + fn diagnostics(&self) -> &TypeCheckDiagnostics; +} diff --git a/crates/red_knot_python_semantic/src/types/diagnostic.rs b/crates/red_knot_python_semantic/src/types/diagnostic.rs index 1e8e62f2c2..21e7b1d022 100644 --- a/crates/red_knot_python_semantic/src/types/diagnostic.rs +++ b/crates/red_knot_python_semantic/src/types/diagnostic.rs @@ -1,11 +1,11 @@ -use crate::lint::{Level, LintId, LintMetadata, LintRegistryBuilder, LintStatus}; +use crate::declare_lint; +use crate::lint::{Level, LintRegistryBuilder, LintStatus}; use crate::types::string_annotation::{ BYTE_STRING_TYPE_ANNOTATION, ESCAPE_CHARACTER_IN_FORWARD_ANNOTATION, FSTRING_TYPE_ANNOTATION, IMPLICIT_CONCATENATED_STRING_TYPE_ANNOTATION, INVALID_SYNTAX_IN_FORWARD_ANNOTATION, RAW_STRING_TYPE_ANNOTATION, }; use crate::types::{ClassLiteralType, Type}; -use crate::{declare_lint, Db}; use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity}; use ruff_db::files::File; use ruff_python_ast::{self as ast, AnyNodeRef}; @@ -15,6 +15,8 @@ use std::fmt::Formatter; use std::ops::Deref; use std::sync::Arc; +use super::context::InferContext; + /// Registers all known type check lints. pub(crate) fn register_lints(registry: &mut LintRegistryBuilder) { registry.register_lint(&CALL_NON_CALLABLE); @@ -564,223 +566,158 @@ impl<'a> IntoIterator for &'a TypeCheckDiagnostics { } } -pub(super) struct TypeCheckDiagnosticsBuilder<'db> { - db: &'db dyn Db, - file: File, - diagnostics: TypeCheckDiagnostics, +/// Emit a diagnostic declaring that the object represented by `node` is not iterable +pub(super) fn report_not_iterable(context: &InferContext, node: AnyNodeRef, not_iterable_ty: Type) { + context.report_lint( + &NOT_ITERABLE, + node, + format_args!( + "Object of type `{}` is not iterable", + not_iterable_ty.display(context.db()) + ), + ); } -impl<'db> TypeCheckDiagnosticsBuilder<'db> { - pub(super) fn new(db: &'db dyn Db, file: File) -> Self { - Self { - db, - file, - diagnostics: TypeCheckDiagnostics::default(), +/// Emit a diagnostic declaring that the object represented by `node` is not iterable +/// because its `__iter__` method is possibly unbound. +pub(super) fn report_not_iterable_possibly_unbound( + context: &InferContext, + node: AnyNodeRef, + element_ty: Type, +) { + context.report_lint( + &NOT_ITERABLE, + node, + format_args!( + "Object of type `{}` is not iterable because its `__iter__` method is possibly unbound", + element_ty.display(context.db()) + ), + ); +} + +/// Emit a diagnostic declaring that an index is out of bounds for a tuple. +pub(super) fn report_index_out_of_bounds( + context: &InferContext, + kind: &'static str, + node: AnyNodeRef, + tuple_ty: Type, + length: usize, + index: i64, +) { + context.report_lint( + &INDEX_OUT_OF_BOUNDS, + node, + format_args!( + "Index {index} is out of bounds for {kind} `{}` with length {length}", + tuple_ty.display(context.db()) + ), + ); +} + +/// Emit a diagnostic declaring that a type does not support subscripting. +pub(super) fn report_non_subscriptable( + context: &InferContext, + node: AnyNodeRef, + non_subscriptable_ty: Type, + method: &str, +) { + context.report_lint( + &NON_SUBSCRIPTABLE, + node, + format_args!( + "Cannot subscript object of type `{}` with no `{method}` method", + non_subscriptable_ty.display(context.db()) + ), + ); +} + +pub(super) fn report_unresolved_module<'db>( + context: &InferContext, + import_node: impl Into>, + level: u32, + module: Option<&str>, +) { + context.report_lint( + &UNRESOLVED_IMPORT, + import_node.into(), + format_args!( + "Cannot resolve import `{}{}`", + ".".repeat(level as usize), + module.unwrap_or_default() + ), + ); +} + +pub(super) fn report_slice_step_size_zero(context: &InferContext, node: AnyNodeRef) { + context.report_lint( + &ZERO_STEPSIZE_IN_SLICE, + node, + format_args!("Slice step size can not be zero"), + ); +} + +pub(super) fn report_invalid_assignment( + context: &InferContext, + node: AnyNodeRef, + declared_ty: Type, + assigned_ty: Type, +) { + match declared_ty { + Type::ClassLiteral(ClassLiteralType { class }) => { + context.report_lint(&INVALID_ASSIGNMENT, node, format_args!( + "Implicit shadowing of class `{}`; annotate to make it explicit if this is intentional", + class.name(context.db()))); } - } - - /// Emit a diagnostic declaring that the object represented by `node` is not iterable - pub(super) fn add_not_iterable(&mut self, node: AnyNodeRef, not_iterable_ty: Type<'db>) { - self.add_lint( - &NOT_ITERABLE, - node, - format_args!( - "Object of type `{}` is not iterable", - not_iterable_ty.display(self.db) - ), - ); - } - - /// Emit a diagnostic declaring that the object represented by `node` is not iterable - /// because its `__iter__` method is possibly unbound. - pub(super) fn add_not_iterable_possibly_unbound( - &mut self, - node: AnyNodeRef, - element_ty: Type<'db>, - ) { - self.add_lint( - &NOT_ITERABLE, - node, - format_args!( - "Object of type `{}` is not iterable because its `__iter__` method is possibly unbound", - element_ty.display(self.db) - ), - ); - } - - /// Emit a diagnostic declaring that an index is out of bounds for a tuple. - pub(super) fn add_index_out_of_bounds( - &mut self, - kind: &'static str, - node: AnyNodeRef, - tuple_ty: Type<'db>, - length: usize, - index: i64, - ) { - self.add_lint( - &INDEX_OUT_OF_BOUNDS, - node, - format_args!( - "Index {index} is out of bounds for {kind} `{}` with length {length}", - tuple_ty.display(self.db) - ), - ); - } - - /// Emit a diagnostic declaring that a type does not support subscripting. - pub(super) fn add_non_subscriptable( - &mut self, - node: AnyNodeRef, - non_subscriptable_ty: Type<'db>, - method: &str, - ) { - self.add_lint( - &NON_SUBSCRIPTABLE, - node, - format_args!( - "Cannot subscript object of type `{}` with no `{method}` method", - non_subscriptable_ty.display(self.db) - ), - ); - } - - pub(super) fn add_unresolved_module( - &mut self, - import_node: impl Into>, - level: u32, - module: Option<&str>, - ) { - self.add_lint( - &UNRESOLVED_IMPORT, - import_node.into(), - format_args!( - "Cannot resolve import `{}{}`", - ".".repeat(level as usize), - module.unwrap_or_default() - ), - ); - } - - pub(super) fn add_slice_step_size_zero(&mut self, node: AnyNodeRef) { - self.add_lint( - &ZERO_STEPSIZE_IN_SLICE, - node, - format_args!("Slice step size can not be zero"), - ); - } - - pub(super) fn add_invalid_assignment( - &mut self, - node: AnyNodeRef, - declared_ty: Type<'db>, - assigned_ty: Type<'db>, - ) { - match declared_ty { - Type::ClassLiteral(ClassLiteralType { class }) => { - self.add_lint(&INVALID_ASSIGNMENT, node, format_args!( - "Implicit shadowing of class `{}`; annotate to make it explicit if this is intentional", - class.name(self.db))); - } - Type::FunctionLiteral(function) => { - self.add_lint(&INVALID_ASSIGNMENT, node, format_args!( - "Implicit shadowing of function `{}`; annotate to make it explicit if this is intentional", - function.name(self.db))); - } - _ => { - self.add_lint( - &INVALID_ASSIGNMENT, - node, - format_args!( - "Object of type `{}` is not assignable to `{}`", - assigned_ty.display(self.db), - declared_ty.display(self.db), - ), - ); - } + Type::FunctionLiteral(function) => { + context.report_lint(&INVALID_ASSIGNMENT, node, format_args!( + "Implicit shadowing of function `{}`; annotate to make it explicit if this is intentional", + function.name(context.db()))); } - } - - pub(super) fn add_possibly_unresolved_reference(&mut self, expr_name_node: &ast::ExprName) { - let ast::ExprName { id, .. } = expr_name_node; - - self.add_lint( - &POSSIBLY_UNRESOLVED_REFERENCE, - expr_name_node.into(), - format_args!("Name `{id}` used when possibly not defined"), - ); - } - - pub(super) fn add_unresolved_reference(&mut self, expr_name_node: &ast::ExprName) { - let ast::ExprName { id, .. } = expr_name_node; - - self.add_lint( - &UNRESOLVED_REFERENCE, - expr_name_node.into(), - format_args!("Name `{id}` used when not defined"), - ); - } - - pub(super) fn add_invalid_exception_caught(&mut self, db: &dyn Db, node: &ast::Expr, ty: Type) { - self.add_lint( - &INVALID_EXCEPTION_CAUGHT, - node.into(), - format_args!( - "Cannot catch object of type `{}` in an exception handler \ - (must be a `BaseException` subclass or a tuple of `BaseException` subclasses)", - ty.display(db) - ), - ); - } - - pub(super) fn add_lint( - &mut self, - lint: &'static LintMetadata, - node: AnyNodeRef, - message: std::fmt::Arguments, - ) { - // Skip over diagnostics if the rule is disabled. - let Some(severity) = self.db.rule_selection().severity(LintId::of(lint)) else { - return; - }; - - self.add(node, DiagnosticId::Lint(lint.name()), severity, message); - } - - /// Adds a new diagnostic. - /// - /// The diagnostic does not get added if the rule isn't enabled for this file. - pub(super) fn add( - &mut self, - node: AnyNodeRef, - id: DiagnosticId, - severity: Severity, - message: std::fmt::Arguments, - ) { - if !self.db.is_file_open(self.file) { - return; + _ => { + context.report_lint( + &INVALID_ASSIGNMENT, + node, + format_args!( + "Object of type `{}` is not assignable to `{}`", + assigned_ty.display(context.db()), + declared_ty.display(context.db()), + ), + ); } - - // TODO: Don't emit the diagnostic if: - // * The enclosing node contains any syntax errors - // * The rule is disabled for this file. We probably want to introduce a new query that - // returns a rule selector for a given file that respects the package's settings, - // any global pragma comments in the file, and any per-file-ignores. - - self.diagnostics.push(TypeCheckDiagnostic { - file: self.file, - id, - message: message.to_string(), - range: node.range(), - severity, - }); - } - - pub(super) fn extend(&mut self, diagnostics: &TypeCheckDiagnostics) { - self.diagnostics.extend(diagnostics); - } - - pub(super) fn finish(mut self) -> TypeCheckDiagnostics { - self.diagnostics.shrink_to_fit(); - self.diagnostics } } + +pub(super) fn report_possibly_unresolved_reference( + context: &InferContext, + expr_name_node: &ast::ExprName, +) { + let ast::ExprName { id, .. } = expr_name_node; + + context.report_lint( + &POSSIBLY_UNRESOLVED_REFERENCE, + expr_name_node.into(), + format_args!("Name `{id}` used when possibly not defined"), + ); +} + +pub(super) fn report_unresolved_reference(context: &InferContext, expr_name_node: &ast::ExprName) { + let ast::ExprName { id, .. } = expr_name_node; + + context.report_lint( + &UNRESOLVED_REFERENCE, + expr_name_node.into(), + format_args!("Name `{id}` used when not defined"), + ); +} + +pub(super) fn report_invalid_exception_caught(context: &InferContext, node: &ast::Expr, ty: Type) { + context.report_lint( + &INVALID_EXCEPTION_CAUGHT, + node.into(), + format_args!( + "Cannot catch object of type `{}` in an exception handler \ + (must be a `BaseException` subclass or a tuple of `BaseException` subclasses)", + ty.display(context.db()) + ), + ); +} diff --git a/crates/red_knot_python_semantic/src/types/infer.rs b/crates/red_knot_python_semantic/src/types/infer.rs index 66ef29505c..9eafda8d56 100644 --- a/crates/red_knot_python_semantic/src/types/infer.rs +++ b/crates/red_knot_python_semantic/src/types/infer.rs @@ -51,7 +51,7 @@ use crate::semantic_index::SemanticIndex; use crate::stdlib::builtins_module_scope; use crate::types::class_base::ClassBase; use crate::types::diagnostic::{ - TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder, CALL_NON_CALLABLE, + report_invalid_assignment, report_unresolved_module, TypeCheckDiagnostics, CALL_NON_CALLABLE, CALL_POSSIBLY_UNBOUND_METHOD, CONFLICTING_DECLARATIONS, CONFLICTING_METACLASS, CYCLIC_CLASS_DEFINITION, DIVISION_BY_ZERO, DUPLICATE_BASE, INCONSISTENT_MRO, INVALID_BASE, INVALID_CONTEXT_MANAGER, INVALID_DECLARATION, INVALID_PARAMETER_DEFAULT, INVALID_TYPE_FORM, @@ -72,6 +72,11 @@ use crate::unpack::Unpack; use crate::util::subscript::{PyIndex, PySlice}; use crate::Db; +use super::context::{InferContext, WithDiagnostics}; +use super::diagnostic::{ + report_index_out_of_bounds, report_invalid_exception_caught, report_non_subscriptable, + report_possibly_unresolved_reference, report_slice_step_size_zero, report_unresolved_reference, +}; use super::string_annotation::{ parse_string_annotation, BYTE_STRING_TYPE_ANNOTATION, FSTRING_TYPE_ANNOTATION, }; @@ -283,6 +288,12 @@ impl<'db> TypeInference<'db> { } } +impl WithDiagnostics for TypeInference<'_> { + fn diagnostics(&self) -> &TypeCheckDiagnostics { + &self.diagnostics + } +} + /// Whether the intersection type is on the left or right side of the comparison. #[derive(Debug, Clone, Copy)] enum IntersectionOn { @@ -337,13 +348,10 @@ enum IntersectionOn { /// assignment, type narrowing guard), we use the [`infer_expression_types()`] query to ensure we /// don't infer its types more than once. pub(super) struct TypeInferenceBuilder<'db> { - db: &'db dyn Db, + context: InferContext<'db>, index: &'db SemanticIndex<'db>, region: InferenceRegion<'db>, - // Cached lookups - file: File, - /// The type inference results types: TypeInference<'db>, @@ -357,8 +365,6 @@ pub(super) struct TypeInferenceBuilder<'db> { /// expression could be deferred if the file has `from __future__ import annotations` import or /// is a stub file but we're still in a non-deferred region. deferred_state: DeferredExpressionState, - - diagnostics: TypeCheckDiagnosticsBuilder<'db>, } impl<'db> TypeInferenceBuilder<'db> { @@ -383,13 +389,11 @@ impl<'db> TypeInferenceBuilder<'db> { }; Self { - db, + context: InferContext::new(db, file), index, region, - file, deferred_state: DeferredExpressionState::None, types: TypeInference::empty(scope), - diagnostics: TypeCheckDiagnosticsBuilder::new(db, file), } } @@ -402,7 +406,15 @@ impl<'db> TypeInferenceBuilder<'db> { .extend(inference.declarations.iter()); self.types.expressions.extend(inference.expressions.iter()); self.types.deferred.extend(inference.deferred.iter()); - self.diagnostics.extend(&inference.diagnostics); + self.context.extend(inference); + } + + fn file(&self) -> File { + self.context.file() + } + + fn db(&self) -> &'db dyn Db { + self.context.db() } fn scope(&self) -> ScopeId<'db> { @@ -412,7 +424,7 @@ impl<'db> TypeInferenceBuilder<'db> { /// Are we currently inferring types in file with deferred types? /// This is true for stub files and files with `__future__.annotations` fn are_all_types_deferred(&self) -> bool { - self.index.has_future_annotations() || self.file.is_stub(self.db.upcast()) + self.index.has_future_annotations() || self.file().is_stub(self.db().upcast()) } /// Are we currently inferring deferred types? @@ -428,7 +440,7 @@ impl<'db> TypeInferenceBuilder<'db> { #[track_caller] fn expression_ty(&self, expr: &ast::Expr) -> Type<'db> { self.types - .expression_ty(expr.scoped_expression_id(self.db, self.scope())) + .expression_ty(expr.scoped_expression_id(self.db(), self.scope())) } /// Get the type of an expression from any scope in the same file. @@ -445,11 +457,11 @@ impl<'db> TypeInferenceBuilder<'db> { /// already in progress for that scope (further up the stack). fn file_expression_ty(&self, expression: &ast::Expr) -> Type<'db> { let file_scope = self.index.expression_scope_id(expression); - let expr_scope = file_scope.to_scope_id(self.db, self.file); - let expr_id = expression.scoped_expression_id(self.db, expr_scope); + let expr_scope = file_scope.to_scope_id(self.db(), self.file()); + let expr_id = expression.scoped_expression_id(self.db(), expr_scope); match self.region { InferenceRegion::Scope(scope) if scope == expr_scope => self.expression_ty(expression), - _ => infer_scope_types(self.db, expr_scope).expression_ty(expr_id), + _ => infer_scope_types(self.db(), expr_scope).expression_ty(expr_id), } } @@ -464,10 +476,10 @@ impl<'db> TypeInferenceBuilder<'db> { } fn infer_region_scope(&mut self, scope: ScopeId<'db>) { - let node = scope.node(self.db); + let node = scope.node(self.db()); match node { NodeWithScopeKind::Module => { - let parsed = parsed_module(self.db.upcast(), self.file); + let parsed = parsed_module(self.db().upcast(), self.file()); self.infer_module(parsed.syntax()); } NodeWithScopeKind::Function(function) => self.infer_function_body(function.node()), @@ -502,7 +514,7 @@ impl<'db> TypeInferenceBuilder<'db> { // Infer the deferred types for the definitions here to consider the end-of-scope // semantics. for definition in std::mem::take(&mut self.types.deferred) { - self.extend(infer_deferred_types(self.db, definition)); + self.extend(infer_deferred_types(self.db(), definition)); } assert!( self.types.deferred.is_empty(), @@ -530,7 +542,7 @@ impl<'db> TypeInferenceBuilder<'db> { .iter() .filter_map(|(definition, ty)| { // Filter out class literals that result from imports - if let DefinitionKind::Class(class) = definition.kind(self.db) { + if let DefinitionKind::Class(class) = definition.kind(self.db()) { ty.into_class_literal().map(|ty| (ty.class, class.node())) } else { None @@ -540,14 +552,14 @@ impl<'db> TypeInferenceBuilder<'db> { // Iterate through all class definitions in this scope. for (class, class_node) in class_definitions { // (1) Check that the class does not have a cyclic definition - if class.is_cyclically_defined(self.db) { - self.diagnostics.add_lint( + if class.is_cyclically_defined(self.db()) { + self.context.report_lint( &CYCLIC_CLASS_DEFINITION, class_node.into(), format_args!( "Cyclic definition of `{}` or bases of `{}` (class cannot inherit from itself)", - class.name(self.db), - class.name(self.db) + class.name(self.db()), + class.name(self.db()) ), ); // Attempting to determine the MRO of a class or if the class has a metaclass conflict @@ -556,45 +568,45 @@ impl<'db> TypeInferenceBuilder<'db> { } // (2) Check that the class's MRO is resolvable - if let Err(mro_error) = class.try_mro(self.db).as_ref() { + if let Err(mro_error) = class.try_mro(self.db()).as_ref() { match mro_error.reason() { MroErrorKind::DuplicateBases(duplicates) => { let base_nodes = class_node.bases(); for (index, duplicate) in duplicates { - self.diagnostics.add_lint( + self.context.report_lint( &DUPLICATE_BASE, (&base_nodes[*index]).into(), - format_args!("Duplicate base class `{}`", duplicate.name(self.db)), + format_args!("Duplicate base class `{}`", duplicate.name(self.db())), ); } } MroErrorKind::InvalidBases(bases) => { let base_nodes = class_node.bases(); for (index, base_ty) in bases { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_BASE, (&base_nodes[*index]).into(), format_args!( "Invalid class base with type `{}` (all bases must be a class, `Any`, `Unknown` or `Todo`)", - base_ty.display(self.db) + base_ty.display(self.db()) ), ); } } - MroErrorKind::UnresolvableMro { bases_list } => self.diagnostics.add_lint( + MroErrorKind::UnresolvableMro { bases_list } => self.context.report_lint( &INCONSISTENT_MRO, class_node.into(), format_args!( "Cannot create a consistent method resolution order (MRO) for class `{}` with bases list `[{}]`", - class.name(self.db), - bases_list.iter().map(|base| base.display(self.db)).join(", ") + class.name(self.db()), + bases_list.iter().map(|base| base.display(self.db())).join(", ") ), ) } } // (3) Check that the class's metaclass can be determined without error. - if let Err(metaclass_error) = class.try_metaclass(self.db) { + if let Err(metaclass_error) = class.try_metaclass(self.db()) { match metaclass_error.reason() { MetaclassErrorKind::Conflict { candidate1: @@ -611,32 +623,32 @@ impl<'db> TypeInferenceBuilder<'db> { } => { let node = class_node.into(); if *candidate1_is_base_class { - self.diagnostics.add_lint( + self.context.report_lint( &CONFLICTING_METACLASS, node, format_args!( "The metaclass of a derived class (`{class}`) must be a subclass of the metaclasses of all its bases, \ but `{metaclass1}` (metaclass of base class `{base1}`) and `{metaclass2}` (metaclass of base class `{base2}`) \ have no subclass relationship", - class = class.name(self.db), - metaclass1 = metaclass1.name(self.db), - base1 = class1.name(self.db), - metaclass2 = metaclass2.name(self.db), - base2 = class2.name(self.db), + class = class.name(self.db()), + metaclass1 = metaclass1.name(self.db()), + base1 = class1.name(self.db()), + metaclass2 = metaclass2.name(self.db()), + base2 = class2.name(self.db()), ), ); } else { - self.diagnostics.add_lint( + self.context.report_lint( &CONFLICTING_METACLASS, node, format_args!( "The metaclass of a derived class (`{class}`) must be a subclass of the metaclasses of all its bases, \ but `{metaclass_of_class}` (metaclass of `{class}`) and `{metaclass_of_base}` (metaclass of base class `{base}`) \ have no subclass relationship", - class = class.name(self.db), - metaclass_of_class = metaclass1.name(self.db), - metaclass_of_base = metaclass2.name(self.db), - base = class2.name(self.db), + class = class.name(self.db()), + metaclass_of_class = metaclass1.name(self.db()), + metaclass_of_base = metaclass2.name(self.db()), + base = class2.name(self.db()), ), ); } @@ -647,7 +659,7 @@ impl<'db> TypeInferenceBuilder<'db> { } fn infer_region_definition(&mut self, definition: Definition<'db>) { - match definition.kind(self.db) { + match definition.kind(self.db()) { DefinitionKind::Function(function) => { self.infer_function_definition(function.node(), definition); } @@ -743,7 +755,7 @@ impl<'db> TypeInferenceBuilder<'db> { // to use end-of-scope semantics. This would require custom and possibly a complex // implementation to allow this "split" to happen. - match definition.kind(self.db) { + match definition.kind(self.db()) { DefinitionKind::Function(function) => self.infer_function_deferred(function.node()), DefinitionKind::Class(class) => self.infer_class_deferred(class.node()), _ => {} @@ -751,7 +763,7 @@ impl<'db> TypeInferenceBuilder<'db> { } fn infer_region_expression(&mut self, expression: Expression<'db>) { - self.infer_expression_impl(expression.node_ref(self.db)); + self.infer_expression_impl(expression.node_ref(self.db())); } /// Raise a diagnostic if the given type cannot be divided by zero. @@ -762,7 +774,7 @@ impl<'db> TypeInferenceBuilder<'db> { Type::BooleanLiteral(_) | Type::IntLiteral(_) => {} Type::Instance(InstanceType { class }) if matches!( - class.known(self.db), + class.known(self.db()), Some(KnownClass::Float | KnownClass::Int | KnownClass::Bool) ) => {} _ => return, @@ -775,19 +787,19 @@ impl<'db> TypeInferenceBuilder<'db> { _ => return, }; - self.diagnostics.add_lint( + self.context.report_lint( &DIVISION_BY_ZERO, expr.into(), format_args!( "Cannot {op} object of type `{}` {by_zero}", - left.display(self.db) + left.display(self.db()) ), ); } fn add_binding(&mut self, node: AnyNodeRef, binding: Definition<'db>, ty: Type<'db>) { - debug_assert!(binding.is_binding(self.db)); - let use_def = self.index.use_def_map(binding.file_scope(self.db)); + debug_assert!(binding.is_binding(self.db())); + let use_def = self.index.use_def_map(binding.file_scope(self.db())); let declarations = use_def.declarations_at_binding(binding); let undeclared_ty = if declarations.may_be_undeclared() { Some(Type::Unknown) @@ -795,25 +807,24 @@ impl<'db> TypeInferenceBuilder<'db> { None }; let mut bound_ty = ty; - let declared_ty = declarations_ty(self.db, declarations, undeclared_ty).unwrap_or_else( + let declared_ty = declarations_ty(self.db(), declarations, undeclared_ty).unwrap_or_else( |(ty, conflicting)| { // TODO point out the conflicting declarations in the diagnostic? - let symbol_table = self.index.symbol_table(binding.file_scope(self.db)); - let symbol_name = symbol_table.symbol(binding.symbol(self.db)).name(); - self.diagnostics.add_lint( + let symbol_table = self.index.symbol_table(binding.file_scope(self.db())); + let symbol_name = symbol_table.symbol(binding.symbol(self.db())).name(); + self.context.report_lint( &CONFLICTING_DECLARATIONS, node, format_args!( "Conflicting declared types for `{symbol_name}`: {}", - conflicting.display(self.db) + conflicting.display(self.db()) ), ); ty }, ); - if !bound_ty.is_assignable_to(self.db, declared_ty) { - self.diagnostics - .add_invalid_assignment(node, declared_ty, bound_ty); + if !bound_ty.is_assignable_to(self.db(), declared_ty) { + report_invalid_assignment(&self.context, node, declared_ty, bound_ty); // allow declarations to override inference in case of invalid assignment bound_ty = declared_ty; }; @@ -822,21 +833,21 @@ impl<'db> TypeInferenceBuilder<'db> { } fn add_declaration(&mut self, node: AnyNodeRef, declaration: Definition<'db>, ty: Type<'db>) { - debug_assert!(declaration.is_declaration(self.db)); - let use_def = self.index.use_def_map(declaration.file_scope(self.db)); + debug_assert!(declaration.is_declaration(self.db())); + let use_def = self.index.use_def_map(declaration.file_scope(self.db())); let prior_bindings = use_def.bindings_at_declaration(declaration); // unbound_ty is Never because for this check we don't care about unbound - let inferred_ty = bindings_ty(self.db, prior_bindings).unwrap_or(Type::Never); - let ty = if inferred_ty.is_assignable_to(self.db, ty) { + let inferred_ty = bindings_ty(self.db(), prior_bindings).unwrap_or(Type::Never); + let ty = if inferred_ty.is_assignable_to(self.db(), ty) { ty } else { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_DECLARATION, node, format_args!( "Cannot declare type `{}` for inferred type `{}`", - ty.display(self.db), - inferred_ty.display(self.db) + ty.display(self.db()), + inferred_ty.display(self.db()) ), ); Type::Unknown @@ -851,13 +862,12 @@ impl<'db> TypeInferenceBuilder<'db> { declared_ty: Type<'db>, inferred_ty: Type<'db>, ) { - debug_assert!(definition.is_binding(self.db)); - debug_assert!(definition.is_declaration(self.db)); - let inferred_ty = if inferred_ty.is_assignable_to(self.db, declared_ty) { + debug_assert!(definition.is_binding(self.db())); + debug_assert!(definition.is_declaration(self.db())); + let inferred_ty = if inferred_ty.is_assignable_to(self.db(), declared_ty) { inferred_ty } else { - self.diagnostics - .add_invalid_assignment(node, declared_ty, inferred_ty); + report_invalid_assignment(&self.context, node, declared_ty, inferred_ty); // if the assignment is invalid, fall back to assuming the annotation is correct declared_ty }; @@ -975,7 +985,7 @@ impl<'db> TypeInferenceBuilder<'db> { fn infer_definition(&mut self, node: impl Into) { let definition = self.index.definition(node); - let result = infer_definition_types(self.db, definition); + let result = infer_definition_types(self.db(), definition); self.extend(result); } @@ -1025,15 +1035,15 @@ impl<'db> TypeInferenceBuilder<'db> { } } - let function_kind = KnownFunction::from_definition(self.db, definition, name); + let function_kind = KnownFunction::from_definition(self.db(), definition, name); let body_scope = self .index .node_scope(NodeWithScopeRef::Function(function)) - .to_scope_id(self.db, self.file); + .to_scope_id(self.db(), self.file()); let function_ty = Type::FunctionLiteral(FunctionType::new( - self.db, + self.db(), &name.id, function_kind, body_scope, @@ -1132,15 +1142,15 @@ impl<'db> TypeInferenceBuilder<'db> { if let Some(annotation) = parameter.annotation.as_ref() { let declared_ty = self.file_expression_ty(annotation); let inferred_ty = if let Some(default_ty) = default_ty { - if default_ty.is_assignable_to(self.db, declared_ty) { - UnionType::from_elements(self.db, [declared_ty, default_ty]) + if default_ty.is_assignable_to(self.db(), declared_ty) { + UnionType::from_elements(self.db(), [declared_ty, default_ty]) } else { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_PARAMETER_DEFAULT, parameter_with_default.into(), format_args!( "Default value of type `{}` is not assignable to annotated parameter type `{}`", - default_ty.display(self.db), declared_ty.display(self.db)), + default_ty.display(self.db()), declared_ty.display(self.db())), ); declared_ty } @@ -1155,7 +1165,7 @@ impl<'db> TypeInferenceBuilder<'db> { ); } else { let ty = if let Some(default_ty) = default_ty { - UnionType::from_elements(self.db, [Type::Unknown, default_ty]) + UnionType::from_elements(self.db(), [Type::Unknown, default_ty]) } else { Type::Unknown }; @@ -1176,14 +1186,14 @@ impl<'db> TypeInferenceBuilder<'db> { if let Some(annotation) = parameter.annotation.as_ref() { let _annotated_ty = self.file_expression_ty(annotation); // TODO `tuple[annotated_ty, ...]` - let ty = KnownClass::Tuple.to_instance(self.db); + let ty = KnownClass::Tuple.to_instance(self.db()); self.add_declaration_with_binding(parameter.into(), definition, ty, ty); } else { self.add_binding( parameter.into(), definition, // TODO `tuple[Unknown, ...]` - KnownClass::Tuple.to_instance(self.db), + KnownClass::Tuple.to_instance(self.db()), ); } } @@ -1201,14 +1211,14 @@ impl<'db> TypeInferenceBuilder<'db> { if let Some(annotation) = parameter.annotation.as_ref() { let _annotated_ty = self.file_expression_ty(annotation); // TODO `dict[str, annotated_ty]` - let ty = KnownClass::Dict.to_instance(self.db); + let ty = KnownClass::Dict.to_instance(self.db()); self.add_declaration_with_binding(parameter.into(), definition, ty, ty); } else { self.add_binding( parameter.into(), definition, // TODO `dict[str, Unknown]` - KnownClass::Dict.to_instance(self.db), + KnownClass::Dict.to_instance(self.db()), ); } } @@ -1238,11 +1248,11 @@ impl<'db> TypeInferenceBuilder<'db> { let body_scope = self .index .node_scope(NodeWithScopeRef::Class(class_node)) - .to_scope_id(self.db, self.file); + .to_scope_id(self.db(), self.file()); - let maybe_known_class = KnownClass::try_from_file(self.db, self.file, name); + let maybe_known_class = KnownClass::try_from_file(self.db(), self.file(), name); - let class = Class::new(self.db, &name.id, body_scope, maybe_known_class); + let class = Class::new(self.db(), &name.id, body_scope, maybe_known_class); let class_ty = Type::class_literal(class); self.add_declaration_with_binding(class_node.into(), definition, class_ty, class_ty); @@ -1290,11 +1300,11 @@ impl<'db> TypeInferenceBuilder<'db> { let rhs_scope = self .index .node_scope(NodeWithScopeRef::TypeAlias(type_alias)) - .to_scope_id(self.db, self.file); + .to_scope_id(self.db(), self.file()); let type_alias_ty = Type::KnownInstance(KnownInstanceType::TypeAliasType(TypeAliasType::new( - self.db, + self.db(), &type_alias.name.as_name_expr().unwrap().id, rhs_scope, ))); @@ -1416,7 +1426,7 @@ impl<'db> TypeInferenceBuilder<'db> { ); self.types.expressions.insert( - target.scoped_expression_id(self.db, self.scope()), + target.scoped_expression_id(self.db(), self.scope()), target_ty, ); self.add_binding(target.into(), definition, target_ty); @@ -1439,56 +1449,56 @@ impl<'db> TypeInferenceBuilder<'db> { return todo_type!("async with statement"); } - let context_manager_ty = context_expression_ty.to_meta_type(self.db); + let context_manager_ty = context_expression_ty.to_meta_type(self.db()); - let enter = context_manager_ty.member(self.db, "__enter__"); - let exit = context_manager_ty.member(self.db, "__exit__"); + let enter = context_manager_ty.member(self.db(), "__enter__"); + let exit = context_manager_ty.member(self.db(), "__exit__"); // TODO: Make use of Protocols when we support it (the manager be assignable to `contextlib.AbstractContextManager`). match (enter, exit) { (Symbol::Unbound, Symbol::Unbound) => { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_CONTEXT_MANAGER, context_expression.into(), format_args!( "Object of type `{}` cannot be used with `with` because it doesn't implement `__enter__` and `__exit__`", - context_expression_ty.display(self.db) + context_expression_ty.display(self.db()) ), ); Type::Unknown } (Symbol::Unbound, _) => { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_CONTEXT_MANAGER, context_expression.into(), format_args!( "Object of type `{}` cannot be used with `with` because it doesn't implement `__enter__`", - context_expression_ty.display(self.db) + context_expression_ty.display(self.db()) ), ); Type::Unknown } (Symbol::Type(enter_ty, enter_boundness), exit) => { if enter_boundness == Boundness::PossiblyUnbound { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_CONTEXT_MANAGER, context_expression.into(), format_args!( "Object of type `{context_expression}` cannot be used with `with` because the method `__enter__` is possibly unbound", - context_expression = context_expression_ty.display(self.db), + context_expression = context_expression_ty.display(self.db()), ), ); } let target_ty = enter_ty - .call(self.db, &[context_expression_ty]) - .return_ty_result(self.db, context_expression.into(), &mut self.diagnostics) + .call(self.db(), &[context_expression_ty]) + .return_ty_result(&self.context, context_expression.into()) .unwrap_or_else(|err| { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_CONTEXT_MANAGER, context_expression.into(), format_args!(" - Object of type `{context_expression}` cannot be used with `with` because the method `__enter__` of type `{enter_ty}` is not callable", context_expression = context_expression_ty.display(self.db), enter_ty = enter_ty.display(self.db) + Object of type `{context_expression}` cannot be used with `with` because the method `__enter__` of type `{enter_ty}` is not callable", context_expression = context_expression_ty.display(self.db()), enter_ty = enter_ty.display(self.db()) ), ); err.return_ty() @@ -1496,12 +1506,12 @@ impl<'db> TypeInferenceBuilder<'db> { match exit { Symbol::Unbound => { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_CONTEXT_MANAGER, context_expression.into(), format_args!( "Object of type `{}` cannot be used with `with` because it doesn't implement `__exit__`", - context_expression_ty.display(self.db) + context_expression_ty.display(self.db()) ), ); } @@ -1509,40 +1519,36 @@ impl<'db> TypeInferenceBuilder<'db> { // TODO: Use the `exit_ty` to determine if any raised exception is suppressed. if exit_boundness == Boundness::PossiblyUnbound { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_CONTEXT_MANAGER, context_expression.into(), format_args!( "Object of type `{context_expression}` cannot be used with `with` because the method `__exit__` is possibly unbound", - context_expression = context_expression_ty.display(self.db), + context_expression = context_expression_ty.display(self.db()), ), ); } if exit_ty .call( - self.db, + self.db(), &[ context_manager_ty, - Type::none(self.db), - Type::none(self.db), - Type::none(self.db), + Type::none(self.db()), + Type::none(self.db()), + Type::none(self.db()), ], ) - .return_ty_result( - self.db, - context_expression.into(), - &mut self.diagnostics, - ) + .return_ty_result(&self.context, context_expression.into()) .is_err() { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_CONTEXT_MANAGER, context_expression.into(), format_args!( "Object of type `{context_expression}` cannot be used with `with` because the method `__exit__` of type `{exit_ty}` is not callable", - context_expression = context_expression_ty.display(self.db), - exit_ty = exit_ty.display(self.db), + context_expression = context_expression_ty.display(self.db()), + exit_ty = exit_ty.display(self.db()), ), ); } @@ -1569,33 +1575,33 @@ impl<'db> TypeInferenceBuilder<'db> { // it will actually be the type of the generic parameters to `BaseExceptionGroup` or `ExceptionGroup`. let symbol_ty = if let Type::Tuple(tuple) = node_ty { let type_base_exception = KnownClass::BaseException - .to_subclass_of(self.db) + .to_subclass_of(self.db()) .unwrap_or(Type::Unknown); - let mut builder = UnionBuilder::new(self.db); - for element in tuple.elements(self.db).iter().copied() { - builder = builder.add(if element.is_assignable_to(self.db, type_base_exception) { - element.to_instance(self.db) - } else { - if let Some(node) = node { - self.diagnostics - .add_invalid_exception_caught(self.db, node, element); - } - Type::Unknown - }); + let mut builder = UnionBuilder::new(self.db()); + for element in tuple.elements(self.db()).iter().copied() { + builder = builder.add( + if element.is_assignable_to(self.db(), type_base_exception) { + element.to_instance(self.db()) + } else { + if let Some(node) = node { + report_invalid_exception_caught(&self.context, node, element); + } + Type::Unknown + }, + ); } builder.build() - } else if node_ty.is_subtype_of(self.db, KnownClass::Tuple.to_instance(self.db)) { + } else if node_ty.is_subtype_of(self.db(), KnownClass::Tuple.to_instance(self.db())) { todo_type!("Homogeneous tuple in exception handler") } else { let type_base_exception = KnownClass::BaseException - .to_subclass_of(self.db) + .to_subclass_of(self.db()) .unwrap_or(Type::Unknown); - if node_ty.is_assignable_to(self.db, type_base_exception) { - node_ty.to_instance(self.db) + if node_ty.is_assignable_to(self.db(), type_base_exception) { + node_ty.to_instance(self.db()) } else { if let Some(node) = node { - self.diagnostics - .add_invalid_exception_caught(self.db, node, node_ty); + report_invalid_exception_caught(&self.context, node, node_ty); } Type::Unknown } @@ -1606,7 +1612,7 @@ impl<'db> TypeInferenceBuilder<'db> { // (needs support for homogeneous tuples). // // TODO: should be generic with `symbol_ty` as the generic parameter - KnownClass::BaseExceptionGroup.to_instance(self.db) + KnownClass::BaseExceptionGroup.to_instance(self.db()) } else { symbol_ty }; @@ -1632,7 +1638,7 @@ impl<'db> TypeInferenceBuilder<'db> { let bound_or_constraint = match bound.as_deref() { Some(expr @ ast::Expr::Tuple(ast::ExprTuple { elts, .. })) => { if elts.len() < 2 { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_TYPE_VARIABLE_CONSTRAINTS, expr.into(), format_args!("TypeVar must have at least two constrained types"), @@ -1641,7 +1647,7 @@ impl<'db> TypeInferenceBuilder<'db> { None } else { let tuple = TupleType::new( - self.db, + self.db(), elts.iter() .map(|expr| self.infer_type_expression(expr)) .collect::>(), @@ -1658,7 +1664,7 @@ impl<'db> TypeInferenceBuilder<'db> { }; let default_ty = self.infer_optional_type_expression(default.as_deref()); let ty = Type::KnownInstance(KnownInstanceType::TypeVar(TypeVarInstance::new( - self.db, + self.db(), name.id.clone(), bound_or_constraint, default_ty, @@ -1824,15 +1830,15 @@ impl<'db> TypeInferenceBuilder<'db> { self.infer_standalone_expression(value); let value_ty = self.expression_ty(value); - let name_ast_id = name.scoped_expression_id(self.db, self.scope()); + let name_ast_id = name.scoped_expression_id(self.db(), self.scope()); let mut target_ty = match assignment.target() { TargetKind::Sequence(unpack) => { - let unpacked = infer_unpack_types(self.db, unpack); + let unpacked = infer_unpack_types(self.db(), unpack); // Only copy the diagnostics if this is the first assignment to avoid duplicating the // unpack assignments. if assignment.is_first() { - self.diagnostics.extend(unpacked.diagnostics()); + self.context.extend(unpacked); } unpacked.get(name_ast_id).unwrap_or(Type::Unknown) @@ -1840,7 +1846,7 @@ impl<'db> TypeInferenceBuilder<'db> { TargetKind::Name => value_ty, }; - if let Some(known_instance) = file_to_module(self.db, definition.file(self.db)) + if let Some(known_instance) = file_to_module(self.db(), definition.file(self.db())) .as_ref() .and_then(|module| KnownInstanceType::try_from_module_and_symbol(module, &name.id)) { @@ -1890,9 +1896,9 @@ impl<'db> TypeInferenceBuilder<'db> { // Handle various singletons. if let Type::Instance(InstanceType { class }) = annotation_ty { - if class.is_known(self.db, KnownClass::SpecialForm) { + if class.is_known(self.db(), KnownClass::SpecialForm) { if let Some(name_expr) = target.as_name_expr() { - if let Some(known_instance) = file_to_module(self.db, self.file) + if let Some(known_instance) = file_to_module(self.db(), self.file()) .as_ref() .and_then(|module| { KnownInstanceType::try_from_module_and_symbol(module, &name_expr.id) @@ -1939,29 +1945,27 @@ impl<'db> TypeInferenceBuilder<'db> { let op = assignment.op; match target_type { Type::Union(union) => { - return union.map(self.db, |&target_type| { + return union.map(self.db(), |&target_type| { self.infer_augmented_op(assignment, target_type, value_type) }) } Type::Instance(InstanceType { class }) => { if let Symbol::Type(class_member, boundness) = - class.class_member(self.db, op.in_place_dunder()) + class.class_member(self.db(), op.in_place_dunder()) { - let call = class_member.call(self.db, &[target_type, value_type]); - let augmented_return_ty = match call.return_ty_result( - self.db, - AnyNodeRef::StmtAugAssign(assignment), - &mut self.diagnostics, - ) { + let call = class_member.call(self.db(), &[target_type, value_type]); + let augmented_return_ty = match call + .return_ty_result(&self.context, AnyNodeRef::StmtAugAssign(assignment)) + { Ok(t) => t, Err(e) => { - self.diagnostics.add_lint( + self.context.report_lint( &UNSUPPORTED_OPERATOR, assignment.into(), format_args!( "Operator `{op}=` is unsupported between objects of type `{}` and `{}`", - target_type.display(self.db), - value_type.display(self.db) + target_type.display(self.db()), + value_type.display(self.db()) ), ); e.return_ty() @@ -1976,20 +1980,20 @@ impl<'db> TypeInferenceBuilder<'db> { let binary_return_ty = self.infer_binary_expression_type(left_ty, right_ty, op) .unwrap_or_else(|| { - self.diagnostics.add_lint( + self.context.report_lint( &UNSUPPORTED_OPERATOR, assignment.into(), format_args!( "Operator `{op}=` is unsupported between objects of type `{}` and `{}`", - left_ty.display(self.db), - right_ty.display(self.db) + left_ty.display(self.db()), + right_ty.display(self.db()) ), ); Type::Unknown }); UnionType::from_elements( - self.db, + self.db(), [augmented_return_ty, binary_return_ty], ) } @@ -2005,13 +2009,13 @@ impl<'db> TypeInferenceBuilder<'db> { self.infer_binary_expression_type(left_ty, right_ty, op) .unwrap_or_else(|| { - self.diagnostics.add_lint( + self.context.report_lint( &UNSUPPORTED_OPERATOR, assignment.into(), format_args!( "Operator `{op}=` is unsupported between objects of type `{}` and `{}`", - left_ty.display(self.db), - right_ty.display(self.db) + left_ty.display(self.db()), + right_ty.display(self.db()) ), ); Type::Unknown @@ -2090,8 +2094,8 @@ impl<'db> TypeInferenceBuilder<'db> { todo_type!("async iterables/iterators") } else { iterable_ty - .iterate(self.db) - .unwrap_with_diagnostic(iterable.into(), &mut self.diagnostics) + .iterate(self.db()) + .unwrap_with_diagnostic(&self.context, iterable.into()) }; self.store_expression_type(target, loop_var_value_ty); @@ -2135,7 +2139,7 @@ impl<'db> TypeInferenceBuilder<'db> { // Resolve the module being imported. let Some(full_module_ty) = self.module_ty_from_name(&full_module_name) else { - self.diagnostics.add_unresolved_module(alias, 0, Some(name)); + report_unresolved_module(&self.context, alias, 0, Some(name)); self.add_unknown_declaration_with_binding(alias.into(), definition); return; }; @@ -2213,7 +2217,7 @@ impl<'db> TypeInferenceBuilder<'db> { tail: Option<&str>, level: NonZeroU32, ) -> Result { - let module = file_to_module(self.db, self.file) + let module = file_to_module(self.db(), self.file()) .ok_or(ModuleNameResolutionError::UnknownCurrentModule)?; let mut level = level.get(); if module.kind().is_package() { @@ -2249,7 +2253,7 @@ impl<'db> TypeInferenceBuilder<'db> { "Resolving imported object `{}` from module `{}` relative to file `{}`", alias.name, format_import_from_module(level.get(), module), - self.file.path(self.db), + self.file().path(self.db()), ); self.relative_module_name(module, level) } else { @@ -2276,8 +2280,7 @@ impl<'db> TypeInferenceBuilder<'db> { "Relative module resolution `{}` failed: too many leading dots", format_import_from_module(*level, module), ); - self.diagnostics - .add_unresolved_module(import_from, *level, module); + report_unresolved_module(&self.context, import_from, *level, module); self.add_unknown_declaration_with_binding(alias.into(), definition); return; } @@ -2285,18 +2288,16 @@ impl<'db> TypeInferenceBuilder<'db> { tracing::debug!( "Relative module resolution `{}` failed; could not resolve file `{}` to a module", format_import_from_module(*level, module), - self.file.path(self.db) + self.file().path(self.db()) ); - self.diagnostics - .add_unresolved_module(import_from, *level, module); + report_unresolved_module(&self.context, import_from, *level, module); self.add_unknown_declaration_with_binding(alias.into(), definition); return; } }; let Some(module_ty) = self.module_ty_from_name(&module_name) else { - self.diagnostics - .add_unresolved_module(import_from, *level, module); + report_unresolved_module(&self.context, import_from, *level, module); self.add_unknown_declaration_with_binding(alias.into(), definition); return; }; @@ -2308,11 +2309,11 @@ impl<'db> TypeInferenceBuilder<'db> { } = alias; // First try loading the requested attribute from the module. - if let Symbol::Type(ty, boundness) = module_ty.member(self.db, name) { + if let Symbol::Type(ty, boundness) = module_ty.member(self.db(), name) { if boundness == Boundness::PossiblyUnbound { // TODO: Consider loading _both_ the attribute and any submodule and unioning them // together if the attribute exists but is possibly-unbound. - self.diagnostics.add_lint( + self.context.report_lint( &POSSIBLY_UNBOUND_IMPORT, AnyNodeRef::Alias(alias), format_args!("Member `{name}` of module `{module_name}` is possibly unbound",), @@ -2351,7 +2352,7 @@ impl<'db> TypeInferenceBuilder<'db> { } } - self.diagnostics.add_lint( + self.context.report_lint( &UNRESOLVED_IMPORT, AnyNodeRef::Alias(alias), format_args!("Module `{module_name}` has no member `{name}`",), @@ -2371,8 +2372,8 @@ impl<'db> TypeInferenceBuilder<'db> { } fn module_ty_from_name(&self, module_name: &ModuleName) -> Option> { - resolve_module(self.db, module_name) - .map(|module| Type::module_literal(self.db, self.file, module)) + resolve_module(self.db(), module_name) + .map(|module| Type::module_literal(self.db(), self.file(), module)) } fn infer_decorator(&mut self, decorator: &ast::Decorator) -> Type<'db> { @@ -2422,14 +2423,14 @@ impl<'db> TypeInferenceBuilder<'db> { fn infer_standalone_expression(&mut self, expression: &ast::Expr) -> Type<'db> { let standalone_expression = self.index.expression(expression); - let types = infer_expression_types(self.db, standalone_expression); + let types = infer_expression_types(self.db(), standalone_expression); self.extend(types); self.expression_ty(expression) } fn infer_expression_impl(&mut self, expression: &ast::Expr) -> Type<'db> { let ty = match expression { - ast::Expr::NoneLiteral(ast::ExprNoneLiteral { range: _ }) => Type::none(self.db), + ast::Expr::NoneLiteral(ast::ExprNoneLiteral { range: _ }) => Type::none(self.db()), ast::Expr::NumberLiteral(literal) => self.infer_number_literal_expression(literal), ast::Expr::BooleanLiteral(literal) => self.infer_boolean_literal_expression(literal), ast::Expr::StringLiteral(literal) => self.infer_string_literal_expression(literal), @@ -2480,7 +2481,7 @@ impl<'db> TypeInferenceBuilder<'db> { // on the string expression itself that represents the annotation. return; } - let expr_id = expression.scoped_expression_id(self.db, self.scope()); + let expr_id = expression.scoped_expression_id(self.db(), self.scope()); let previous = self.types.expressions.insert(expr_id, ty); assert_eq!(previous, None); } @@ -2492,12 +2493,12 @@ impl<'db> TypeInferenceBuilder<'db> { ast::Number::Int(n) => n .as_i64() .map(Type::IntLiteral) - .unwrap_or_else(|| KnownClass::Int.to_instance(self.db)), - ast::Number::Float(_) => KnownClass::Float.to_instance(self.db), - ast::Number::Complex { .. } => builtins_symbol(self.db, "complex") + .unwrap_or_else(|| KnownClass::Int.to_instance(self.db())), + ast::Number::Float(_) => KnownClass::Float.to_instance(self.db()), + ast::Number::Complex { .. } => builtins_symbol(self.db(), "complex") .ignore_possibly_unbound() .unwrap_or(Type::Unknown) - .to_instance(self.db), + .to_instance(self.db()), } } @@ -2510,7 +2511,7 @@ impl<'db> TypeInferenceBuilder<'db> { fn infer_string_literal_expression(&mut self, literal: &ast::ExprStringLiteral) -> Type<'db> { if literal.value.len() <= Self::MAX_STRING_LITERAL_SIZE { - Type::string_literal(self.db, literal.value.to_str()) + Type::string_literal(self.db(), literal.value.to_str()) } else { Type::LiteralString } @@ -2519,7 +2520,7 @@ impl<'db> TypeInferenceBuilder<'db> { fn infer_bytes_literal_expression(&mut self, literal: &ast::ExprBytesLiteral) -> Type<'db> { // TODO: ignoring r/R prefixes for now, should normalize bytes values let bytes: Vec = literal.value.bytes().collect(); - Type::bytes_literal(self.db, &bytes) + Type::bytes_literal(self.db(), &bytes) } fn infer_fstring_expression(&mut self, fstring: &ast::ExprFString) -> Type<'db> { @@ -2559,8 +2560,8 @@ impl<'db> TypeInferenceBuilder<'db> { if !conversion.is_none() || format_spec.is_some() { collector.add_expression(); } else { - if let Type::StringLiteral(literal) = ty.str(self.db) { - collector.push_str(literal.value(self.db)); + if let Type::StringLiteral(literal) = ty.str(self.db()) { + collector.push_str(literal.value(self.db())); } else { collector.add_expression(); } @@ -2574,14 +2575,14 @@ impl<'db> TypeInferenceBuilder<'db> { } } } - collector.ty(self.db) + collector.ty(self.db()) } fn infer_ellipsis_literal_expression( &mut self, _literal: &ast::ExprEllipsisLiteral, ) -> Type<'db> { - builtins_symbol(self.db, "Ellipsis") + builtins_symbol(self.db(), "Ellipsis") .ignore_possibly_unbound() .unwrap_or(Type::Unknown) } @@ -2597,7 +2598,7 @@ impl<'db> TypeInferenceBuilder<'db> { let element_types: Vec> = elts.iter().map(|elt| self.infer_expression(elt)).collect(); - Type::tuple(self.db, &element_types) + Type::tuple(self.db(), &element_types) } fn infer_list_expression(&mut self, list: &ast::ExprList) -> Type<'db> { @@ -2612,7 +2613,7 @@ impl<'db> TypeInferenceBuilder<'db> { } // TODO generic - KnownClass::List.to_instance(self.db) + KnownClass::List.to_instance(self.db()) } fn infer_set_expression(&mut self, set: &ast::ExprSet) -> Type<'db> { @@ -2623,7 +2624,7 @@ impl<'db> TypeInferenceBuilder<'db> { } // TODO generic - KnownClass::Set.to_instance(self.db) + KnownClass::Set.to_instance(self.db()) } fn infer_dict_expression(&mut self, dict: &ast::ExprDict) -> Type<'db> { @@ -2635,7 +2636,7 @@ impl<'db> TypeInferenceBuilder<'db> { } // TODO generic - KnownClass::Dict.to_instance(self.db) + KnownClass::Dict.to_instance(self.db()) } /// Infer the type of the `iter` expression of the first comprehension. @@ -2791,7 +2792,7 @@ impl<'db> TypeInferenceBuilder<'db> { definition: Definition<'db>, ) { let expression = self.index.expression(iterable); - let result = infer_expression_types(self.db, expression); + let result = infer_expression_types(self.db(), expression); // Two things are different if it's the first comprehension: // (1) We must lookup the `ScopedExpressionId` of the iterable expression in the outer scope, @@ -2802,13 +2803,13 @@ impl<'db> TypeInferenceBuilder<'db> { let iterable_ty = if is_first { let lookup_scope = self .index - .parent_scope_id(self.scope().file_scope_id(self.db)) + .parent_scope_id(self.scope().file_scope_id(self.db())) .expect("A comprehension should never be the top-level scope") - .to_scope_id(self.db, self.file); - result.expression_ty(iterable.scoped_expression_id(self.db, lookup_scope)) + .to_scope_id(self.db(), self.file()); + result.expression_ty(iterable.scoped_expression_id(self.db(), lookup_scope)) } else { self.extend(result); - result.expression_ty(iterable.scoped_expression_id(self.db, self.scope())) + result.expression_ty(iterable.scoped_expression_id(self.db(), self.scope())) }; let target_ty = if is_async { @@ -2816,12 +2817,12 @@ impl<'db> TypeInferenceBuilder<'db> { todo_type!("async iterables/iterators") } else { iterable_ty - .iterate(self.db) - .unwrap_with_diagnostic(iterable.into(), &mut self.diagnostics) + .iterate(self.db()) + .unwrap_with_diagnostic(&self.context, iterable.into()) }; self.types.expressions.insert( - target.scoped_expression_id(self.db, self.scope()), + target.scoped_expression_id(self.db(), self.scope()), target_ty, ); self.add_binding(target.into(), definition, target_ty); @@ -2831,7 +2832,7 @@ impl<'db> TypeInferenceBuilder<'db> { // See https://peps.python.org/pep-0572/#differences-between-assignment-expressions-and-assignment-statements if named.target.is_name_expr() { let definition = self.index.definition(named); - let result = infer_definition_types(self.db, definition); + let result = infer_definition_types(self.db(), definition); self.extend(result); result.binding_ty(definition) } else { @@ -2873,10 +2874,10 @@ impl<'db> TypeInferenceBuilder<'db> { let body_ty = self.infer_expression(body); let orelse_ty = self.infer_expression(orelse); - match test_ty.bool(self.db) { + match test_ty.bool(self.db()) { Truthiness::AlwaysTrue => body_ty, Truthiness::AlwaysFalse => orelse_ty, - Truthiness::Ambiguous => UnionType::from_elements(self.db, [body_ty, orelse_ty]), + Truthiness::Ambiguous => UnionType::from_elements(self.db(), [body_ty, orelse_ty]), } } @@ -2917,8 +2918,8 @@ impl<'db> TypeInferenceBuilder<'db> { let arg_types = self.infer_arguments(arguments); let function_type = self.infer_expression(func); function_type - .call(self.db, arg_types.as_slice()) - .unwrap_with_diagnostic(self.db, func.as_ref().into(), &mut self.diagnostics) + .call(self.db(), arg_types.as_slice()) + .unwrap_with_diagnostic(&self.context, func.as_ref().into()) } fn infer_starred_expression(&mut self, starred: &ast::ExprStarred) -> Type<'db> { @@ -2930,8 +2931,8 @@ impl<'db> TypeInferenceBuilder<'db> { let iterable_ty = self.infer_expression(value); iterable_ty - .iterate(self.db) - .unwrap_with_diagnostic(value.as_ref().into(), &mut self.diagnostics); + .iterate(self.db()) + .unwrap_with_diagnostic(&self.context, value.as_ref().into()); // TODO todo_type!("starred expression") @@ -2951,8 +2952,8 @@ impl<'db> TypeInferenceBuilder<'db> { let iterable_ty = self.infer_expression(value); iterable_ty - .iterate(self.db) - .unwrap_with_diagnostic(value.as_ref().into(), &mut self.diagnostics); + .iterate(self.db()) + .unwrap_with_diagnostic(&self.context, value.as_ref().into()); // TODO get type from `ReturnType` of generator todo_type!() @@ -2970,7 +2971,7 @@ impl<'db> TypeInferenceBuilder<'db> { /// Look up a name reference that isn't bound in the local scope. fn lookup_name(&mut self, name_node: &ast::ExprName) -> Symbol<'db> { let ast::ExprName { id: name, .. } = name_node; - let file_scope_id = self.scope().file_scope_id(self.db); + let file_scope_id = self.scope().file_scope_id(self.db()); let is_bound = if let Some(symbol) = self.index.symbol_table(file_scope_id).symbol_by_name(name) { symbol.is_bound() @@ -2985,15 +2986,16 @@ impl<'db> TypeInferenceBuilder<'db> { // In function-like scopes, any local variable (symbol that is bound in this scope) can // only have a definition in this scope, or error; it never references another scope. // (At runtime, it would use the `LOAD_FAST` opcode.) - if !is_bound || !self.scope().is_function_like(self.db) { + if !is_bound || !self.scope().is_function_like(self.db()) { // Walk up parent scopes looking for a possible enclosing scope that may have a // definition of this name visible to us (would be `LOAD_DEREF` at runtime.) for (enclosing_scope_file_id, _) in self.index.ancestor_scopes(file_scope_id) { // Class scopes are not visible to nested scopes, and we need to handle global // scope differently (because an unbound name there falls back to builtins), so // check only function-like scopes. - let enclosing_scope_id = enclosing_scope_file_id.to_scope_id(self.db, self.file); - if !enclosing_scope_id.is_function_like(self.db) { + let enclosing_scope_id = + enclosing_scope_file_id.to_scope_id(self.db(), self.file()); + if !enclosing_scope_id.is_function_like(self.db()) { continue; } let enclosing_symbol_table = self.index.symbol_table(enclosing_scope_file_id); @@ -3006,7 +3008,7 @@ impl<'db> TypeInferenceBuilder<'db> { // runtime, it is the scope that creates the cell for our closure.) If the name // isn't bound in that scope, we should get an unbound name, not continue // falling back to other scopes / globals / builtins. - return symbol(self.db, enclosing_scope_id, name); + return symbol(self.db(), enclosing_scope_id, name); } } @@ -3015,25 +3017,25 @@ impl<'db> TypeInferenceBuilder<'db> { let global_symbol = if file_scope_id.is_global() { Symbol::Unbound } else { - global_symbol(self.db, self.file, name) + global_symbol(self.db(), self.file(), name) }; // Fallback to builtins (without infinite recursion if we're already in builtins.) if global_symbol.possibly_unbound() - && Some(self.scope()) != builtins_module_scope(self.db) + && Some(self.scope()) != builtins_module_scope(self.db()) { - let mut builtins_symbol = builtins_symbol(self.db, name); + let mut builtins_symbol = builtins_symbol(self.db(), name); if builtins_symbol.is_unbound() && name == "reveal_type" { - self.diagnostics.add_lint( + self.context.report_lint( &UNDEFINED_REVEAL, name_node.into(), format_args!( "`reveal_type` used without importing it; this is allowed for debugging convenience but will fail at runtime"), ); - builtins_symbol = typing_extensions_symbol(self.db, name); + builtins_symbol = typing_extensions_symbol(self.db(), name); } - global_symbol.or_fall_back_to(self.db, &builtins_symbol) + global_symbol.or_fall_back_to(self.db(), &builtins_symbol) } else { global_symbol } @@ -3050,14 +3052,14 @@ impl<'db> TypeInferenceBuilder<'db> { ctx: _, } = name; - let file_scope_id = self.scope().file_scope_id(self.db); + let file_scope_id = self.scope().file_scope_id(self.db()); let use_def = self.index.use_def_map(file_scope_id); // If we're inferring types of deferred expressions, always treat them as public symbols let (bindings_ty, boundness) = if self.is_deferred() { if let Some(symbol) = self.index.symbol_table(file_scope_id).symbol_id_by_name(id) { ( - bindings_ty(self.db, use_def.public_bindings(symbol)), + bindings_ty(self.db(), use_def.public_bindings(symbol)), use_def.public_boundness(symbol), ) } else { @@ -3068,9 +3070,9 @@ impl<'db> TypeInferenceBuilder<'db> { (None, Boundness::PossiblyUnbound) } } else { - let use_id = name.scoped_use_id(self.db, self.scope()); + let use_id = name.scoped_use_id(self.db(), self.scope()); ( - bindings_ty(self.db, use_def.bindings_at_use(use_id)), + bindings_ty(self.db(), use_def.bindings_at_use(use_id)), use_def.use_boundness(use_id), ) }; @@ -3079,18 +3081,18 @@ impl<'db> TypeInferenceBuilder<'db> { match self.lookup_name(name) { Symbol::Type(looked_up_ty, looked_up_boundness) => { if looked_up_boundness == Boundness::PossiblyUnbound { - self.diagnostics.add_possibly_unresolved_reference(name); + report_possibly_unresolved_reference(&self.context, name); } bindings_ty - .map(|ty| UnionType::from_elements(self.db, [ty, looked_up_ty])) + .map(|ty| UnionType::from_elements(self.db(), [ty, looked_up_ty])) .unwrap_or(looked_up_ty) } Symbol::Unbound => { if bindings_ty.is_some() { - self.diagnostics.add_possibly_unresolved_reference(name); + report_possibly_unresolved_reference(&self.context, name); } else { - self.diagnostics.add_unresolved_reference(name); + report_unresolved_reference(&self.context, name); } bindings_ty.unwrap_or(Type::Unknown) } @@ -3118,16 +3120,16 @@ impl<'db> TypeInferenceBuilder<'db> { } = attribute; let value_ty = self.infer_expression(value); - match value_ty.member(self.db, &attr.id) { + match value_ty.member(self.db(), &attr.id) { Symbol::Type(member_ty, boundness) => { if boundness == Boundness::PossiblyUnbound { - self.diagnostics.add_lint( + self.context.report_lint( &POSSIBLY_UNBOUND_ATTRIBUTE, attribute.into(), format_args!( "Attribute `{}` on type `{}` is possibly unbound", attr.id, - value_ty.display(self.db), + value_ty.display(self.db()), ), ); } @@ -3135,12 +3137,12 @@ impl<'db> TypeInferenceBuilder<'db> { member_ty } Symbol::Unbound => { - self.diagnostics.add_lint( + self.context.report_lint( &UNRESOLVED_ATTRIBUTE, attribute.into(), format_args!( "Type `{}` has no attribute `{}`", - value_ty.display(self.db), + value_ty.display(self.db()), attr.id ), ); @@ -3188,7 +3190,7 @@ impl<'db> TypeInferenceBuilder<'db> { (UnaryOp::USub, Type::BooleanLiteral(bool)) => Type::IntLiteral(-i64::from(bool)), (UnaryOp::Invert, Type::BooleanLiteral(bool)) => Type::IntLiteral(!i64::from(bool)), - (UnaryOp::Not, ty) => ty.bool(self.db).negate().into_type(self.db), + (UnaryOp::Not, ty) => ty.bool(self.db()).negate().into_type(self.db()), (_, Type::Any) => Type::Any, (_, Type::Unknown) => Type::Unknown, ( @@ -3205,35 +3207,31 @@ impl<'db> TypeInferenceBuilder<'db> { }; if let Symbol::Type(class_member, _) = - class.class_member(self.db, unary_dunder_method) + class.class_member(self.db(), unary_dunder_method) { - let call = class_member.call(self.db, &[operand_type]); + let call = class_member.call(self.db(), &[operand_type]); - match call.return_ty_result( - self.db, - AnyNodeRef::ExprUnaryOp(unary), - &mut self.diagnostics, - ) { + match call.return_ty_result(&self.context, AnyNodeRef::ExprUnaryOp(unary)) { Ok(t) => t, Err(e) => { - self.diagnostics.add_lint( + self.context.report_lint( &UNSUPPORTED_OPERATOR, unary.into(), format_args!( "Unary operator `{op}` is unsupported for type `{}`", - operand_type.display(self.db), + operand_type.display(self.db()), ), ); e.return_ty() } } } else { - self.diagnostics.add_lint( + self.context.report_lint( &UNSUPPORTED_OPERATOR, unary.into(), format_args!( "Unary operator `{op}` is unsupported for type `{}`", - operand_type.display(self.db), + operand_type.display(self.db()), ), ); @@ -3269,13 +3267,13 @@ impl<'db> TypeInferenceBuilder<'db> { self.infer_binary_expression_type(left_ty, right_ty, *op) .unwrap_or_else(|| { - self.diagnostics.add_lint( + self.context.report_lint( &UNSUPPORTED_OPERATOR, binary.into(), format_args!( "Operator `{op}` is unsupported between objects of type `{}` and `{}`", - left_ty.display(self.db), - right_ty.display(self.db) + left_ty.display(self.db()), + right_ty.display(self.db()) ), ); Type::Unknown @@ -3297,35 +3295,35 @@ impl<'db> TypeInferenceBuilder<'db> { (Type::IntLiteral(n), Type::IntLiteral(m), ast::Operator::Add) => Some( n.checked_add(m) .map(Type::IntLiteral) - .unwrap_or_else(|| KnownClass::Int.to_instance(self.db)), + .unwrap_or_else(|| KnownClass::Int.to_instance(self.db())), ), (Type::IntLiteral(n), Type::IntLiteral(m), ast::Operator::Sub) => Some( n.checked_sub(m) .map(Type::IntLiteral) - .unwrap_or_else(|| KnownClass::Int.to_instance(self.db)), + .unwrap_or_else(|| KnownClass::Int.to_instance(self.db())), ), (Type::IntLiteral(n), Type::IntLiteral(m), ast::Operator::Mult) => Some( n.checked_mul(m) .map(Type::IntLiteral) - .unwrap_or_else(|| KnownClass::Int.to_instance(self.db)), + .unwrap_or_else(|| KnownClass::Int.to_instance(self.db())), ), (Type::IntLiteral(_), Type::IntLiteral(_), ast::Operator::Div) => { - Some(KnownClass::Float.to_instance(self.db)) + Some(KnownClass::Float.to_instance(self.db())) } (Type::IntLiteral(n), Type::IntLiteral(m), ast::Operator::FloorDiv) => Some( n.checked_div(m) .map(Type::IntLiteral) - .unwrap_or_else(|| KnownClass::Int.to_instance(self.db)), + .unwrap_or_else(|| KnownClass::Int.to_instance(self.db())), ), (Type::IntLiteral(n), Type::IntLiteral(m), ast::Operator::Mod) => Some( n.checked_rem(m) .map(Type::IntLiteral) - .unwrap_or_else(|| KnownClass::Int.to_instance(self.db)), + .unwrap_or_else(|| KnownClass::Int.to_instance(self.db())), ), (Type::IntLiteral(n), Type::IntLiteral(m), ast::Operator::Pow) => { @@ -3334,21 +3332,21 @@ impl<'db> TypeInferenceBuilder<'db> { Ok(m) => n .checked_pow(m) .map(Type::IntLiteral) - .unwrap_or_else(|| KnownClass::Int.to_instance(self.db)), - Err(_) => KnownClass::Int.to_instance(self.db), + .unwrap_or_else(|| KnownClass::Int.to_instance(self.db())), + Err(_) => KnownClass::Int.to_instance(self.db()), }) } (Type::BytesLiteral(lhs), Type::BytesLiteral(rhs), ast::Operator::Add) => { - let bytes = [&**lhs.value(self.db), &**rhs.value(self.db)].concat(); - Some(Type::bytes_literal(self.db, &bytes)) + let bytes = [&**lhs.value(self.db()), &**rhs.value(self.db())].concat(); + Some(Type::bytes_literal(self.db(), &bytes)) } (Type::StringLiteral(lhs), Type::StringLiteral(rhs), ast::Operator::Add) => { - let lhs_value = lhs.value(self.db).to_string(); - let rhs_value = rhs.value(self.db).as_ref(); + let lhs_value = lhs.value(self.db()).to_string(); + let rhs_value = rhs.value(self.db()).as_ref(); let ty = if lhs_value.len() + rhs_value.len() <= Self::MAX_STRING_LITERAL_SIZE { - Type::string_literal(self.db, &(lhs_value + rhs_value)) + Type::string_literal(self.db(), &(lhs_value + rhs_value)) } else { Type::LiteralString }; @@ -3364,13 +3362,13 @@ impl<'db> TypeInferenceBuilder<'db> { (Type::StringLiteral(s), Type::IntLiteral(n), ast::Operator::Mult) | (Type::IntLiteral(n), Type::StringLiteral(s), ast::Operator::Mult) => { let ty = if n < 1 { - Type::string_literal(self.db, "") + Type::string_literal(self.db(), "") } else if let Ok(n) = usize::try_from(n) { - if n.checked_mul(s.value(self.db).len()) + if n.checked_mul(s.value(self.db()).len()) .is_some_and(|new_length| new_length <= Self::MAX_STRING_LITERAL_SIZE) { - let new_literal = s.value(self.db).repeat(n); - Type::string_literal(self.db, &new_literal) + let new_literal = s.value(self.db()).repeat(n); + Type::string_literal(self.db(), &new_literal) } else { Type::LiteralString } @@ -3383,78 +3381,79 @@ impl<'db> TypeInferenceBuilder<'db> { (Type::LiteralString, Type::IntLiteral(n), ast::Operator::Mult) | (Type::IntLiteral(n), Type::LiteralString, ast::Operator::Mult) => { let ty = if n < 1 { - Type::string_literal(self.db, "") + Type::string_literal(self.db(), "") } else { Type::LiteralString }; Some(ty) } - (Type::Instance(_), Type::IntLiteral(_), op) => { - self.infer_binary_expression_type(left_ty, KnownClass::Int.to_instance(self.db), op) - } + (Type::Instance(_), Type::IntLiteral(_), op) => self.infer_binary_expression_type( + left_ty, + KnownClass::Int.to_instance(self.db()), + op, + ), (Type::IntLiteral(_), Type::Instance(_), op) => self.infer_binary_expression_type( - KnownClass::Int.to_instance(self.db), + KnownClass::Int.to_instance(self.db()), right_ty, op, ), (Type::Instance(_), Type::Tuple(_), op) => self.infer_binary_expression_type( left_ty, - KnownClass::Tuple.to_instance(self.db), + KnownClass::Tuple.to_instance(self.db()), op, ), (Type::Tuple(_), Type::Instance(_), op) => self.infer_binary_expression_type( - KnownClass::Tuple.to_instance(self.db), + KnownClass::Tuple.to_instance(self.db()), right_ty, op, ), - (Type::Instance(_), Type::StringLiteral(_) | Type::LiteralString, op) => { - self.infer_binary_expression_type(left_ty, KnownClass::Str.to_instance(self.db), op) - } + (Type::Instance(_), Type::StringLiteral(_) | Type::LiteralString, op) => self + .infer_binary_expression_type(left_ty, KnownClass::Str.to_instance(self.db()), op), (Type::StringLiteral(_) | Type::LiteralString, Type::Instance(_), op) => self - .infer_binary_expression_type(KnownClass::Str.to_instance(self.db), right_ty, op), + .infer_binary_expression_type(KnownClass::Str.to_instance(self.db()), right_ty, op), (Type::Instance(_), Type::BytesLiteral(_), op) => self.infer_binary_expression_type( left_ty, - KnownClass::Bytes.to_instance(self.db), + KnownClass::Bytes.to_instance(self.db()), op, ), (Type::BytesLiteral(_), Type::Instance(_), op) => self.infer_binary_expression_type( - KnownClass::Bytes.to_instance(self.db), + KnownClass::Bytes.to_instance(self.db()), right_ty, op, ), (Type::Instance(left), Type::Instance(right), op) => { - if left != right && right.is_subtype_of(self.db, left) { + if left != right && right.is_subtype_of(self.db(), left) { let reflected_dunder = op.reflected_dunder(); - let rhs_reflected = right.class.class_member(self.db, reflected_dunder); + let rhs_reflected = right.class.class_member(self.db(), reflected_dunder); if !rhs_reflected.is_unbound() - && rhs_reflected != left.class.class_member(self.db, reflected_dunder) + && rhs_reflected != left.class.class_member(self.db(), reflected_dunder) { return right_ty - .call_dunder(self.db, reflected_dunder, &[right_ty, left_ty]) - .return_ty(self.db) + .call_dunder(self.db(), reflected_dunder, &[right_ty, left_ty]) + .return_ty(self.db()) .or_else(|| { left_ty - .call_dunder(self.db, op.dunder(), &[left_ty, right_ty]) - .return_ty(self.db) + .call_dunder(self.db(), op.dunder(), &[left_ty, right_ty]) + .return_ty(self.db()) }); } } let call_on_left_instance = if let Symbol::Type(class_member, _) = - left.class.class_member(self.db, op.dunder()) + left.class.class_member(self.db(), op.dunder()) { class_member - .call(self.db, &[left_ty, right_ty]) - .return_ty(self.db) + .call(self.db(), &[left_ty, right_ty]) + .return_ty(self.db()) } else { None }; @@ -3464,11 +3463,11 @@ impl<'db> TypeInferenceBuilder<'db> { None } else { if let Symbol::Type(class_member, _) = - right.class.class_member(self.db, op.reflected_dunder()) + right.class.class_member(self.db(), op.reflected_dunder()) { class_member - .call(self.db, &[right_ty, left_ty]) - .return_ty(self.db) + .call(self.db(), &[right_ty, left_ty]) + .return_ty(self.db()) } else { None } @@ -3499,7 +3498,7 @@ impl<'db> TypeInferenceBuilder<'db> { values, } = bool_op; Self::infer_chained_boolean_types( - self.db, + self.db(), *op, values.iter().enumerate().map(|(index, value)| { if index == values.len() - 1 { @@ -3566,7 +3565,7 @@ impl<'db> TypeInferenceBuilder<'db> { // As some operators (==, !=, <, <=, >, >=) *can* return an arbitrary type, the logic below // is shared with the one in `infer_binary_type_comparison`. Self::infer_chained_boolean_types( - self.db, + self.db(), ast::BoolOp::And, std::iter::once(&**left) .chain(comparators) @@ -3579,21 +3578,21 @@ impl<'db> TypeInferenceBuilder<'db> { self.infer_binary_type_comparison(left_ty, *op, right_ty) .unwrap_or_else(|error| { // Handle unsupported operators (diagnostic, `bool`/`Unknown` outcome) - self.diagnostics.add_lint( + self.context.report_lint( &UNSUPPORTED_OPERATOR, AnyNodeRef::ExprCompare(compare), format_args!( "Operator `{}` is not supported for types `{}` and `{}`{}", error.op, - error.left_ty.display(self.db), - error.right_ty.display(self.db), + error.left_ty.display(self.db()), + error.right_ty.display(self.db()), if (left_ty, right_ty) == (error.left_ty, error.right_ty) { String::new() } else { format!( ", in comparing `{}` with `{}`", - left_ty.display(self.db), - right_ty.display(self.db) + left_ty.display(self.db()), + right_ty.display(self.db()) ) } ), @@ -3604,7 +3603,7 @@ impl<'db> TypeInferenceBuilder<'db> { ast::CmpOp::In | ast::CmpOp::NotIn | ast::CmpOp::Is - | ast::CmpOp::IsNot => KnownClass::Bool.to_instance(self.db), + | ast::CmpOp::IsNot => KnownClass::Bool.to_instance(self.db()), // Other operators can return arbitrary types _ => Type::Unknown, } @@ -3624,7 +3623,7 @@ impl<'db> TypeInferenceBuilder<'db> { // If a comparison yields a definitive true/false answer on a (positive) part // of an intersection type, it will also yield a definitive answer on the full // intersection type, which is even more specific. - for pos in intersection.positive(self.db) { + for pos in intersection.positive(self.db()) { let result = match intersection_on { IntersectionOn::Left => self.infer_binary_type_comparison(*pos, op, other)?, IntersectionOn::Right => self.infer_binary_type_comparison(other, op, *pos)?, @@ -3636,7 +3635,7 @@ impl<'db> TypeInferenceBuilder<'db> { // For negative contributions to the intersection type, there are only a few // special cases that allow us to narrow down the result type of the comparison. - for neg in intersection.negative(self.db) { + for neg in intersection.negative(self.db()) { let result = match intersection_on { IntersectionOn::Left => self.infer_binary_type_comparison(*neg, op, other).ok(), IntersectionOn::Right => self.infer_binary_type_comparison(other, op, *neg).ok(), @@ -3697,8 +3696,8 @@ impl<'db> TypeInferenceBuilder<'db> { // // we would get a result type `Literal[True]` which is too narrow. // - let mut builder = IntersectionBuilder::new(self.db); - for pos in intersection.positive(self.db) { + let mut builder = IntersectionBuilder::new(self.db()); + for pos in intersection.positive(self.db()) { let result = match intersection_on { IntersectionOn::Left => self.infer_binary_type_comparison(*pos, op, other)?, IntersectionOn::Right => self.infer_binary_type_comparison(other, op, *pos)?, @@ -3727,15 +3726,15 @@ impl<'db> TypeInferenceBuilder<'db> { // - `[ast::CompOp::IsNot]`: return `true` if unequal, `bool` if equal match (left, right) { (Type::Union(union), other) => { - let mut builder = UnionBuilder::new(self.db); - for element in union.elements(self.db) { + let mut builder = UnionBuilder::new(self.db()); + for element in union.elements(self.db()) { builder = builder.add(self.infer_binary_type_comparison(*element, op, other)?); } Ok(builder.build()) } (other, Type::Union(union)) => { - let mut builder = UnionBuilder::new(self.db); - for element in union.elements(self.db) { + let mut builder = UnionBuilder::new(self.db()); + for element in union.elements(self.db()) { builder = builder.add(self.infer_binary_type_comparison(other, op, *element)?); } Ok(builder.build()) @@ -3765,14 +3764,14 @@ impl<'db> TypeInferenceBuilder<'db> { ast::CmpOp::GtE => Ok(Type::BooleanLiteral(n >= m)), ast::CmpOp::Is => { if n == m { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } else { Ok(Type::BooleanLiteral(false)) } } ast::CmpOp::IsNot => { if n == m { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } else { Ok(Type::BooleanLiteral(true)) } @@ -3785,10 +3784,10 @@ impl<'db> TypeInferenceBuilder<'db> { }), }, (Type::IntLiteral(_), Type::Instance(_)) => { - self.infer_binary_type_comparison(KnownClass::Int.to_instance(self.db), op, right) + self.infer_binary_type_comparison(KnownClass::Int.to_instance(self.db()), op, right) } (Type::Instance(_), Type::IntLiteral(_)) => { - self.infer_binary_type_comparison(left, op, KnownClass::Int.to_instance(self.db)) + self.infer_binary_type_comparison(left, op, KnownClass::Int.to_instance(self.db())) } // Booleans are coded as integers (False = 0, True = 1) @@ -3810,8 +3809,8 @@ impl<'db> TypeInferenceBuilder<'db> { ), (Type::StringLiteral(salsa_s1), Type::StringLiteral(salsa_s2)) => { - let s1 = salsa_s1.value(self.db); - let s2 = salsa_s2.value(self.db); + let s1 = salsa_s1.value(self.db()); + let s2 = salsa_s2.value(self.db()); match op { ast::CmpOp::Eq => Ok(Type::BooleanLiteral(s1 == s2)), ast::CmpOp::NotEq => Ok(Type::BooleanLiteral(s1 != s2)), @@ -3823,14 +3822,14 @@ impl<'db> TypeInferenceBuilder<'db> { ast::CmpOp::NotIn => Ok(Type::BooleanLiteral(!s2.contains(s1.as_ref()))), ast::CmpOp::Is => { if s1 == s2 { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } else { Ok(Type::BooleanLiteral(false)) } } ast::CmpOp::IsNot => { if s1 == s2 { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } else { Ok(Type::BooleanLiteral(true)) } @@ -3838,22 +3837,22 @@ impl<'db> TypeInferenceBuilder<'db> { } } (Type::StringLiteral(_), _) => { - self.infer_binary_type_comparison(KnownClass::Str.to_instance(self.db), op, right) + self.infer_binary_type_comparison(KnownClass::Str.to_instance(self.db()), op, right) } (_, Type::StringLiteral(_)) => { - self.infer_binary_type_comparison(left, op, KnownClass::Str.to_instance(self.db)) + self.infer_binary_type_comparison(left, op, KnownClass::Str.to_instance(self.db())) } (Type::LiteralString, _) => { - self.infer_binary_type_comparison(KnownClass::Str.to_instance(self.db), op, right) + self.infer_binary_type_comparison(KnownClass::Str.to_instance(self.db()), op, right) } (_, Type::LiteralString) => { - self.infer_binary_type_comparison(left, op, KnownClass::Str.to_instance(self.db)) + self.infer_binary_type_comparison(left, op, KnownClass::Str.to_instance(self.db())) } (Type::BytesLiteral(salsa_b1), Type::BytesLiteral(salsa_b2)) => { - let b1 = &**salsa_b1.value(self.db); - let b2 = &**salsa_b2.value(self.db); + let b1 = &**salsa_b1.value(self.db()); + let b2 = &**salsa_b2.value(self.db()); match op { ast::CmpOp::Eq => Ok(Type::BooleanLiteral(b1 == b2)), ast::CmpOp::NotEq => Ok(Type::BooleanLiteral(b1 != b2)), @@ -3869,40 +3868,44 @@ impl<'db> TypeInferenceBuilder<'db> { } ast::CmpOp::Is => { if b1 == b2 { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } else { Ok(Type::BooleanLiteral(false)) } } ast::CmpOp::IsNot => { if b1 == b2 { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } else { Ok(Type::BooleanLiteral(true)) } } } } - (Type::BytesLiteral(_), _) => { - self.infer_binary_type_comparison(KnownClass::Bytes.to_instance(self.db), op, right) - } - (_, Type::BytesLiteral(_)) => { - self.infer_binary_type_comparison(left, op, KnownClass::Bytes.to_instance(self.db)) - } + (Type::BytesLiteral(_), _) => self.infer_binary_type_comparison( + KnownClass::Bytes.to_instance(self.db()), + op, + right, + ), + (_, Type::BytesLiteral(_)) => self.infer_binary_type_comparison( + left, + op, + KnownClass::Bytes.to_instance(self.db()), + ), (Type::Tuple(_), Type::Instance(InstanceType { class })) - if class.is_known(self.db, KnownClass::VersionInfo) => + if class.is_known(self.db(), KnownClass::VersionInfo) => { - self.infer_binary_type_comparison(left, op, Type::version_info_tuple(self.db)) + self.infer_binary_type_comparison(left, op, Type::version_info_tuple(self.db())) } (Type::Instance(InstanceType { class }), Type::Tuple(_)) - if class.is_known(self.db, KnownClass::VersionInfo) => + if class.is_known(self.db(), KnownClass::VersionInfo) => { - self.infer_binary_type_comparison(Type::version_info_tuple(self.db), op, right) + self.infer_binary_type_comparison(Type::version_info_tuple(self.db()), op, right) } (Type::Tuple(lhs), Type::Tuple(rhs)) => { // Note: This only works on heterogeneous tuple types. - let lhs_elements = lhs.elements(self.db); - let rhs_elements = rhs.elements(self.db); + let lhs_elements = lhs.elements(self.db()); + let rhs_elements = rhs.elements(self.db()); let mut tuple_rich_comparison = |op| self.infer_tuple_rich_comparison(lhs_elements, op, rhs_elements); @@ -3927,7 +3930,7 @@ impl<'db> TypeInferenceBuilder<'db> { match eq_result { todo @ Type::Todo(_) => return Ok(todo), - ty => match ty.bool(self.db) { + ty => match ty.bool(self.db()) { Truthiness::AlwaysTrue => eq_count += 1, Truthiness::AlwaysFalse => not_eq_count += 1, Truthiness::Ambiguous => (), @@ -3940,7 +3943,7 @@ impl<'db> TypeInferenceBuilder<'db> { } else if not_eq_count == rhs_elements.len() { Ok(Type::BooleanLiteral(op.is_not_in())) } else { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } } ast::CmpOp::Is | ast::CmpOp::IsNot => { @@ -3952,9 +3955,9 @@ impl<'db> TypeInferenceBuilder<'db> { Ok(match eq_result { todo @ Type::Todo(_) => todo, - ty => match ty.bool(self.db) { + ty => match ty.bool(self.db()) { Truthiness::AlwaysFalse => Type::BooleanLiteral(op.is_is_not()), - _ => KnownClass::Bool.to_instance(self.db), + _ => KnownClass::Bool.to_instance(self.db()), }, }) } @@ -3964,9 +3967,9 @@ impl<'db> TypeInferenceBuilder<'db> { // Lookup the rich comparison `__dunder__` methods on instances (Type::Instance(left_instance), Type::Instance(right_instance)) => { let rich_comparison = - |op| perform_rich_comparison(self.db, left_instance, right_instance, op); + |op| perform_rich_comparison(self.db(), left_instance, right_instance, op); let membership_test_comparison = |op| { - perform_membership_test_comparison(self.db, left_instance, right_instance, op) + perform_membership_test_comparison(self.db(), left_instance, right_instance, op) }; match op { ast::CmpOp::Eq => rich_comparison(RichCompareOperator::Eq), @@ -3980,32 +3983,32 @@ impl<'db> TypeInferenceBuilder<'db> { membership_test_comparison(MembershipTestCompareOperator::NotIn) } ast::CmpOp::Is => { - if left.is_disjoint_from(self.db, right) { + if left.is_disjoint_from(self.db(), right) { Ok(Type::BooleanLiteral(false)) - } else if left.is_singleton(self.db) - && left.is_equivalent_to(self.db, right) + } else if left.is_singleton(self.db()) + && left.is_equivalent_to(self.db(), right) { Ok(Type::BooleanLiteral(true)) } else { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } } ast::CmpOp::IsNot => { - if left.is_disjoint_from(self.db, right) { + if left.is_disjoint_from(self.db(), right) { Ok(Type::BooleanLiteral(true)) - } else if left.is_singleton(self.db) - && left.is_equivalent_to(self.db, right) + } else if left.is_singleton(self.db()) + && left.is_equivalent_to(self.db(), right) { Ok(Type::BooleanLiteral(false)) } else { - Ok(KnownClass::Bool.to_instance(self.db)) + Ok(KnownClass::Bool.to_instance(self.db())) } } } } // TODO: handle more types _ => match op { - ast::CmpOp::Is | ast::CmpOp::IsNot => Ok(KnownClass::Bool.to_instance(self.db)), + ast::CmpOp::Is | ast::CmpOp::IsNot => Ok(KnownClass::Bool.to_instance(self.db())), _ => Ok(todo_type!()), }, } @@ -4025,7 +4028,7 @@ impl<'db> TypeInferenceBuilder<'db> { let left_iter = left.iter().copied(); let right_iter = right.iter().copied(); - let mut builder = UnionBuilder::new(self.db); + let mut builder = UnionBuilder::new(self.db()); for (l_ty, r_ty) in left_iter.zip(right_iter) { let pairwise_eq_result = self @@ -4035,7 +4038,7 @@ impl<'db> TypeInferenceBuilder<'db> { match pairwise_eq_result { // If propagation is required, return the result as is todo @ Type::Todo(_) => return Ok(todo), - ty => match ty.bool(self.db) { + ty => match ty.bool(self.db()) { // - AlwaysTrue : Continue to the next pair for lexicographic comparison Truthiness::AlwaysTrue => continue, // - AlwaysFalse: @@ -4110,22 +4113,23 @@ impl<'db> TypeInferenceBuilder<'db> { ( Type::Instance(InstanceType { class }), Type::IntLiteral(_) | Type::BooleanLiteral(_) | Type::SliceLiteral(_), - ) if class.is_known(self.db, KnownClass::VersionInfo) => self + ) if class.is_known(self.db(), KnownClass::VersionInfo) => self .infer_subscript_expression_types( value_node, - Type::version_info_tuple(self.db), + Type::version_info_tuple(self.db()), slice_ty, ), // Ex) Given `("a", "b", "c", "d")[1]`, return `"b"` (Type::Tuple(tuple_ty), Type::IntLiteral(int)) if i32::try_from(int).is_ok() => { - let elements = tuple_ty.elements(self.db); + let elements = tuple_ty.elements(self.db()); elements .iter() .py_index(i32::try_from(int).expect("checked in branch arm")) .copied() .unwrap_or_else(|_| { - self.diagnostics.add_index_out_of_bounds( + report_index_out_of_bounds( + &self.context, "tuple", value_node.into(), value_ty, @@ -4137,14 +4141,14 @@ impl<'db> TypeInferenceBuilder<'db> { } // Ex) Given `("a", 1, Null)[0:2]`, return `("a", 1)` (Type::Tuple(tuple_ty), Type::SliceLiteral(slice_ty)) => { - let elements = tuple_ty.elements(self.db); - let (start, stop, step) = slice_ty.as_tuple(self.db); + let elements = tuple_ty.elements(self.db()); + let (start, stop, step) = slice_ty.as_tuple(self.db()); if let Ok(new_elements) = elements.py_slice(start, stop, step) { let new_elements: Vec<_> = new_elements.copied().collect(); - Type::tuple(self.db, &new_elements) + Type::tuple(self.db(), &new_elements) } else { - self.diagnostics.add_slice_step_size_zero(value_node.into()); + report_slice_step_size_zero(&self.context, value_node.into()); Type::Unknown } } @@ -4152,13 +4156,14 @@ impl<'db> TypeInferenceBuilder<'db> { (Type::StringLiteral(literal_ty), Type::IntLiteral(int)) if i32::try_from(int).is_ok() => { - let literal_value = literal_ty.value(self.db); + let literal_value = literal_ty.value(self.db()); literal_value .chars() .py_index(i32::try_from(int).expect("checked in branch arm")) - .map(|ch| Type::string_literal(self.db, &ch.to_string())) + .map(|ch| Type::string_literal(self.db(), &ch.to_string())) .unwrap_or_else(|_| { - self.diagnostics.add_index_out_of_bounds( + report_index_out_of_bounds( + &self.context, "string", value_node.into(), value_ty, @@ -4170,15 +4175,15 @@ impl<'db> TypeInferenceBuilder<'db> { } // Ex) Given `"value"[1:3]`, return `"al"` (Type::StringLiteral(literal_ty), Type::SliceLiteral(slice_ty)) => { - let literal_value = literal_ty.value(self.db); - let (start, stop, step) = slice_ty.as_tuple(self.db); + let literal_value = literal_ty.value(self.db()); + let (start, stop, step) = slice_ty.as_tuple(self.db()); let chars: Vec<_> = literal_value.chars().collect(); let result = if let Ok(new_chars) = chars.py_slice(start, stop, step) { let literal: String = new_chars.collect(); - Type::string_literal(self.db, &literal) + Type::string_literal(self.db(), &literal) } else { - self.diagnostics.add_slice_step_size_zero(value_node.into()); + report_slice_step_size_zero(&self.context, value_node.into()); Type::Unknown }; result @@ -4187,13 +4192,14 @@ impl<'db> TypeInferenceBuilder<'db> { (Type::BytesLiteral(literal_ty), Type::IntLiteral(int)) if i32::try_from(int).is_ok() => { - let literal_value = literal_ty.value(self.db); + let literal_value = literal_ty.value(self.db()); literal_value .iter() .py_index(i32::try_from(int).expect("checked in branch arm")) - .map(|byte| Type::bytes_literal(self.db, &[*byte])) + .map(|byte| Type::bytes_literal(self.db(), &[*byte])) .unwrap_or_else(|_| { - self.diagnostics.add_index_out_of_bounds( + report_index_out_of_bounds( + &self.context, "bytes literal", value_node.into(), value_ty, @@ -4205,14 +4211,14 @@ impl<'db> TypeInferenceBuilder<'db> { } // Ex) Given `b"value"[1:3]`, return `b"al"` (Type::BytesLiteral(literal_ty), Type::SliceLiteral(slice_ty)) => { - let literal_value = literal_ty.value(self.db); - let (start, stop, step) = slice_ty.as_tuple(self.db); + let literal_value = literal_ty.value(self.db()); + let (start, stop, step) = slice_ty.as_tuple(self.db()); if let Ok(new_bytes) = literal_value.py_slice(start, stop, step) { let new_bytes: Vec = new_bytes.copied().collect(); - Type::bytes_literal(self.db, &new_bytes) + Type::bytes_literal(self.db(), &new_bytes) } else { - self.diagnostics.add_slice_step_size_zero(value_node.into()); + report_slice_step_size_zero(&self.context, value_node.into()); Type::Unknown } } @@ -4227,36 +4233,36 @@ impl<'db> TypeInferenceBuilder<'db> { ), (value_ty, slice_ty) => { // Resolve the value to its class. - let value_meta_ty = value_ty.to_meta_type(self.db); + let value_meta_ty = value_ty.to_meta_type(self.db()); // If the class defines `__getitem__`, return its return type. // // See: https://docs.python.org/3/reference/datamodel.html#class-getitem-versus-getitem - match value_meta_ty.member(self.db, "__getitem__") { + match value_meta_ty.member(self.db(), "__getitem__") { Symbol::Unbound => {} Symbol::Type(dunder_getitem_method, boundness) => { if boundness == Boundness::PossiblyUnbound { - self.diagnostics.add_lint( + self.context.report_lint( &CALL_POSSIBLY_UNBOUND_METHOD, value_node.into(), format_args!( "Method `__getitem__` of type `{}` is possibly unbound", - value_ty.display(self.db), + value_ty.display(self.db()), ), ); } return dunder_getitem_method - .call(self.db, &[slice_ty]) - .return_ty_result(self.db, value_node.into(), &mut self.diagnostics) + .call(self.db(), &[slice_ty]) + .return_ty_result(&self.context, value_node.into()) .unwrap_or_else(|err| { - self.diagnostics.add_lint( + self.context.report_lint( &CALL_NON_CALLABLE, value_node.into(), format_args!( "Method `__getitem__` of type `{}` is not callable on object of type `{}`", - err.called_ty().display(self.db), - value_ty.display(self.db), + err.called_ty().display(self.db()), + value_ty.display(self.db()), ), ); err.return_ty() @@ -4273,34 +4279,35 @@ impl<'db> TypeInferenceBuilder<'db> { // even if the target version is Python 3.8 or lower, // despite the fact that there will be no corresponding `__class_getitem__` // method in these `sys.version_info` branches. - if value_ty.is_subtype_of(self.db, KnownClass::Type.to_instance(self.db)) { - let dunder_class_getitem_method = value_ty.member(self.db, "__class_getitem__"); + if value_ty.is_subtype_of(self.db(), KnownClass::Type.to_instance(self.db())) { + let dunder_class_getitem_method = + value_ty.member(self.db(), "__class_getitem__"); match dunder_class_getitem_method { Symbol::Unbound => {} Symbol::Type(ty, boundness) => { if boundness == Boundness::PossiblyUnbound { - self.diagnostics.add_lint( + self.context.report_lint( &CALL_POSSIBLY_UNBOUND_METHOD, value_node.into(), format_args!( "Method `__class_getitem__` of type `{}` is possibly unbound", - value_ty.display(self.db), + value_ty.display(self.db()), ), ); } return ty - .call(self.db, &[slice_ty]) - .return_ty_result(self.db, value_node.into(), &mut self.diagnostics) + .call(self.db(), &[slice_ty]) + .return_ty_result(&self.context, value_node.into()) .unwrap_or_else(|err| { - self.diagnostics.add_lint( + self.context.report_lint( &CALL_NON_CALLABLE, value_node.into(), format_args!( "Method `__class_getitem__` of type `{}` is not callable on object of type `{}`", - err.called_ty().display(self.db), - value_ty.display(self.db), + err.called_ty().display(self.db()), + value_ty.display(self.db()), ), ); err.return_ty() @@ -4308,18 +4315,20 @@ impl<'db> TypeInferenceBuilder<'db> { } } - if matches!(value_ty, Type::ClassLiteral(ClassLiteralType { class }) if class.is_known(self.db, KnownClass::Type)) + if matches!(value_ty, Type::ClassLiteral(ClassLiteralType { class }) if class.is_known(self.db(), KnownClass::Type)) { - return KnownClass::GenericAlias.to_instance(self.db); + return KnownClass::GenericAlias.to_instance(self.db()); } - self.diagnostics.add_non_subscriptable( + report_non_subscriptable( + &self.context, value_node.into(), value_ty, "__class_getitem__", ); } else { - self.diagnostics.add_non_subscriptable( + report_non_subscriptable( + &self.context, value_node.into(), value_ty, "__getitem__", @@ -4355,7 +4364,7 @@ impl<'db> TypeInferenceBuilder<'db> { }, Some(Type::BooleanLiteral(b)) => SliceArg::Arg(Some(i32::from(b))), Some(Type::Instance(InstanceType { class })) - if class.is_known(self.db, KnownClass::NoneType) => + if class.is_known(self.db(), KnownClass::NoneType) => { SliceArg::Arg(None) } @@ -4369,9 +4378,9 @@ impl<'db> TypeInferenceBuilder<'db> { type_to_slice_argument(ty_step), ) { (SliceArg::Arg(lower), SliceArg::Arg(upper), SliceArg::Arg(step)) => { - Type::SliceLiteral(SliceLiteralType::new(self.db, lower, upper, step)) + Type::SliceLiteral(SliceLiteralType::new(self.db(), lower, upper, step)) } - _ => KnownClass::Slice.to_instance(self.db), + _ => KnownClass::Slice.to_instance(self.db()), } } @@ -4391,7 +4400,7 @@ impl<'db> TypeInferenceBuilder<'db> { pub(super) fn finish(mut self) -> TypeInference<'db> { self.infer_region(); - self.types.diagnostics = self.diagnostics.finish(); + self.types.diagnostics = self.context.finish(); self.types.shrink_to_fit(); self.types } @@ -4436,7 +4445,7 @@ impl<'db> TypeInferenceBuilder<'db> { ast::Expr::Starred(starred) => self.infer_starred_expression(starred), ast::Expr::BytesLiteral(bytes) => { - self.diagnostics.add_lint( + self.context.report_lint( &BYTE_STRING_TYPE_ANNOTATION, bytes.into(), format_args!("Type expressions cannot use bytes literal"), @@ -4445,7 +4454,7 @@ impl<'db> TypeInferenceBuilder<'db> { } ast::Expr::FString(fstring) => { - self.diagnostics.add_lint( + self.context.report_lint( &FSTRING_TYPE_ANNOTATION, fstring.into(), format_args!("Type expressions cannot use f-strings"), @@ -4466,18 +4475,15 @@ impl<'db> TypeInferenceBuilder<'db> { /// Infer the type of a string annotation expression. fn infer_string_annotation_expression(&mut self, string: &ast::ExprStringLiteral) -> Type<'db> { - match parse_string_annotation(self.db, self.file, string) { - Ok(parsed) => { + match parse_string_annotation(&self.context, string) { + Some(parsed) => { // String annotations are always evaluated in the deferred context. self.infer_annotation_expression( parsed.expr(), DeferredExpressionState::InStringAnnotation, ) } - Err(diagnostics) => { - self.diagnostics.extend(&diagnostics); - Type::Unknown - } + None => Type::Unknown, } } } @@ -4523,10 +4529,8 @@ impl<'db> TypeInferenceBuilder<'db> { ast::Expr::Name(name) => match name.ctx { ast::ExprContext::Load => self .infer_name_expression(name) - .in_type_expression(self.db) - .unwrap_or_else(|error| { - error.into_fallback_type(&mut self.diagnostics, expression) - }), + .in_type_expression(self.db()) + .unwrap_or_else(|error| error.into_fallback_type(&self.context, expression)), ast::ExprContext::Invalid => Type::Unknown, ast::ExprContext::Store | ast::ExprContext::Del => todo_type!(), }, @@ -4534,15 +4538,13 @@ impl<'db> TypeInferenceBuilder<'db> { ast::Expr::Attribute(attribute_expression) => match attribute_expression.ctx { ast::ExprContext::Load => self .infer_attribute_expression(attribute_expression) - .in_type_expression(self.db) - .unwrap_or_else(|error| { - error.into_fallback_type(&mut self.diagnostics, expression) - }), + .in_type_expression(self.db()) + .unwrap_or_else(|error| error.into_fallback_type(&self.context, expression)), ast::ExprContext::Invalid => Type::Unknown, ast::ExprContext::Store | ast::ExprContext::Del => todo_type!(), }, - ast::Expr::NoneLiteral(_literal) => Type::none(self.db), + ast::Expr::NoneLiteral(_literal) => Type::none(self.db()), // https://typing.readthedocs.io/en/latest/spec/annotations.html#string-annotations ast::Expr::StringLiteral(string) => self.infer_string_type_expression(string), @@ -4570,7 +4572,7 @@ impl<'db> TypeInferenceBuilder<'db> { match value_ty { Type::ClassLiteral(class_literal_ty) => { - match class_literal_ty.class.known(self.db) { + match class_literal_ty.class.known(self.db()) { Some(KnownClass::Tuple) => self.infer_tuple_type_expression(slice), Some(KnownClass::Type) => self.infer_subclass_of_type_expression(slice), _ => self.infer_subscript_type_expression(subscript, value_ty), @@ -4586,7 +4588,7 @@ impl<'db> TypeInferenceBuilder<'db> { ast::Operator::BitOr => { let left_ty = self.infer_type_expression(&binary.left); let right_ty = self.infer_type_expression(&binary.right); - UnionType::from_elements(self.db, [left_ty, right_ty]) + UnionType::from_elements(self.db(), [left_ty, right_ty]) } // anything else is an invalid annotation: _ => { @@ -4695,18 +4697,15 @@ impl<'db> TypeInferenceBuilder<'db> { /// Infer the type of a string type expression. fn infer_string_type_expression(&mut self, string: &ast::ExprStringLiteral) -> Type<'db> { - match parse_string_annotation(self.db, self.file, string) { - Ok(parsed) => { + match parse_string_annotation(&self.context, string) { + Some(parsed) => { // String annotations are always evaluated in the deferred context. self.infer_type_expression_with_state( parsed.expr(), DeferredExpressionState::InStringAnnotation, ) } - Err(diagnostics) => { - self.diagnostics.extend(&diagnostics); - Type::Unknown - } + None => Type::Unknown, } } @@ -4749,7 +4748,7 @@ impl<'db> TypeInferenceBuilder<'db> { let ty = if return_todo { todo_type!("full tuple[...] support") } else { - Type::tuple(self.db, &element_types) + Type::tuple(self.db(), &element_types) }; // Here, we store the type for the inner `int, str` tuple-expression, @@ -4764,7 +4763,7 @@ impl<'db> TypeInferenceBuilder<'db> { if element_could_alter_type_of_whole_tuple(single_element, single_element_ty) { todo_type!() } else { - Type::tuple(self.db, [single_element_ty]) + Type::tuple(self.db(), [single_element_ty]) } } } @@ -4785,7 +4784,7 @@ impl<'db> TypeInferenceBuilder<'db> { } ast::Expr::BinOp(binary) if binary.op == ast::Operator::BitOr => { let union_ty = UnionType::from_elements( - self.db, + self.db(), [ self.infer_subclass_of_type_expression(&binary.left), self.infer_subclass_of_type_expression(&binary.right), @@ -4797,7 +4796,7 @@ impl<'db> TypeInferenceBuilder<'db> { } ast::Expr::Tuple(_) => { self.infer_type_expression(slice); - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_TYPE_FORM, slice.into(), format_args!("type[...] must have exactly one type argument"), @@ -4813,7 +4812,7 @@ impl<'db> TypeInferenceBuilder<'db> { Type::KnownInstance(KnownInstanceType::Union) => match &**parameters { ast::Expr::Tuple(tuple) => { let ty = UnionType::from_elements( - self.db, + self.db(), tuple .iter() .map(|element| self.infer_subclass_of_type_expression(element)), @@ -4874,13 +4873,13 @@ impl<'db> TypeInferenceBuilder<'db> { let arguments_slice = &*subscript.slice; match known_instance { KnownInstanceType::Annotated => { - let mut report_invalid_arguments = || { - self.diagnostics.add_lint( + let report_invalid_arguments = || { + self.context.report_lint( &INVALID_TYPE_FORM, subscript.into(), format_args!( "Special form `{}` expected at least 2 arguments (one type and at least one metadata element)", - known_instance.repr(self.db) + known_instance.repr(self.db()) ), ); }; @@ -4920,7 +4919,7 @@ impl<'db> TypeInferenceBuilder<'db> { Ok(ty) => ty, Err(nodes) => { for node in nodes { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_TYPE_FORM, node.into(), format_args!( @@ -4935,12 +4934,12 @@ impl<'db> TypeInferenceBuilder<'db> { } KnownInstanceType::Optional => { let param_type = self.infer_type_expression(arguments_slice); - UnionType::from_elements(self.db, [param_type, Type::none(self.db)]) + UnionType::from_elements(self.db(), [param_type, Type::none(self.db())]) } KnownInstanceType::Union => match arguments_slice { ast::Expr::Tuple(t) => { let union_ty = UnionType::from_elements( - self.db, + self.db(), t.iter().map(|elt| self.infer_type_expression(elt)), ); self.store_expression_type(arguments_slice, union_ty); @@ -4964,39 +4963,39 @@ impl<'db> TypeInferenceBuilder<'db> { // TODO: Generics KnownInstanceType::ChainMap => { self.infer_type_expression(arguments_slice); - KnownClass::ChainMap.to_instance(self.db) + KnownClass::ChainMap.to_instance(self.db()) } KnownInstanceType::OrderedDict => { self.infer_type_expression(arguments_slice); - KnownClass::OrderedDict.to_instance(self.db) + KnownClass::OrderedDict.to_instance(self.db()) } KnownInstanceType::Dict => { self.infer_type_expression(arguments_slice); - KnownClass::Dict.to_instance(self.db) + KnownClass::Dict.to_instance(self.db()) } KnownInstanceType::List => { self.infer_type_expression(arguments_slice); - KnownClass::List.to_instance(self.db) + KnownClass::List.to_instance(self.db()) } KnownInstanceType::DefaultDict => { self.infer_type_expression(arguments_slice); - KnownClass::DefaultDict.to_instance(self.db) + KnownClass::DefaultDict.to_instance(self.db()) } KnownInstanceType::Counter => { self.infer_type_expression(arguments_slice); - KnownClass::Counter.to_instance(self.db) + KnownClass::Counter.to_instance(self.db()) } KnownInstanceType::Set => { self.infer_type_expression(arguments_slice); - KnownClass::Set.to_instance(self.db) + KnownClass::Set.to_instance(self.db()) } KnownInstanceType::FrozenSet => { self.infer_type_expression(arguments_slice); - KnownClass::FrozenSet.to_instance(self.db) + KnownClass::FrozenSet.to_instance(self.db()) } KnownInstanceType::Deque => { self.infer_type_expression(arguments_slice); - KnownClass::Deque.to_instance(self.db) + KnownClass::Deque.to_instance(self.db()) } KnownInstanceType::ReadOnly => { @@ -5036,34 +5035,34 @@ impl<'db> TypeInferenceBuilder<'db> { todo_type!("Unpack[] special form") } KnownInstanceType::NoReturn | KnownInstanceType::Never | KnownInstanceType::Any => { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_TYPE_FORM, subscript.into(), format_args!( "Type `{}` expected no type parameter", - known_instance.repr(self.db) + known_instance.repr(self.db()) ), ); Type::Unknown } KnownInstanceType::TypingSelf | KnownInstanceType::TypeAlias => { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_TYPE_FORM, subscript.into(), format_args!( "Special form `{}` expected no type parameter", - known_instance.repr(self.db) + known_instance.repr(self.db()) ), ); Type::Unknown } KnownInstanceType::LiteralString => { - self.diagnostics.add_lint( + self.context.report_lint( &INVALID_TYPE_FORM, subscript.into(), format_args!( "Type `{}` expected no type parameter. Did you mean to use `Literal[...]` instead?", - known_instance.repr(self.db) + known_instance.repr(self.db()) ), ); Type::Unknown @@ -5096,7 +5095,7 @@ impl<'db> TypeInferenceBuilder<'db> { } ast::Expr::Tuple(tuple) if !tuple.parenthesized => { let mut errors = vec![]; - let mut builder = UnionBuilder::new(self.db); + let mut builder = UnionBuilder::new(self.db()); for elt in tuple { match self.infer_literal_parameter_type(elt) { Ok(ty) => { @@ -5134,7 +5133,7 @@ impl<'db> TypeInferenceBuilder<'db> { let value_ty = self.infer_expression(value); // TODO: Check that value type is enum otherwise return None value_ty - .member(self.db, &attr.id) + .member(self.db(), &attr.id) .ignore_possibly_unbound() .unwrap_or(Type::Unknown) } diff --git a/crates/red_knot_python_semantic/src/types/string_annotation.rs b/crates/red_knot_python_semantic/src/types/string_annotation.rs index 6ae6b66581..52096a7311 100644 --- a/crates/red_knot_python_semantic/src/types/string_annotation.rs +++ b/crates/red_knot_python_semantic/src/types/string_annotation.rs @@ -1,13 +1,13 @@ -use ruff_db::files::File; use ruff_db::source::source_text; use ruff_python_ast::str::raw_contents; use ruff_python_ast::{self as ast, ModExpression, StringFlags}; use ruff_python_parser::{parse_expression_range, Parsed}; use ruff_text_size::Ranged; +use crate::declare_lint; use crate::lint::{Level, LintStatus}; -use crate::types::diagnostic::{TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder}; -use crate::{declare_lint, Db}; + +use super::context::InferContext; declare_lint! { /// ## What it does @@ -127,24 +127,23 @@ declare_lint! { } } -type AnnotationParseResult = Result, TypeCheckDiagnostics>; - /// Parses the given expression as a string annotation. pub(crate) fn parse_string_annotation( - db: &dyn Db, - file: File, + context: &InferContext, string_expr: &ast::ExprStringLiteral, -) -> AnnotationParseResult { +) -> Option> { + let file = context.file(); + let db = context.db(); + let _span = tracing::trace_span!("parse_string_annotation", string=?string_expr.range(), file=%file.path(db)).entered(); let source = source_text(db.upcast(), file); let node_text = &source[string_expr.range()]; - let mut diagnostics = TypeCheckDiagnosticsBuilder::new(db, file); if let [string_literal] = string_expr.value.as_slice() { let prefix = string_literal.flags.prefix(); if prefix.is_raw() { - diagnostics.add_lint( + context.report_lint( &RAW_STRING_TYPE_ANNOTATION, string_literal.into(), format_args!("Type expressions cannot use raw string literal"), @@ -167,8 +166,8 @@ pub(crate) fn parse_string_annotation( // """ = 1 // ``` match parse_expression_range(source.as_str(), range_excluding_quotes) { - Ok(parsed) => return Ok(parsed), - Err(parse_error) => diagnostics.add_lint( + Ok(parsed) => return Some(parsed), + Err(parse_error) => context.report_lint( &INVALID_SYNTAX_IN_FORWARD_ANNOTATION, string_literal.into(), format_args!("Syntax error in forward annotation: {}", parse_error.error), @@ -177,7 +176,7 @@ pub(crate) fn parse_string_annotation( } else { // The raw contents of the string doesn't match the parsed content. This could be the // case for annotations that contain escape sequences. - diagnostics.add_lint( + context.report_lint( &ESCAPE_CHARACTER_IN_FORWARD_ANNOTATION, string_expr.into(), format_args!("Type expressions cannot contain escape characters"), @@ -185,12 +184,12 @@ pub(crate) fn parse_string_annotation( } } else { // String is implicitly concatenated. - diagnostics.add_lint( + context.report_lint( &IMPLICIT_CONCATENATED_STRING_TYPE_ANNOTATION, string_expr.into(), format_args!("Type expressions cannot span multiple string literals"), ); } - Err(diagnostics.finish()) + None } diff --git a/crates/red_knot_python_semantic/src/types/unpacker.rs b/crates/red_knot_python_semantic/src/types/unpacker.rs index 99d45ecd3e..aa1820357a 100644 --- a/crates/red_knot_python_semantic/src/types/unpacker.rs +++ b/crates/red_knot_python_semantic/src/types/unpacker.rs @@ -6,30 +6,34 @@ use rustc_hash::FxHashMap; use crate::semantic_index::ast_ids::{HasScopedExpressionId, ScopedExpressionId}; use crate::semantic_index::symbol::ScopeId; -use crate::types::{todo_type, Type, TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder}; +use crate::types::{todo_type, Type, TypeCheckDiagnostics}; use crate::Db; +use super::context::{InferContext, WithDiagnostics}; + /// Unpacks the value expression type to their respective targets. pub(crate) struct Unpacker<'db> { - db: &'db dyn Db, + context: InferContext<'db>, targets: FxHashMap>, - diagnostics: TypeCheckDiagnosticsBuilder<'db>, } impl<'db> Unpacker<'db> { pub(crate) fn new(db: &'db dyn Db, file: File) -> Self { Self { - db, + context: InferContext::new(db, file), targets: FxHashMap::default(), - diagnostics: TypeCheckDiagnosticsBuilder::new(db, file), } } + fn db(&self) -> &'db dyn Db { + self.context.db() + } + pub(crate) fn unpack(&mut self, target: &ast::Expr, value_ty: Type<'db>, scope: ScopeId<'db>) { match target { ast::Expr::Name(target_name) => { self.targets - .insert(target_name.scoped_expression_id(self.db, scope), value_ty); + .insert(target_name.scoped_expression_id(self.db(), scope), value_ty); } ast::Expr::Starred(ast::ExprStarred { value, .. }) => { self.unpack(value, value_ty, scope); @@ -40,11 +44,11 @@ impl<'db> Unpacker<'db> { let starred_index = elts.iter().position(ast::Expr::is_starred_expr); let element_types = if let Some(starred_index) = starred_index { - if tuple_ty.len(self.db) >= elts.len() - 1 { + if tuple_ty.len(self.db()) >= elts.len() - 1 { let mut element_types = Vec::with_capacity(elts.len()); element_types.extend_from_slice( // SAFETY: Safe because of the length check above. - &tuple_ty.elements(self.db)[..starred_index], + &tuple_ty.elements(self.db())[..starred_index], ); // E.g., in `(a, *b, c, d) = ...`, the index of starred element `b` @@ -52,10 +56,10 @@ impl<'db> Unpacker<'db> { let remaining = elts.len() - (starred_index + 1); // This index represents the type of the last element that belongs // to the starred expression, in an exclusive manner. - let starred_end_index = tuple_ty.len(self.db) - remaining; + let starred_end_index = tuple_ty.len(self.db()) - remaining; // SAFETY: Safe because of the length check above. let _starred_element_types = - &tuple_ty.elements(self.db)[starred_index..starred_end_index]; + &tuple_ty.elements(self.db())[starred_index..starred_end_index]; // TODO: Combine the types into a list type. If the // starred_element_types is empty, then it should be `List[Any]`. // combine_types(starred_element_types); @@ -63,11 +67,11 @@ impl<'db> Unpacker<'db> { element_types.extend_from_slice( // SAFETY: Safe because of the length check above. - &tuple_ty.elements(self.db)[starred_end_index..], + &tuple_ty.elements(self.db())[starred_end_index..], ); Cow::Owned(element_types) } else { - let mut element_types = tuple_ty.elements(self.db).to_vec(); + let mut element_types = tuple_ty.elements(self.db()).to_vec(); // Subtract 1 to insert the starred expression type at the correct // index. element_types.resize(elts.len() - 1, Type::Unknown); @@ -76,7 +80,7 @@ impl<'db> Unpacker<'db> { Cow::Owned(element_types) } } else { - Cow::Borrowed(tuple_ty.elements(self.db).as_ref()) + Cow::Borrowed(tuple_ty.elements(self.db()).as_ref()) }; for (index, element) in elts.iter().enumerate() { @@ -94,9 +98,9 @@ impl<'db> Unpacker<'db> { // individual character, instead of just an array of `LiteralString`, but // there would be a cost and it's not clear that it's worth it. let value_ty = Type::tuple( - self.db, + self.db(), std::iter::repeat(Type::LiteralString) - .take(string_literal_ty.python_len(self.db)), + .take(string_literal_ty.python_len(self.db())), ); self.unpack(target, value_ty, scope); } @@ -105,8 +109,8 @@ impl<'db> Unpacker<'db> { Type::LiteralString } else { value_ty - .iterate(self.db) - .unwrap_with_diagnostic(AnyNodeRef::from(target), &mut self.diagnostics) + .iterate(self.db()) + .unwrap_with_diagnostic(&self.context, AnyNodeRef::from(target)) }; for element in elts { self.unpack(element, value_ty, scope); @@ -120,7 +124,7 @@ impl<'db> Unpacker<'db> { pub(crate) fn finish(mut self) -> UnpackResult<'db> { self.targets.shrink_to_fit(); UnpackResult { - diagnostics: self.diagnostics.finish(), + diagnostics: self.context.finish(), targets: self.targets, } } @@ -136,8 +140,10 @@ impl<'db> UnpackResult<'db> { pub(crate) fn get(&self, expr_id: ScopedExpressionId) -> Option> { self.targets.get(&expr_id).copied() } +} - pub(crate) fn diagnostics(&self) -> &TypeCheckDiagnostics { +impl WithDiagnostics for UnpackResult<'_> { + fn diagnostics(&self) -> &TypeCheckDiagnostics { &self.diagnostics } }