Compare commits

...

6 Commits

Author SHA1 Message Date
Micha Reiser
68c8cb93d6 Use internal Salsa APIs to make it compile 2025-12-26 11:20:00 +01:00
Micha Reiser
4bd405e1c5 [ty] Test performance and memory impact of shrinking Salsa's DB key 2025-12-26 11:09:33 +01:00
Micha Reiser
9693375e10 [ty] Reduce monomorphization (#22195) 2025-12-26 10:02:20 +01:00
Matthew Mckee
1ec3503cc3 [ty] Fix playground inlay hint location (#22200) 2025-12-26 09:20:57 +01:00
Alex Waygood
19b10993e1 [ty] Automatically re-run ecosystem-analyzer workflow on subsequent pushes to a PR, if the PR has the ecosystem-analyzer label (#22179)
## Summary

This PR reworks our ecosystem-analyzer workflow so that it automatically
reruns if a PR with the `ecosystem-analyzer` label has new commits
pushed to it, or is reopened after previously being closed. It's
currently easy to forget that you need to remove and re-add the label to
trigger a fresh workflow run, which can then mean that there are stale
(misleading) results in the PR comment posted by the bot. It also means
that it takes longer for CI to finish than it would otherwise, because
it might be a few minutes after pushing new commits to the PR before you
remember that you also need to remove and re-add the label.

To write this PR, I consulted:
- The GitHub workflow trigger documentation:
https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#pull_request
- This Stack Overflow answer:
https://stackoverflow.com/a/59588725/13990016

## Test Plan

I experimented with pushing commits to this PR and closing/reopening it,
and both of these actions triggered fresh runs of the ecosystem-analyzer
worfklow when the label was present on the PR. However, removing the
label again meant that the workflow was no longer triggered by these
actions.
2025-12-25 17:41:19 +00:00
Micha Reiser
014abe1ee1 [ty] Fix completion in decorators with missing declaration (#22177) 2025-12-25 15:05:47 +00:00
19 changed files with 429 additions and 173 deletions

View File

@@ -4,7 +4,13 @@ permissions: {}
on:
pull_request:
types: [labeled]
# The default for `pull_request` is to trigger on `synchronize`, `opened` and `reopened`.
# We also add `labeled` here so that the workflow triggers when a label is initially added.
types:
- labeled
- synchronize
- opened
- reopened
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
@@ -23,7 +29,7 @@ jobs:
name: Compute diagnostic diff
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
timeout-minutes: 20
if: contains(github.event.label.name, 'ecosystem-analyzer')
if: contains( github.event.pull_request.labels.*.name, 'ecosystem-analyzer')
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:

View File

@@ -1,3 +1,7 @@
# This workflow is a cron job that generates a report describing
# all diagnostics ty emits across the whole ecosystem. The report
# is uploaded to https://ty-ecosystem-ext.pages.dev/ on a weekly basis.
name: ty ecosystem-report
permissions: {}

6
Cargo.lock generated
View File

@@ -3621,7 +3621,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=0298d2951e45ccf1450f1a09fb379dc7e48ddee9#0298d2951e45ccf1450f1a09fb379dc7e48ddee9"
dependencies = [
"boxcar",
"compact_str",
@@ -3646,12 +3646,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=0298d2951e45ccf1450f1a09fb379dc7e48ddee9#0298d2951e45ccf1450f1a09fb379dc7e48ddee9"
[[package]]
name = "salsa-macros"
version = "0.25.2"
source = "git+https://github.com/salsa-rs/salsa.git?rev=ce80691fa0b87dc2fd2235a26544e63e5e43d8d3#ce80691fa0b87dc2fd2235a26544e63e5e43d8d3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=0298d2951e45ccf1450f1a09fb379dc7e48ddee9#0298d2951e45ccf1450f1a09fb379dc7e48ddee9"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -147,7 +147,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "ce80691fa0b87dc2fd2235a26544e63e5e43d8d3", default-features = false, features = [
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "0298d2951e45ccf1450f1a09fb379dc7e48ddee9", default-features = false, features = [
"compact_str",
"macros",
"salsa_unstable",

View File

@@ -37,7 +37,7 @@ pub fn assert_const_function_query_was_not_run<Db, Q, QDb, R>(
let event = events.iter().find(|event| {
if let salsa::EventKind::WillExecute { database_key } = event.kind {
db.ingredient_debug_name(database_key.ingredient_index()) == query_name
db.ingredient_debug_name(database_key.ingredient_index(db.zalsa())) == query_name
} else {
false
}
@@ -89,7 +89,7 @@ where
let event = events.iter().find(|event| {
if let salsa::EventKind::WillExecute { database_key } = event.kind {
db.ingredient_debug_name(database_key.ingredient_index()) == query_name
db.ingredient_debug_name(database_key.ingredient_index(db.zalsa())) == query_name
&& database_key.key_index() == input.as_id()
} else {
false

View File

@@ -3,3 +3,6 @@
def foo(): ...
@@
def foo(): ...
@test
@
class Test

View File

@@ -477,6 +477,17 @@ impl<'src> Parser<'src> {
}
}
pub(super) fn parse_missing_name(&mut self) -> ast::ExprName {
let identifier = self.parse_missing_identifier();
ast::ExprName {
range: identifier.range,
id: identifier.id,
ctx: ExprContext::Invalid,
node_index: AtomicNodeIndex::NONE,
}
}
/// Parses an identifier.
///
/// For an invalid identifier, the `id` field will be an empty string.
@@ -524,16 +535,20 @@ impl<'src> Parser<'src> {
node_index: AtomicNodeIndex::NONE,
}
} else {
self.add_error(
ParseErrorType::OtherError("Expected an identifier".into()),
range,
);
self.parse_missing_identifier()
}
}
ast::Identifier {
id: Name::empty(),
range: self.missing_node_range(),
node_index: AtomicNodeIndex::NONE,
}
fn parse_missing_identifier(&mut self) -> ast::Identifier {
self.add_error(
ParseErrorType::OtherError("Expected an identifier".into()),
self.current_token_range(),
);
ast::Identifier {
id: Name::empty(),
range: self.missing_node_range(),
node_index: AtomicNodeIndex::NONE,
}
}

View File

@@ -2782,13 +2782,20 @@ impl<'src> Parser<'src> {
// def foo(): ...
// @@
// def foo(): ...
// @test
// @
// class Test
while self.at(TokenKind::At) {
progress.assert_progressing(self);
let decorator_start = self.node_start();
self.bump(TokenKind::At);
let parsed_expr = self.parse_named_expression_or_higher(ExpressionContext::default());
let parsed_expr = if self.at(TokenKind::Def) || self.at(TokenKind::Class) {
Expr::Name(self.parse_missing_name()).into()
} else {
self.parse_named_expression_or_higher(ExpressionContext::default())
};
if self.options.target_version < PythonVersion::PY39 {
// test_ok decorator_expression_dotted_ident_py38
@@ -2914,21 +2921,27 @@ impl<'src> Parser<'src> {
self.current_token_range(),
);
// TODO(dhruvmanila): It seems that this recovery drops all the parsed
// decorators. Maybe we could convert them into statement expression
// with a flag indicating that this expression is part of a decorator.
// It's only possible to keep them if it's a function or class definition.
// We could possibly keep them if there's indentation error:
//
// ```python
// @decorator
// @decorator
// def foo(): ...
// ```
//
// Or, parse it as a binary expression where the left side is missing.
// We would need to convert each decorator into a binary expression.
self.parse_statement()
let range = self.node_range(start);
ast::StmtFunctionDef {
node_index: AtomicNodeIndex::default(),
range,
is_async: false,
decorator_list: decorators,
name: ast::Identifier {
id: Name::empty(),
range: self.missing_node_range(),
node_index: AtomicNodeIndex::NONE,
},
type_params: None,
parameters: Box::new(ast::Parameters {
range: self.missing_node_range(),
..ast::Parameters::default()
}),
returns: None,
body: vec![],
}
.into()
}
}
}

View File

@@ -1,6 +1,5 @@
---
source: crates/ruff_python_parser/tests/fixtures.rs
input_file: crates/ruff_python_parser/resources/inline/err/decorator_missing_expression.py
---
## AST
@@ -8,40 +7,57 @@ input_file: crates/ruff_python_parser/resources/inline/err/decorator_missing_exp
Module(
ModModule {
node_index: NodeIndex(None),
range: 0..51,
range: 0..70,
body: [
AnnAssign(
StmtAnnAssign {
FunctionDef(
StmtFunctionDef {
node_index: NodeIndex(None),
range: 5..15,
target: Call(
ExprCall {
range: 0..15,
is_async: false,
decorator_list: [
Decorator {
range: 0..1,
node_index: NodeIndex(None),
range: 5..10,
func: Name(
expression: Name(
ExprName {
node_index: NodeIndex(None),
range: 5..8,
id: Name("foo"),
ctx: Load,
range: 1..1,
id: Name(""),
ctx: Invalid,
},
),
arguments: Arguments {
range: 8..10,
},
],
name: Identifier {
id: Name("foo"),
range: 5..8,
node_index: NodeIndex(None),
},
type_params: None,
parameters: Parameters {
range: 8..10,
node_index: NodeIndex(None),
posonlyargs: [],
args: [],
vararg: None,
kwonlyargs: [],
kwarg: None,
},
returns: None,
body: [
Expr(
StmtExpr {
node_index: NodeIndex(None),
args: [],
keywords: [],
range: 12..15,
value: EllipsisLiteral(
ExprEllipsisLiteral {
node_index: NodeIndex(None),
range: 12..15,
},
),
},
},
),
annotation: EllipsisLiteral(
ExprEllipsisLiteral {
node_index: NodeIndex(None),
range: 12..15,
},
),
value: None,
simple: false,
),
],
},
),
FunctionDef(
@@ -161,6 +177,46 @@ Module(
],
},
),
ClassDef(
StmtClassDef {
node_index: NodeIndex(None),
range: 51..69,
decorator_list: [
Decorator {
range: 51..56,
node_index: NodeIndex(None),
expression: Name(
ExprName {
node_index: NodeIndex(None),
range: 52..56,
id: Name("test"),
ctx: Load,
},
),
},
Decorator {
range: 57..58,
node_index: NodeIndex(None),
expression: Name(
ExprName {
node_index: NodeIndex(None),
range: 58..58,
id: Name(""),
ctx: Invalid,
},
),
},
],
name: Identifier {
id: Name("Test"),
range: 65..69,
node_index: NodeIndex(None),
},
type_params: None,
arguments: None,
body: [],
},
),
],
},
)
@@ -169,15 +225,7 @@ Module(
|
1 | @def foo(): ...
| ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here
2 | @
3 | def foo(): ...
|
|
1 | @def foo(): ...
| ^^^ Syntax Error: Expected newline, found name
| ^^^ Syntax Error: Expected an identifier
2 | @
3 | def foo(): ...
|
@@ -199,6 +247,7 @@ Module(
4 | @@
| ^ Syntax Error: Expected an expression
5 | def foo(): ...
6 | @test
|
@@ -208,4 +257,23 @@ Module(
4 | @@
| ^ Syntax Error: Expected an expression
5 | def foo(): ...
6 | @test
7 | @
|
|
5 | def foo(): ...
6 | @test
7 | @
| ^ Syntax Error: Expected an expression
8 | class Test
|
|
6 | @test
7 | @
8 | class Test
| ^ Syntax Error: Expected `:`, found newline
|

View File

@@ -1,6 +1,5 @@
---
source: crates/ruff_python_parser/tests/fixtures.rs
input_file: crates/ruff_python_parser/resources/inline/err/decorator_unexpected_token.py
---
## AST
@@ -10,6 +9,44 @@ Module(
node_index: NodeIndex(None),
range: 0..34,
body: [
FunctionDef(
StmtFunctionDef {
node_index: NodeIndex(None),
range: 0..4,
is_async: false,
decorator_list: [
Decorator {
range: 0..4,
node_index: NodeIndex(None),
expression: Name(
ExprName {
node_index: NodeIndex(None),
range: 1..4,
id: Name("foo"),
ctx: Load,
},
),
},
],
name: Identifier {
id: Name(""),
range: 4..4,
node_index: NodeIndex(None),
},
type_params: None,
parameters: Parameters {
range: 4..4,
node_index: NodeIndex(None),
posonlyargs: [],
args: [],
vararg: None,
kwonlyargs: [],
kwarg: None,
},
returns: None,
body: [],
},
),
With(
StmtWith {
node_index: NodeIndex(None),
@@ -46,6 +83,44 @@ Module(
],
},
),
FunctionDef(
StmtFunctionDef {
node_index: NodeIndex(None),
range: 23..27,
is_async: false,
decorator_list: [
Decorator {
range: 23..27,
node_index: NodeIndex(None),
expression: Name(
ExprName {
node_index: NodeIndex(None),
range: 24..27,
id: Name("foo"),
ctx: Load,
},
),
},
],
name: Identifier {
id: Name(""),
range: 27..27,
node_index: NodeIndex(None),
},
type_params: None,
parameters: Parameters {
range: 27..27,
node_index: NodeIndex(None),
posonlyargs: [],
args: [],
vararg: None,
kwonlyargs: [],
kwarg: None,
},
returns: None,
body: [],
},
),
Assign(
StmtAssign {
node_index: NodeIndex(None),

View File

@@ -6686,6 +6686,19 @@ def func():
.not_contains("False");
}
#[test]
fn decorator_without_class_or_function() {
completion_test_builder(
"\
from dataclasses import dataclass
@dataclass(froz<CURSOR>
",
)
.build()
.contains("frozen");
}
#[test]
fn statement_keywords_in_if_body() {
completion_test_builder(

View File

@@ -10889,25 +10889,37 @@ impl<'db> UnionTypeInstance<'db> {
scope_id: ScopeId<'db>,
typevar_binding_context: Option<Definition<'db>>,
) -> Type<'db> {
let value_expr_types = value_expr_types.into_iter().collect::<Box<_>>();
let mut builder = UnionBuilder::new(db);
for ty in &value_expr_types {
match ty.in_type_expression(db, scope_id, typevar_binding_context) {
Ok(ty) => builder.add_in_place(ty),
Err(error) => {
return Type::KnownInstance(KnownInstanceType::UnionType(
UnionTypeInstance::new(db, Some(value_expr_types), Err(error)),
));
fn from_value_expression_types_impl<'db>(
db: &'db dyn Db,
value_expr_types: Box<[Type<'db>]>,
scope_id: ScopeId<'db>,
typevar_binding_context: Option<Definition<'db>>,
) -> Type<'db> {
let mut builder = UnionBuilder::new(db);
for ty in &value_expr_types {
match ty.in_type_expression(db, scope_id, typevar_binding_context) {
Ok(ty) => builder.add_in_place(ty),
Err(error) => {
return Type::KnownInstance(KnownInstanceType::UnionType(
UnionTypeInstance::new(db, Some(value_expr_types), Err(error)),
));
}
}
}
Type::KnownInstance(KnownInstanceType::UnionType(UnionTypeInstance::new(
db,
Some(value_expr_types),
Ok(builder.build()),
)))
}
Type::KnownInstance(KnownInstanceType::UnionType(UnionTypeInstance::new(
from_value_expression_types_impl(
db,
Some(value_expr_types),
Ok(builder.build()),
)))
value_expr_types.into_iter().collect(),
scope_id,
typevar_binding_context,
)
}
/// Get the types of the elements of this union as they would appear in a value

View File

@@ -5124,27 +5124,46 @@ impl KnownClass {
db: &'db dyn Db,
specialization: impl IntoIterator<Item = Type<'db>>,
) -> Option<ClassType<'db>> {
fn to_specialized_class_type_impl<'db>(
db: &'db dyn Db,
class: KnownClass,
class_literal: ClassLiteral<'db>,
specialization: Box<[Type<'db>]>,
generic_context: GenericContext<'db>,
) -> ClassType<'db> {
if specialization.len() != generic_context.len(db) {
// a cache of the `KnownClass`es that we have already seen mismatched-arity
// specializations for (and therefore that we've already logged a warning for)
static MESSAGES: LazyLock<Mutex<FxHashSet<KnownClass>>> =
LazyLock::new(Mutex::default);
if MESSAGES.lock().unwrap().insert(class) {
tracing::info!(
"Wrong number of types when specializing {}. \
Falling back to default specialization for the symbol instead.",
class.display(db)
);
}
return class_literal.default_specialization(db);
}
class_literal
.apply_specialization(db, |_| generic_context.specialize(db, specialization))
}
let Type::ClassLiteral(class_literal) = self.to_class_literal(db) else {
return None;
};
let generic_context = class_literal.generic_context(db)?;
let types = specialization.into_iter().collect::<Box<[_]>>();
if types.len() != generic_context.len(db) {
// a cache of the `KnownClass`es that we have already seen mismatched-arity
// specializations for (and therefore that we've already logged a warning for)
static MESSAGES: LazyLock<Mutex<FxHashSet<KnownClass>>> = LazyLock::new(Mutex::default);
if MESSAGES.lock().unwrap().insert(self) {
tracing::info!(
"Wrong number of types when specializing {}. \
Falling back to default specialization for the symbol instead.",
self.display(db)
);
}
return Some(class_literal.default_specialization(db));
}
Some(class_literal.apply_specialization(db, |_| generic_context.specialize(db, types)))
Some(to_specialized_class_type_impl(
db,
self,
class_literal,
types,
generic_context,
))
}
/// Lookup a [`KnownClass`] in typeshed and return a [`Type`]

View File

@@ -3774,10 +3774,7 @@ impl<'db> BoundTypeVarInstance<'db> {
/// specifies the required specializations, and the iterator will be empty. For a constrained
/// typevar, the primary result will include the fully static constraints, and the iterator
/// will include an entry for each non-fully-static constraint.
fn required_specializations(
self,
db: &'db dyn Db,
) -> (Node<'db>, impl IntoIterator<Item = Node<'db>>) {
fn required_specializations(self, db: &'db dyn Db) -> (Node<'db>, Vec<Node<'db>>) {
// For upper bounds and constraints, we are free to choose any materialization that makes
// the check succeed. In non-inferable positions, it is most helpful to choose a
// materialization that is as restrictive as possible, since that minimizes the number of

View File

@@ -93,25 +93,31 @@ impl<'db> DisplaySettings<'db> {
I: IntoIterator<Item = T>,
T: Into<Type<'db>>,
{
fn build_display_settings<'db>(
collector: &AmbiguousClassCollector<'db>,
) -> DisplaySettings<'db> {
DisplaySettings {
qualified: Rc::new(
collector
.class_names
.borrow()
.iter()
.filter_map(|(name, ambiguity)| {
Some((*name, QualificationLevel::from_ambiguity_state(ambiguity)?))
})
.collect(),
),
..DisplaySettings::default()
}
}
let collector = AmbiguousClassCollector::default();
for ty in types {
collector.visit_type(db, ty.into());
}
Self {
qualified: Rc::new(
collector
.class_names
.borrow()
.iter()
.filter_map(|(name, ambiguity)| {
Some((*name, QualificationLevel::from_ambiguity_state(ambiguity)?))
})
.collect(),
),
..Self::default()
}
build_display_settings(&collector)
}
}

View File

@@ -563,38 +563,46 @@ impl<'db> GenericContext<'db> {
I: IntoIterator<Item = Option<Type<'db>>>,
I::IntoIter: ExactSizeIterator,
{
let mut types = self.fill_in_defaults(db, types);
let len = types.len();
loop {
let mut any_changed = false;
for i in 0..len {
let partial = PartialSpecialization {
generic_context: self,
types: &types,
// Don't recursively substitute type[i] in itself. Ideally, we could instead
// check if the result is self-referential after we're done applying the
// partial specialization. But when we apply a paramspec, we don't use the
// callable that it maps to directly; we create a new callable that reuses
// parts of it. That means we can't look for the previous type directly.
// Instead we use this to skip specializing the type in itself in the first
// place.
skip: Some(i),
};
let updated = types[i].apply_type_mapping(
db,
&TypeMapping::PartialSpecialization(partial),
TypeContext::default(),
);
if updated != types[i] {
types[i] = updated;
any_changed = true;
fn specialize_recursive_impl<'db>(
db: &'db dyn Db,
context: GenericContext<'db>,
mut types: Box<[Type<'db>]>,
) -> Specialization<'db> {
let len = types.len();
loop {
let mut any_changed = false;
for i in 0..len {
let partial = PartialSpecialization {
generic_context: context,
types: &types,
// Don't recursively substitute type[i] in itself. Ideally, we could instead
// check if the result is self-referential after we're done applying the
// partial specialization. But when we apply a paramspec, we don't use the
// callable that it maps to directly; we create a new callable that reuses
// parts of it. That means we can't look for the previous type directly.
// Instead we use this to skip specializing the type in itself in the first
// place.
skip: Some(i),
};
let updated = types[i].apply_type_mapping(
db,
&TypeMapping::PartialSpecialization(partial),
TypeContext::default(),
);
if updated != types[i] {
types[i] = updated;
any_changed = true;
}
}
if !any_changed {
return Specialization::new(db, context, types, None, None);
}
}
if !any_changed {
return Specialization::new(db, self, types, None, None);
}
}
let types = self.fill_in_defaults(db, types);
specialize_recursive_impl(db, self, types)
}
/// Creates a specialization of this generic context for the `tuple` class.
@@ -614,7 +622,7 @@ impl<'db> GenericContext<'db> {
{
let types = types.into_iter();
let variables = self.variables(db);
assert!(self.len(db) == types.len());
assert_eq!(self.len(db), types.len());
// Typevars can have other typevars as their default values, e.g.
//

View File

@@ -514,7 +514,7 @@ pub fn call_signature_details<'db>(
// Extract signature details from all callable bindings
bindings
.into_iter()
.flat_map(std::iter::IntoIterator::into_iter)
.flatten()
.map(|binding| {
let argument_to_parameter_mapping = binding.argument_matches().to_vec();
let signature = binding.signature;
@@ -623,7 +623,7 @@ pub fn definitions_for_bin_op<'db>(
let definitions: Vec<_> = bindings
.into_iter()
.flat_map(std::iter::IntoIterator::into_iter)
.flatten()
.filter_map(|binding| {
Some(ResolvedDefinition::Definition(
binding.signature.definition?,
@@ -681,7 +681,7 @@ pub fn definitions_for_unary_op<'db>(
let definitions = bindings
.into_iter()
.flat_map(std::iter::IntoIterator::into_iter)
.flatten()
.filter_map(|binding| {
Some(ResolvedDefinition::Definition(
binding.signature.definition?,

View File

@@ -1841,32 +1841,37 @@ impl<'db> Parameters<'db> {
db: &'db dyn Db,
parameters: impl IntoIterator<Item = Parameter<'db>>,
) -> Self {
let value: Vec<Parameter<'db>> = parameters.into_iter().collect();
let mut kind = ParametersKind::Standard;
if let [p1, p2] = value.as_slice()
&& p1.is_variadic()
&& p2.is_keyword_variadic()
{
match (p1.annotated_type(), p2.annotated_type()) {
(None | Some(Type::Dynamic(_)), None | Some(Type::Dynamic(_))) => {
kind = ParametersKind::Gradual;
}
(Some(Type::TypeVar(args_typevar)), Some(Type::TypeVar(kwargs_typevar))) => {
if let (Some(ParamSpecAttrKind::Args), Some(ParamSpecAttrKind::Kwargs)) = (
args_typevar.paramspec_attr(db),
kwargs_typevar.paramspec_attr(db),
) {
let typevar = args_typevar.without_paramspec_attr(db);
if typevar.is_same_typevar_as(db, kwargs_typevar.without_paramspec_attr(db))
{
kind = ParametersKind::ParamSpec(typevar);
fn new_impl<'db>(db: &'db dyn Db, value: Vec<Parameter<'db>>) -> Parameters<'db> {
let mut kind = ParametersKind::Standard;
if let [p1, p2] = value.as_slice()
&& p1.is_variadic()
&& p2.is_keyword_variadic()
{
match (p1.annotated_type(), p2.annotated_type()) {
(None | Some(Type::Dynamic(_)), None | Some(Type::Dynamic(_))) => {
kind = ParametersKind::Gradual;
}
(Some(Type::TypeVar(args_typevar)), Some(Type::TypeVar(kwargs_typevar))) => {
if let (Some(ParamSpecAttrKind::Args), Some(ParamSpecAttrKind::Kwargs)) = (
args_typevar.paramspec_attr(db),
kwargs_typevar.paramspec_attr(db),
) {
let typevar = args_typevar.without_paramspec_attr(db);
if typevar
.is_same_typevar_as(db, kwargs_typevar.without_paramspec_attr(db))
{
kind = ParametersKind::ParamSpec(typevar);
}
}
}
_ => {}
}
_ => {}
}
Parameters { value, kind }
}
Self { value, kind }
let value: Vec<Parameter<'db>> = parameters.into_iter().collect();
new_impl(db, value)
}
/// Create an empty parameter list.

View File

@@ -454,10 +454,22 @@ class PlaygroundServer
return {
dispose: () => {},
hints: inlayHints.map((hint) => ({
label: hint.label.map((part) => ({
label: part.label,
// As of 2025-09-23, location isn't supported by Monaco which is why we don't set it
})),
label: hint.label.map((part) => {
const locationLink = part.location
? this.mapNavigationTarget(part.location)
: undefined;
return {
label: part.label,
// Range cannot be `undefined`.
location: locationLink?.targetSelectionRange
? {
uri: locationLink.uri,
range: locationLink.targetSelectionRange,
}
: undefined,
};
}),
position: {
lineNumber: hint.position.line,
column: hint.position.column,