Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
56edbef069 | ||
|
|
ab9b01b0c4 | ||
|
|
678f8732e4 | ||
|
|
811dd2bb68 |
15
Cargo.lock
generated
15
Cargo.lock
generated
@@ -2130,6 +2130,13 @@ dependencies = [
|
||||
"wild",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_allocator"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_benchmark"
|
||||
version = "0.0.0"
|
||||
@@ -2138,10 +2145,9 @@ dependencies = [
|
||||
"mimalloc",
|
||||
"once_cell",
|
||||
"red_knot",
|
||||
"ruff_allocator",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"serde",
|
||||
@@ -2176,6 +2182,7 @@ dependencies = [
|
||||
"insta",
|
||||
"matchit",
|
||||
"path-slash",
|
||||
"ruff_allocator",
|
||||
"ruff_cache",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
@@ -2294,6 +2301,7 @@ dependencies = [
|
||||
"pyproject-toml",
|
||||
"quick-junit",
|
||||
"regex",
|
||||
"ruff_allocator",
|
||||
"ruff_cache",
|
||||
"ruff_diagnostics",
|
||||
"ruff_macros",
|
||||
@@ -2366,6 +2374,7 @@ dependencies = [
|
||||
"is-macro",
|
||||
"itertools 0.13.0",
|
||||
"once_cell",
|
||||
"ruff_allocator",
|
||||
"ruff_cache",
|
||||
"ruff_macros",
|
||||
"ruff_python_trivia",
|
||||
@@ -2392,6 +2401,7 @@ name = "ruff_python_codegen"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"ruff_allocator",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_literal",
|
||||
"ruff_python_parser",
|
||||
@@ -2462,6 +2472,7 @@ dependencies = [
|
||||
"compact_str",
|
||||
"insta",
|
||||
"memchr",
|
||||
"ruff_allocator",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
|
||||
@@ -13,6 +13,7 @@ license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_allocator = { path = "crates/ruff_allocator" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
@@ -46,6 +47,7 @@ argfile = { version = "0.2.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
bumpalo = { version = "3.16.0" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
|
||||
@@ -38,7 +38,7 @@ pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
let source = source_text(db.upcast(), file_id);
|
||||
lint_lines(&source, &mut diagnostics);
|
||||
|
||||
let parsed = parsed_module(db.upcast(), file_id);
|
||||
let parsed = parsed_module(db.upcast(), file_id).parsed();
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.syntax();
|
||||
@@ -82,7 +82,7 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
let parsed = parsed_module(db.upcast(), file_id);
|
||||
let semantic = SemanticModel::new(db.upcast(), file_id);
|
||||
|
||||
if !parsed.is_valid() {
|
||||
if !parsed.parsed().is_valid() {
|
||||
return Diagnostics::Empty;
|
||||
}
|
||||
|
||||
@@ -93,7 +93,7 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
|
||||
SemanticVisitor { context: &context }.visit_body(parsed.suite());
|
||||
SemanticVisitor { context: &context }.visit_body(parsed.parsed().suite());
|
||||
|
||||
Diagnostics::from(context.diagnostics.take())
|
||||
}
|
||||
@@ -201,7 +201,7 @@ impl<'db> SemanticLintContext<'db> {
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn ast(&self) -> &'db ast::ModModule {
|
||||
self.parsed.syntax()
|
||||
self.parsed.parsed().syntax()
|
||||
}
|
||||
|
||||
pub(crate) fn push_diagnostic(&self, diagnostic: String) {
|
||||
@@ -220,7 +220,7 @@ struct SyntaxLintVisitor<'a> {
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SyntaxLintVisitor<'_> {
|
||||
impl Visitor<'_, '_> for SyntaxLintVisitor<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ ast::StringLiteral) {
|
||||
// A very naive implementation of use double quotes
|
||||
let text = &self.source[string_literal.range];
|
||||
@@ -236,7 +236,7 @@ struct SemanticVisitor<'a> {
|
||||
context: &'a SemanticLintContext<'a>,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for SemanticVisitor<'_> {
|
||||
impl Visitor<'_, '_> for SemanticVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
@@ -299,9 +299,9 @@ impl From<Vec<String>> for Diagnostics {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum AnyImportRef<'a> {
|
||||
Import(&'a ast::StmtImport),
|
||||
ImportFrom(&'a ast::StmtImportFrom),
|
||||
enum AnyImportRef<'a, 'ast> {
|
||||
Import(&'a ast::StmtImport<'ast>),
|
||||
ImportFrom(&'a ast::StmtImportFrom<'ast>),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -13,7 +13,6 @@ use ruff_db::parsed::ParsedModule;
|
||||
///
|
||||
/// ## Equality
|
||||
/// Two `AstNodeRef` are considered equal if their wrapped nodes are equal.
|
||||
#[derive(Clone)]
|
||||
pub struct AstNodeRef<T> {
|
||||
/// Owned reference to the node's [`ParsedModule`].
|
||||
///
|
||||
@@ -50,6 +49,15 @@ impl<T> AstNodeRef<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for AstNodeRef<T> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
_parsed: self._parsed.clone(),
|
||||
node: self.node,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for AstNodeRef<T> {
|
||||
type Target = T;
|
||||
|
||||
|
||||
@@ -11,9 +11,10 @@ pub(super) struct NodeKey {
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
pub(super) fn from_node<'a, 'ast, N>(node: N) -> Self
|
||||
where
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
N: Into<AnyNodeRef<'a, 'ast>>,
|
||||
'ast: 'a,
|
||||
{
|
||||
let node = node.into();
|
||||
NodeKey {
|
||||
|
||||
@@ -58,7 +58,7 @@ pub trait HasScopedUseId {
|
||||
#[newtype_index]
|
||||
pub struct ScopedUseId;
|
||||
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
impl HasScopedUseId for ast::ExprName<'_> {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
@@ -67,7 +67,7 @@ impl HasScopedUseId for ast::ExprName {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::ExpressionRef<'_> {
|
||||
impl HasScopedUseId for ast::ExpressionRef<'_, '_> {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
@@ -101,41 +101,41 @@ macro_rules! impl_has_scoped_expression_id {
|
||||
};
|
||||
}
|
||||
|
||||
impl_has_scoped_expression_id!(ast::ExprBoolOp);
|
||||
impl_has_scoped_expression_id!(ast::ExprName);
|
||||
impl_has_scoped_expression_id!(ast::ExprBinOp);
|
||||
impl_has_scoped_expression_id!(ast::ExprUnaryOp);
|
||||
impl_has_scoped_expression_id!(ast::ExprLambda);
|
||||
impl_has_scoped_expression_id!(ast::ExprIf);
|
||||
impl_has_scoped_expression_id!(ast::ExprDict);
|
||||
impl_has_scoped_expression_id!(ast::ExprSet);
|
||||
impl_has_scoped_expression_id!(ast::ExprListComp);
|
||||
impl_has_scoped_expression_id!(ast::ExprSetComp);
|
||||
impl_has_scoped_expression_id!(ast::ExprDictComp);
|
||||
impl_has_scoped_expression_id!(ast::ExprGenerator);
|
||||
impl_has_scoped_expression_id!(ast::ExprAwait);
|
||||
impl_has_scoped_expression_id!(ast::ExprYield);
|
||||
impl_has_scoped_expression_id!(ast::ExprYieldFrom);
|
||||
impl_has_scoped_expression_id!(ast::ExprCompare);
|
||||
impl_has_scoped_expression_id!(ast::ExprCall);
|
||||
impl_has_scoped_expression_id!(ast::ExprFString);
|
||||
impl_has_scoped_expression_id!(ast::ExprStringLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprBytesLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprNumberLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprBoolOp<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprName<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprBinOp<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprUnaryOp<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprLambda<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprIf<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprDict<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprSet<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprListComp<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprSetComp<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprDictComp<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprGenerator<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprAwait<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprYield<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprYieldFrom<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprCompare<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprCall<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprFString<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprStringLiteral<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprBytesLiteral<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprNumberLiteral<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprBooleanLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprNoneLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprEllipsisLiteral);
|
||||
impl_has_scoped_expression_id!(ast::ExprAttribute);
|
||||
impl_has_scoped_expression_id!(ast::ExprSubscript);
|
||||
impl_has_scoped_expression_id!(ast::ExprStarred);
|
||||
impl_has_scoped_expression_id!(ast::ExprNamed);
|
||||
impl_has_scoped_expression_id!(ast::ExprList);
|
||||
impl_has_scoped_expression_id!(ast::ExprTuple);
|
||||
impl_has_scoped_expression_id!(ast::ExprSlice);
|
||||
impl_has_scoped_expression_id!(ast::ExprIpyEscapeCommand);
|
||||
impl_has_scoped_expression_id!(ast::Expr);
|
||||
impl_has_scoped_expression_id!(ast::ExprAttribute<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprSubscript<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprStarred<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprNamed<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprList<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprTuple<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprSlice<'_>);
|
||||
impl_has_scoped_expression_id!(ast::ExprIpyEscapeCommand<'_>);
|
||||
impl_has_scoped_expression_id!(ast::Expr<'_>);
|
||||
|
||||
impl HasScopedAstId for ast::ExpressionRef<'_> {
|
||||
impl HasScopedAstId for ast::ExpressionRef<'_, '_> {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
@@ -196,13 +196,13 @@ pub(crate) mod node_key {
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
pub(crate) struct ExpressionNodeKey(NodeKey);
|
||||
|
||||
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
|
||||
fn from(value: ast::ExpressionRef<'_>) -> Self {
|
||||
impl From<ast::ExpressionRef<'_, '_>> for ExpressionNodeKey {
|
||||
fn from(value: ast::ExpressionRef<'_, '_>) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Expr> for ExpressionNodeKey {
|
||||
impl From<&ast::Expr<'_>> for ExpressionNodeKey {
|
||||
fn from(value: &ast::Expr) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ pub(super) struct SemanticIndexBuilder<'db> {
|
||||
module: &'db ParsedModule,
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// The assignment we're currently visiting.
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
current_assignment: Option<CurrentAssignment<'db, 'db>>,
|
||||
/// Flow states at each `break` in the current loop.
|
||||
loop_break_states: Vec<FlowSnapshot>,
|
||||
|
||||
@@ -82,12 +82,16 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
.expect("Always to have a root scope")
|
||||
}
|
||||
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef) {
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef<'_, 'db>) {
|
||||
let parent = self.current_scope();
|
||||
self.push_scope_with_parent(node, Some(parent));
|
||||
}
|
||||
|
||||
fn push_scope_with_parent(&mut self, node: NodeWithScopeRef, parent: Option<FileScopeId>) {
|
||||
fn push_scope_with_parent(
|
||||
&mut self,
|
||||
node: NodeWithScopeRef<'_, 'db>,
|
||||
parent: Option<FileScopeId>,
|
||||
) {
|
||||
let children_start = self.scopes.next_index() + 1;
|
||||
|
||||
let scope = Scope {
|
||||
@@ -172,8 +176,11 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
fn add_definition<'a>(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
definition_node: impl Into<DefinitionNodeRef<'a, 'db>>,
|
||||
) -> Definition<'db>
|
||||
where
|
||||
'db: 'a,
|
||||
{
|
||||
let definition_node = definition_node.into();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
@@ -197,7 +204,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
|
||||
/// standalone (type narrowing tests, RHS of an assignment.)
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) {
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr<'db>) {
|
||||
let expression = Expression::new(
|
||||
self.db,
|
||||
self.file,
|
||||
@@ -214,8 +221,8 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
with_scope: NodeWithScopeRef,
|
||||
type_params: Option<&'db ast::TypeParams>,
|
||||
with_scope: NodeWithScopeRef<'_, 'db>,
|
||||
type_params: Option<&'db ast::TypeParams<'db>>,
|
||||
nested: impl FnOnce(&mut Self) -> FileScopeId,
|
||||
) -> FileScopeId {
|
||||
if let Some(type_params) = type_params {
|
||||
@@ -239,7 +246,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
}) => (name, &None, default),
|
||||
};
|
||||
// TODO create Definition for typevars
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_or_update_symbol(Name::new(name.id), SymbolFlags::IS_DEFINED);
|
||||
if let Some(bound) = bound {
|
||||
self.visit_expr(bound);
|
||||
}
|
||||
@@ -259,7 +266,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
let module = self.module;
|
||||
let module = self.module.parsed();
|
||||
self.visit_body(module.suite());
|
||||
|
||||
// Pop the root scope
|
||||
@@ -310,11 +317,8 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, 'ast> Visitor<'ast> for SemanticIndexBuilder<'db>
|
||||
where
|
||||
'ast: 'db,
|
||||
{
|
||||
fn visit_stmt(&mut self, stmt: &'ast ast::Stmt) {
|
||||
impl<'db> Visitor<'db, 'db> for SemanticIndexBuilder<'db> {
|
||||
fn visit_stmt(&mut self, stmt: &'db ast::Stmt<'db>) {
|
||||
match stmt {
|
||||
ast::Stmt::FunctionDef(function_def) => {
|
||||
for decorator in &function_def.decorator_list {
|
||||
@@ -322,7 +326,7 @@ where
|
||||
}
|
||||
|
||||
let symbol = self
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
.add_or_update_symbol(Name::new(function_def.name.id), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
self.with_type_params(
|
||||
@@ -346,7 +350,7 @@ where
|
||||
}
|
||||
|
||||
let symbol =
|
||||
self.add_or_update_symbol(class.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_or_update_symbol(Name::new(class.name.id), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, class);
|
||||
|
||||
self.with_type_params(
|
||||
@@ -367,7 +371,7 @@ where
|
||||
ast::Stmt::Import(node) => {
|
||||
for alias in &node.names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.clone()
|
||||
Name::new(asname.id)
|
||||
} else {
|
||||
Name::new(alias.name.id.split('.').next().unwrap())
|
||||
};
|
||||
@@ -385,7 +389,7 @@ where
|
||||
};
|
||||
|
||||
let symbol =
|
||||
self.add_or_update_symbol(symbol_name.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_or_update_symbol(Name::new(symbol_name), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, ImportFromDefinitionNodeRef { node, alias_index });
|
||||
}
|
||||
}
|
||||
@@ -470,7 +474,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'ast ast::Expr) {
|
||||
fn visit_expr(&mut self, expr: &'db ast::Expr<'db>) {
|
||||
self.scopes_by_expression
|
||||
.insert(expr.into(), self.current_scope());
|
||||
self.current_ast_ids().record_expression(expr);
|
||||
@@ -484,7 +488,7 @@ where
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
let symbol = self.add_or_update_symbol(id.clone(), flags);
|
||||
let symbol = self.add_or_update_symbol(Name::new(id), flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
match self.current_assignment {
|
||||
Some(CurrentAssignment::Assign(assignment)) => {
|
||||
@@ -551,26 +555,26 @@ where
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
enum CurrentAssignment<'a, 'ast> {
|
||||
Assign(&'a ast::StmtAssign<'ast>),
|
||||
AnnAssign(&'a ast::StmtAnnAssign<'ast>),
|
||||
Named(&'a ast::ExprNamed<'ast>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAssign) -> Self {
|
||||
impl<'a, 'ast> From<&'a ast::StmtAssign<'ast>> for CurrentAssignment<'a, 'ast> {
|
||||
fn from(value: &'a ast::StmtAssign<'ast>) -> Self {
|
||||
Self::Assign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAnnAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAnnAssign) -> Self {
|
||||
impl<'a, 'ast> From<&'a ast::StmtAnnAssign<'ast>> for CurrentAssignment<'a, 'ast> {
|
||||
fn from(value: &'a ast::StmtAnnAssign<'ast>) -> Self {
|
||||
Self::AnnAssign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::ExprNamed) -> Self {
|
||||
impl<'a, 'ast> From<&'a ast::ExprNamed<'ast>> for CurrentAssignment<'a, 'ast> {
|
||||
fn from(value: &'a ast::ExprNamed<'ast>) -> Self {
|
||||
Self::Named(value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ pub struct Definition<'db> {
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
pub(crate) node: DefinitionKind<'db>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Definition<'static>>,
|
||||
@@ -36,73 +36,73 @@ impl<'db> Definition<'db> {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Import(&'a ast::Alias),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
pub(crate) enum DefinitionNodeRef<'a, 'ast> {
|
||||
Import(&'a ast::Alias<'ast>),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a, 'ast>),
|
||||
Function(&'a ast::StmtFunctionDef<'ast>),
|
||||
Class(&'a ast::StmtClassDef<'ast>),
|
||||
NamedExpression(&'a ast::ExprNamed<'ast>),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a, 'ast>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign<'ast>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtFunctionDef) -> Self {
|
||||
impl<'a, 'ast> From<&'a ast::StmtFunctionDef<'ast>> for DefinitionNodeRef<'a, 'ast> {
|
||||
fn from(node: &'a ast::StmtFunctionDef<'ast>) -> Self {
|
||||
Self::Function(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtClassDef> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtClassDef) -> Self {
|
||||
impl<'a, 'ast> From<&'a ast::StmtClassDef<'ast>> for DefinitionNodeRef<'a, 'ast> {
|
||||
fn from(node: &'a ast::StmtClassDef<'ast>) -> Self {
|
||||
Self::Class(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::ExprNamed) -> Self {
|
||||
impl<'a, 'ast> From<&'a ast::ExprNamed<'ast>> for DefinitionNodeRef<'a, 'ast> {
|
||||
fn from(node: &'a ast::ExprNamed<'ast>) -> Self {
|
||||
Self::NamedExpression(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAnnAssign) -> Self {
|
||||
impl<'a, 'ast> From<&'a ast::StmtAnnAssign<'ast>> for DefinitionNodeRef<'a, 'ast> {
|
||||
fn from(node: &'a ast::StmtAnnAssign<'ast>) -> Self {
|
||||
Self::AnnotatedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: &'a ast::Alias) -> Self {
|
||||
impl<'a, 'ast> From<&'a ast::Alias<'ast>> for DefinitionNodeRef<'a, 'ast> {
|
||||
fn from(node_ref: &'a ast::Alias<'ast>) -> Self {
|
||||
Self::Import(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: ImportFromDefinitionNodeRef<'a>) -> Self {
|
||||
impl<'a, 'ast> From<ImportFromDefinitionNodeRef<'a, 'ast>> for DefinitionNodeRef<'a, 'ast> {
|
||||
fn from(node_ref: ImportFromDefinitionNodeRef<'a, 'ast>) -> Self {
|
||||
Self::ImportFrom(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
|
||||
impl<'a, 'ast> From<AssignmentDefinitionNodeRef<'a, 'ast>> for DefinitionNodeRef<'a, 'ast> {
|
||||
fn from(node_ref: AssignmentDefinitionNodeRef<'a, 'ast>) -> Self {
|
||||
Self::Assignment(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a, 'ast> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom<'ast>,
|
||||
pub(crate) alias_index: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) assignment: &'a ast::StmtAssign,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
pub(crate) struct AssignmentDefinitionNodeRef<'a, 'ast> {
|
||||
pub(crate) assignment: &'a ast::StmtAssign<'ast>,
|
||||
pub(crate) target: &'a ast::ExprName<'ast>,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
impl<'ast> DefinitionNodeRef<'_, 'ast> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind<'ast> {
|
||||
match self {
|
||||
DefinitionNodeRef::Import(alias) => {
|
||||
DefinitionKind::Import(AstNodeRef::new(parsed, alias))
|
||||
@@ -153,41 +153,41 @@ impl DefinitionNodeRef<'_> {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum DefinitionKind {
|
||||
Import(AstNodeRef<ast::Alias>),
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
pub enum DefinitionKind<'ast> {
|
||||
Import(AstNodeRef<ast::Alias<'ast>>),
|
||||
ImportFrom(ImportFromDefinitionKind<'ast>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef<'ast>>),
|
||||
Class(AstNodeRef<ast::StmtClassDef<'ast>>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed<'ast>>),
|
||||
Assignment(AssignmentDefinitionKind<'ast>),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign<'ast>>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinitionKind {
|
||||
node: AstNodeRef<ast::StmtImportFrom>,
|
||||
pub struct ImportFromDefinitionKind<'ast> {
|
||||
node: AstNodeRef<ast::StmtImportFrom<'ast>>,
|
||||
alias_index: usize,
|
||||
}
|
||||
|
||||
impl ImportFromDefinitionKind {
|
||||
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
|
||||
impl<'ast> ImportFromDefinitionKind<'ast> {
|
||||
pub(crate) fn import(&self) -> &ast::StmtImportFrom<'ast> {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn alias(&self) -> &ast::Alias {
|
||||
pub(crate) fn alias(&self) -> &ast::Alias<'ast> {
|
||||
&self.node.node().names[self.alias_index]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef<ast::StmtAssign>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
pub struct AssignmentDefinitionKind<'ast> {
|
||||
assignment: AstNodeRef<ast::StmtAssign<'ast>>,
|
||||
target: AstNodeRef<ast::ExprName<'ast>>,
|
||||
}
|
||||
|
||||
impl AssignmentDefinitionKind {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
impl<'ast> AssignmentDefinitionKind<'ast> {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign<'ast> {
|
||||
self.assignment.node()
|
||||
}
|
||||
}
|
||||
@@ -195,37 +195,37 @@ impl AssignmentDefinitionKind {
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
pub(crate) struct DefinitionNodeKey(NodeKey);
|
||||
|
||||
impl From<&ast::Alias> for DefinitionNodeKey {
|
||||
impl From<&ast::Alias<'_>> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Alias) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtFunctionDef> for DefinitionNodeKey {
|
||||
impl From<&ast::StmtFunctionDef<'_>> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtFunctionDef) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtClassDef> for DefinitionNodeKey {
|
||||
impl From<&ast::StmtClassDef<'_>> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtClassDef) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprName> for DefinitionNodeKey {
|
||||
impl From<&ast::ExprName<'_>> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprName) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprNamed> for DefinitionNodeKey {
|
||||
impl From<&ast::ExprNamed<'_>> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprNamed) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
impl From<&ast::StmtAnnAssign<'_>> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAnnAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ pub(crate) struct Expression<'db> {
|
||||
/// The expression node.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: AstNodeRef<ast::Expr>,
|
||||
pub(crate) node: AstNodeRef<ast::Expr<'db>>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Expression<'static>>,
|
||||
|
||||
@@ -99,7 +99,7 @@ pub struct ScopeId<'db> {
|
||||
/// The node that introduces this scope.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub node: NodeWithScopeKind,
|
||||
pub node: NodeWithScopeKind<'db>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<ScopeId<'static>>,
|
||||
@@ -293,22 +293,22 @@ impl SymbolTableBuilder {
|
||||
|
||||
/// Reference to a node that introduces a new scope.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum NodeWithScopeRef<'a> {
|
||||
pub(crate) enum NodeWithScopeRef<'a, 'ast> {
|
||||
Module,
|
||||
Class(&'a ast::StmtClassDef),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
Class(&'a ast::StmtClassDef<'ast>),
|
||||
Function(&'a ast::StmtFunctionDef<'ast>),
|
||||
Lambda(&'a ast::ExprLambda<'ast>),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef<'ast>),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef<'ast>),
|
||||
}
|
||||
|
||||
impl NodeWithScopeRef<'_> {
|
||||
impl<'ast> NodeWithScopeRef<'_, 'ast> {
|
||||
/// Converts the unowned reference to an owned [`NodeWithScopeKind`].
|
||||
///
|
||||
/// # Safety
|
||||
/// The node wrapped by `self` must be a child of `module`.
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn to_kind(self, module: ParsedModule) -> NodeWithScopeKind {
|
||||
pub(super) unsafe fn to_kind(self, module: ParsedModule) -> NodeWithScopeKind<'ast> {
|
||||
match self {
|
||||
NodeWithScopeRef::Module => NodeWithScopeKind::Module,
|
||||
NodeWithScopeRef::Class(class) => {
|
||||
@@ -362,13 +362,13 @@ impl NodeWithScopeRef<'_> {
|
||||
|
||||
/// Node that introduces a new scope.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum NodeWithScopeKind {
|
||||
pub enum NodeWithScopeKind<'ast> {
|
||||
Module,
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
ClassTypeParameters(AstNodeRef<ast::StmtClassDef>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
Class(AstNodeRef<ast::StmtClassDef<'ast>>),
|
||||
ClassTypeParameters(AstNodeRef<ast::StmtClassDef<'ast>>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef<'ast>>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef<'ast>>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda<'ast>>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
|
||||
@@ -41,7 +41,7 @@ pub trait HasTy {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db>;
|
||||
}
|
||||
|
||||
impl HasTy for ast::ExpressionRef<'_> {
|
||||
impl HasTy for ast::ExpressionRef<'_, '_> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let file_scope = index.expression_scope_id(*self);
|
||||
@@ -64,40 +64,40 @@ macro_rules! impl_expression_has_ty {
|
||||
};
|
||||
}
|
||||
|
||||
impl_expression_has_ty!(ast::ExprBoolOp);
|
||||
impl_expression_has_ty!(ast::ExprNamed);
|
||||
impl_expression_has_ty!(ast::ExprBinOp);
|
||||
impl_expression_has_ty!(ast::ExprUnaryOp);
|
||||
impl_expression_has_ty!(ast::ExprLambda);
|
||||
impl_expression_has_ty!(ast::ExprIf);
|
||||
impl_expression_has_ty!(ast::ExprDict);
|
||||
impl_expression_has_ty!(ast::ExprSet);
|
||||
impl_expression_has_ty!(ast::ExprListComp);
|
||||
impl_expression_has_ty!(ast::ExprSetComp);
|
||||
impl_expression_has_ty!(ast::ExprDictComp);
|
||||
impl_expression_has_ty!(ast::ExprGenerator);
|
||||
impl_expression_has_ty!(ast::ExprAwait);
|
||||
impl_expression_has_ty!(ast::ExprYield);
|
||||
impl_expression_has_ty!(ast::ExprYieldFrom);
|
||||
impl_expression_has_ty!(ast::ExprCompare);
|
||||
impl_expression_has_ty!(ast::ExprCall);
|
||||
impl_expression_has_ty!(ast::ExprFString);
|
||||
impl_expression_has_ty!(ast::ExprStringLiteral);
|
||||
impl_expression_has_ty!(ast::ExprBytesLiteral);
|
||||
impl_expression_has_ty!(ast::ExprNumberLiteral);
|
||||
impl_expression_has_ty!(ast::ExprBoolOp<'_>);
|
||||
impl_expression_has_ty!(ast::ExprNamed<'_>);
|
||||
impl_expression_has_ty!(ast::ExprBinOp<'_>);
|
||||
impl_expression_has_ty!(ast::ExprUnaryOp<'_>);
|
||||
impl_expression_has_ty!(ast::ExprLambda<'_>);
|
||||
impl_expression_has_ty!(ast::ExprIf<'_>);
|
||||
impl_expression_has_ty!(ast::ExprDict<'_>);
|
||||
impl_expression_has_ty!(ast::ExprSet<'_>);
|
||||
impl_expression_has_ty!(ast::ExprListComp<'_>);
|
||||
impl_expression_has_ty!(ast::ExprSetComp<'_>);
|
||||
impl_expression_has_ty!(ast::ExprDictComp<'_>);
|
||||
impl_expression_has_ty!(ast::ExprGenerator<'_>);
|
||||
impl_expression_has_ty!(ast::ExprAwait<'_>);
|
||||
impl_expression_has_ty!(ast::ExprYield<'_>);
|
||||
impl_expression_has_ty!(ast::ExprYieldFrom<'_>);
|
||||
impl_expression_has_ty!(ast::ExprCompare<'_>);
|
||||
impl_expression_has_ty!(ast::ExprCall<'_>);
|
||||
impl_expression_has_ty!(ast::ExprFString<'_>);
|
||||
impl_expression_has_ty!(ast::ExprStringLiteral<'_>);
|
||||
impl_expression_has_ty!(ast::ExprBytesLiteral<'_>);
|
||||
impl_expression_has_ty!(ast::ExprNumberLiteral<'_>);
|
||||
impl_expression_has_ty!(ast::ExprBooleanLiteral);
|
||||
impl_expression_has_ty!(ast::ExprNoneLiteral);
|
||||
impl_expression_has_ty!(ast::ExprEllipsisLiteral);
|
||||
impl_expression_has_ty!(ast::ExprAttribute);
|
||||
impl_expression_has_ty!(ast::ExprSubscript);
|
||||
impl_expression_has_ty!(ast::ExprStarred);
|
||||
impl_expression_has_ty!(ast::ExprName);
|
||||
impl_expression_has_ty!(ast::ExprList);
|
||||
impl_expression_has_ty!(ast::ExprTuple);
|
||||
impl_expression_has_ty!(ast::ExprSlice);
|
||||
impl_expression_has_ty!(ast::ExprIpyEscapeCommand);
|
||||
impl_expression_has_ty!(ast::ExprAttribute<'_>);
|
||||
impl_expression_has_ty!(ast::ExprSubscript<'_>);
|
||||
impl_expression_has_ty!(ast::ExprStarred<'_>);
|
||||
impl_expression_has_ty!(ast::ExprName<'_>);
|
||||
impl_expression_has_ty!(ast::ExprList<'_>);
|
||||
impl_expression_has_ty!(ast::ExprTuple<'_>);
|
||||
impl_expression_has_ty!(ast::ExprSlice<'_>);
|
||||
impl_expression_has_ty!(ast::ExprIpyEscapeCommand<'_>);
|
||||
|
||||
impl HasTy for ast::Expr {
|
||||
impl HasTy for ast::Expr<'_> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Expr::BoolOp(inner) => inner.ty(model),
|
||||
@@ -136,7 +136,7 @@ impl HasTy for ast::Expr {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::StmtFunctionDef {
|
||||
impl HasTy for ast::StmtFunctionDef<'_> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
@@ -144,7 +144,7 @@ impl HasTy for ast::StmtFunctionDef {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for StmtClassDef {
|
||||
impl HasTy for StmtClassDef<'_> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
@@ -152,7 +152,7 @@ impl HasTy for StmtClassDef {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::Alias {
|
||||
impl HasTy for ast::Alias<'_> {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
|
||||
@@ -248,7 +248,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
match node {
|
||||
NodeWithScopeKind::Module => {
|
||||
let parsed = parsed_module(self.db.upcast(), self.file);
|
||||
self.infer_module(parsed.syntax());
|
||||
self.infer_module(parsed.parsed().syntax());
|
||||
}
|
||||
NodeWithScopeKind::Function(function) => self.infer_function_body(function.node()),
|
||||
NodeWithScopeKind::Lambda(lambda) => self.infer_lambda_body(lambda.node()),
|
||||
@@ -407,8 +407,11 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_optional_expression(returns.as_deref());
|
||||
}
|
||||
|
||||
let function_ty =
|
||||
Type::Function(FunctionType::new(self.db, name.id.clone(), decorator_tys));
|
||||
let function_ty = Type::Function(FunctionType::new(
|
||||
self.db,
|
||||
Name::new(name.id),
|
||||
decorator_tys,
|
||||
));
|
||||
|
||||
self.types.definitions.insert(definition, function_ty);
|
||||
}
|
||||
@@ -483,7 +486,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.node_scope(NodeWithScopeRef::Class(class))
|
||||
.to_scope_id(self.db, self.file);
|
||||
|
||||
let class_ty = Type::Class(ClassType::new(self.db, name.id.clone(), bases, body_scope));
|
||||
let class_ty = Type::Class(ClassType::new(
|
||||
self.db,
|
||||
Name::new(name.id),
|
||||
bases,
|
||||
body_scope,
|
||||
));
|
||||
|
||||
self.types.definitions.insert(definition, class_ty);
|
||||
}
|
||||
|
||||
17
crates/ruff_allocator/Cargo.toml
Normal file
17
crates/ruff_allocator/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "ruff_allocator"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
bumpalo = { workspace = true, features = ["boxed", "collections"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
137
crates/ruff_allocator/src/lib.rs
Normal file
137
crates/ruff_allocator/src/lib.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::iter::FusedIterator;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::{borrow, fmt};
|
||||
|
||||
pub type Allocator = bumpalo::Bump;
|
||||
|
||||
pub type String<'allocator> = bumpalo::collections::String<'allocator>;
|
||||
pub type Vec<'allocator, T> = bumpalo::collections::Vec<'allocator, T>;
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Box<'allocator, T: ?Sized>(bumpalo::boxed::Box<'allocator, T>);
|
||||
|
||||
impl<'allocator, T> Box<'allocator, T> {
|
||||
pub fn new_in(value: T, arena: &'allocator Allocator) -> Self {
|
||||
Self(bumpalo::boxed::Box::new_in(value, arena))
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> T {
|
||||
bumpalo::boxed::Box::into_inner(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ast, T> CloneIn<'ast> for Box<'ast, T>
|
||||
where
|
||||
T: CloneIn<'ast>,
|
||||
{
|
||||
fn clone_in(&self, allocator: &'ast Allocator) -> Self {
|
||||
Self(bumpalo::boxed::Box::new_in(
|
||||
self.0.as_ref().clone_in(allocator),
|
||||
allocator,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AsRef<T> for Box<'_, T> {
|
||||
fn as_ref(&self) -> &T {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Display for Box<'_, T>
|
||||
where
|
||||
T: Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: fmt::Debug + ?Sized> fmt::Debug for Box<'a, T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Debug::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized> borrow::Borrow<T> for Box<'a, T> {
|
||||
fn borrow(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized> borrow::BorrowMut<T> for Box<'a, T> {
|
||||
fn borrow_mut(&mut self) -> &mut T {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized> AsMut<T> for Box<'a, T> {
|
||||
fn as_mut(&mut self) -> &mut T {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized> Deref for Box<'a, T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized> DerefMut for Box<'a, T> {
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I: Iterator + ?Sized> Iterator for Box<'a, I> {
|
||||
type Item = I::Item;
|
||||
fn next(&mut self) -> Option<I::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
fn nth(&mut self, n: usize) -> Option<I::Item> {
|
||||
self.0.nth(n)
|
||||
}
|
||||
fn last(self) -> Option<I::Item> {
|
||||
#[inline]
|
||||
fn some<T>(_: Option<T>, x: T) -> Option<T> {
|
||||
Some(x)
|
||||
}
|
||||
self.fold(None, some)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<'a, I> {
|
||||
fn next_back(&mut self) -> Option<I::Item> {
|
||||
self.0.next_back()
|
||||
}
|
||||
fn nth_back(&mut self, n: usize) -> Option<I::Item> {
|
||||
self.0.nth_back(n)
|
||||
}
|
||||
}
|
||||
impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<'a, I> {
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I: FusedIterator + ?Sized> FusedIterator for Box<'a, I> {}
|
||||
|
||||
pub trait CloneIn<'a> {
|
||||
#[must_use]
|
||||
fn clone_in(&self, allocator: &'a Allocator) -> Self;
|
||||
}
|
||||
|
||||
impl<T> CloneIn<'_> for T
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
fn clone_in(&self, _: &'_ Allocator) -> Self {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,9 @@ license = { workspace = true }
|
||||
bench = false
|
||||
doctest = false
|
||||
|
||||
[[bench]]
|
||||
name = "linter"
|
||||
harness = false
|
||||
#[[bench]]
|
||||
#name = "linter"
|
||||
#harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "lexer"
|
||||
@@ -27,9 +27,9 @@ harness = false
|
||||
name = "parser"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "formatter"
|
||||
harness = false
|
||||
#[[bench]]
|
||||
#name = "formatter"
|
||||
#harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "red_knot"
|
||||
@@ -44,10 +44,11 @@ ureq = { workspace = true }
|
||||
codspeed-criterion-compat = { workspace = true, default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_allocator = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_linter = { workspace = true }
|
||||
#ruff_linter = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_formatter = { workspace = true }
|
||||
#ruff_python_formatter = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
red_knot = { workspace = true }
|
||||
|
||||
@@ -1,78 +1,78 @@
|
||||
use std::path::Path;
|
||||
|
||||
use codspeed_criterion_compat::{
|
||||
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
|
||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
|
||||
use ruff_python_parser::{parse, Mode};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
#[cfg(all(
|
||||
not(target_os = "windows"),
|
||||
not(target_os = "openbsd"),
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
|
||||
fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||
Ok(vec![
|
||||
TestCase::fast(TestFile::try_download("numpy/globals.py", "https://raw.githubusercontent.com/numpy/numpy/89d64415e349ca75a25250f22b874aa16e5c0973/numpy/_globals.py")?),
|
||||
TestCase::fast(TestFile::try_download("unicode/pypinyin.py", "https://raw.githubusercontent.com/mozillazg/python-pinyin/9521e47d96e3583a5477f5e43a2e82d513f27a3f/pypinyin/standard.py")?),
|
||||
TestCase::normal(TestFile::try_download(
|
||||
"pydantic/types.py",
|
||||
"https://raw.githubusercontent.com/pydantic/pydantic/83b3c49e99ceb4599d9286a3d793cea44ac36d4b/pydantic/types.py",
|
||||
)?),
|
||||
TestCase::normal(TestFile::try_download("numpy/ctypeslib.py", "https://raw.githubusercontent.com/numpy/numpy/e42c9503a14d66adfd41356ef5640c6975c45218/numpy/ctypeslib.py")?),
|
||||
TestCase::slow(TestFile::try_download(
|
||||
"large/dataset.py",
|
||||
"https://raw.githubusercontent.com/DHI/mikeio/b7d26418f4db2909b0aa965253dbe83194d7bb5b/tests/test_dataset.py",
|
||||
)?),
|
||||
])
|
||||
}
|
||||
|
||||
fn benchmark_formatter(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group("formatter");
|
||||
let test_cases = create_test_cases().unwrap();
|
||||
|
||||
for case in test_cases {
|
||||
group.throughput(Throughput::Bytes(case.code().len() as u64));
|
||||
|
||||
group.bench_with_input(
|
||||
BenchmarkId::from_parameter(case.name()),
|
||||
&case,
|
||||
|b, case| {
|
||||
// Parse the source.
|
||||
let parsed =
|
||||
parse(case.code(), Mode::Module).expect("Input should be a valid Python code");
|
||||
|
||||
let comment_ranges = CommentRanges::from(parsed.tokens());
|
||||
|
||||
b.iter(|| {
|
||||
let options = PyFormatOptions::from_extension(Path::new(case.name()))
|
||||
.with_preview(PreviewMode::Enabled);
|
||||
let formatted =
|
||||
format_module_ast(&parsed, &comment_ranges, case.code(), options)
|
||||
.expect("Formatting to succeed");
|
||||
|
||||
formatted.print().expect("Printing to succeed")
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(formatter, benchmark_formatter);
|
||||
criterion_main!(formatter);
|
||||
// use std::path::Path;
|
||||
//
|
||||
// use codspeed_criterion_compat::{
|
||||
// criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
|
||||
// };
|
||||
//
|
||||
// use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
// use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
|
||||
// use ruff_python_parser::{parse, Mode};
|
||||
// use ruff_python_trivia::CommentRanges;
|
||||
//
|
||||
// #[cfg(target_os = "windows")]
|
||||
// #[global_allocator]
|
||||
// static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
//
|
||||
// #[cfg(all(
|
||||
// not(target_os = "windows"),
|
||||
// not(target_os = "openbsd"),
|
||||
// any(
|
||||
// target_arch = "x86_64",
|
||||
// target_arch = "aarch64",
|
||||
// target_arch = "powerpc64"
|
||||
// )
|
||||
// ))]
|
||||
// #[global_allocator]
|
||||
// static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
//
|
||||
// fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||
// Ok(vec![
|
||||
// TestCase::fast(TestFile::try_download("numpy/globals.py", "https://raw.githubusercontent.com/numpy/numpy/89d64415e349ca75a25250f22b874aa16e5c0973/numpy/_globals.py")?),
|
||||
// TestCase::fast(TestFile::try_download("unicode/pypinyin.py", "https://raw.githubusercontent.com/mozillazg/python-pinyin/9521e47d96e3583a5477f5e43a2e82d513f27a3f/pypinyin/standard.py")?),
|
||||
// TestCase::normal(TestFile::try_download(
|
||||
// "pydantic/types.py",
|
||||
// "https://raw.githubusercontent.com/pydantic/pydantic/83b3c49e99ceb4599d9286a3d793cea44ac36d4b/pydantic/types.py",
|
||||
// )?),
|
||||
// TestCase::normal(TestFile::try_download("numpy/ctypeslib.py", "https://raw.githubusercontent.com/numpy/numpy/e42c9503a14d66adfd41356ef5640c6975c45218/numpy/ctypeslib.py")?),
|
||||
// TestCase::slow(TestFile::try_download(
|
||||
// "large/dataset.py",
|
||||
// "https://raw.githubusercontent.com/DHI/mikeio/b7d26418f4db2909b0aa965253dbe83194d7bb5b/tests/test_dataset.py",
|
||||
// )?),
|
||||
// ])
|
||||
// }
|
||||
//
|
||||
// fn benchmark_formatter(criterion: &mut Criterion) {
|
||||
// let mut group = criterion.benchmark_group("formatter");
|
||||
// let test_cases = create_test_cases().unwrap();
|
||||
//
|
||||
// for case in test_cases {
|
||||
// group.throughput(Throughput::Bytes(case.code().len() as u64));
|
||||
//
|
||||
// group.bench_with_input(
|
||||
// BenchmarkId::from_parameter(case.name()),
|
||||
// &case,
|
||||
// |b, case| {
|
||||
// // Parse the source.
|
||||
// let parsed =
|
||||
// parse(case.code(), Mode::Module).expect("Input should be a valid Python code");
|
||||
//
|
||||
// let comment_ranges = CommentRanges::from(parsed.tokens());
|
||||
//
|
||||
// b.iter(|| {
|
||||
// let options = PyFormatOptions::from_extension(Path::new(case.name()))
|
||||
// .with_preview(PreviewMode::Enabled);
|
||||
// let formatted =
|
||||
// format_module_ast(&parsed, &comment_ranges, case.code(), options)
|
||||
// .expect("Formatting to succeed");
|
||||
//
|
||||
// formatted.print().expect("Printing to succeed")
|
||||
// });
|
||||
// },
|
||||
// );
|
||||
// }
|
||||
//
|
||||
// group.finish();
|
||||
// }
|
||||
//
|
||||
// criterion_group!(formatter, benchmark_formatter);
|
||||
// criterion_main!(formatter);
|
||||
|
||||
@@ -2,6 +2,7 @@ use codspeed_criterion_compat::{
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_python_parser::{lexer, Mode, TokenKind};
|
||||
|
||||
@@ -48,7 +49,8 @@ fn benchmark_lexer(criterion: &mut Criterion<WallTime>) {
|
||||
&case,
|
||||
|b, case| {
|
||||
b.iter(|| {
|
||||
let mut lexer = lexer::lex(case.code(), Mode::Module);
|
||||
let allocator = Allocator::new();
|
||||
let mut lexer = lexer::lex(case.code(), Mode::Module, &allocator);
|
||||
loop {
|
||||
let token = lexer.next_token();
|
||||
match token {
|
||||
|
||||
@@ -1,137 +1,137 @@
|
||||
use codspeed_criterion_compat::{
|
||||
self as criterion, criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion,
|
||||
Throughput,
|
||||
};
|
||||
use criterion::measurement;
|
||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_linter::linter::{lint_only, ParseSource};
|
||||
use ruff_linter::rule_selector::PreviewOptions;
|
||||
use ruff_linter::settings::rule_table::RuleTable;
|
||||
use ruff_linter::settings::types::PreviewMode;
|
||||
use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_linter::{registry::Rule, RuleSelector};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_parser::parse_module;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
#[cfg(all(
|
||||
not(target_os = "windows"),
|
||||
not(target_os = "openbsd"),
|
||||
any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[global_allocator]
|
||||
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
|
||||
fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||
Ok(vec![
|
||||
TestCase::fast(TestFile::try_download("numpy/globals.py", "https://raw.githubusercontent.com/numpy/numpy/89d64415e349ca75a25250f22b874aa16e5c0973/numpy/_globals.py")?),
|
||||
TestCase::fast(TestFile::try_download("unicode/pypinyin.py", "https://raw.githubusercontent.com/mozillazg/python-pinyin/9521e47d96e3583a5477f5e43a2e82d513f27a3f/pypinyin/standard.py")?),
|
||||
TestCase::normal(TestFile::try_download(
|
||||
"pydantic/types.py",
|
||||
"https://raw.githubusercontent.com/pydantic/pydantic/83b3c49e99ceb4599d9286a3d793cea44ac36d4b/pydantic/types.py",
|
||||
)?),
|
||||
TestCase::normal(TestFile::try_download("numpy/ctypeslib.py", "https://raw.githubusercontent.com/numpy/numpy/e42c9503a14d66adfd41356ef5640c6975c45218/numpy/ctypeslib.py")?),
|
||||
TestCase::slow(TestFile::try_download(
|
||||
"large/dataset.py",
|
||||
"https://raw.githubusercontent.com/DHI/mikeio/b7d26418f4db2909b0aa965253dbe83194d7bb5b/tests/test_dataset.py",
|
||||
)?),
|
||||
])
|
||||
}
|
||||
|
||||
fn benchmark_linter(mut group: BenchmarkGroup<measurement::WallTime>, settings: &LinterSettings) {
|
||||
let test_cases = create_test_cases().unwrap();
|
||||
|
||||
for case in test_cases {
|
||||
group.throughput(Throughput::Bytes(case.code().len() as u64));
|
||||
|
||||
group.bench_with_input(
|
||||
BenchmarkId::from_parameter(case.name()),
|
||||
&case,
|
||||
|b, case| {
|
||||
// Parse the source.
|
||||
let parsed =
|
||||
parse_module(case.code()).expect("Input should be a valid Python code");
|
||||
|
||||
b.iter_batched(
|
||||
|| parsed.clone(),
|
||||
|parsed| {
|
||||
let path = case.path();
|
||||
let result = lint_only(
|
||||
&path,
|
||||
None,
|
||||
settings,
|
||||
flags::Noqa::Enabled,
|
||||
&SourceKind::Python(case.code().to_string()),
|
||||
PySourceType::from(path.as_path()),
|
||||
ParseSource::Precomputed(parsed),
|
||||
);
|
||||
|
||||
// Assert that file contains no parse errors
|
||||
assert!(!result.has_syntax_error);
|
||||
},
|
||||
criterion::BatchSize::SmallInput,
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
fn benchmark_default_rules(criterion: &mut Criterion) {
|
||||
let group = criterion.benchmark_group("linter/default-rules");
|
||||
benchmark_linter(group, &LinterSettings::default());
|
||||
}
|
||||
|
||||
/// Disables IO based rules because they are a source of flakiness
|
||||
fn disable_io_rules(rules: &mut RuleTable) {
|
||||
rules.disable(Rule::ShebangMissingExecutableFile);
|
||||
rules.disable(Rule::ShebangNotExecutable);
|
||||
}
|
||||
|
||||
fn benchmark_all_rules(criterion: &mut Criterion) {
|
||||
let mut rules: RuleTable = RuleSelector::All
|
||||
.rules(&PreviewOptions {
|
||||
mode: PreviewMode::Disabled,
|
||||
require_explicit: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
disable_io_rules(&mut rules);
|
||||
|
||||
let settings = LinterSettings {
|
||||
rules,
|
||||
..LinterSettings::default()
|
||||
};
|
||||
|
||||
let group = criterion.benchmark_group("linter/all-rules");
|
||||
benchmark_linter(group, &settings);
|
||||
}
|
||||
|
||||
fn benchmark_preview_rules(criterion: &mut Criterion) {
|
||||
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
|
||||
|
||||
disable_io_rules(&mut rules);
|
||||
|
||||
let settings = LinterSettings {
|
||||
rules,
|
||||
preview: PreviewMode::Enabled,
|
||||
..LinterSettings::default()
|
||||
};
|
||||
|
||||
let group = criterion.benchmark_group("linter/all-with-preview-rules");
|
||||
benchmark_linter(group, &settings);
|
||||
}
|
||||
|
||||
criterion_group!(default_rules, benchmark_default_rules);
|
||||
criterion_group!(all_rules, benchmark_all_rules);
|
||||
criterion_group!(preview_rules, benchmark_preview_rules);
|
||||
criterion_main!(default_rules, all_rules, preview_rules);
|
||||
// use codspeed_criterion_compat::{
|
||||
// self as criterion, criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion,
|
||||
// Throughput,
|
||||
// };
|
||||
// use criterion::measurement;
|
||||
// use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
// use ruff_linter::linter::{lint_only, ParseSource};
|
||||
// use ruff_linter::rule_selector::PreviewOptions;
|
||||
// use ruff_linter::settings::rule_table::RuleTable;
|
||||
// use ruff_linter::settings::types::PreviewMode;
|
||||
// use ruff_linter::settings::{flags, LinterSettings};
|
||||
// use ruff_linter::source_kind::SourceKind;
|
||||
// use ruff_linter::{registry::Rule, RuleSelector};
|
||||
// use ruff_python_ast::PySourceType;
|
||||
// use ruff_python_parser::parse_module;
|
||||
//
|
||||
// #[cfg(target_os = "windows")]
|
||||
// #[global_allocator]
|
||||
// static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
//
|
||||
// #[cfg(all(
|
||||
// not(target_os = "windows"),
|
||||
// not(target_os = "openbsd"),
|
||||
// any(
|
||||
// target_arch = "x86_64",
|
||||
// target_arch = "aarch64",
|
||||
// target_arch = "powerpc64"
|
||||
// )
|
||||
// ))]
|
||||
// #[global_allocator]
|
||||
// static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
//
|
||||
// fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||
// Ok(vec![
|
||||
// TestCase::fast(TestFile::try_download("numpy/globals.py", "https://raw.githubusercontent.com/numpy/numpy/89d64415e349ca75a25250f22b874aa16e5c0973/numpy/_globals.py")?),
|
||||
// TestCase::fast(TestFile::try_download("unicode/pypinyin.py", "https://raw.githubusercontent.com/mozillazg/python-pinyin/9521e47d96e3583a5477f5e43a2e82d513f27a3f/pypinyin/standard.py")?),
|
||||
// TestCase::normal(TestFile::try_download(
|
||||
// "pydantic/types.py",
|
||||
// "https://raw.githubusercontent.com/pydantic/pydantic/83b3c49e99ceb4599d9286a3d793cea44ac36d4b/pydantic/types.py",
|
||||
// )?),
|
||||
// TestCase::normal(TestFile::try_download("numpy/ctypeslib.py", "https://raw.githubusercontent.com/numpy/numpy/e42c9503a14d66adfd41356ef5640c6975c45218/numpy/ctypeslib.py")?),
|
||||
// TestCase::slow(TestFile::try_download(
|
||||
// "large/dataset.py",
|
||||
// "https://raw.githubusercontent.com/DHI/mikeio/b7d26418f4db2909b0aa965253dbe83194d7bb5b/tests/test_dataset.py",
|
||||
// )?),
|
||||
// ])
|
||||
// }
|
||||
//
|
||||
// fn benchmark_linter(mut group: BenchmarkGroup<measurement::WallTime>, settings: &LinterSettings) {
|
||||
// let test_cases = create_test_cases().unwrap();
|
||||
//
|
||||
// for case in test_cases {
|
||||
// group.throughput(Throughput::Bytes(case.code().len() as u64));
|
||||
//
|
||||
// group.bench_with_input(
|
||||
// BenchmarkId::from_parameter(case.name()),
|
||||
// &case,
|
||||
// |b, case| {
|
||||
// // Parse the source.
|
||||
// let parsed =
|
||||
// parse_module(case.code()).expect("Input should be a valid Python code");
|
||||
//
|
||||
// b.iter_batched(
|
||||
// || parsed.clone(),
|
||||
// |parsed| {
|
||||
// let path = case.path();
|
||||
// let result = lint_only(
|
||||
// &path,
|
||||
// None,
|
||||
// settings,
|
||||
// flags::Noqa::Enabled,
|
||||
// &SourceKind::Python(case.code().to_string()),
|
||||
// PySourceType::from(path.as_path()),
|
||||
// ParseSource::Precomputed(parsed),
|
||||
// );
|
||||
//
|
||||
// // Assert that file contains no parse errors
|
||||
// assert!(!result.has_syntax_error);
|
||||
// },
|
||||
// criterion::BatchSize::SmallInput,
|
||||
// );
|
||||
// },
|
||||
// );
|
||||
// }
|
||||
//
|
||||
// group.finish();
|
||||
// }
|
||||
//
|
||||
// fn benchmark_default_rules(criterion: &mut Criterion) {
|
||||
// let group = criterion.benchmark_group("linter/default-rules");
|
||||
// benchmark_linter(group, &LinterSettings::default());
|
||||
// }
|
||||
//
|
||||
// /// Disables IO based rules because they are a source of flakiness
|
||||
// fn disable_io_rules(rules: &mut RuleTable) {
|
||||
// rules.disable(Rule::ShebangMissingExecutableFile);
|
||||
// rules.disable(Rule::ShebangNotExecutable);
|
||||
// }
|
||||
//
|
||||
// fn benchmark_all_rules(criterion: &mut Criterion) {
|
||||
// let mut rules: RuleTable = RuleSelector::All
|
||||
// .rules(&PreviewOptions {
|
||||
// mode: PreviewMode::Disabled,
|
||||
// require_explicit: false,
|
||||
// })
|
||||
// .collect();
|
||||
//
|
||||
// disable_io_rules(&mut rules);
|
||||
//
|
||||
// let settings = LinterSettings {
|
||||
// rules,
|
||||
// ..LinterSettings::default()
|
||||
// };
|
||||
//
|
||||
// let group = criterion.benchmark_group("linter/all-rules");
|
||||
// benchmark_linter(group, &settings);
|
||||
// }
|
||||
//
|
||||
// fn benchmark_preview_rules(criterion: &mut Criterion) {
|
||||
// let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
|
||||
//
|
||||
// disable_io_rules(&mut rules);
|
||||
//
|
||||
// let settings = LinterSettings {
|
||||
// rules,
|
||||
// preview: PreviewMode::Enabled,
|
||||
// ..LinterSettings::default()
|
||||
// };
|
||||
//
|
||||
// let group = criterion.benchmark_group("linter/all-with-preview-rules");
|
||||
// benchmark_linter(group, &settings);
|
||||
// }
|
||||
//
|
||||
// criterion_group!(default_rules, benchmark_default_rules);
|
||||
// criterion_group!(all_rules, benchmark_all_rules);
|
||||
// criterion_group!(preview_rules, benchmark_preview_rules);
|
||||
// criterion_main!(default_rules, all_rules, preview_rules);
|
||||
|
||||
@@ -2,6 +2,7 @@ use codspeed_criterion_compat::{
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
|
||||
use ruff_python_ast::Stmt;
|
||||
@@ -43,8 +44,8 @@ struct CountVisitor {
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl<'a> StatementVisitor<'a> for CountVisitor {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a, 'ast> StatementVisitor<'a, 'ast> for CountVisitor {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'ast>) {
|
||||
walk_stmt(self, stmt);
|
||||
self.count += 1;
|
||||
}
|
||||
@@ -61,7 +62,8 @@ fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
|
||||
&case,
|
||||
|b, case| {
|
||||
b.iter(|| {
|
||||
let parsed = parse_module(case.code())
|
||||
let allocator = Allocator::new();
|
||||
let parsed = parse_module(case.code(), &allocator)
|
||||
.expect("Input should be a valid Python code")
|
||||
.into_suite();
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_allocator = { workspace = true }
|
||||
ruff_cache = { workspace = true, optional = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast::{ModModule, PySourceType};
|
||||
@@ -20,7 +19,7 @@ use crate::Db;
|
||||
/// reflected in the changed AST offsets.
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Sala requires
|
||||
/// for determining if a query result is unchanged.
|
||||
#[salsa::tracked(return_ref, no_eq)]
|
||||
#[salsa::tracked(no_eq, return_ref)]
|
||||
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
let _span = tracing::trace_span!("parse_module", file = ?file.path(db)).entered();
|
||||
|
||||
@@ -37,39 +36,48 @@ pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
.map_or(PySourceType::Python, PySourceType::from_extension),
|
||||
};
|
||||
|
||||
ParsedModule::new(parse_unchecked_source(&source, ty))
|
||||
ParsedModule::parse(&source, ty)
|
||||
}
|
||||
|
||||
/// Cheap cloneable wrapper around the parsed module.
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedModule {
|
||||
inner: Arc<Parsed<ModModule>>,
|
||||
inner: Arc<ParsedInner>,
|
||||
}
|
||||
|
||||
struct ParsedInner {
|
||||
parsed: Parsed<ModModule<'static>>,
|
||||
|
||||
// It's important that allocator comes **after** parsed
|
||||
// so that it gets dropped **after** parsed.
|
||||
allocator: Box<std::sync::Mutex<ruff_allocator::Allocator>>,
|
||||
}
|
||||
|
||||
impl ParsedModule {
|
||||
pub fn new(parsed: Parsed<ModModule>) -> Self {
|
||||
pub fn parse(source: &str, ty: PySourceType) -> Self {
|
||||
let allocator = Box::new(std::sync::Mutex::new(ruff_allocator::Allocator::new()));
|
||||
|
||||
let parsed: Parsed<ModModule<'static>> = {
|
||||
let allocator = allocator.lock().unwrap();
|
||||
let parsed = parse_unchecked_source(&source, ty, &allocator);
|
||||
unsafe { std::mem::transmute(parsed) }
|
||||
};
|
||||
|
||||
Self {
|
||||
inner: Arc::new(parsed),
|
||||
inner: Arc::new(ParsedInner { parsed, allocator }),
|
||||
}
|
||||
}
|
||||
|
||||
/// Consumes `self` and returns the Arc storing the parsed module.
|
||||
pub fn into_arc(self) -> Arc<Parsed<ModModule>> {
|
||||
self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ParsedModule {
|
||||
type Target = Parsed<ModModule>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
pub fn parsed<'a>(&'a self) -> &'a Parsed<ModModule<'a>> {
|
||||
unsafe { std::mem::transmute(&self.inner.parsed) }
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ParsedModule {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("ParsedModule").field(&self.inner).finish()
|
||||
f.debug_tuple("ParsedModule")
|
||||
.field(&self.inner.parsed)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ license = { workspace = true }
|
||||
[lib]
|
||||
|
||||
[dependencies]
|
||||
ruff_allocator = { workspace = true }
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_diagnostics = { workspace = true, features = ["serde"] }
|
||||
ruff_notebook = { workspace = true }
|
||||
|
||||
@@ -6,13 +6,13 @@ use ruff_python_semantic::{ScopeId, Snapshot};
|
||||
/// visited.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct Visit<'a> {
|
||||
pub(crate) string_type_definitions: Vec<(&'a ExprStringLiteral, Snapshot)>,
|
||||
pub(crate) future_type_definitions: Vec<(&'a Expr, Snapshot)>,
|
||||
pub(crate) type_param_definitions: Vec<(&'a Expr, Snapshot)>,
|
||||
pub(crate) string_type_definitions: Vec<(&'a ExprStringLiteral<'a>, Snapshot)>,
|
||||
pub(crate) future_type_definitions: Vec<(&'a Expr<'a>, Snapshot)>,
|
||||
pub(crate) type_param_definitions: Vec<(&'a Expr<'a>, Snapshot)>,
|
||||
pub(crate) functions: Vec<Snapshot>,
|
||||
pub(crate) lambdas: Vec<Snapshot>,
|
||||
/// N.B. This field should always be empty unless it's a stub file
|
||||
pub(crate) class_bases: Vec<(&'a Expr, Snapshot)>,
|
||||
pub(crate) class_bases: Vec<(&'a Expr<'a>, Snapshot)>,
|
||||
}
|
||||
|
||||
impl Visit<'_> {
|
||||
|
||||
@@ -30,7 +30,7 @@ use std::path::Path;
|
||||
|
||||
use itertools::Itertools;
|
||||
use log::debug;
|
||||
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_diagnostics::{Diagnostic, IsolationLevel};
|
||||
use ruff_notebook::{CellOffsets, NotebookIndex};
|
||||
use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to_module_path};
|
||||
@@ -175,10 +175,12 @@ impl ExpectedDocstringKind {
|
||||
}
|
||||
|
||||
pub(crate) struct Checker<'a> {
|
||||
allocator: &'a Allocator,
|
||||
|
||||
/// The [`Parsed`] output for the source code.
|
||||
parsed: &'a Parsed<ModModule>,
|
||||
parsed: &'a Parsed<ModModule<'a>>,
|
||||
/// The [`Parsed`] output for the type annotation the checker is currently in.
|
||||
parsed_type_annotation: Option<&'a Parsed<ModExpression>>,
|
||||
parsed_type_annotation: Option<&'a Parsed<ModExpression<'a>>>,
|
||||
/// The [`Path`] to the file under analysis.
|
||||
path: &'a Path,
|
||||
/// The [`Path`] to the package containing the current file.
|
||||
@@ -208,7 +210,7 @@ pub(crate) struct Checker<'a> {
|
||||
/// The [`Indexer`] for the current file, which contains the offsets of all comments and more.
|
||||
indexer: &'a Indexer,
|
||||
/// The [`Importer`] for the current file, which enables importing of other modules.
|
||||
importer: Importer<'a>,
|
||||
importer: Importer<'a, 'a>,
|
||||
/// The [`SemanticModel`], built up over the course of the AST traversal.
|
||||
semantic: SemanticModel<'a>,
|
||||
/// A set of deferred nodes to be visited after the current traversal (e.g., function bodies).
|
||||
@@ -228,7 +230,8 @@ pub(crate) struct Checker<'a> {
|
||||
impl<'a> Checker<'a> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn new(
|
||||
parsed: &'a Parsed<ModModule>,
|
||||
allocator: &'a Allocator,
|
||||
parsed: &'a Parsed<ModModule<'a>>,
|
||||
settings: &'a LinterSettings,
|
||||
noqa_line_for: &'a NoqaMapping,
|
||||
noqa: flags::Noqa,
|
||||
@@ -243,6 +246,7 @@ impl<'a> Checker<'a> {
|
||||
notebook_index: Option<&'a NotebookIndex>,
|
||||
) -> Checker<'a> {
|
||||
Checker {
|
||||
allocator,
|
||||
parsed,
|
||||
parsed_type_annotation: None,
|
||||
settings,
|
||||
@@ -294,6 +298,10 @@ impl<'a> Checker<'a> {
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn allocator(&self) -> &'a Allocator {
|
||||
self.allocator
|
||||
}
|
||||
|
||||
/// Returns the appropriate quoting for f-string by reversing the one used outside of
|
||||
/// the f-string.
|
||||
///
|
||||
@@ -358,7 +366,7 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
/// The [`Importer`] for the current file, which enables importing of other modules.
|
||||
pub(crate) const fn importer(&self) -> &Importer<'a> {
|
||||
pub(crate) const fn importer(&self) -> &Importer<'a, 'a> {
|
||||
&self.importer
|
||||
}
|
||||
|
||||
@@ -407,8 +415,8 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for Checker<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> Visitor<'a, 'a> for Checker<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
// Step 0: Pre-processing
|
||||
self.semantic.push_node(stmt);
|
||||
|
||||
@@ -997,14 +1005,14 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
self.last_stmt_end = stmt.end();
|
||||
}
|
||||
|
||||
fn visit_annotation(&mut self, expr: &'a Expr) {
|
||||
fn visit_annotation(&mut self, expr: &'a Expr<'a>) {
|
||||
let flags_snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::TYPING_ONLY_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
self.semantic.flags = flags_snapshot;
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
// Step 0: Pre-processing
|
||||
if self.source_type.is_stub()
|
||||
&& self.semantic.in_class_base()
|
||||
@@ -1064,7 +1072,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
range: _,
|
||||
}) => {
|
||||
if let Expr::Name(ast::ExprName { id, ctx, range: _ }) = func.as_ref() {
|
||||
if id == "locals" && ctx.is_load() {
|
||||
if *id == "locals" && ctx.is_load() {
|
||||
let scope = self.semantic.current_scope_mut();
|
||||
scope.set_uses_locals();
|
||||
}
|
||||
@@ -1497,7 +1505,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
self.semantic.pop_node();
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler<'a>) {
|
||||
let flags_snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::EXCEPTION_HANDLER;
|
||||
|
||||
@@ -1559,7 +1567,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
analyze::parameters(parameters, self);
|
||||
}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter<'a>) {
|
||||
// Step 1: Binding.
|
||||
// Bind, but intentionally avoid walking the annotation, as we handle it
|
||||
// upstream.
|
||||
@@ -1574,7 +1582,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
analyze::parameter(parameter, self);
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'a Pattern) {
|
||||
fn visit_pattern(&mut self, pattern: &'a Pattern<'a>) {
|
||||
// Step 1: Binding
|
||||
if let Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
@@ -1599,7 +1607,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
||||
fn visit_body(&mut self, body: &'a [Stmt<'a>]) {
|
||||
// Step 4: Analysis
|
||||
analyze::suite(body, self);
|
||||
|
||||
@@ -1609,7 +1617,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase<'a>) {
|
||||
self.visit_pattern(&match_case.pattern);
|
||||
if let Some(expr) = &match_case.guard {
|
||||
self.visit_boolean_test(expr);
|
||||
@@ -1620,7 +1628,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
self.semantic.pop_branch();
|
||||
}
|
||||
|
||||
fn visit_type_param(&mut self, type_param: &'a ast::TypeParam) {
|
||||
fn visit_type_param(&mut self, type_param: &'a ast::TypeParam<'a>) {
|
||||
// Step 1: Binding
|
||||
match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar { name, range, .. })
|
||||
@@ -1678,7 +1686,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_f_string_element(&mut self, f_string_element: &'a FStringElement) {
|
||||
fn visit_f_string_element(&mut self, f_string_element: &'a FStringElement<'a>) {
|
||||
let snapshot = self.semantic.flags;
|
||||
if f_string_element.is_expression() {
|
||||
self.semantic.flags |= SemanticModelFlags::F_STRING_REPLACEMENT_FIELD;
|
||||
@@ -1690,13 +1698,13 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
|
||||
impl<'a> Checker<'a> {
|
||||
/// Visit a [`Module`]. Returns `true` if the module contains a module-level docstring.
|
||||
fn visit_module(&mut self, python_ast: &'a Suite) {
|
||||
fn visit_module(&mut self, python_ast: &'a Suite<'a>) {
|
||||
analyze::module(python_ast, self);
|
||||
}
|
||||
|
||||
/// Visit a list of [`Comprehension`] nodes, assumed to be the comprehensions that compose a
|
||||
/// generator expression, like a list or set comprehension.
|
||||
fn visit_generators(&mut self, generators: &'a [Comprehension]) {
|
||||
fn visit_generators(&mut self, generators: &'a [Comprehension<'a>]) {
|
||||
let mut iterator = generators.iter();
|
||||
|
||||
let Some(generator) = iterator.next() else {
|
||||
@@ -1762,7 +1770,7 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
/// Visit an body of [`Stmt`] nodes within a type-checking block.
|
||||
fn visit_type_checking_block(&mut self, body: &'a [Stmt]) {
|
||||
fn visit_type_checking_block(&mut self, body: &'a [Stmt<'a>]) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::TYPE_CHECKING_BLOCK;
|
||||
self.visit_body(body);
|
||||
@@ -1770,7 +1778,7 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a runtime-evaluated type annotation.
|
||||
fn visit_runtime_evaluated_annotation(&mut self, expr: &'a Expr) {
|
||||
fn visit_runtime_evaluated_annotation(&mut self, expr: &'a Expr<'a>) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_EVALUATED_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
@@ -1778,7 +1786,7 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a runtime-required type annotation.
|
||||
fn visit_runtime_required_annotation(&mut self, expr: &'a Expr) {
|
||||
fn visit_runtime_required_annotation(&mut self, expr: &'a Expr<'a>) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_REQUIRED_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
@@ -1786,7 +1794,7 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a type definition.
|
||||
fn visit_type_definition(&mut self, expr: &'a Expr) {
|
||||
fn visit_type_definition(&mut self, expr: &'a Expr<'a>) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::TYPE_DEFINITION;
|
||||
self.visit_expr(expr);
|
||||
@@ -1794,7 +1802,7 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as _not_ a type definition.
|
||||
fn visit_non_type_definition(&mut self, expr: &'a Expr) {
|
||||
fn visit_non_type_definition(&mut self, expr: &'a Expr<'a>) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags -= SemanticModelFlags::TYPE_DEFINITION;
|
||||
self.visit_expr(expr);
|
||||
@@ -1804,7 +1812,7 @@ impl<'a> Checker<'a> {
|
||||
/// Visit an [`Expr`], and treat it as a boolean test. This is useful for detecting whether an
|
||||
/// expressions return value is significant, or whether the calling context only relies on
|
||||
/// its truthiness.
|
||||
fn visit_boolean_test(&mut self, expr: &'a Expr) {
|
||||
fn visit_boolean_test(&mut self, expr: &'a Expr<'a>) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::BOOLEAN_TEST;
|
||||
self.visit_expr(expr);
|
||||
@@ -1812,7 +1820,7 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
/// Visit an [`ElifElseClause`]
|
||||
fn visit_elif_else_clause(&mut self, clause: &'a ElifElseClause) {
|
||||
fn visit_elif_else_clause(&mut self, clause: &'a ElifElseClause<'a>) {
|
||||
if let Some(test) = &clause.test {
|
||||
self.visit_boolean_test(test);
|
||||
}
|
||||
@@ -1927,7 +1935,7 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_node_load(&mut self, expr: &Expr) {
|
||||
fn handle_node_load(&mut self, expr: &Expr<'a>) {
|
||||
let Expr::Name(expr) = expr else {
|
||||
return;
|
||||
};
|
||||
@@ -1948,21 +1956,21 @@ impl<'a> Checker<'a> {
|
||||
&& match parent {
|
||||
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
|
||||
if let Some(Expr::Name(ast::ExprName { id, .. })) = targets.first() {
|
||||
id == "__all__"
|
||||
*id == "__all__"
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
Stmt::AugAssign(ast::StmtAugAssign { target, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||
id == "__all__"
|
||||
*id == "__all__"
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||
id == "__all__"
|
||||
*id == "__all__"
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -2036,7 +2044,7 @@ impl<'a> Checker<'a> {
|
||||
self.add_binding(id, expr.range(), BindingKind::Assignment, flags);
|
||||
}
|
||||
|
||||
fn handle_node_delete(&mut self, expr: &'a Expr) {
|
||||
fn handle_node_delete(&mut self, expr: &'a Expr<'a>) {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = expr else {
|
||||
return;
|
||||
};
|
||||
@@ -2172,7 +2180,7 @@ impl<'a> Checker<'a> {
|
||||
let type_definitions = std::mem::take(&mut self.visit.string_type_definitions);
|
||||
for (string_expr, snapshot) in type_definitions {
|
||||
if let Ok((parsed_annotation, kind)) =
|
||||
parse_type_annotation(string_expr, self.locator.contents())
|
||||
parse_type_annotation(string_expr, self.locator.contents(), self.allocator)
|
||||
{
|
||||
let parsed_annotation = allocator.alloc(parsed_annotation);
|
||||
self.parsed_type_annotation = Some(parsed_annotation);
|
||||
@@ -2354,8 +2362,9 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn check_ast(
|
||||
parsed: &Parsed<ModModule>,
|
||||
pub(crate) fn check_ast<'a>(
|
||||
allocator: &'a Allocator,
|
||||
parsed: &Parsed<ModModule<'a>>,
|
||||
locator: &Locator,
|
||||
stylist: &Stylist,
|
||||
indexer: &Indexer,
|
||||
@@ -2389,6 +2398,7 @@ pub(crate) fn check_ast(
|
||||
};
|
||||
|
||||
let mut checker = Checker::new(
|
||||
allocator,
|
||||
parsed,
|
||||
settings,
|
||||
noqa_line_for,
|
||||
|
||||
@@ -89,6 +89,7 @@ pub(crate) fn check_physical_lines(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::parse_module;
|
||||
@@ -105,7 +106,8 @@ mod tests {
|
||||
fn e501_non_ascii_char() {
|
||||
let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8.
|
||||
let locator = Locator::new(line);
|
||||
let parsed = parse_module(line).unwrap();
|
||||
let allocator = Allocator::new();
|
||||
let parsed = parse_module(line, &allocator).unwrap();
|
||||
let indexer = Indexer::from_tokens(parsed.tokens(), &locator);
|
||||
let stylist = Stylist::from_tokens(parsed.tokens(), &locator);
|
||||
|
||||
|
||||
@@ -373,6 +373,7 @@ impl TodoDirectiveKind {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_python_parser::parse_module;
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
@@ -388,7 +389,8 @@ mod tests {
|
||||
use super::IsortDirectives;
|
||||
|
||||
fn noqa_mappings(contents: &str) -> NoqaMapping {
|
||||
let parsed = parse_module(contents).unwrap();
|
||||
let allocator = Allocator::new();
|
||||
let parsed = parse_module(contents, &allocator).unwrap();
|
||||
let locator = Locator::new(contents);
|
||||
let indexer = Indexer::from_tokens(parsed.tokens(), &locator);
|
||||
|
||||
@@ -563,7 +565,8 @@ assert foo, \
|
||||
}
|
||||
|
||||
fn isort_directives(contents: &str) -> IsortDirectives {
|
||||
let parsed = parse_module(contents).unwrap();
|
||||
let allocator = Allocator::new();
|
||||
let parsed = parse_module(contents, &allocator).unwrap();
|
||||
let locator = Locator::new(contents);
|
||||
let comment_ranges = CommentRanges::from(parsed.tokens());
|
||||
extract_isort_directives(&locator, &comment_ranges)
|
||||
|
||||
@@ -4,7 +4,9 @@ use ruff_python_ast::{self as ast, Stmt};
|
||||
use ruff_python_semantic::{Definition, DefinitionId, Definitions, Member, MemberKind};
|
||||
|
||||
/// Extract a docstring from a function or class body.
|
||||
pub(crate) fn docstring_from(suite: &[Stmt]) -> Option<&ast::ExprStringLiteral> {
|
||||
pub(crate) fn docstring_from<'a, 'ast>(
|
||||
suite: &'a [Stmt<'ast>],
|
||||
) -> Option<&'a ast::ExprStringLiteral<'ast>> {
|
||||
let stmt = suite.first()?;
|
||||
// Require the docstring to be a standalone expression.
|
||||
let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt else {
|
||||
@@ -26,8 +28,8 @@ pub(crate) fn extract_docstring<'a>(
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub(crate) enum ExtractionTarget<'a> {
|
||||
Class(&'a ast::StmtClassDef),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef<'a>),
|
||||
}
|
||||
|
||||
/// Extract a `Definition` from the AST node defined by a `Stmt`.
|
||||
|
||||
@@ -15,7 +15,7 @@ pub(crate) mod styles;
|
||||
pub(crate) struct Docstring<'a> {
|
||||
pub(crate) definition: &'a Definition<'a>,
|
||||
/// The literal AST node representing the docstring.
|
||||
pub(crate) expr: &'a ExprStringLiteral,
|
||||
pub(crate) expr: &'a ExprStringLiteral<'a>,
|
||||
/// The content of the docstring, including the leading and trailing quotes.
|
||||
pub(crate) contents: &'a str,
|
||||
/// The range of the docstring body (without the quotes). The range is relative to [`Self::contents`].
|
||||
|
||||
@@ -234,7 +234,7 @@ impl<'a> SectionContexts<'a> {
|
||||
self.contexts.len()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> SectionContextsIter {
|
||||
pub(crate) fn iter(&self) -> SectionContextsIter<'_, 'a> {
|
||||
SectionContextsIter {
|
||||
docstring_body: self.docstring.body(),
|
||||
inner: self.contexts.iter(),
|
||||
@@ -242,9 +242,9 @@ impl<'a> SectionContexts<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a SectionContexts<'a> {
|
||||
impl<'sections, 'a> IntoIterator for &'sections SectionContexts<'a> {
|
||||
type Item = SectionContext<'a>;
|
||||
type IntoIter = SectionContextsIter<'a>;
|
||||
type IntoIter = SectionContextsIter<'sections, 'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
@@ -257,12 +257,12 @@ impl Debug for SectionContexts<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct SectionContextsIter<'a> {
|
||||
pub(crate) struct SectionContextsIter<'sections, 'a> {
|
||||
docstring_body: DocstringBody<'a>,
|
||||
inner: std::slice::Iter<'a, SectionContextData>,
|
||||
inner: std::slice::Iter<'sections, SectionContextData>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for SectionContextsIter<'a> {
|
||||
impl<'a> Iterator for SectionContextsIter<'_, 'a> {
|
||||
type Item = SectionContext<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@@ -279,7 +279,7 @@ impl<'a> Iterator for SectionContextsIter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DoubleEndedIterator for SectionContextsIter<'a> {
|
||||
impl DoubleEndedIterator for SectionContextsIter<'_, '_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let back = self.inner.next_back()?;
|
||||
Some(SectionContext {
|
||||
@@ -289,8 +289,8 @@ impl<'a> DoubleEndedIterator for SectionContextsIter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SectionContextsIter<'_> {}
|
||||
impl ExactSizeIterator for SectionContextsIter<'_> {}
|
||||
impl FusedIterator for SectionContextsIter<'_, '_> {}
|
||||
impl ExactSizeIterator for SectionContextsIter<'_, '_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SectionContextData {
|
||||
|
||||
@@ -26,9 +26,9 @@ use crate::importer::insertion::Insertion;
|
||||
|
||||
mod insertion;
|
||||
|
||||
pub(crate) struct Importer<'a> {
|
||||
pub(crate) struct Importer<'a, 'ast> {
|
||||
/// The Python AST to which we are adding imports.
|
||||
python_ast: &'a [Stmt],
|
||||
python_ast: &'a [Stmt<'ast>],
|
||||
/// The tokens representing the Python AST.
|
||||
tokens: &'a Tokens,
|
||||
/// The [`Locator`] for the Python AST.
|
||||
@@ -36,14 +36,14 @@ pub(crate) struct Importer<'a> {
|
||||
/// The [`Stylist`] for the Python AST.
|
||||
stylist: &'a Stylist<'a>,
|
||||
/// The list of visited, top-level runtime imports in the Python AST.
|
||||
runtime_imports: Vec<&'a Stmt>,
|
||||
runtime_imports: Vec<&'a Stmt<'ast>>,
|
||||
/// The list of visited, top-level `if TYPE_CHECKING:` blocks in the Python AST.
|
||||
type_checking_blocks: Vec<&'a Stmt>,
|
||||
type_checking_blocks: Vec<&'a Stmt<'ast>>,
|
||||
}
|
||||
|
||||
impl<'a> Importer<'a> {
|
||||
impl<'a, 'ast> Importer<'a, 'ast> {
|
||||
pub(crate) fn new(
|
||||
parsed: &'a Parsed<ModModule>,
|
||||
parsed: &'a Parsed<ModModule<'ast>>,
|
||||
locator: &'a Locator<'a>,
|
||||
stylist: &'a Stylist<'a>,
|
||||
) -> Self {
|
||||
@@ -58,12 +58,12 @@ impl<'a> Importer<'a> {
|
||||
}
|
||||
|
||||
/// Visit a top-level import statement.
|
||||
pub(crate) fn visit_import(&mut self, import: &'a Stmt) {
|
||||
pub(crate) fn visit_import(&mut self, import: &'a Stmt<'ast>) {
|
||||
self.runtime_imports.push(import);
|
||||
}
|
||||
|
||||
/// Visit a top-level type-checking block.
|
||||
pub(crate) fn visit_type_checking_block(&mut self, type_checking_block: &'a Stmt) {
|
||||
pub(crate) fn visit_type_checking_block(&mut self, type_checking_block: &'a Stmt<'ast>) {
|
||||
self.type_checking_blocks.push(type_checking_block);
|
||||
}
|
||||
|
||||
@@ -468,7 +468,7 @@ impl<'a> Importer<'a> {
|
||||
}
|
||||
|
||||
/// Return the import statement that precedes the given position, if any.
|
||||
fn preceding_import(&self, at: TextSize) -> Option<&'a Stmt> {
|
||||
fn preceding_import(&self, at: TextSize) -> Option<&'a Stmt<'a>> {
|
||||
self.runtime_imports
|
||||
.partition_point(|stmt| stmt.start() < at)
|
||||
.checked_sub(1)
|
||||
@@ -476,7 +476,7 @@ impl<'a> Importer<'a> {
|
||||
}
|
||||
|
||||
/// Return the `TYPE_CHECKING` block that precedes the given position, if any.
|
||||
fn preceding_type_checking_block(&self, at: TextSize) -> Option<&'a Stmt> {
|
||||
fn preceding_type_checking_block(&self, at: TextSize) -> Option<&'a Stmt<'a>> {
|
||||
let block = self.type_checking_blocks.first()?;
|
||||
if block.start() <= at {
|
||||
Some(block)
|
||||
@@ -562,7 +562,7 @@ impl<'a> ImportRequest<'a> {
|
||||
/// An existing list of module or member imports, located within an import statement.
|
||||
pub(crate) struct ImportedMembers<'a> {
|
||||
/// The import statement.
|
||||
pub(crate) statement: &'a Stmt,
|
||||
pub(crate) statement: &'a Stmt<'a>,
|
||||
/// The "names" of the imported members.
|
||||
pub(crate) names: Vec<&'a str>,
|
||||
}
|
||||
|
||||
@@ -663,17 +663,22 @@ This indicates a bug in Ruff. If you could open an issue at:
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ParseSource {
|
||||
pub enum ParseSource<'a> {
|
||||
/// Parse the [`Parsed`] from the given source code.
|
||||
None,
|
||||
/// Use the precomputed [`Parsed`].
|
||||
Precomputed(Parsed<ModModule>),
|
||||
Precomputed(Parsed<ModModule<'a>>),
|
||||
}
|
||||
|
||||
impl ParseSource {
|
||||
impl<'a> ParseSource<'a> {
|
||||
/// Consumes the [`ParseSource`] and returns the parsed [`Parsed`], parsing the source code if
|
||||
/// necessary.
|
||||
fn into_parsed(self, source_kind: &SourceKind, source_type: PySourceType) -> Parsed<ModModule> {
|
||||
fn into_parsed(
|
||||
self,
|
||||
source_kind: &SourceKind,
|
||||
source_type: PySourceType,
|
||||
allocator: &'a ruff_allocator::Allocator,
|
||||
) -> Parsed<ModModule<'a>> {
|
||||
match self {
|
||||
ParseSource::None => {
|
||||
ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type)
|
||||
|
||||
@@ -305,13 +305,13 @@ impl<'a> EmitterContext<'a> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Edit, Fix};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_parser::{parse_unchecked, Mode};
|
||||
use ruff_source_file::{Locator, OneIndexed, SourceFileBuilder};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::message::{Emitter, EmitterContext, Message};
|
||||
|
||||
@@ -324,7 +324,8 @@ if call(foo
|
||||
";
|
||||
let locator = Locator::new(source);
|
||||
let source_file = SourceFileBuilder::new("syntax_errors.py", source).finish();
|
||||
parse_unchecked(source, Mode::Module)
|
||||
let allocator = Allocator::new();
|
||||
parse_unchecked(source, Mode::Module, &allocator)
|
||||
.errors()
|
||||
.iter()
|
||||
.map(|parse_error| {
|
||||
|
||||
@@ -102,11 +102,11 @@ pub(crate) fn fastapi_redundant_response_model(
|
||||
}
|
||||
}
|
||||
|
||||
fn check_decorator<'a>(
|
||||
fn check_decorator<'a, 'ast>(
|
||||
function_def: &StmtFunctionDef,
|
||||
decorator: &'a Decorator,
|
||||
decorator: &'a Decorator<'ast>,
|
||||
semantic: &'a SemanticModel,
|
||||
) -> Option<(&'a ExprCall, &'a Keyword)> {
|
||||
) -> Option<(&'a ExprCall<'ast>, &'a Keyword<'ast>)> {
|
||||
let call = is_fastapi_route_decorator(decorator, semantic)?;
|
||||
let response_model_arg = call.arguments.find_keyword("response_model")?;
|
||||
let return_value = function_def.returns.as_ref()?;
|
||||
|
||||
@@ -17,10 +17,10 @@ pub(crate) fn is_fastapi_route(function_def: &StmtFunctionDef, semantic: &Semant
|
||||
}
|
||||
|
||||
/// Returns `true` if the decorator is indicative of a FastAPI route.
|
||||
pub(crate) fn is_fastapi_route_decorator<'a>(
|
||||
decorator: &'a Decorator,
|
||||
semantic: &'a SemanticModel,
|
||||
) -> Option<&'a ExprCall> {
|
||||
pub(crate) fn is_fastapi_route_decorator<'a, 'ast>(
|
||||
decorator: &'a Decorator<'ast>,
|
||||
semantic: &SemanticModel,
|
||||
) -> Option<&'a ExprCall<'ast>> {
|
||||
let call = decorator.expression.as_call_expr()?;
|
||||
let decorator_method = call.func.as_attribute_expr()?;
|
||||
let method_name = &decorator_method.attr;
|
||||
|
||||
@@ -210,25 +210,25 @@ impl AutoPythonType {
|
||||
}
|
||||
|
||||
/// Given a [`PythonType`], return an [`Expr`] that resolves to that type.
|
||||
fn type_expr(python_type: PythonType) -> Option<Expr> {
|
||||
fn name(name: &str) -> Expr {
|
||||
fn type_expr(python_type: PythonType, allocator: &ruff_allocator::Allocator) -> Option<Expr> {
|
||||
fn name<'a>(name: &str, allocator: &'a ruff_allocator::Allocator) -> Expr<'a> {
|
||||
Expr::Name(ast::ExprName {
|
||||
id: name.into(),
|
||||
id: allocator.alloc_str(name),
|
||||
range: TextRange::default(),
|
||||
ctx: ExprContext::Load,
|
||||
})
|
||||
}
|
||||
|
||||
match python_type {
|
||||
PythonType::String => Some(name("str")),
|
||||
PythonType::Bytes => Some(name("bytes")),
|
||||
PythonType::String => Some(name("str", allocator)),
|
||||
PythonType::Bytes => Some(name("bytes", allocator)),
|
||||
PythonType::Number(number) => match number {
|
||||
NumberLike::Integer => Some(name("int")),
|
||||
NumberLike::Float => Some(name("float")),
|
||||
NumberLike::Complex => Some(name("complex")),
|
||||
NumberLike::Bool => Some(name("bool")),
|
||||
NumberLike::Integer => Some(name("int", allocator)),
|
||||
NumberLike::Float => Some(name("float", allocator)),
|
||||
NumberLike::Complex => Some(name("complex", allocator)),
|
||||
NumberLike::Bool => Some(name("bool", allocator)),
|
||||
},
|
||||
PythonType::None => Some(name("None")),
|
||||
PythonType::None => Some(name("None", allocator)),
|
||||
PythonType::Ellipsis => None,
|
||||
PythonType::Dict => None,
|
||||
PythonType::List => None,
|
||||
|
||||
@@ -8,7 +8,7 @@ static PASSWORD_CANDIDATE_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"(^|_)(?i)(pas+wo?r?d|pass(phrase)?|pwd|token|secrete?)($|_)").unwrap()
|
||||
});
|
||||
|
||||
pub(super) fn string_literal(expr: &Expr) -> Option<&str> {
|
||||
pub(super) fn string_literal<'a>(expr: &'a Expr) -> Option<&'a str> {
|
||||
match expr {
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => Some(value.to_str()),
|
||||
_ => None,
|
||||
|
||||
@@ -49,7 +49,7 @@ impl Violation for HardcodedPasswordString {
|
||||
}
|
||||
}
|
||||
|
||||
fn password_target(target: &Expr) -> Option<&str> {
|
||||
fn password_target<'a>(target: &'a Expr<'a>) -> Option<&'a str> {
|
||||
let target_name = match target {
|
||||
// variable = "s3cr3t"
|
||||
Expr::Name(ast::ExprName { id, .. }) => id.as_str(),
|
||||
|
||||
@@ -464,7 +464,7 @@ enum Safety {
|
||||
Unknown,
|
||||
}
|
||||
|
||||
impl From<&Expr> for Safety {
|
||||
impl From<&Expr<'_>> for Safety {
|
||||
/// Return the [`Safety`] level for the [`Expr`]. This is based on Bandit's definition: string
|
||||
/// literals are considered okay, but dynamically-computed values are not.
|
||||
fn from(expr: &Expr) -> Self {
|
||||
|
||||
@@ -847,7 +847,7 @@ pub(crate) fn suspicious_function_call(checker: &mut Checker, call: &ExprCall) {
|
||||
|
||||
/// Return the leading characters for an expression, if it's a string literal, f-string, or
|
||||
/// string concatenation.
|
||||
fn leading_chars(expr: &Expr) -> Option<impl Iterator<Item = char> + Clone + '_> {
|
||||
fn leading_chars<'a>(expr: &Expr<'a>) -> Option<impl Iterator<Item = char> + Clone + 'a> {
|
||||
match expr {
|
||||
// Ex) `"foo"`
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, ExprContext, Stmt};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use ruff_allocator::CloneIn;
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::is_const_false;
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, ExprContext, Stmt};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use std::alloc::alloc;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
@@ -48,22 +49,28 @@ impl AlwaysFixableViolation for AssertFalse {
|
||||
}
|
||||
}
|
||||
|
||||
fn assertion_error(msg: Option<&Expr>) -> Stmt {
|
||||
fn assertion_error<'ast>(
|
||||
msg: Option<&Expr<'ast>>,
|
||||
allocator: &'ast ruff_allocator::Allocator,
|
||||
) -> Stmt<'ast> {
|
||||
Stmt::Raise(ast::StmtRaise {
|
||||
range: TextRange::default(),
|
||||
exc: Some(Box::new(Expr::Call(ast::ExprCall {
|
||||
func: Box::new(Expr::Name(ast::ExprName {
|
||||
id: "AssertionError".into(),
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
})),
|
||||
exc: Some(ruff_allocator::Box::new_in(Expr::Call(ast::ExprCall {
|
||||
func: ruff_allocator::Box::new_in(
|
||||
Expr::Name(ast::ExprName {
|
||||
id: "AssertionError".into(),
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
}),
|
||||
allocator,
|
||||
),
|
||||
arguments: Arguments {
|
||||
args: if let Some(msg) = msg {
|
||||
Box::from([msg.clone()])
|
||||
allocator.alloc_slice_fill_iter([msg.clone_in(allocator)])
|
||||
} else {
|
||||
Box::from([])
|
||||
&mut []
|
||||
},
|
||||
keywords: Box::from([]),
|
||||
keywords: &mut [],
|
||||
range: TextRange::default(),
|
||||
},
|
||||
range: TextRange::default(),
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ruff_allocator::{Allocator, CloneIn};
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Violation};
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::name::UnqualifiedName;
|
||||
use ruff_python_ast::{self as ast, ExceptHandler, Expr, ExprContext};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::edits::pad;
|
||||
@@ -103,9 +103,12 @@ impl AlwaysFixableViolation for DuplicateHandlerException {
|
||||
}
|
||||
}
|
||||
|
||||
fn type_pattern(elts: Vec<&Expr>) -> Expr {
|
||||
fn type_pattern<'ast>(elts: Vec<&Expr>, allocator: &'ast Allocator) -> Expr<'ast> {
|
||||
ast::ExprTuple {
|
||||
elts: elts.into_iter().cloned().collect(),
|
||||
elts: elts
|
||||
.into_iter()
|
||||
.map(|elt| elt.clone_in(allocator))
|
||||
.collect(),
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
parenthesized: true,
|
||||
@@ -116,9 +119,9 @@ fn type_pattern(elts: Vec<&Expr>) -> Expr {
|
||||
/// B014
|
||||
fn duplicate_handler_exceptions<'a>(
|
||||
checker: &mut Checker,
|
||||
expr: &'a Expr,
|
||||
elts: &'a [Expr],
|
||||
) -> FxHashMap<UnqualifiedName<'a>, &'a Expr> {
|
||||
expr: &'a Expr<'a>,
|
||||
elts: &'a [Expr<'a>],
|
||||
) -> FxHashMap<UnqualifiedName<'a>, &'a Expr<'a>> {
|
||||
let mut seen: FxHashMap<UnqualifiedName, &Expr> = FxHashMap::default();
|
||||
let mut duplicates: FxHashSet<UnqualifiedName> = FxHashSet::default();
|
||||
let mut unique_elts: Vec<&Expr> = Vec::default();
|
||||
|
||||
@@ -71,7 +71,7 @@ pub(crate) fn except_with_non_exception_classes(
|
||||
///
|
||||
/// This should leave any unstarred iterables alone (subsequently raising a
|
||||
/// warning for B029).
|
||||
fn flatten_iterables(expr: &Expr) -> Vec<&Expr> {
|
||||
fn flatten_iterables<'a>(expr: &'a Expr<'a>) -> Vec<&'a Expr<'a>> {
|
||||
// Unpack the top-level Tuple into queue, otherwise add as-is.
|
||||
let mut exprs_to_process: VecDeque<&Expr> = match expr {
|
||||
Expr::Tuple(ast::ExprTuple { elts, .. }) => elts.iter().collect(),
|
||||
|
||||
@@ -98,7 +98,7 @@ impl<'a, 'b> ArgumentDefaultVisitor<'a, 'b> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<'_> for ArgumentDefaultVisitor<'_, '_> {
|
||||
impl Visitor<'_, '_> for ArgumentDefaultVisitor<'_, '_> {
|
||||
fn visit_expr(&mut self, expr: &Expr) {
|
||||
match expr {
|
||||
Expr::Call(ast::ExprCall { func, .. }) => {
|
||||
|
||||
@@ -56,13 +56,13 @@ impl Violation for FunctionUsesLoopVariable {
|
||||
|
||||
#[derive(Default)]
|
||||
struct LoadedNamesVisitor<'a> {
|
||||
loaded: Vec<&'a ast::ExprName>,
|
||||
stored: Vec<&'a ast::ExprName>,
|
||||
loaded: Vec<&'a ast::ExprName<'a>>,
|
||||
stored: Vec<&'a ast::ExprName<'a>>,
|
||||
}
|
||||
|
||||
/// `Visitor` to collect all used identifiers in a statement.
|
||||
impl<'a> Visitor<'a> for LoadedNamesVisitor<'a> {
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
impl<'a> Visitor<'a, 'a> for LoadedNamesVisitor<'a> {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
match expr {
|
||||
Expr::Name(name) => match &name.ctx {
|
||||
ExprContext::Load => self.loaded.push(name),
|
||||
@@ -76,14 +76,14 @@ impl<'a> Visitor<'a> for LoadedNamesVisitor<'a> {
|
||||
|
||||
#[derive(Default)]
|
||||
struct SuspiciousVariablesVisitor<'a> {
|
||||
names: Vec<&'a ast::ExprName>,
|
||||
safe_functions: Vec<&'a Expr>,
|
||||
names: Vec<&'a ast::ExprName<'a>>,
|
||||
safe_functions: Vec<&'a Expr<'a>>,
|
||||
}
|
||||
|
||||
/// `Visitor` to collect all suspicious variables (those referenced in
|
||||
/// functions, but not bound as arguments).
|
||||
impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> Visitor<'a, 'a> for SuspiciousVariablesVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(ast::StmtFunctionDef {
|
||||
parameters, body, ..
|
||||
@@ -122,7 +122,7 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
|
||||
visitor::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
match expr {
|
||||
Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
@@ -205,11 +205,11 @@ struct NamesFromAssignmentsVisitor<'a> {
|
||||
}
|
||||
|
||||
/// `Visitor` to collect all names used in an assignment expression.
|
||||
impl<'a> Visitor<'a> for NamesFromAssignmentsVisitor<'a> {
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
impl<'a> Visitor<'a, 'a> for NamesFromAssignmentsVisitor<'a> {
|
||||
fn visit_expr(&mut self, expr: &Expr<'a>) {
|
||||
match expr {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
self.names.push(id.as_str());
|
||||
self.names.push(id);
|
||||
}
|
||||
Expr::Starred(ast::ExprStarred { value, .. }) => {
|
||||
self.visit_expr(value);
|
||||
@@ -230,8 +230,8 @@ struct AssignedNamesVisitor<'a> {
|
||||
}
|
||||
|
||||
/// `Visitor` to collect all used identifiers in a statement.
|
||||
impl<'a> Visitor<'a> for AssignedNamesVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> Visitor<'a, 'a> for AssignedNamesVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
if stmt.is_function_def_stmt() {
|
||||
// Don't recurse.
|
||||
return;
|
||||
@@ -258,7 +258,7 @@ impl<'a> Visitor<'a> for AssignedNamesVisitor<'a> {
|
||||
visitor::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
if expr.is_lambda_expr() {
|
||||
// Don't recurse.
|
||||
return;
|
||||
@@ -267,7 +267,7 @@ impl<'a> Visitor<'a> for AssignedNamesVisitor<'a> {
|
||||
visitor::walk_expr(self, expr);
|
||||
}
|
||||
|
||||
fn visit_comprehension(&mut self, comprehension: &'a Comprehension) {
|
||||
fn visit_comprehension(&mut self, comprehension: &'a Comprehension<'a>) {
|
||||
let mut visitor = NamesFromAssignmentsVisitor::default();
|
||||
visitor.visit_expr(&comprehension.target);
|
||||
self.names.extend(visitor.names);
|
||||
@@ -303,7 +303,7 @@ pub(crate) fn function_uses_loop_variable(checker: &mut Checker, node: &Node) {
|
||||
// If a variable was used in a function or lambda body, and assigned in the
|
||||
// loop, flag it.
|
||||
for name in suspicious_variables {
|
||||
if reassigned_in_loop.contains(&name.id.as_str()) {
|
||||
if reassigned_in_loop.contains(&name.id) {
|
||||
if !checker.flake8_bugbear_seen.contains(&name.range()) {
|
||||
checker.flake8_bugbear_seen.push(name.range());
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
|
||||
@@ -143,9 +143,9 @@ fn is_mutating_function(function_name: &str) -> bool {
|
||||
/// A visitor to collect mutations to a variable in a loop.
|
||||
#[derive(Debug, Clone)]
|
||||
struct LoopMutationsVisitor<'a> {
|
||||
iter: &'a Expr,
|
||||
target: &'a Expr,
|
||||
index: &'a Expr,
|
||||
iter: &'a Expr<'a>,
|
||||
target: &'a Expr<'a>,
|
||||
index: &'a Expr<'a>,
|
||||
mutations: HashMap<u32, Vec<TextRange>>,
|
||||
branches: Vec<u32>,
|
||||
branch: u32,
|
||||
@@ -153,7 +153,7 @@ struct LoopMutationsVisitor<'a> {
|
||||
|
||||
impl<'a> LoopMutationsVisitor<'a> {
|
||||
/// Initialize the visitor.
|
||||
fn new(iter: &'a Expr, target: &'a Expr, index: &'a Expr) -> Self {
|
||||
fn new(iter: &'a Expr<'a>, target: &'a Expr<'a>, index: &'a Expr<'a>) -> Self {
|
||||
Self {
|
||||
iter,
|
||||
target,
|
||||
@@ -237,8 +237,8 @@ impl<'a> LoopMutationsVisitor<'a> {
|
||||
}
|
||||
|
||||
/// `Visitor` to collect all used identifiers in a statement.
|
||||
impl<'a> Visitor<'a> for LoopMutationsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> Visitor<'a, 'a> for LoopMutationsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
match stmt {
|
||||
// Ex) `del items[0]`
|
||||
Stmt::Delete(StmtDelete { range, targets }) => {
|
||||
@@ -302,7 +302,7 @@ impl<'a> Visitor<'a> for LoopMutationsVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
// Ex) `items.append(1)`
|
||||
if let Expr::Call(ExprCall { func, .. }) = expr {
|
||||
self.handle_call(func);
|
||||
|
||||
@@ -76,11 +76,11 @@ pub(crate) fn loop_variable_overrides_iterator(checker: &mut Checker, target: &E
|
||||
|
||||
#[derive(Default)]
|
||||
struct NameFinder<'a> {
|
||||
names: FxHashMap<&'a str, &'a Expr>,
|
||||
names: FxHashMap<&'a str, &'a Expr<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for NameFinder<'a> {
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
impl<'a> Visitor<'a, 'a> for NameFinder<'a> {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
match expr {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
self.names.insert(id, expr);
|
||||
|
||||
@@ -113,7 +113,7 @@ struct ReturnInGeneratorVisitor {
|
||||
has_yield: bool,
|
||||
}
|
||||
|
||||
impl StatementVisitor<'_> for ReturnInGeneratorVisitor {
|
||||
impl StatementVisitor<'_, '_> for ReturnInGeneratorVisitor {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
Stmt::Expr(ast::StmtExpr { value, .. }) => match **value {
|
||||
|
||||
@@ -66,7 +66,7 @@ struct GroupNameFinder<'a> {
|
||||
/// branch order.
|
||||
counter_stack: Vec<Vec<u32>>,
|
||||
/// A list of reused expressions.
|
||||
exprs: Vec<&'a Expr>,
|
||||
exprs: Vec<&'a Expr<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> GroupNameFinder<'a> {
|
||||
@@ -112,8 +112,8 @@ impl<'a> GroupNameFinder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for GroupNameFinder<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> Visitor<'a, 'a> for GroupNameFinder<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
if self.overridden {
|
||||
return;
|
||||
}
|
||||
@@ -220,7 +220,7 @@ impl<'a> Visitor<'a> for GroupNameFinder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_comprehension(&mut self, comprehension: &'a Comprehension) {
|
||||
fn visit_comprehension(&mut self, comprehension: &'a Comprehension<'a>) {
|
||||
if self.name_matches(&comprehension.target) {
|
||||
self.overridden = true;
|
||||
}
|
||||
@@ -235,7 +235,7 @@ impl<'a> Visitor<'a> for GroupNameFinder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
if let Expr::Named(ast::ExprNamed { target, .. }) = expr {
|
||||
if self.name_matches(target) {
|
||||
self.overridden = true;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use ruff_python_ast::{Expr, Keyword};
|
||||
|
||||
pub(super) fn exactly_one_argument_with_matching_function<'a>(
|
||||
pub(super) fn exactly_one_argument_with_matching_function<'a, 'ast>(
|
||||
name: &str,
|
||||
func: &Expr,
|
||||
args: &'a [Expr],
|
||||
args: &'a [Expr<'ast>],
|
||||
keywords: &[Keyword],
|
||||
) -> Option<&'a Expr> {
|
||||
) -> Option<&'a Expr<'ast>> {
|
||||
let [arg] = args else {
|
||||
return None;
|
||||
};
|
||||
@@ -19,11 +19,11 @@ pub(super) fn exactly_one_argument_with_matching_function<'a>(
|
||||
Some(arg)
|
||||
}
|
||||
|
||||
pub(super) fn first_argument_with_matching_function<'a>(
|
||||
pub(super) fn first_argument_with_matching_function<'a, 'ast>(
|
||||
name: &str,
|
||||
func: &Expr,
|
||||
args: &'a [Expr],
|
||||
) -> Option<&'a Expr> {
|
||||
args: &'a [Expr<'ast>],
|
||||
) -> Option<&'a Expr<'ast>> {
|
||||
if func.as_name_expr().is_some_and(|func| func.id == name) {
|
||||
args.first()
|
||||
} else {
|
||||
|
||||
@@ -270,9 +270,9 @@ fn late_binding(parameters: &Parameters, body: &Expr) -> bool {
|
||||
#[derive(Debug)]
|
||||
struct LateBindingVisitor<'a> {
|
||||
/// The arguments to the current lambda.
|
||||
parameters: &'a Parameters,
|
||||
parameters: &'a Parameters<'a>,
|
||||
/// The arguments to any lambdas within the current lambda body.
|
||||
lambdas: Vec<Option<&'a Parameters>>,
|
||||
lambdas: Vec<Option<&'a Parameters<'a>>>,
|
||||
/// Whether any names within the current lambda body are late-bound within nested lambdas.
|
||||
late_bound: bool,
|
||||
}
|
||||
@@ -287,10 +287,10 @@ impl<'a> LateBindingVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for LateBindingVisitor<'a> {
|
||||
fn visit_stmt(&mut self, _stmt: &'a Stmt) {}
|
||||
impl<'a> Visitor<'a, 'a> for LateBindingVisitor<'a> {
|
||||
fn visit_stmt(&mut self, _stmt: &'a Stmt<'a>) {}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
match expr {
|
||||
Expr::Lambda(ast::ExprLambda { parameters, .. }) => {
|
||||
self.lambdas.push(parameters.as_deref());
|
||||
@@ -322,5 +322,5 @@ impl<'a> Visitor<'a> for LateBindingVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, _body: &'a [Stmt]) {}
|
||||
fn visit_body(&mut self, _body: &'a [Stmt<'a>]) {}
|
||||
}
|
||||
|
||||
@@ -147,7 +147,7 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, call: &ast::ExprCal
|
||||
|
||||
/// Determine the set of keywords that appear in multiple positions (either directly, as in
|
||||
/// `func(x=1)`, or indirectly, as in `func(**{"x": 1})`).
|
||||
fn duplicates(call: &ast::ExprCall) -> FxHashSet<&str> {
|
||||
fn duplicates<'a>(call: &'a ast::ExprCall<'a>) -> FxHashSet<&'a str> {
|
||||
let mut seen =
|
||||
FxHashSet::with_capacity_and_hasher(call.arguments.keywords.len(), FxBuildHasher);
|
||||
let mut duplicates =
|
||||
@@ -171,7 +171,7 @@ fn duplicates(call: &ast::ExprCall) -> FxHashSet<&str> {
|
||||
}
|
||||
|
||||
/// Return `Some` if a key is a valid keyword argument name, or `None` otherwise.
|
||||
fn as_kwarg(key: &Expr) -> Option<&str> {
|
||||
fn as_kwarg<'a>(key: &'a Expr<'a>) -> Option<&'a str> {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = key {
|
||||
if is_identifier(value.to_str()) {
|
||||
return Some(value.to_str());
|
||||
|
||||
@@ -118,7 +118,7 @@ pub(crate) fn bad_generator_return_type(
|
||||
}
|
||||
|
||||
let returns = match &function_def.returns {
|
||||
Some(returns) => returns.as_ref(),
|
||||
Some(returns) => &**returns,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
@@ -282,7 +282,7 @@ fn generate_fix(
|
||||
|
||||
#[derive(Debug)]
|
||||
struct YieldTypeInfo<'a> {
|
||||
expr: &'a ast::Expr,
|
||||
expr: &'a ast::Expr<'a>,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
|
||||
@@ -290,9 +290,9 @@ fn check_positional_args_for_overloaded_method(
|
||||
parent_class_def: &StmtClassDef,
|
||||
parameters_range: TextRange,
|
||||
) {
|
||||
fn parameter_annotation_loosely_matches_predicate(
|
||||
parameter: &ParameterWithDefault,
|
||||
predicate: impl FnOnce(&Expr) -> bool,
|
||||
fn parameter_annotation_loosely_matches_predicate<'a, 'ast>(
|
||||
parameter: &'a ParameterWithDefault<'ast>,
|
||||
predicate: impl FnOnce(&'a Expr<'ast>) -> bool,
|
||||
semantic: &SemanticModel,
|
||||
) -> bool {
|
||||
parameter
|
||||
@@ -424,10 +424,10 @@ fn check_positional_args_for_overloaded_method(
|
||||
}
|
||||
|
||||
/// Return the non-`None` annotation element of a PEP 604-style union or `Optional` annotation.
|
||||
fn non_none_annotation_element<'a>(
|
||||
annotation: &'a Expr,
|
||||
fn non_none_annotation_element<'a, 'ast>(
|
||||
annotation: &'a Expr<'ast>,
|
||||
semantic: &SemanticModel,
|
||||
) -> Option<&'a Expr> {
|
||||
) -> Option<&'a Expr<'ast>> {
|
||||
// E.g., `typing.Union` or `typing.Optional`
|
||||
if let Expr::Subscript(ExprSubscript { value, slice, .. }) = annotation {
|
||||
let qualified_name = semantic.resolve_qualified_name(value)?;
|
||||
|
||||
@@ -58,13 +58,13 @@ impl Violation for RedundantLiteralUnion {
|
||||
}
|
||||
|
||||
/// PYI051
|
||||
pub(crate) fn redundant_literal_union<'a>(checker: &mut Checker, union: &'a Expr) {
|
||||
pub(crate) fn redundant_literal_union<'a, 'ast>(checker: &mut Checker, union: &'a Expr<'ast>) {
|
||||
let mut typing_literal_exprs = Vec::new();
|
||||
let mut builtin_types_in_union = FxHashSet::default();
|
||||
|
||||
// Adds a member to `literal_exprs` for each value in a `Literal`, and any builtin types
|
||||
// to `builtin_types_in_union`.
|
||||
let mut func = |expr: &'a Expr, _parent: &'a Expr| {
|
||||
let mut func = |expr: &'a Expr<'ast>, _parent: &'a Expr<'ast>| {
|
||||
if let Expr::Subscript(ast::ExprSubscript { value, slice, .. }) = expr {
|
||||
if checker.semantic().match_typing_expr(value, "Literal") {
|
||||
if let Expr::Tuple(ast::ExprTuple { elts, .. }) = slice.as_ref() {
|
||||
|
||||
@@ -209,7 +209,7 @@ impl Violation for PytestUnittestAssertion {
|
||||
/// the exception name.
|
||||
struct ExceptionHandlerVisitor<'a> {
|
||||
exception_name: &'a str,
|
||||
current_assert: Option<&'a Stmt>,
|
||||
current_assert: Option<&'a Stmt<'a>>,
|
||||
errors: Vec<Diagnostic>,
|
||||
}
|
||||
|
||||
@@ -223,8 +223,8 @@ impl<'a> ExceptionHandlerVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for ExceptionHandlerVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> Visitor<'a, 'a> for ExceptionHandlerVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
match stmt {
|
||||
Stmt::Assert(_) => {
|
||||
self.current_assert = Some(stmt);
|
||||
@@ -235,7 +235,7 @@ impl<'a> Visitor<'a> for ExceptionHandlerVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
match expr {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
if let Some(current_assert) = self.current_assert {
|
||||
|
||||
@@ -622,12 +622,12 @@ impl fmt::Display for Parentheses {
|
||||
struct SkipFunctionsVisitor<'a> {
|
||||
has_return_with_value: bool,
|
||||
has_yield_from: bool,
|
||||
yield_statements: Vec<&'a Expr>,
|
||||
addfinalizer_call: Option<&'a Expr>,
|
||||
yield_statements: Vec<&'a Expr<'a>>,
|
||||
addfinalizer_call: Option<&'a Expr<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for SkipFunctionsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> Visitor<'a, 'a> for SkipFunctionsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
match stmt {
|
||||
Stmt::Return(ast::StmtReturn { value, range: _ }) => {
|
||||
if value.is_some() {
|
||||
@@ -639,7 +639,7 @@ impl<'a> Visitor<'a> for SkipFunctionsVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
match expr {
|
||||
Expr::YieldFrom(_) => {
|
||||
self.has_yield_from = true;
|
||||
@@ -663,10 +663,10 @@ impl<'a> Visitor<'a> for SkipFunctionsVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn fixture_decorator<'a>(
|
||||
decorators: &'a [Decorator],
|
||||
fn fixture_decorator<'a, 'ast>(
|
||||
decorators: &'a [Decorator<'ast>],
|
||||
semantic: &SemanticModel,
|
||||
) -> Option<&'a Decorator> {
|
||||
) -> Option<&'a Decorator<'ast>> {
|
||||
decorators.iter().find(|decorator| {
|
||||
is_pytest_fixture(decorator, semantic) || is_pytest_yield_fixture(decorator, semantic)
|
||||
})
|
||||
|
||||
@@ -4,9 +4,9 @@ use ruff_python_ast::{self as ast, Decorator, Expr, Keyword};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_trivia::PythonWhitespace;
|
||||
|
||||
pub(super) fn get_mark_decorators(
|
||||
decorators: &[Decorator],
|
||||
) -> impl Iterator<Item = (&Decorator, &str)> {
|
||||
pub(super) fn get_mark_decorators<'a, 'ast>(
|
||||
decorators: &'a [Decorator<'ast>],
|
||||
) -> impl Iterator<Item = (&'a Decorator<'ast>, &'a str)> {
|
||||
decorators.iter().filter_map(|decorator| {
|
||||
let name = UnqualifiedName::from_expr(map_callable(&decorator.expression))?;
|
||||
let ["pytest", "mark", marker] = name.segments() else {
|
||||
|
||||
@@ -52,12 +52,12 @@ impl Violation for PytestPatchWithLambda {
|
||||
/// Visitor that checks references the argument names in the lambda body.
|
||||
#[derive(Debug)]
|
||||
struct LambdaBodyVisitor<'a> {
|
||||
parameters: &'a Parameters,
|
||||
parameters: &'a Parameters<'a>,
|
||||
uses_args: bool,
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for LambdaBodyVisitor<'a> {
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
impl<'a> Visitor<'a, 'a> for LambdaBodyVisitor<'a> {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
match expr {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
if self.parameters.includes(id) {
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use itertools::all;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
|
||||
use ruff_allocator::{Allocator, CloneIn};
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::{
|
||||
self as ast, Arguments, CmpOp, Expr, ExprContext, Identifier, Keyword, Stmt, UnaryOp,
|
||||
};
|
||||
use ruff_text_size::TextRange;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
|
||||
/// An enum to represent the different types of assertions present in the
|
||||
/// `unittest` module. Note: any variants that can't be replaced with plain
|
||||
@@ -161,19 +164,24 @@ impl TryFrom<&str> for UnittestAssert {
|
||||
}
|
||||
}
|
||||
|
||||
fn assert(expr: &Expr, msg: Option<&Expr>) -> Stmt {
|
||||
fn assert<'ast>(expr: &Expr, msg: Option<&Expr>, allocator: &'ast Allocator) -> Stmt<'ast> {
|
||||
Stmt::Assert(ast::StmtAssert {
|
||||
test: Box::new(expr.clone()),
|
||||
msg: msg.map(|msg| Box::new(msg.clone())),
|
||||
test: ruff_allocator::Box::new_in(expr.clone_in(allocator), allocator),
|
||||
msg: msg.map(|msg| ruff_allocator::Box::new_in(msg.clone_in(allocator), allocator)),
|
||||
range: TextRange::default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn compare(left: &Expr, cmp_op: CmpOp, right: &Expr) -> Expr {
|
||||
fn compare<'ast>(
|
||||
left: &Expr,
|
||||
cmp_op: CmpOp,
|
||||
right: &Expr,
|
||||
allocator: &'ast Allocator,
|
||||
) -> Expr<'ast> {
|
||||
Expr::Compare(ast::ExprCompare {
|
||||
left: Box::new(left.clone()),
|
||||
ops: Box::from([cmp_op]),
|
||||
comparators: Box::from([right.clone()]),
|
||||
left: ruff_allocator::Box::new_in(left.clone_in(allocator), allocator),
|
||||
ops: allocator.alloc_slice_fill_iter([cmp_op.clone_in(allocator)]),
|
||||
comparators: allocator.alloc_slice_fill_iter([right.clone_in(allocator)]),
|
||||
range: TextRange::default(),
|
||||
})
|
||||
}
|
||||
@@ -276,7 +284,12 @@ impl UnittestAssert {
|
||||
Ok(args_map)
|
||||
}
|
||||
|
||||
pub(crate) fn generate_assert(self, args: &[Expr], keywords: &[Keyword]) -> Result<Stmt> {
|
||||
pub(crate) fn generate_assert<'ast>(
|
||||
self,
|
||||
args: &[Expr],
|
||||
keywords: &[Keyword],
|
||||
allocator: &'ast Allocator,
|
||||
) -> Result<Stmt<'ast>> {
|
||||
let args = self.args_map(args, keywords)?;
|
||||
match self {
|
||||
UnittestAssert::True
|
||||
@@ -292,13 +305,17 @@ impl UnittestAssert {
|
||||
assert(
|
||||
&Expr::UnaryOp(ast::ExprUnaryOp {
|
||||
op: UnaryOp::Not,
|
||||
operand: Box::new(expr.clone()),
|
||||
operand: ruff_allocator::Box::new_in(
|
||||
expr.clone_in(allocator),
|
||||
allocator,
|
||||
),
|
||||
range: TextRange::default(),
|
||||
}),
|
||||
msg,
|
||||
allocator,
|
||||
)
|
||||
} else {
|
||||
assert(expr, msg)
|
||||
assert(expr, msg, allocator)
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -336,8 +353,8 @@ impl UnittestAssert {
|
||||
UnittestAssert::IsNot => CmpOp::IsNot,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let expr = compare(first, cmp_op, second);
|
||||
Ok(assert(&expr, msg))
|
||||
let expr = compare(first, cmp_op, second, allocator);
|
||||
Ok(assert(&expr, msg, allocator))
|
||||
}
|
||||
UnittestAssert::In | UnittestAssert::NotIn => {
|
||||
let member = args
|
||||
@@ -352,8 +369,8 @@ impl UnittestAssert {
|
||||
} else {
|
||||
CmpOp::NotIn
|
||||
};
|
||||
let expr = compare(member, cmp_op, container);
|
||||
Ok(assert(&expr, msg))
|
||||
let expr = compare(member, cmp_op, container, allocator);
|
||||
Ok(assert(&expr, msg, allocator))
|
||||
}
|
||||
UnittestAssert::IsNone | UnittestAssert::IsNotNone => {
|
||||
let expr = args
|
||||
@@ -368,8 +385,8 @@ impl UnittestAssert {
|
||||
let node = Expr::NoneLiteral(ast::ExprNoneLiteral {
|
||||
range: TextRange::default(),
|
||||
});
|
||||
let expr = compare(expr, cmp_op, &node);
|
||||
Ok(assert(&expr, msg))
|
||||
let expr = compare(expr, cmp_op, &node, allocator);
|
||||
Ok(assert(&expr, msg, allocator))
|
||||
}
|
||||
UnittestAssert::IsInstance | UnittestAssert::NotIsInstance => {
|
||||
let obj = args
|
||||
@@ -380,30 +397,33 @@ impl UnittestAssert {
|
||||
.ok_or_else(|| anyhow!("Missing argument `cls`"))?;
|
||||
let msg = args.get("msg").copied();
|
||||
let node = ast::ExprName {
|
||||
id: Name::new_static("isinstance"),
|
||||
id: "isinstance",
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let node1 = ast::ExprCall {
|
||||
func: Box::new(node.into()),
|
||||
func: ruff_allocator::Box::new_in(node.into(), allocator),
|
||||
arguments: Arguments {
|
||||
args: Box::from([(**obj).clone(), (**cls).clone()]),
|
||||
keywords: Box::from([]),
|
||||
args: allocator.alloc_slice_fill_iter([
|
||||
(**obj).clone_in(allocator),
|
||||
(**cls).clone_in(allocator),
|
||||
]),
|
||||
keywords: &mut [],
|
||||
range: TextRange::default(),
|
||||
},
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let isinstance = node1.into();
|
||||
if matches!(self, UnittestAssert::IsInstance) {
|
||||
Ok(assert(&isinstance, msg))
|
||||
Ok(assert(&isinstance, msg, allocator))
|
||||
} else {
|
||||
let node = ast::ExprUnaryOp {
|
||||
op: UnaryOp::Not,
|
||||
operand: Box::new(isinstance),
|
||||
operand: ruff_allocator::Box::new_in(isinstance, allocator),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let expr = node.into();
|
||||
Ok(assert(&expr, msg))
|
||||
Ok(assert(&expr, msg, allocator))
|
||||
}
|
||||
}
|
||||
UnittestAssert::Regex
|
||||
@@ -418,18 +438,18 @@ impl UnittestAssert {
|
||||
.ok_or_else(|| anyhow!("Missing argument `regex`"))?;
|
||||
let msg = args.get("msg").copied();
|
||||
let node = ast::ExprName {
|
||||
id: Name::new_static("re"),
|
||||
id: "re",
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let node1 = ast::ExprAttribute {
|
||||
value: Box::new(node.into()),
|
||||
attr: Identifier::new("search".to_string(), TextRange::default()),
|
||||
value: ruff_allocator::Box::new_in(node.into(), allocator),
|
||||
attr: Identifier::new("search", TextRange::default()),
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let node2 = ast::ExprCall {
|
||||
func: Box::new(node1.into()),
|
||||
func: ruff_allocator::Box::new_in(node1.into(), allocator),
|
||||
arguments: Arguments {
|
||||
args: Box::from([(**regex).clone(), (**text).clone()]),
|
||||
keywords: Box::from([]),
|
||||
@@ -439,14 +459,14 @@ impl UnittestAssert {
|
||||
};
|
||||
let re_search = node2.into();
|
||||
if matches!(self, UnittestAssert::Regex | UnittestAssert::RegexpMatches) {
|
||||
Ok(assert(&re_search, msg))
|
||||
Ok(assert(&re_search, msg, allocator))
|
||||
} else {
|
||||
let node = ast::ExprUnaryOp {
|
||||
op: UnaryOp::Not,
|
||||
operand: Box::new(re_search),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
Ok(assert(&node.into(), msg))
|
||||
Ok(assert(&node.into(), msg, allocator))
|
||||
}
|
||||
}
|
||||
_ => bail!("Cannot fix `{self}`"),
|
||||
|
||||
@@ -104,7 +104,7 @@ impl<'a> AvoidableEscapedQuoteChecker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<'_> for AvoidableEscapedQuoteChecker<'_> {
|
||||
impl Visitor<'_, '_> for AvoidableEscapedQuoteChecker<'_> {
|
||||
fn visit_string_literal(&mut self, string_literal: &'_ ast::StringLiteral) {
|
||||
if let Some(diagnostic) = check_string_or_bytes(
|
||||
self.locator,
|
||||
@@ -323,7 +323,7 @@ struct ContainsAnyString {
|
||||
result: bool,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for ContainsAnyString {
|
||||
impl Visitor<'_, '_> for ContainsAnyString {
|
||||
fn visit_string_literal(&mut self, _: &'_ ast::StringLiteral) {
|
||||
self.result = true;
|
||||
}
|
||||
|
||||
@@ -8,9 +8,9 @@ use ruff_python_semantic::SemanticModel;
|
||||
#[derive(Default)]
|
||||
pub(super) struct Stack<'data> {
|
||||
/// The `return` statements in the current function.
|
||||
pub(super) returns: Vec<&'data ast::StmtReturn>,
|
||||
pub(super) returns: Vec<&'data ast::StmtReturn<'data>>,
|
||||
/// The `elif` or `else` statements in the current function.
|
||||
pub(super) elifs_elses: Vec<(&'data [Stmt], &'data ElifElseClause)>,
|
||||
pub(super) elifs_elses: Vec<(&'data [Stmt<'data>], &'data ElifElseClause<'data>)>,
|
||||
/// The non-local variables in the current function.
|
||||
pub(super) non_locals: FxHashSet<&'data str>,
|
||||
/// The annotated variables in the current function.
|
||||
@@ -33,8 +33,11 @@ pub(super) struct Stack<'data> {
|
||||
/// The `assignment`-to-`return` statement pairs in the current function.
|
||||
/// TODO(charlie): Remove the extra [`Stmt`] here, which is necessary to support statement
|
||||
/// removal for the `return` statement.
|
||||
pub(super) assignment_return:
|
||||
Vec<(&'data ast::StmtAssign, &'data ast::StmtReturn, &'data Stmt)>,
|
||||
pub(super) assignment_return: Vec<(
|
||||
&'data ast::StmtAssign<'data>,
|
||||
&'data ast::StmtReturn<'data>,
|
||||
&'data Stmt<'data>,
|
||||
)>,
|
||||
}
|
||||
|
||||
pub(super) struct ReturnVisitor<'semantic, 'data> {
|
||||
@@ -43,9 +46,9 @@ pub(super) struct ReturnVisitor<'semantic, 'data> {
|
||||
/// The current stack of nodes.
|
||||
pub(super) stack: Stack<'data>,
|
||||
/// The preceding sibling of the current node.
|
||||
sibling: Option<&'data Stmt>,
|
||||
sibling: Option<&'data Stmt<'data>>,
|
||||
/// The parent nodes of the current node.
|
||||
parents: Vec<&'data Stmt>,
|
||||
parents: Vec<&'data Stmt<'data>>,
|
||||
}
|
||||
|
||||
impl<'semantic, 'data> ReturnVisitor<'semantic, 'data> {
|
||||
@@ -59,8 +62,8 @@ impl<'semantic, 'data> ReturnVisitor<'semantic, 'data> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'semantic, 'a> Visitor<'a> for ReturnVisitor<'semantic, 'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'semantic, 'a> Visitor<'a, 'a> for ReturnVisitor<'semantic, 'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
match stmt {
|
||||
Stmt::ClassDef(ast::StmtClassDef { decorator_list, .. }) => {
|
||||
// Visit the decorators, etc.
|
||||
@@ -105,7 +108,7 @@ impl<'semantic, 'a> Visitor<'a> for ReturnVisitor<'semantic, 'a> {
|
||||
// Ex) `x: int`
|
||||
if value.is_none() {
|
||||
if let Expr::Name(name) = target.as_ref() {
|
||||
self.stack.annotations.insert(name.id.as_str());
|
||||
self.stack.annotations.insert(name.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -169,7 +172,7 @@ impl<'semantic, 'a> Visitor<'a> for ReturnVisitor<'semantic, 'a> {
|
||||
self.parents.pop();
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'a>) {
|
||||
match expr {
|
||||
Expr::YieldFrom(_) | Expr::Yield(_) => {
|
||||
self.stack.is_generator = true;
|
||||
@@ -178,7 +181,7 @@ impl<'semantic, 'a> Visitor<'a> for ReturnVisitor<'semantic, 'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &'a [Stmt]) {
|
||||
fn visit_body(&mut self, body: &'a [Stmt<'a>]) {
|
||||
let sibling = self.sibling;
|
||||
self.sibling = None;
|
||||
visitor::walk_body(self, body);
|
||||
|
||||
@@ -301,7 +301,7 @@ pub(crate) fn is_same_expr<'a>(a: &'a Expr, b: &'a Expr) -> Option<&'a str> {
|
||||
}
|
||||
|
||||
/// If `call` is an `isinstance()` call, return its target.
|
||||
fn isinstance_target<'a>(call: &'a Expr, semantic: &'a SemanticModel) -> Option<&'a Expr> {
|
||||
fn isinstance_target<'a>(call: &Expr<'a>, semantic: &SemanticModel) -> Option<&'a Expr<'a>> {
|
||||
// Verify that this is an `isinstance` call.
|
||||
let ast::ExprCall {
|
||||
func,
|
||||
@@ -470,7 +470,7 @@ pub(crate) fn duplicate_isinstance_call(checker: &mut Checker, expr: &Expr) {
|
||||
}
|
||||
}
|
||||
|
||||
fn match_eq_target(expr: &Expr) -> Option<(&Name, &Expr)> {
|
||||
fn match_eq_target<'a>(expr: &Expr<'a>) -> Option<(&'a Name, &'a Expr<'a>)> {
|
||||
let Expr::Compare(ast::ExprCompare {
|
||||
left,
|
||||
ops,
|
||||
|
||||
@@ -195,7 +195,7 @@ fn check_os_environ_subscript(checker: &mut Checker, expr: &Expr) {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = attr_value.as_ref() else {
|
||||
return;
|
||||
};
|
||||
if id != "os" || attr != "environ" {
|
||||
if *id != "os" || attr != "environ" {
|
||||
return;
|
||||
}
|
||||
let Expr::StringLiteral(ast::ExprStringLiteral { value: env_var, .. }) = slice.as_ref() else {
|
||||
|
||||
@@ -179,7 +179,7 @@ pub(crate) fn if_expr_with_true_false(
|
||||
&ast::ExprCall {
|
||||
func: Box::new(
|
||||
ast::ExprName {
|
||||
id: Name::new_static("bool"),
|
||||
id: "bool",
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ impl Violation for MultipleWithStatements {
|
||||
|
||||
/// Returns a boolean indicating whether it's an async with statement, the items
|
||||
/// and body.
|
||||
fn next_with(body: &[Stmt]) -> Option<(bool, &[WithItem], &[Stmt])> {
|
||||
fn next_with<'a>(body: &'a [Stmt<'a>]) -> Option<(bool, &'a [WithItem<'a>], &'a [Stmt<'a>])> {
|
||||
let [Stmt::With(ast::StmtWith {
|
||||
is_async,
|
||||
items,
|
||||
|
||||
@@ -139,12 +139,12 @@ pub(crate) fn nested_if_statements(
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) enum NestedIf<'a> {
|
||||
If(&'a ast::StmtIf),
|
||||
Elif(&'a ElifElseClause),
|
||||
If(&'a ast::StmtIf<'a>),
|
||||
Elif(&'a ElifElseClause<'a>),
|
||||
}
|
||||
|
||||
impl<'a> NestedIf<'a> {
|
||||
pub(super) fn body(self) -> &'a [Stmt] {
|
||||
pub(super) fn body(self) -> &'a [Stmt<'a>] {
|
||||
match self {
|
||||
NestedIf::If(stmt_if) => &stmt_if.body,
|
||||
NestedIf::Elif(clause) => &clause.body,
|
||||
@@ -165,7 +165,7 @@ impl Ranged for NestedIf<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&NestedIf<'a>> for AnyNodeRef<'a> {
|
||||
impl<'a> From<&NestedIf<'a>> for AnyNodeRef<'a, 'a> {
|
||||
fn from(value: &NestedIf<'a>) -> Self {
|
||||
match value {
|
||||
NestedIf::If(stmt_if) => (*stmt_if).into(),
|
||||
@@ -175,7 +175,7 @@ impl<'a> From<&NestedIf<'a>> for AnyNodeRef<'a> {
|
||||
}
|
||||
|
||||
/// Returns the body, the range of the `if` or `elif` and whether the range is for an `if` or `elif`
|
||||
fn nested_if_body(stmt_if: &ast::StmtIf) -> Option<NestedIf> {
|
||||
fn nested_if_body<'a>(stmt_if: &'a ast::StmtIf<'a>) -> Option<NestedIf<'a>> {
|
||||
let ast::StmtIf {
|
||||
test,
|
||||
body,
|
||||
@@ -225,7 +225,7 @@ fn nested_if_body(stmt_if: &ast::StmtIf) -> Option<NestedIf> {
|
||||
/// z = 1
|
||||
/// ...
|
||||
/// ```
|
||||
fn find_last_nested_if(body: &[Stmt]) -> Option<&Expr> {
|
||||
fn find_last_nested_if<'a>(body: &'a [Stmt<'a>]) -> Option<&'a Expr<'a>> {
|
||||
let [Stmt::If(ast::StmtIf {
|
||||
test,
|
||||
body: inner_body,
|
||||
@@ -248,7 +248,7 @@ fn is_main_check(expr: &Expr) -> bool {
|
||||
}) = expr
|
||||
{
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = left.as_ref() {
|
||||
if id == "__name__" {
|
||||
if *id == "__name__" {
|
||||
if let [Expr::StringLiteral(ast::ExprStringLiteral { value, .. })] = &**comparators
|
||||
{
|
||||
if value == "__main__" {
|
||||
|
||||
@@ -152,7 +152,7 @@ pub(crate) fn enumerate_for_loop(checker: &mut Checker, for_stmt: &ast::StmtFor)
|
||||
|
||||
/// If the statement is an index increment statement (e.g., `i += 1`), return
|
||||
/// the name of the index variable.
|
||||
fn match_index_increment(stmt: &Stmt) -> Option<&ast::ExprName> {
|
||||
fn match_index_increment<'a, 'ast>(stmt: &'a Stmt<'ast>) -> Option<&'a ast::ExprName<'ast>> {
|
||||
let Stmt::AugAssign(ast::StmtAugAssign {
|
||||
target,
|
||||
op: Operator::Add,
|
||||
@@ -183,7 +183,7 @@ struct LoopControlFlowVisitor {
|
||||
has_continue: bool,
|
||||
}
|
||||
|
||||
impl StatementVisitor<'_> for LoopControlFlowVisitor {
|
||||
impl StatementVisitor<'_, '_> for LoopControlFlowVisitor {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
Stmt::Continue(_) => self.has_continue = true,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use ruff_allocator::{Allocator, CloneIn};
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, ElifElseClause, Expr, Stmt};
|
||||
@@ -154,16 +155,22 @@ pub(crate) fn if_else_block_instead_of_if_exp(checker: &mut Checker, stmt_if: &a
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
fn ternary(target_var: &Expr, body_value: &Expr, test: &Expr, orelse_value: &Expr) -> Stmt {
|
||||
fn ternary<'ast>(
|
||||
target_var: &Expr,
|
||||
body_value: &Expr,
|
||||
test: &Expr,
|
||||
orelse_value: &Expr,
|
||||
allocator: &'ast Allocator,
|
||||
) -> Stmt<'ast> {
|
||||
let node = ast::ExprIf {
|
||||
test: Box::new(test.clone()),
|
||||
body: Box::new(body_value.clone()),
|
||||
orelse: Box::new(orelse_value.clone()),
|
||||
test: ruff_allocator::Box::new_in(test.clone_in(allocator), allocator),
|
||||
body: ruff_allocator::Box::new_in(body_value.clone_in(allocator), allocator),
|
||||
orelse: ruff_allocator::Box::new_in(orelse_value.clone_in(allocator), allocator),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let node1 = ast::StmtAssign {
|
||||
targets: vec![target_var.clone()],
|
||||
value: Box::new(node.into()),
|
||||
targets: vec![target_var.clone_in(allocator)],
|
||||
value: ruff_allocator::Box::new_in(node.into(), allocator),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
node1.into()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use itertools::all;
|
||||
use ruff_allocator::{Allocator, CloneIn};
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::any_over_expr;
|
||||
@@ -58,7 +60,11 @@ impl Violation for ReimplementedBuiltin {
|
||||
}
|
||||
|
||||
/// SIM110, SIM111
|
||||
pub(crate) fn convert_for_loop_to_any_all(checker: &mut Checker, stmt: &Stmt) {
|
||||
pub(crate) fn convert_for_loop_to_any_all(
|
||||
checker: &mut Checker,
|
||||
stmt: &Stmt,
|
||||
allocator: &Allocator,
|
||||
) {
|
||||
if !checker.semantic().current_scope().kind.is_function() {
|
||||
return;
|
||||
}
|
||||
@@ -90,11 +96,12 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &mut Checker, stmt: &Stmt) {
|
||||
// Replace with `any`.
|
||||
(true, false) => {
|
||||
let contents = return_stmt(
|
||||
Name::new_static("any"),
|
||||
"any",
|
||||
loop_.test,
|
||||
loop_.target,
|
||||
loop_.iter,
|
||||
checker.generator(),
|
||||
allocator,
|
||||
);
|
||||
|
||||
// Don't flag if the resulting expression would exceed the maximum line length.
|
||||
@@ -133,7 +140,7 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &mut Checker, stmt: &Stmt) {
|
||||
range: _,
|
||||
}) = &loop_.test
|
||||
{
|
||||
*operand.clone()
|
||||
(**operand).clone_in(allocator)
|
||||
} else if let Expr::Compare(ast::ExprCompare {
|
||||
left,
|
||||
ops,
|
||||
@@ -155,35 +162,43 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &mut Checker, stmt: &Stmt) {
|
||||
CmpOp::NotIn => CmpOp::In,
|
||||
};
|
||||
let node = ast::ExprCompare {
|
||||
left: left.clone(),
|
||||
ops: Box::from([op]),
|
||||
comparators: Box::from([comparator.clone()]),
|
||||
left: left.clone_in(allocator),
|
||||
ops: allocator.alloc_slice_fill_iter([op]),
|
||||
comparators: allocator
|
||||
.alloc_slice_fill_iter([comparator.clone_in(allocator)]),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
node.into()
|
||||
Expr::Compare(node)
|
||||
} else {
|
||||
let node = ast::ExprUnaryOp {
|
||||
op: UnaryOp::Not,
|
||||
operand: Box::new(loop_.test.clone()),
|
||||
operand: ruff_allocator::Box::new_in(
|
||||
loop_.test.clone_in(allocator),
|
||||
allocator,
|
||||
),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
node.into()
|
||||
Expr::UnaryOp(node)
|
||||
}
|
||||
} else {
|
||||
let node = ast::ExprUnaryOp {
|
||||
op: UnaryOp::Not,
|
||||
operand: Box::new(loop_.test.clone()),
|
||||
operand: ruff_allocator::Box::new_in(
|
||||
loop_.test.clone_in(allocator),
|
||||
allocator,
|
||||
),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
node.into()
|
||||
Expr::UnaryOp(node)
|
||||
}
|
||||
};
|
||||
let contents = return_stmt(
|
||||
Name::new_static("all"),
|
||||
"all",
|
||||
&test,
|
||||
loop_.target,
|
||||
loop_.iter,
|
||||
checker.generator(),
|
||||
allocator,
|
||||
);
|
||||
|
||||
// Don't flag if the resulting expression would exceed the maximum line length.
|
||||
@@ -230,11 +245,11 @@ struct Loop<'a> {
|
||||
/// The `return` value of the loop.
|
||||
return_value: bool,
|
||||
/// The test condition in the loop.
|
||||
test: &'a Expr,
|
||||
test: &'a Expr<'a>,
|
||||
/// The target of the loop.
|
||||
target: &'a Expr,
|
||||
target: &'a Expr<'a>,
|
||||
/// The iterator of the loop.
|
||||
iter: &'a Expr,
|
||||
iter: &'a Expr<'a>,
|
||||
}
|
||||
|
||||
/// Represents a `return` statement following a `for` loop, like:
|
||||
@@ -256,10 +271,10 @@ struct Loop<'a> {
|
||||
#[derive(Debug)]
|
||||
struct Terminal<'a> {
|
||||
return_value: bool,
|
||||
stmt: &'a Stmt,
|
||||
stmt: &'a Stmt<'a>,
|
||||
}
|
||||
|
||||
fn match_loop(stmt: &Stmt) -> Option<Loop> {
|
||||
fn match_loop<'a>(stmt: &Stmt<'a>) -> Option<Loop<'a>> {
|
||||
let Stmt::For(ast::StmtFor {
|
||||
body, target, iter, ..
|
||||
}) = stmt
|
||||
@@ -310,7 +325,7 @@ fn match_loop(stmt: &Stmt) -> Option<Loop> {
|
||||
/// return True
|
||||
/// return False
|
||||
/// ```
|
||||
fn match_else_return(stmt: &Stmt) -> Option<Terminal> {
|
||||
fn match_else_return<'a>(stmt: &Stmt<'a>) -> Option<Terminal<'a>> {
|
||||
let Stmt::For(ast::StmtFor { orelse, .. }) = stmt else {
|
||||
return None;
|
||||
};
|
||||
@@ -379,12 +394,19 @@ fn match_sibling_return<'a>(stmt: &'a Stmt, sibling: &'a Stmt) -> Option<Termina
|
||||
}
|
||||
|
||||
/// Generate a return statement for an `any` or `all` builtin comprehension.
|
||||
fn return_stmt(id: Name, test: &Expr, target: &Expr, iter: &Expr, generator: Generator) -> String {
|
||||
fn return_stmt(
|
||||
id: &str,
|
||||
test: &Expr,
|
||||
target: &Expr,
|
||||
iter: &Expr,
|
||||
generator: Generator,
|
||||
allocator: &Allocator,
|
||||
) -> String {
|
||||
let node = ast::ExprGenerator {
|
||||
elt: Box::new(test.clone()),
|
||||
elt: ruff_allocator::Box::new_in(test.clone_in(allocator), allocator),
|
||||
generators: vec![Comprehension {
|
||||
target: target.clone(),
|
||||
iter: iter.clone(),
|
||||
target: target.clone_in(allocator),
|
||||
iter: iter.clone_in(allocator),
|
||||
ifs: vec![],
|
||||
is_async: false,
|
||||
range: TextRange::default(),
|
||||
@@ -393,21 +415,21 @@ fn return_stmt(id: Name, test: &Expr, target: &Expr, iter: &Expr, generator: Gen
|
||||
parenthesized: false,
|
||||
};
|
||||
let node1 = ast::ExprName {
|
||||
id,
|
||||
id: allocator.alloc_str(id),
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let node2 = ast::ExprCall {
|
||||
func: Box::new(node1.into()),
|
||||
func: ruff_allocator::Box::new_in(node1.into(), allocator),
|
||||
arguments: Arguments {
|
||||
args: Box::from([node.into()]),
|
||||
keywords: Box::from([]),
|
||||
args: allocator.alloc_slice_fill_iter([node.into()]),
|
||||
keywords: &mut [],
|
||||
range: TextRange::default(),
|
||||
},
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let node3 = ast::StmtReturn {
|
||||
value: Some(Box::new(node2.into())),
|
||||
value: Some(ruff_allocator::Box::new_in(node2.into(), allocator)),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
generator.stmt(&node3.into())
|
||||
|
||||
@@ -50,7 +50,7 @@ impl Violation for ReturnInTryExceptFinally {
|
||||
}
|
||||
}
|
||||
|
||||
fn find_return(stmts: &[Stmt]) -> Option<&Stmt> {
|
||||
fn find_return<'a, 'ast>(stmts: &'a [Stmt<'ast>]) -> Option<&'a Stmt<'ast>> {
|
||||
stmts.iter().find(|stmt| stmt.is_return_stmt())
|
||||
}
|
||||
|
||||
|
||||
@@ -82,7 +82,7 @@ enum ConstantLikelihood {
|
||||
Definitely = 2,
|
||||
}
|
||||
|
||||
impl From<&Expr> for ConstantLikelihood {
|
||||
impl From<&Expr<'_>> for ConstantLikelihood {
|
||||
/// Determine the [`ConstantLikelihood`] of an expression.
|
||||
fn from(expr: &Expr) -> Self {
|
||||
match expr {
|
||||
|
||||
@@ -116,14 +116,14 @@ pub(crate) fn zip_dict_keys_and_values(checker: &mut Checker, expr: &ast::ExprCa
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
fn get_var_attr(expr: &Expr) -> Option<(&ExprName, &Identifier)> {
|
||||
fn get_var_attr<'ast>(expr: &Expr<'ast>) -> Option<(&'ast ExprName<'ast>, &'ast Identifier<'ast>)> {
|
||||
let Expr::Call(ast::ExprCall { func, .. }) = expr else {
|
||||
return None;
|
||||
};
|
||||
let Expr::Attribute(ExprAttribute { value, attr, .. }) = func.as_ref() else {
|
||||
let Expr::Attribute(ExprAttribute { value, attr, .. }) = &**func else {
|
||||
return None;
|
||||
};
|
||||
let Expr::Name(var_name) = value.as_ref() else {
|
||||
let Expr::Name(var_name) = &**value else {
|
||||
return None;
|
||||
};
|
||||
Some((var_name, attr))
|
||||
|
||||
@@ -7,7 +7,7 @@ pub(super) fn has_slots(body: &[Stmt]) -> bool {
|
||||
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
|
||||
for target in targets {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||
if id.as_str() == "__slots__" {
|
||||
if *id == "__slots__" {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,7 @@ pub(super) fn has_slots(body: &[Stmt]) -> bool {
|
||||
}
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||
if id.as_str() == "__slots__" {
|
||||
if *id == "__slots__" {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,10 +94,7 @@ fn fix_banned_relative_import(
|
||||
panic!("Expected Stmt::ImportFrom");
|
||||
};
|
||||
let node = ast::StmtImportFrom {
|
||||
module: Some(Identifier::new(
|
||||
module_path.to_string(),
|
||||
TextRange::default(),
|
||||
)),
|
||||
module: Some(Identifier::new(&module_path, TextRange::default())),
|
||||
names: names.clone(),
|
||||
level: 0,
|
||||
range: TextRange::default(),
|
||||
|
||||
@@ -311,7 +311,7 @@ fn move_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -
|
||||
|
||||
// Step 1) Remove the import.
|
||||
let remove_import_edit = fix::edits::remove_unused_imports(
|
||||
member_names.iter().map(AsRef::as_ref),
|
||||
member_names.iter().map(|name| *name),
|
||||
statement,
|
||||
parent,
|
||||
checker.locator(),
|
||||
|
||||
@@ -73,7 +73,7 @@ pub(crate) fn runtime_string_union(checker: &mut Checker, expr: &Expr) {
|
||||
}
|
||||
|
||||
/// Collect all string members in possibly-nested binary `|` expressions.
|
||||
fn traverse_op<'a>(expr: &'a Expr, strings: &mut Vec<&'a Expr>) {
|
||||
fn traverse_op<'a, 'ast>(expr: &'a Expr<'ast>, strings: &mut Vec<&'a Expr<'ast>>) {
|
||||
match expr {
|
||||
Expr::StringLiteral(_) => {
|
||||
strings.push(expr);
|
||||
|
||||
@@ -287,7 +287,7 @@ fn method(
|
||||
|
||||
fn call<'a>(
|
||||
argumentable: Argumentable,
|
||||
parameters: impl Iterator<Item = &'a Parameter>,
|
||||
parameters: impl Iterator<Item = &'a Parameter<'a>>,
|
||||
scope: &Scope,
|
||||
semantic: &SemanticModel,
|
||||
dummy_variable_rgx: &Regex,
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
use ruff_allocator::{Allocator, CloneIn};
|
||||
use ruff_python_ast::{self as ast, Arguments, ConversionFlag, Expr};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
/// Wrap an expression in a [`ast::FStringElement::Expression`] with no special formatting.
|
||||
fn to_f_string_expression_element(inner: &Expr) -> ast::FStringElement {
|
||||
fn to_f_string_expression_element<'ast>(
|
||||
inner: &Expr,
|
||||
allocator: &'ast Allocator,
|
||||
) -> ast::FStringElement<'ast> {
|
||||
ast::FStringElement::Expression(ast::FStringExpressionElement {
|
||||
expression: Box::new(inner.clone()),
|
||||
expression: ruff_allocator::Box::new_in(inner.clone_in(allocator), allocator),
|
||||
debug_text: None,
|
||||
conversion: ConversionFlag::None,
|
||||
format_spec: None,
|
||||
@@ -13,9 +17,12 @@ fn to_f_string_expression_element(inner: &Expr) -> ast::FStringElement {
|
||||
}
|
||||
|
||||
/// Convert a string to a [`ast::FStringElement::Literal`].
|
||||
pub(super) fn to_f_string_literal_element(s: &str) -> ast::FStringElement {
|
||||
pub(super) fn to_f_string_literal_element<'ast>(
|
||||
s: &str,
|
||||
allocator: &'ast Allocator,
|
||||
) -> ast::FStringElement<'ast> {
|
||||
ast::FStringElement::Literal(ast::FStringLiteralElement {
|
||||
value: s.to_string().into_boxed_str(),
|
||||
value: allocator.alloc_str(s),
|
||||
range: TextRange::default(),
|
||||
})
|
||||
}
|
||||
@@ -49,19 +56,24 @@ fn is_simple_callee(func: &Expr) -> bool {
|
||||
}
|
||||
|
||||
/// Convert an expression to a f-string element (if it looks like a good idea).
|
||||
pub(super) fn to_f_string_element(expr: &Expr) -> Option<ast::FStringElement> {
|
||||
pub(super) fn to_f_string_element<'ast>(
|
||||
expr: &Expr,
|
||||
allocator: &'ast Allocator,
|
||||
) -> Option<ast::FStringElement<'ast>> {
|
||||
match expr {
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, range }) => {
|
||||
Some(ast::FStringElement::Literal(ast::FStringLiteralElement {
|
||||
value: value.to_string().into_boxed_str(),
|
||||
value: allocator.alloc_str(value.to_str()),
|
||||
range: *range,
|
||||
}))
|
||||
}
|
||||
// These should be pretty safe to wrap in a formatted value.
|
||||
Expr::NumberLiteral(_) | Expr::BooleanLiteral(_) | Expr::Name(_) | Expr::Attribute(_) => {
|
||||
Some(to_f_string_expression_element(expr))
|
||||
Some(to_f_string_expression_element(expr, allocator))
|
||||
}
|
||||
Expr::Call(_) if is_simple_call(expr) => {
|
||||
Some(to_f_string_expression_element(expr, allocator))
|
||||
}
|
||||
Expr::Call(_) if is_simple_call(expr) => Some(to_f_string_expression_element(expr)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::fix::edits::pad;
|
||||
use ast::FStringFlags;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::fix::edits::pad;
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr};
|
||||
@@ -60,21 +60,26 @@ fn is_static_length(elts: &[Expr]) -> bool {
|
||||
elts.iter().all(|e| !e.is_starred_expr())
|
||||
}
|
||||
|
||||
fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option<Expr> {
|
||||
fn build_fstring<'ast>(
|
||||
joiner: &str,
|
||||
joinees: &[Expr],
|
||||
allocator: &'ast Allocator,
|
||||
) -> Option<Expr<'ast>> {
|
||||
// If all elements are string constants, join them into a single string.
|
||||
if joinees.iter().all(Expr::is_string_literal_expr) {
|
||||
let node = ast::StringLiteral {
|
||||
value: joinees
|
||||
.iter()
|
||||
.filter_map(|expr| {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = expr {
|
||||
Some(value.to_str())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.join(joiner)
|
||||
.into_boxed_str(),
|
||||
value: allocator.alloc_str(
|
||||
&joinees
|
||||
.iter()
|
||||
.filter_map(|expr| {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = expr {
|
||||
Some(value.to_str())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.join(joiner),
|
||||
),
|
||||
..ast::StringLiteral::default()
|
||||
};
|
||||
return Some(node.into());
|
||||
@@ -90,9 +95,9 @@ fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option<Expr> {
|
||||
return None;
|
||||
}
|
||||
if !std::mem::take(&mut first) {
|
||||
f_string_elements.push(helpers::to_f_string_literal_element(joiner));
|
||||
f_string_elements.push(helpers::to_f_string_literal_element(joiner, allocator));
|
||||
}
|
||||
f_string_elements.push(helpers::to_f_string_element(expr)?);
|
||||
f_string_elements.push(helpers::to_f_string_element(expr, allocator)?);
|
||||
}
|
||||
|
||||
let node = ast::FString {
|
||||
@@ -131,7 +136,7 @@ pub(crate) fn static_join_to_fstring(checker: &mut Checker, expr: &Expr, joiner:
|
||||
|
||||
// Try to build the fstring (internally checks whether e.g. the elements are
|
||||
// convertible to f-string elements).
|
||||
let Some(new_expr) = build_fstring(joiner, joinees) else {
|
||||
let Some(new_expr) = build_fstring(joiner, joinees, checker.allocator()) else {
|
||||
return;
|
||||
};
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::rules::isort::helpers;
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct Block<'a> {
|
||||
pub(crate) nested: bool,
|
||||
pub(crate) imports: Vec<&'a Stmt>,
|
||||
pub(crate) imports: Vec<&'a Stmt<'a>>,
|
||||
pub(crate) trailer: Option<Trailer>,
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ impl<'a> BlockBuilder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn track_import(&mut self, stmt: &'a Stmt) {
|
||||
fn track_import(&mut self, stmt: &'a Stmt<'a>) {
|
||||
let index = self.blocks.len() - 1;
|
||||
self.blocks[index].imports.push(stmt);
|
||||
self.blocks[index].nested = self.nested;
|
||||
@@ -120,8 +120,8 @@ impl<'a> BlockBuilder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> StatementVisitor<'a> for BlockBuilder<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> StatementVisitor<'a, 'a> for BlockBuilder<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
// Track manual splits (e.g., `# isort: split`).
|
||||
if self
|
||||
.splits
|
||||
@@ -273,7 +273,7 @@ impl<'a> StatementVisitor<'a> for BlockBuilder<'a> {
|
||||
self.nested = prev_nested;
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler<'a>) {
|
||||
let prev_nested = self.nested;
|
||||
self.nested = true;
|
||||
|
||||
@@ -287,14 +287,14 @@ impl<'a> StatementVisitor<'a> for BlockBuilder<'a> {
|
||||
self.nested = prev_nested;
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase<'a>) {
|
||||
for stmt in &match_case.body {
|
||||
self.visit_stmt(stmt);
|
||||
}
|
||||
self.finalize(None);
|
||||
}
|
||||
|
||||
fn visit_elif_else_clause(&mut self, elif_else_clause: &'a ElifElseClause) {
|
||||
fn visit_elif_else_clause(&mut self, elif_else_clause: &'a ElifElseClause<'a>) {
|
||||
for stmt in &elif_else_clause.body {
|
||||
self.visit_stmt(stmt);
|
||||
}
|
||||
|
||||
@@ -176,14 +176,14 @@ pub(crate) fn function_is_too_complex(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_python_ast::Suite;
|
||||
use ruff_python_parser::parse_module;
|
||||
|
||||
use super::get_complexity_number;
|
||||
|
||||
fn parse_suite(source: &str) -> Result<Suite> {
|
||||
Ok(parse_module(source)?.into_suite())
|
||||
fn parse_suite<'a>(source: &str, allocator: &'a Allocator) -> Result<Suite<'a>> {
|
||||
Ok(parse_module(source, allocator)?.into_suite())
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -192,7 +192,8 @@ mod tests {
|
||||
def trivial():
|
||||
pass
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 1);
|
||||
Ok(())
|
||||
}
|
||||
@@ -203,7 +204,8 @@ def trivial():
|
||||
def expr_as_statement():
|
||||
0xF00D
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 1);
|
||||
Ok(())
|
||||
}
|
||||
@@ -216,7 +218,8 @@ def sequential(n):
|
||||
s = k + n
|
||||
return s
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 1);
|
||||
Ok(())
|
||||
}
|
||||
@@ -232,7 +235,8 @@ def if_elif_else_dead_path(n):
|
||||
else:
|
||||
return "smaller than or equal to three"
|
||||
"#;
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 3);
|
||||
Ok(())
|
||||
}
|
||||
@@ -249,7 +253,8 @@ def nested_ifs():
|
||||
else:
|
||||
return "smaller than or equal to three"
|
||||
"#;
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 3);
|
||||
Ok(())
|
||||
}
|
||||
@@ -261,7 +266,8 @@ def for_loop():
|
||||
for i in range(10):
|
||||
print(i)
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
@@ -275,7 +281,8 @@ def for_else(mylist):
|
||||
else:
|
||||
print(None)
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
@@ -289,7 +296,8 @@ def recursive(n):
|
||||
else:
|
||||
return n
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
@@ -306,7 +314,8 @@ def nested_functions():
|
||||
|
||||
a()
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 3);
|
||||
Ok(())
|
||||
}
|
||||
@@ -324,7 +333,8 @@ def try_else():
|
||||
else:
|
||||
print(4)
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 4);
|
||||
Ok(())
|
||||
}
|
||||
@@ -341,7 +351,8 @@ def nested_try_finally():
|
||||
finally:
|
||||
print(3)
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 1);
|
||||
Ok(())
|
||||
}
|
||||
@@ -358,7 +369,8 @@ async def foobar(a, b, c):
|
||||
async for x in a:
|
||||
pass
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 3);
|
||||
Ok(())
|
||||
}
|
||||
@@ -369,7 +381,8 @@ async def foobar(a, b, c):
|
||||
def annotated_assign():
|
||||
x: Any = None
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 1);
|
||||
Ok(())
|
||||
}
|
||||
@@ -405,7 +418,8 @@ class Class:
|
||||
|
||||
return ServiceProvider(Logger())
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 9);
|
||||
Ok(())
|
||||
}
|
||||
@@ -419,7 +433,8 @@ def process_detect_lines():
|
||||
finally:
|
||||
pass
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 1);
|
||||
Ok(())
|
||||
}
|
||||
@@ -434,7 +449,8 @@ def process_detect_lines():
|
||||
if res:
|
||||
errors.append(f"Non-zero exit code {res}")
|
||||
"#;
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
@@ -447,7 +463,8 @@ def with_lock():
|
||||
if foo:
|
||||
print('bar')
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
@@ -462,7 +479,8 @@ def f():
|
||||
case _:
|
||||
print('bar')
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
@@ -479,7 +497,8 @@ def f():
|
||||
case _:
|
||||
print('baz')
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 3);
|
||||
Ok(())
|
||||
}
|
||||
@@ -494,7 +513,8 @@ def f():
|
||||
case x:
|
||||
print(x)
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
@@ -509,7 +529,8 @@ def f():
|
||||
case 5 | _:
|
||||
print(x)
|
||||
";
|
||||
let stmts = parse_suite(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let stmts = parse_suite(source, &allocator)?;
|
||||
assert_eq!(get_complexity_number(&stmts), 2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -764,7 +764,7 @@ fn is_guarded_by_try_except(
|
||||
try_block_contains_undeprecated_attribute(try_node, &replacement.details, semantic)
|
||||
}
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
let Some(binding_id) = semantic.lookup_symbol(id.as_str()) else {
|
||||
let Some(binding_id) = semantic.lookup_symbol(id) else {
|
||||
return false;
|
||||
};
|
||||
let binding = semantic.binding(binding_id);
|
||||
@@ -840,7 +840,7 @@ impl<'a> AttributeSearcher<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<'_> for AttributeSearcher<'_> {
|
||||
impl Visitor<'_, '_> for AttributeSearcher<'_> {
|
||||
fn visit_expr(&mut self, expr: &'_ Expr) {
|
||||
if self.found_attribute {
|
||||
return;
|
||||
@@ -911,7 +911,7 @@ impl<'a> ImportSearcher<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl StatementVisitor<'_> for ImportSearcher<'_> {
|
||||
impl StatementVisitor<'_, '_> for ImportSearcher<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
if self.found_import {
|
||||
return;
|
||||
|
||||
@@ -37,7 +37,7 @@ pub(super) fn test_expression(expr: &Expr, semantic: &SemanticModel) -> Resoluti
|
||||
match &semantic.binding(id).kind {
|
||||
BindingKind::Argument => {
|
||||
// Avoid, e.g., `self.values`.
|
||||
if matches!(name.id.as_str(), "self" | "cls") {
|
||||
if matches!(name.id, "self" | "cls") {
|
||||
Resolution::IrrelevantBinding
|
||||
} else {
|
||||
Resolution::RelevantLocal
|
||||
|
||||
@@ -45,7 +45,7 @@ pub(crate) fn assignment_to_df(targets: &[Expr]) -> Option<Diagnostic> {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
return None;
|
||||
};
|
||||
if id != "df" {
|
||||
if *id != "df" {
|
||||
return None;
|
||||
}
|
||||
Some(Diagnostic::new(PandasDfVariableName, target.range()))
|
||||
|
||||
@@ -58,7 +58,7 @@ impl Violation for PandasUseOfPdMerge {
|
||||
pub(crate) fn use_of_pd_merge(checker: &mut Checker, func: &Expr) {
|
||||
if let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() {
|
||||
if id == "pd" && attr == "merge" {
|
||||
if *id == "pd" && attr == "merge" {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(PandasUseOfPdMerge, func.range()));
|
||||
|
||||
@@ -56,7 +56,7 @@ pub(crate) fn error_suffix_on_exception_name(
|
||||
if !arguments.is_some_and(|arguments| {
|
||||
arguments.args.iter().any(|base| {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = &base {
|
||||
id == "Exception" || id.ends_with("Error")
|
||||
*id == "Exception" || id.ends_with("Error")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
||||
@@ -147,8 +147,8 @@ impl<'a> MutationVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> StatementVisitor<'a> for MutationVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> StatementVisitor<'a, 'a> for MutationVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
if match_mutation(stmt, self.target) {
|
||||
self.is_mutated = true;
|
||||
} else {
|
||||
@@ -179,13 +179,13 @@ fn match_mutation(stmt: &Stmt, id: &str) -> bool {
|
||||
let Some(ast::ExprName { id: target_id, .. }) = value.as_name_expr() else {
|
||||
return false;
|
||||
};
|
||||
target_id == id
|
||||
*target_id == id
|
||||
}
|
||||
// Ex) `foo[0] = bar`
|
||||
Stmt::Assign(ast::StmtAssign { targets, .. }) => targets.iter().any(|target| {
|
||||
if let Some(ast::ExprSubscript { value: target, .. }) = target.as_subscript_expr() {
|
||||
if let Some(ast::ExprName { id: target_id, .. }) = target.as_name_expr() {
|
||||
return target_id == id;
|
||||
return *target_id == id;
|
||||
}
|
||||
}
|
||||
false
|
||||
@@ -193,7 +193,7 @@ fn match_mutation(stmt: &Stmt, id: &str) -> bool {
|
||||
// Ex) `foo += bar`
|
||||
Stmt::AugAssign(ast::StmtAugAssign { target, .. }) => {
|
||||
if let Some(ast::ExprName { id: target_id, .. }) = target.as_name_expr() {
|
||||
target_id == id
|
||||
*target_id == id
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -202,7 +202,7 @@ fn match_mutation(stmt: &Stmt, id: &str) -> bool {
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
|
||||
if let Some(ast::ExprSubscript { value: target, .. }) = target.as_subscript_expr() {
|
||||
if let Some(ast::ExprName { id: target_id, .. }) = target.as_name_expr() {
|
||||
return target_id == id;
|
||||
return *target_id == id;
|
||||
}
|
||||
}
|
||||
false
|
||||
@@ -211,7 +211,7 @@ fn match_mutation(stmt: &Stmt, id: &str) -> bool {
|
||||
Stmt::Delete(ast::StmtDelete { targets, .. }) => targets.iter().any(|target| {
|
||||
if let Some(ast::ExprSubscript { value: target, .. }) = target.as_subscript_expr() {
|
||||
if let Some(ast::ExprName { id: target_id, .. }) = target.as_name_expr() {
|
||||
return target_id == id;
|
||||
return *target_id == id;
|
||||
}
|
||||
}
|
||||
false
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use ruff_allocator::{Allocator, CloneIn};
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{
|
||||
@@ -92,6 +93,7 @@ pub(crate) fn lambda_assignment(
|
||||
annotation,
|
||||
checker.semantic(),
|
||||
checker.generator(),
|
||||
checker.allocator(),
|
||||
)
|
||||
.universal_newlines()
|
||||
.enumerate()
|
||||
@@ -146,7 +148,11 @@ pub(crate) fn lambda_assignment(
|
||||
/// The `Callable` import can be from either `collections.abc` or `typing`.
|
||||
/// If an ellipsis is used for the argument types, an empty list is returned.
|
||||
/// The returned values are cloned, so they can be used as-is.
|
||||
fn extract_types(annotation: &Expr, semantic: &SemanticModel) -> Option<(Vec<Expr>, Expr)> {
|
||||
fn extract_types<'ast>(
|
||||
annotation: &Expr,
|
||||
semantic: &SemanticModel,
|
||||
allocator: &'ast Allocator,
|
||||
) -> Option<(Vec<Expr<'ast>>, Expr<'ast>)> {
|
||||
let Expr::Subscript(ast::ExprSubscript { value, slice, .. }) = &annotation else {
|
||||
return None;
|
||||
};
|
||||
@@ -172,13 +178,16 @@ fn extract_types(annotation: &Expr, semantic: &SemanticModel) -> Option<(Vec<Exp
|
||||
// The first argument to `Callable` must be a list of types, parameter
|
||||
// specification, or ellipsis.
|
||||
let params = match param_types {
|
||||
Expr::List(ast::ExprList { elts, .. }) => elts.clone(),
|
||||
Expr::List(ast::ExprList { elts, .. }) => elts
|
||||
.into_iter()
|
||||
.map(|element| element.clone_in(allocator))
|
||||
.collect(),
|
||||
Expr::EllipsisLiteral(_) => vec![],
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
// The second argument to `Callable` must be a type.
|
||||
let return_type = return_type.clone();
|
||||
let return_type = return_type.clone_in(allocator);
|
||||
|
||||
Some((params, return_type))
|
||||
}
|
||||
@@ -191,9 +200,13 @@ fn function(
|
||||
annotation: Option<&Expr>,
|
||||
semantic: &SemanticModel,
|
||||
generator: Generator,
|
||||
allocator: &Allocator,
|
||||
) -> String {
|
||||
let body = Stmt::Return(ast::StmtReturn {
|
||||
value: Some(Box::new(body.clone())),
|
||||
value: Some(ruff_allocator::Box::new_in(
|
||||
body.clone_in(allocator),
|
||||
allocator,
|
||||
)),
|
||||
range: TextRange::default(),
|
||||
});
|
||||
let parameters = parameters.cloned().unwrap_or_default();
|
||||
@@ -207,9 +220,9 @@ fn function(
|
||||
.enumerate()
|
||||
.map(|(idx, parameter)| ParameterWithDefault {
|
||||
parameter: Parameter {
|
||||
annotation: arg_types
|
||||
.get(idx)
|
||||
.map(|arg_type| Box::new(arg_type.clone())),
|
||||
annotation: arg_types.get(idx).map(|arg_type| {
|
||||
ruff_allocator::Box::new_in(arg_type.clone_in(allocator), allocator)
|
||||
}),
|
||||
..parameter.parameter.clone()
|
||||
},
|
||||
..parameter.clone()
|
||||
@@ -221,9 +234,9 @@ fn function(
|
||||
.enumerate()
|
||||
.map(|(idx, parameter)| ParameterWithDefault {
|
||||
parameter: Parameter {
|
||||
annotation: arg_types
|
||||
.get(idx + new_posonlyargs.len())
|
||||
.map(|arg_type| Box::new(arg_type.clone())),
|
||||
annotation: arg_types.get(idx + new_posonlyargs.len()).map(|arg_type| {
|
||||
ruff_allocator::Box::new_in(arg_type.clone_in(allocator), allocator)
|
||||
}),
|
||||
..parameter.parameter.clone()
|
||||
},
|
||||
..parameter.clone()
|
||||
|
||||
@@ -472,6 +472,7 @@ struct Line {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_python_parser::parse_module;
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
@@ -556,7 +557,8 @@ if False:
|
||||
}
|
||||
|
||||
fn assert_logical_lines(contents: &str, expected: &[&str]) {
|
||||
let parsed = parse_module(contents).unwrap();
|
||||
let allocator = Allocator::new();
|
||||
let parsed = parse_module(contents, &allocator).unwrap();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = LogicalLines::from_tokens(parsed.tokens(), &locator)
|
||||
.into_iter()
|
||||
|
||||
@@ -78,7 +78,7 @@ fn is_type(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
// Ex) `type(obj) == int`
|
||||
matches!(
|
||||
id.as_str(),
|
||||
*id,
|
||||
"bool"
|
||||
| "bytearray"
|
||||
| "bytes"
|
||||
|
||||
@@ -384,7 +384,7 @@ struct BodyVisitor<'a> {
|
||||
}
|
||||
|
||||
impl<'a> BodyVisitor<'a> {
|
||||
fn new(semantic: &'a SemanticModel) -> Self {
|
||||
fn new(semantic: &'a SemanticModel<'a>) -> Self {
|
||||
Self {
|
||||
returns: Vec::new(),
|
||||
raised_exceptions: Vec::new(),
|
||||
@@ -400,8 +400,8 @@ impl<'a> BodyVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> StatementVisitor<'a> for BodyVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
impl<'a> StatementVisitor<'a, 'a> for BodyVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt<'a>) {
|
||||
match stmt {
|
||||
Stmt::Raise(ast::StmtRaise { exc: Some(exc), .. }) => {
|
||||
if let Some(qualified_name) = extract_raised_exception(self.semantic, exc.as_ref())
|
||||
|
||||
@@ -2014,10 +2014,10 @@ fn parse_numpy_sections(
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_google_sections(
|
||||
fn parse_google_sections<'a>(
|
||||
checker: &mut Checker,
|
||||
docstring: &Docstring,
|
||||
section_contexts: &SectionContexts,
|
||||
section_contexts: &SectionContexts<'a>,
|
||||
) {
|
||||
let mut iterator = section_contexts.iter().peekable();
|
||||
while let Some(context) = iterator.next() {
|
||||
|
||||
@@ -13,11 +13,11 @@ mod tests {
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::Locator;
|
||||
@@ -681,8 +681,12 @@ mod tests {
|
||||
let source_type = PySourceType::default();
|
||||
let source_kind = SourceKind::Python(contents.to_string());
|
||||
let settings = LinterSettings::for_rules(Linter::Pyflakes.rules());
|
||||
let parsed =
|
||||
ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type);
|
||||
let allocator = Allocator::new();
|
||||
let parsed = ruff_python_parser::parse_unchecked_source(
|
||||
source_kind.source_code(),
|
||||
source_type,
|
||||
&allocator,
|
||||
);
|
||||
let locator = Locator::new(&contents);
|
||||
let stylist = Stylist::from_tokens(parsed.tokens(), &locator);
|
||||
let indexer = Indexer::from_tokens(parsed.tokens(), &locator);
|
||||
|
||||
@@ -31,8 +31,8 @@ impl Violation for BreakOutsideLoop {
|
||||
|
||||
/// F701
|
||||
pub(crate) fn break_outside_loop<'a>(
|
||||
stmt: &'a Stmt,
|
||||
parents: &mut impl Iterator<Item = &'a Stmt>,
|
||||
stmt: &Stmt<'a>,
|
||||
parents: &mut impl Iterator<Item = &'a Stmt<'a>>,
|
||||
) -> Option<Diagnostic> {
|
||||
let mut child = stmt;
|
||||
for parent in parents {
|
||||
|
||||
@@ -31,8 +31,8 @@ impl Violation for ContinueOutsideLoop {
|
||||
|
||||
/// F702
|
||||
pub(crate) fn continue_outside_loop<'a>(
|
||||
stmt: &'a Stmt,
|
||||
parents: &mut impl Iterator<Item = &'a Stmt>,
|
||||
stmt: &'a Stmt<'a>,
|
||||
parents: &mut impl Iterator<Item = &'a Stmt<'a>>,
|
||||
) -> Option<Diagnostic> {
|
||||
let mut child = stmt;
|
||||
for parent in parents {
|
||||
|
||||
@@ -237,7 +237,7 @@ impl LocatedCmpOp {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
|
||||
use ruff_allocator::Allocator;
|
||||
use ruff_python_ast::CmpOp;
|
||||
use ruff_python_parser::parse_expression;
|
||||
use ruff_text_size::TextSize;
|
||||
@@ -245,7 +245,8 @@ mod tests {
|
||||
use super::{locate_cmp_ops, LocatedCmpOp};
|
||||
|
||||
fn extract_cmp_op_locations(source: &str) -> Result<Vec<LocatedCmpOp>> {
|
||||
let parsed = parse_expression(source)?;
|
||||
let allocator = Allocator::new();
|
||||
let parsed = parse_expression(source, &allocator)?;
|
||||
Ok(locate_cmp_ops(parsed.expr(), parsed.tokens()))
|
||||
}
|
||||
|
||||
|
||||
@@ -50,17 +50,17 @@ impl Violation for RaiseNotImplemented {
|
||||
}
|
||||
}
|
||||
|
||||
fn match_not_implemented(expr: &Expr) -> Option<&Expr> {
|
||||
fn match_not_implemented<'a, 'ast>(expr: &'a Expr<'ast>) -> Option<&'a Expr<'ast>> {
|
||||
match expr {
|
||||
Expr::Call(ast::ExprCall { func, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = func.as_ref() {
|
||||
if id == "NotImplemented" {
|
||||
if *id == "NotImplemented" {
|
||||
return Some(func);
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
if id == "NotImplemented" {
|
||||
if *id == "NotImplemented" {
|
||||
return Some(expr);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -238,7 +238,7 @@ fn is_first_party(qualified_name: &str, level: u32, checker: &Checker) -> bool {
|
||||
}
|
||||
|
||||
/// Find the `Expr` for top level `__all__` bindings.
|
||||
fn find_dunder_all_exprs<'a>(semantic: &'a SemanticModel) -> Vec<&'a ast::Expr> {
|
||||
fn find_dunder_all_exprs<'a>(semantic: &'a SemanticModel<'a>) -> Vec<&'a ast::Expr<'a>> {
|
||||
semantic
|
||||
.global_scope()
|
||||
.get_all("__all__")
|
||||
|
||||
@@ -10,7 +10,7 @@ use ruff_text_size::TextRange;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
/// Returns the value of the `name` parameter to, e.g., a `TypeVar` constructor.
|
||||
pub(super) fn type_param_name(arguments: &Arguments) -> Option<&str> {
|
||||
pub(super) fn type_param_name<'a>(arguments: &'a Arguments) -> Option<&'a str> {
|
||||
// Handle both `TypeVar("T")` and `TypeVar(name="T")`.
|
||||
let name_param = arguments.find_argument("name", 0)?;
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = &name_param {
|
||||
@@ -143,7 +143,7 @@ impl SequenceIndexVisitor<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'_> for SequenceIndexVisitor<'a> {
|
||||
impl<'a> Visitor<'_, '_> for SequenceIndexVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
if self.modified {
|
||||
return;
|
||||
|
||||
@@ -106,7 +106,7 @@ fn is_open(func: &Expr, semantic: &SemanticModel) -> Option<Kind> {
|
||||
}
|
||||
|
||||
/// Returns the mode argument, if present.
|
||||
fn extract_mode(call: &ast::ExprCall, kind: Kind) -> Option<&Expr> {
|
||||
fn extract_mode<'a, 'ast>(call: &'a ast::ExprCall<'ast>, kind: Kind) -> Option<&'a Expr<'ast>> {
|
||||
match kind {
|
||||
Kind::Builtin => call.arguments.find_argument("mode", 1),
|
||||
Kind::Pathlib => call.arguments.find_argument("mode", 0),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user