Compare commits
17 Commits
dhruv/keep
...
perf-node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82f33db5e6 | ||
|
|
f873d2ac12 | ||
|
|
ecd9e6a650 | ||
|
|
785c39927b | ||
|
|
a35cdbb275 | ||
|
|
0c98b5949c | ||
|
|
e5f37a8254 | ||
|
|
5c5dfc11f0 | ||
|
|
678045e1aa | ||
|
|
dedefd73da | ||
|
|
37a60460ed | ||
|
|
0bd258a370 | ||
|
|
9baab8672a | ||
|
|
c65e3310d5 | ||
|
|
38c19fb96e | ||
|
|
abb4cdbf3d | ||
|
|
fc811f5168 |
7
Cargo.lock
generated
7
Cargo.lock
generated
@@ -1953,7 +1953,6 @@ dependencies = [
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_source_file",
|
||||
@@ -2742,7 +2741,7 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
dependencies = [
|
||||
"append-only-vec",
|
||||
"arc-swap",
|
||||
@@ -2762,12 +2761,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
||||
@@ -108,7 +108,7 @@ rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
|
||||
@@ -5,8 +5,8 @@ use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::log::LevelFilter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
@@ -60,10 +60,10 @@ pub(crate) enum VerbosityLevel {
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::Warn,
|
||||
VerbosityLevel::Verbose => LevelFilter::Info,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::Debug,
|
||||
VerbosityLevel::Trace => LevelFilter::Trace,
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,7 +88,7 @@ pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuar
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(tracing::level_filters::LevelFilter::WARN.into())
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
@@ -127,7 +127,6 @@ impl TestCase {
|
||||
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
|
||||
let package = self.db().workspace().package(self.db(), path).unwrap();
|
||||
let files = package.files(self.db());
|
||||
let files = files.read();
|
||||
let mut collected: Vec<_> = files.into_iter().collect();
|
||||
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
|
||||
collected
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {}
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
@@ -91,7 +94,11 @@ pub(crate) mod tests {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
impl Db for TestDb {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
|
||||
@@ -41,7 +41,7 @@ pub(crate) fn resolve_module_query<'db>(
|
||||
|
||||
let module = Module::new(name.clone(), kind, search_path, module_file);
|
||||
|
||||
tracing::debug!(
|
||||
tracing::trace!(
|
||||
"Resolved module '{name}' to '{path}'.",
|
||||
path = module_file.path(db)
|
||||
);
|
||||
@@ -172,11 +172,11 @@ impl SearchPaths {
|
||||
static_paths.push(search_path);
|
||||
}
|
||||
|
||||
tracing::debug!("Adding static search path '{src_root}'");
|
||||
tracing::debug!("Adding first-party search path '{src_root}'");
|
||||
static_paths.push(SearchPath::first_party(system, src_root)?);
|
||||
|
||||
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::debug!("Adding static custom-sdtlib search-path '{custom_typeshed}'");
|
||||
tracing::debug!("Adding custom-stdlib search path '{custom_typeshed}'");
|
||||
|
||||
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
|
||||
files.try_add_root(
|
||||
@@ -192,7 +192,7 @@ impl SearchPaths {
|
||||
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
|
||||
|
||||
for path in site_packages_paths {
|
||||
tracing::debug!("Adding site-package path '{path}'");
|
||||
tracing::debug!("Adding site-packages search path '{path}'");
|
||||
let search_path = SearchPath::site_packages(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{AnyNodeRef, NodeKind};
|
||||
use ruff_python_ast::{AnyNodeRef, AstNode, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
@@ -11,7 +11,19 @@ pub(super) struct NodeKey {
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
#[inline]
|
||||
pub(super) fn from_node<'a, N>(node: &N) -> Self
|
||||
where
|
||||
N: AstNode,
|
||||
{
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(super) fn from_ref<'a, N>(node: N) -> Self
|
||||
where
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
|
||||
@@ -24,7 +24,7 @@ impl Program {
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: {target_version}");
|
||||
tracing::info!("Target version: Python {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
@@ -154,6 +154,10 @@ impl<'db> SemanticIndex<'db> {
|
||||
&self.scopes[id]
|
||||
}
|
||||
|
||||
pub(crate) fn scope_ids(&self) -> impl Iterator<Item = ScopeId> {
|
||||
self.scope_ids_by_scope.iter().copied()
|
||||
}
|
||||
|
||||
/// Returns the id of the parent scope.
|
||||
pub(crate) fn parent_scope_id(&self, scope_id: FileScopeId) -> Option<FileScopeId> {
|
||||
let scope = self.scope(scope_id);
|
||||
|
||||
@@ -197,12 +197,14 @@ pub(crate) mod node_key {
|
||||
pub(crate) struct ExpressionNodeKey(NodeKey);
|
||||
|
||||
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: ast::ExpressionRef<'_>) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
Self(NodeKey::from_ref(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Expr> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: &ast::Expr) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
|
||||
@@ -5,21 +5,37 @@ use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{
|
||||
global_scope, symbol_table, use_def_map, DefinitionWithConstraints,
|
||||
global_scope, semantic_index, symbol_table, use_def_map, DefinitionWithConstraints,
|
||||
DefinitionWithConstraintsIterator,
|
||||
};
|
||||
use crate::types::narrow::narrowing_constraint;
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
|
||||
pub(crate) use self::diagnostic::TypeCheckDiagnostics;
|
||||
pub(crate) use self::infer::{
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, TypeInference,
|
||||
};
|
||||
|
||||
mod builder;
|
||||
mod diagnostic;
|
||||
mod display;
|
||||
mod infer;
|
||||
mod narrow;
|
||||
|
||||
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
|
||||
pub(crate) use self::infer::{
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, TypeInference,
|
||||
};
|
||||
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
|
||||
let _span = tracing::trace_span!("check_types", file=?file.path(db)).entered();
|
||||
|
||||
let index = semantic_index(db, file);
|
||||
let mut diagnostics = TypeCheckDiagnostics::new();
|
||||
|
||||
for scope_id in index.scope_ids() {
|
||||
let result = infer_scope_types(db, scope_id);
|
||||
diagnostics.extend(result.diagnostics());
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope).
|
||||
pub(crate) fn symbol_ty<'db>(
|
||||
@@ -206,20 +222,42 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a member access of a type.
|
||||
///
|
||||
/// For example, if `foo` is `Type::Instance(<Bar>)`,
|
||||
/// `foo.member(&db, "baz")` returns the type of `baz` attributes
|
||||
/// as accessed from instances of the `Bar` class.
|
||||
///
|
||||
/// TODO: use of this method currently requires manually checking
|
||||
/// whether the returned type is `Unknown`/`Unbound`
|
||||
/// (or a union with `Unknown`/`Unbound`) in many places.
|
||||
/// Ideally we'd use a more type-safe pattern, such as returning
|
||||
/// an `Option` or a `Result` from this method, which would force
|
||||
/// us to explicitly consider whether to handle an error or propagate
|
||||
/// it up the call stack.
|
||||
#[must_use]
|
||||
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Never => todo!("attribute lookup on Never type"),
|
||||
Type::Never => {
|
||||
// TODO: attribute lookup on Never type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Unbound => Type::Unbound,
|
||||
Type::None => todo!("attribute lookup on None type"),
|
||||
Type::Function(_) => todo!("attribute lookup on Function type"),
|
||||
Type::None => {
|
||||
// TODO: attribute lookup on None type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Function(_) => {
|
||||
// TODO: attribute lookup on function type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Module(file) => global_symbol_ty_by_name(db, *file, name),
|
||||
Type::Class(class) => class.class_member(db, name),
|
||||
Type::Instance(_) => {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
todo!("attribute lookup on Instance type")
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
@@ -231,7 +269,7 @@ impl<'db> Type<'db> {
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
todo!("attribute lookup on Intersection type")
|
||||
Type::Unknown
|
||||
}
|
||||
Type::IntLiteral(_) => {
|
||||
// TODO raise error
|
||||
@@ -333,3 +371,115 @@ pub struct IntersectionType<'db> {
|
||||
/// directly in intersections rather than as a separate type.
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
use super::TypeCheckDiagnostics;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
db.memory_file_system()
|
||||
.create_directory_all("/src")
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
fn assert_diagnostic_messages(diagnostics: &TypeCheckDiagnostics, expected: &[&str]) {
|
||||
let messages: Vec<&str> = diagnostics
|
||||
.iter()
|
||||
.map(|diagnostic| diagnostic.message())
|
||||
.collect();
|
||||
assert_eq!(&messages, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_statement() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("src/foo.py", "import bar\n")
|
||||
.context("Failed to write foo.py")?;
|
||||
|
||||
let foo = system_path_to_file(&db, "src/foo.py").context("Failed to resolve foo.py")?;
|
||||
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_from_statement() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("src/foo.py", "from bar import baz\n")
|
||||
.unwrap();
|
||||
let foo = system_path_to_file(&db, "src/foo.py").unwrap();
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_from_resolved_module() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([("/src/a.py", ""), ("/src/b.py", "from a import thing")])
|
||||
.unwrap();
|
||||
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_diagnostic_messages(
|
||||
&b_file_diagnostics,
|
||||
&["Could not resolve import of 'thing' from 'a'"],
|
||||
);
|
||||
}
|
||||
|
||||
#[ignore = "\
|
||||
A spurious second 'Unresolved import' diagnostic message is emitted on `b.py`, \
|
||||
despite the symbol existing in the symbol table for `a.py`"]
|
||||
#[test]
|
||||
fn resolved_import_of_symbol_from_unresolved_import() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "import foo as foo"),
|
||||
("/src/b.py", "from a import foo"),
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
let a_file = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let a_file_diagnostics = super::check_types(&db, a_file);
|
||||
assert_diagnostic_messages(
|
||||
&a_file_diagnostics,
|
||||
&["Import 'foo' could not be resolved."],
|
||||
);
|
||||
|
||||
// Importing the unresolved import into a second first-party file should not trigger
|
||||
// an additional "unresolved import" violation
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_eq!(&*b_file_diagnostics, &[]);
|
||||
}
|
||||
}
|
||||
|
||||
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostic {
|
||||
// TODO: Don't use string keys for rules
|
||||
pub(super) rule: String,
|
||||
pub(super) message: String,
|
||||
pub(super) range: TextRange,
|
||||
pub(super) file: File,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostic {
|
||||
pub fn rule(&self) -> &str {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
pub fn message(&self) -> &str {
|
||||
&self.message
|
||||
}
|
||||
|
||||
pub fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for TypeCheckDiagnostic {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// A collection of type check diagnostics.
|
||||
///
|
||||
/// The diagnostics are wrapped in an `Arc` because they need to be cloned multiple times
|
||||
/// when going from `infer_expression` to `check_file`. We could consider
|
||||
/// making [`TypeCheckDiagnostic`] a Salsa struct to have them Arena-allocated (once the Tables refactor is done).
|
||||
/// Using Salsa struct does have the downside that it leaks the Salsa dependency into diagnostics and
|
||||
/// each Salsa-struct comes with an overhead.
|
||||
#[derive(Default, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostics {
|
||||
inner: Vec<std::sync::Arc<TypeCheckDiagnostic>>,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostics {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
|
||||
self.inner.push(Arc::new(diagnostic));
|
||||
}
|
||||
|
||||
pub(crate) fn shrink_to_fit(&mut self) {
|
||||
self.inner.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TypeCheckDiagnostic> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = TypeCheckDiagnostic>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter.into_iter().map(std::sync::Arc::new));
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Extend<&'a std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = &'a Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner
|
||||
.extend(iter.into_iter().map(std::sync::Arc::clone));
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for TypeCheckDiagnostics {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
self.inner.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TypeCheckDiagnostics {
|
||||
type Target = [std::sync::Arc<TypeCheckDiagnostic>];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for TypeCheckDiagnostics {
|
||||
type Item = Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::vec::IntoIter<std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a TypeCheckDiagnostics {
|
||||
type Item = &'a Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::slice::Iter<'a, std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.iter()
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,8 @@ use salsa::plumbing::AsId;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExprContext};
|
||||
use ruff_python_ast::{AnyNodeRef, ExprContext};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::module_name::ModuleName;
|
||||
@@ -40,6 +41,7 @@ use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::{FileScopeId, NodeWithScopeKind, NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::types::diagnostic::{TypeCheckDiagnostic, TypeCheckDiagnostics};
|
||||
use crate::types::{
|
||||
builtins_symbol_ty_by_name, definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType,
|
||||
Name, Type, UnionBuilder,
|
||||
@@ -123,13 +125,16 @@ pub(crate) enum InferenceRegion<'db> {
|
||||
}
|
||||
|
||||
/// The inferred types for a single region.
|
||||
#[derive(Debug, Eq, PartialEq, Default, Clone)]
|
||||
#[derive(Debug, Eq, PartialEq, Default)]
|
||||
pub(crate) struct TypeInference<'db> {
|
||||
/// The types of every expression in this region.
|
||||
expressions: FxHashMap<ScopedExpressionId, Type<'db>>,
|
||||
|
||||
/// The types of every definition in this region.
|
||||
definitions: FxHashMap<Definition<'db>, Type<'db>>,
|
||||
|
||||
/// The diagnostics for this region.
|
||||
diagnostics: TypeCheckDiagnostics,
|
||||
}
|
||||
|
||||
impl<'db> TypeInference<'db> {
|
||||
@@ -142,9 +147,14 @@ impl<'db> TypeInference<'db> {
|
||||
self.definitions[&definition]
|
||||
}
|
||||
|
||||
pub(crate) fn diagnostics(&self) -> &[std::sync::Arc<TypeCheckDiagnostic>] {
|
||||
&self.diagnostics
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
self.expressions.shrink_to_fit();
|
||||
self.definitions.shrink_to_fit();
|
||||
self.diagnostics.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,6 +245,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
fn extend(&mut self, inference: &TypeInference<'db>) {
|
||||
self.types.definitions.extend(inference.definitions.iter());
|
||||
self.types.expressions.extend(inference.expressions.iter());
|
||||
self.types.diagnostics.extend(&inference.diagnostics);
|
||||
}
|
||||
|
||||
/// Infers types in the given [`InferenceRegion`].
|
||||
@@ -855,7 +866,26 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
asname: _,
|
||||
} = alias;
|
||||
|
||||
let module_ty = self.module_ty_from_name(ModuleName::new(name));
|
||||
let module_ty = ModuleName::new(name)
|
||||
.ok_or(ModuleResolutionError::InvalidSyntax)
|
||||
.and_then(|module_name| self.module_ty_from_name(module_name));
|
||||
|
||||
let module_ty = match module_ty {
|
||||
Ok(ty) => ty,
|
||||
Err(ModuleResolutionError::InvalidSyntax) => {
|
||||
tracing::debug!("Failed to resolve import due to invalid syntax");
|
||||
Type::Unknown
|
||||
}
|
||||
Err(ModuleResolutionError::UnresolvedModule) => {
|
||||
self.add_diagnostic(
|
||||
AnyNodeRef::Alias(alias),
|
||||
"unresolved-import",
|
||||
format_args!("Import '{name}' could not be resolved."),
|
||||
);
|
||||
Type::Unknown
|
||||
}
|
||||
};
|
||||
|
||||
self.types.definitions.insert(definition, module_ty);
|
||||
}
|
||||
|
||||
@@ -903,10 +933,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
/// - `tail` is the relative module name stripped of all leading dots:
|
||||
/// - `from .foo import bar` => `tail == "foo"`
|
||||
/// - `from ..foo.bar import baz` => `tail == "foo.bar"`
|
||||
fn relative_module_name(&self, tail: Option<&str>, level: NonZeroU32) -> Option<ModuleName> {
|
||||
fn relative_module_name(
|
||||
&self,
|
||||
tail: Option<&str>,
|
||||
level: NonZeroU32,
|
||||
) -> Result<ModuleName, ModuleResolutionError> {
|
||||
let Some(module) = file_to_module(self.db, self.file) else {
|
||||
tracing::debug!("Failed to resolve file {:?} to a module", self.file);
|
||||
return None;
|
||||
tracing::debug!(
|
||||
"Relative module resolution '{}' failed; could not resolve file '{}' to a module",
|
||||
format_import_from_module(level.get(), tail),
|
||||
self.file.path(self.db)
|
||||
);
|
||||
return Err(ModuleResolutionError::UnresolvedModule);
|
||||
};
|
||||
let mut level = level.get();
|
||||
if module.kind().is_package() {
|
||||
@@ -914,17 +952,19 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
let mut module_name = module.name().to_owned();
|
||||
for _ in 0..level {
|
||||
module_name = module_name.parent()?;
|
||||
module_name = module_name
|
||||
.parent()
|
||||
.ok_or(ModuleResolutionError::UnresolvedModule)?;
|
||||
}
|
||||
if let Some(tail) = tail {
|
||||
if let Some(valid_tail) = ModuleName::new(tail) {
|
||||
module_name.extend(&valid_tail);
|
||||
} else {
|
||||
tracing::debug!("Failed to resolve relative import due to invalid syntax");
|
||||
return None;
|
||||
tracing::debug!("Relative module resolution failed: invalid syntax");
|
||||
return Err(ModuleResolutionError::InvalidSyntax);
|
||||
}
|
||||
}
|
||||
Some(module_name)
|
||||
Ok(module_name)
|
||||
}
|
||||
|
||||
fn infer_import_from_definition(
|
||||
@@ -944,16 +984,27 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// `follow_nonexistent_import_bare_to_module()`.
|
||||
let ast::StmtImportFrom { module, level, .. } = import_from;
|
||||
tracing::trace!("Resolving imported object {alias:?} from statement {import_from:?}");
|
||||
let module = module.as_deref();
|
||||
let module_name = if let Some(level) = NonZeroU32::new(*level) {
|
||||
self.relative_module_name(module.as_deref(), level)
|
||||
tracing::trace!(
|
||||
"Resolving imported object '{}' from module '{}' relative to file '{}'",
|
||||
alias.name,
|
||||
format_import_from_module(level.get(), module),
|
||||
self.file.path(self.db),
|
||||
);
|
||||
self.relative_module_name(module, level)
|
||||
} else {
|
||||
let module_name = module
|
||||
.as_ref()
|
||||
.expect("Non-relative import should always have a non-None `module`!");
|
||||
ModuleName::new(module_name)
|
||||
tracing::trace!(
|
||||
"Resolving imported object '{}' from module '{}'",
|
||||
alias.name,
|
||||
format_import_from_module(*level, module),
|
||||
);
|
||||
module
|
||||
.and_then(ModuleName::new)
|
||||
.ok_or(ModuleResolutionError::InvalidSyntax)
|
||||
};
|
||||
|
||||
let module_ty = self.module_ty_from_name(module_name);
|
||||
let module_ty = module_name.and_then(|module_name| self.module_ty_from_name(module_name));
|
||||
|
||||
let ast::Alias {
|
||||
range: _,
|
||||
@@ -966,11 +1017,34 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// the runtime error will occur immediately (rather than when the symbol is *used*,
|
||||
// as would be the case for a symbol with type `Unbound`), so it's appropriate to
|
||||
// think of the type of the imported symbol as `Unknown` rather than `Unbound`
|
||||
let ty = module_ty
|
||||
let member_ty = module_ty
|
||||
.unwrap_or(Type::Unbound)
|
||||
.member(self.db, &Name::new(&name.id))
|
||||
.replace_unbound_with(self.db, Type::Unknown);
|
||||
|
||||
self.types.definitions.insert(definition, ty);
|
||||
if matches!(module_ty, Err(ModuleResolutionError::UnresolvedModule)) {
|
||||
self.add_diagnostic(
|
||||
AnyNodeRef::StmtImportFrom(import_from),
|
||||
"unresolved-import",
|
||||
format_args!(
|
||||
"Import '{}{}' could not be resolved.",
|
||||
".".repeat(*level as usize),
|
||||
module.unwrap_or_default()
|
||||
),
|
||||
);
|
||||
} else if module_ty.is_ok() && member_ty.is_unknown() {
|
||||
self.add_diagnostic(
|
||||
AnyNodeRef::Alias(alias),
|
||||
"unresolved-import",
|
||||
format_args!(
|
||||
"Could not resolve import of '{name}' from '{}{}'",
|
||||
".".repeat(*level as usize),
|
||||
module.unwrap_or_default()
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
self.types.definitions.insert(definition, member_ty);
|
||||
}
|
||||
|
||||
fn infer_return_statement(&mut self, ret: &ast::StmtReturn) {
|
||||
@@ -984,10 +1058,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
fn module_ty_from_name(&self, module_name: Option<ModuleName>) -> Type<'db> {
|
||||
module_name
|
||||
.and_then(|module_name| resolve_module(self.db, module_name))
|
||||
.map_or(Type::Unknown, |module| Type::Module(module.file()))
|
||||
fn module_ty_from_name(
|
||||
&self,
|
||||
module_name: ModuleName,
|
||||
) -> Result<Type<'db>, ModuleResolutionError> {
|
||||
resolve_module(self.db, module_name)
|
||||
.map(|module| Type::Module(module.file()))
|
||||
.ok_or(ModuleResolutionError::UnresolvedModule)
|
||||
}
|
||||
|
||||
fn infer_decorator(&mut self, decorator: &ast::Decorator) -> Type<'db> {
|
||||
@@ -1059,7 +1136,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ast::Expr::Yield(yield_expression) => self.infer_yield_expression(yield_expression),
|
||||
ast::Expr::YieldFrom(yield_from) => self.infer_yield_from_expression(yield_from),
|
||||
ast::Expr::Await(await_expression) => self.infer_await_expression(await_expression),
|
||||
Expr::IpyEscapeCommand(_) => todo!("Implement Ipy escape command support"),
|
||||
ast::Expr::IpyEscapeCommand(_) => todo!("Implement Ipy escape command support"),
|
||||
};
|
||||
|
||||
let expr_id = expression.scoped_ast_id(self.db, self.scope);
|
||||
@@ -1117,21 +1194,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
flags: _,
|
||||
} = fstring;
|
||||
for element in elements {
|
||||
match element {
|
||||
ast::FStringElement::Literal(_) => {
|
||||
// TODO string literal type
|
||||
}
|
||||
ast::FStringElement::Expression(expr_element) => {
|
||||
let ast::FStringExpressionElement {
|
||||
range: _,
|
||||
expression,
|
||||
debug_text: _,
|
||||
conversion: _,
|
||||
format_spec: _,
|
||||
} = expr_element;
|
||||
self.infer_expression(expression);
|
||||
}
|
||||
}
|
||||
self.infer_fstring_element(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1141,6 +1204,30 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_fstring_element(&mut self, element: &ast::FStringElement) {
|
||||
match element {
|
||||
ast::FStringElement::Literal(_) => {
|
||||
// TODO string literal type
|
||||
}
|
||||
ast::FStringElement::Expression(expr_element) => {
|
||||
let ast::FStringExpressionElement {
|
||||
range: _,
|
||||
expression,
|
||||
debug_text: _,
|
||||
conversion: _,
|
||||
format_spec,
|
||||
} = expr_element;
|
||||
self.infer_expression(expression);
|
||||
|
||||
if let Some(format_spec) = format_spec {
|
||||
for spec_element in &format_spec.elements {
|
||||
self.infer_fstring_element(spec_element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn infer_ellipsis_literal_expression(
|
||||
&mut self,
|
||||
@@ -1706,6 +1793,28 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a new diagnostic.
|
||||
///
|
||||
/// The diagnostic does not get added if the rule isn't enabled for this file.
|
||||
fn add_diagnostic(&mut self, node: AnyNodeRef, rule: &str, message: std::fmt::Arguments) {
|
||||
if !self.db.is_file_open(self.file) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Don't emit the diagnostic if:
|
||||
// * The enclosing node contains any syntax errors
|
||||
// * The rule is disabled for this file. We probably want to introduce a new query that
|
||||
// returns a rule selector for a given file that respects the package's settings,
|
||||
// any global pragma comments in the file, and any per-file-ignores.
|
||||
|
||||
self.types.diagnostics.push(TypeCheckDiagnostic {
|
||||
file: self.file,
|
||||
rule: rule.to_string(),
|
||||
message: message.to_string(),
|
||||
range: node.range(),
|
||||
});
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> TypeInference<'db> {
|
||||
self.infer_region();
|
||||
self.types.shrink_to_fit();
|
||||
@@ -1713,6 +1822,20 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
fn format_import_from_module(level: u32, module: Option<&str>) -> String {
|
||||
format!(
|
||||
"{}{}",
|
||||
".".repeat(level as usize),
|
||||
module.unwrap_or_default()
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
enum ModuleResolutionError {
|
||||
InvalidSyntax,
|
||||
UnresolvedModule,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
@@ -1966,6 +2089,16 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_import_with_no_module_name() -> anyhow::Result<()> {
|
||||
// This test checks that invalid syntax in a `StmtImportFrom` node
|
||||
// leads to the type being inferred as `Unknown`
|
||||
let mut db = setup_db();
|
||||
db.write_file("src/foo.py", "from import bar")?;
|
||||
assert_public_ty(&db, "src/foo.py", "bar", "Unknown");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
@@ -14,7 +14,6 @@ license = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_linter = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use lsp_types::ClientCapabilities;
|
||||
use ruff_linter::display_settings;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
@@ -66,20 +65,3 @@ impl ResolvedClientCapabilities {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ResolvedClientCapabilities {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
display_settings! {
|
||||
formatter = f,
|
||||
namespace = "capabilities",
|
||||
fields = [
|
||||
self.code_action_deferred_edit_resolution,
|
||||
self.apply_edit,
|
||||
self.document_changes,
|
||||
self.workspace_refresh,
|
||||
self.pull_diagnostics,
|
||||
]
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,18 +278,6 @@ impl DocumentQuery {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a source kind used by the linter.
|
||||
pub(crate) fn make_source_kind(&self) -> ruff_linter::source_kind::SourceKind {
|
||||
match self {
|
||||
Self::Text { document, .. } => {
|
||||
ruff_linter::source_kind::SourceKind::Python(document.contents().to_string())
|
||||
}
|
||||
Self::Notebook { notebook, .. } => {
|
||||
ruff_linter::source_kind::SourceKind::IpyNotebook(notebook.make_ruff_notebook())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to access the underlying notebook document that this query is selecting.
|
||||
pub fn as_notebook(&self) -> Option<&NotebookDocument> {
|
||||
match self {
|
||||
|
||||
@@ -109,7 +109,7 @@ impl Workspace {
|
||||
pub fn check_file(&self, file_id: &FileHandle) -> Result<Vec<String>, Error> {
|
||||
let result = self.db.check_file(file_id.file).map_err(into_error)?;
|
||||
|
||||
Ok(result.to_vec())
|
||||
Ok(result.clone())
|
||||
}
|
||||
|
||||
/// Checks all open files
|
||||
|
||||
@@ -17,5 +17,8 @@ fn check() {
|
||||
|
||||
let result = workspace.check_file(&test).expect("Check to succeed");
|
||||
|
||||
assert_eq!(result, vec!["/test.py:1:8: Unresolved import 'random22'"]);
|
||||
assert_eq!(
|
||||
result,
|
||||
vec!["/test.py:1:8: Import 'random22' could not be resolved.",]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ use ruff_db::{Db as SourceDb, Upcast};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
use salsa::{Cancelled, Event};
|
||||
|
||||
use crate::lint::Diagnostics;
|
||||
use crate::workspace::{check_file, Workspace, WorkspaceMetadata};
|
||||
|
||||
mod changes;
|
||||
@@ -61,7 +60,7 @@ impl RootDatabase {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
pub fn check_file(&self, file: File) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
@@ -115,7 +114,15 @@ impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SemanticDb for RootDatabase {}
|
||||
impl SemanticDb for RootDatabase {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
let Some(workspace) = &self.workspace else {
|
||||
return false;
|
||||
};
|
||||
|
||||
workspace.is_file_open(self, file)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for RootDatabase {
|
||||
@@ -242,7 +249,12 @@ pub(crate) mod tests {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl red_knot_python_semantic::Db for TestDb {}
|
||||
impl red_knot_python_semantic::Db for TestDb {
|
||||
fn is_file_open(&self, file: ruff_db::files::File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
|
||||
|
||||
@@ -120,7 +120,7 @@ impl RootDatabase {
|
||||
if workspace_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
tracing::debug!("Reloading workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Deref;
|
||||
use std::time::Duration;
|
||||
|
||||
use tracing::debug_span;
|
||||
@@ -22,7 +21,7 @@ use crate::db::Db;
|
||||
pub(crate) fn unwind_if_cancelled(db: &dyn Db) {}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Vec<String> {
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
@@ -64,7 +63,7 @@ pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
}));
|
||||
}
|
||||
|
||||
Diagnostics::from(diagnostics)
|
||||
diagnostics
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
@@ -86,7 +85,7 @@ fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
|
||||
#[allow(unreachable_pub)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
pub fn lint_semantic(db: &dyn Db, file_id: File) -> Vec<String> {
|
||||
let _span = debug_span!("lint_semantic", file=%file_id.path(db)).entered();
|
||||
|
||||
let source = source_text(db.upcast(), file_id);
|
||||
@@ -94,7 +93,7 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
let semantic = SemanticModel::new(db.upcast(), file_id);
|
||||
|
||||
if !parsed.is_valid() {
|
||||
return Diagnostics::Empty;
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let context = SemanticLintContext {
|
||||
@@ -106,7 +105,7 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
|
||||
SemanticVisitor { context: &context }.visit_body(parsed.suite());
|
||||
|
||||
Diagnostics::from(context.diagnostics.take())
|
||||
context.diagnostics.take()
|
||||
}
|
||||
|
||||
fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSize) -> String {
|
||||
@@ -116,48 +115,13 @@ fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSi
|
||||
.source_location(start, context.source_text());
|
||||
format!(
|
||||
"{}:{}:{}: {}",
|
||||
context.semantic.file_path().as_str(),
|
||||
context.semantic.file_path(),
|
||||
source_location.row,
|
||||
source_location.column,
|
||||
message,
|
||||
)
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
|
||||
// TODO: this treats any symbol with `Type::Unknown` as an unresolved import,
|
||||
// which isn't really correct: if it exists but has `Type::Unknown` in the
|
||||
// module we're importing it from, we shouldn't really emit a diagnostic here,
|
||||
// but currently do.
|
||||
match import {
|
||||
AnyImportRef::Import(import) => {
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
alias.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
AnyImportRef::ImportFrom(import) => {
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
alias.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
|
||||
if !matches!(name.ctx, ast::ExprContext::Load) {
|
||||
return;
|
||||
@@ -280,17 +244,8 @@ struct SemanticVisitor<'a> {
|
||||
|
||||
impl Visitor<'_> for SemanticVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
lint_bad_override(self.context, class);
|
||||
}
|
||||
ast::Stmt::Import(import) => {
|
||||
lint_unresolved_imports(self.context, AnyImportRef::Import(import));
|
||||
}
|
||||
ast::Stmt::ImportFrom(import) => {
|
||||
lint_unresolved_imports(self.context, AnyImportRef::ImportFrom(import));
|
||||
}
|
||||
_ => {}
|
||||
if let ast::Stmt::ClassDef(class) = stmt {
|
||||
lint_bad_override(self.context, class);
|
||||
}
|
||||
|
||||
walk_stmt(self, stmt);
|
||||
@@ -308,53 +263,6 @@ impl Visitor<'_> for SemanticVisitor<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Vec<String>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum AnyImportRef<'a> {
|
||||
Import(&'a ast::StmtImport),
|
||||
ImportFrom(&'a ast::StmtImportFrom),
|
||||
}
|
||||
|
||||
impl Ranged for AnyImportRef<'_> {
|
||||
fn range(&self) -> ruff_text_size::TextRange {
|
||||
match self {
|
||||
AnyImportRef::Import(import) => import.range(),
|
||||
AnyImportRef::ImportFrom(import) => import.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_python_semantic::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
@@ -363,7 +271,7 @@ mod tests {
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
use super::{lint_semantic, Diagnostics};
|
||||
use super::lint_semantic;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
setup_db_with_root(SystemPathBuf::from("/src"))
|
||||
@@ -409,9 +317,9 @@ mod tests {
|
||||
.unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, "/src/a.py").expect("file to exist");
|
||||
let Diagnostics::List(messages) = lint_semantic(&db, file) else {
|
||||
panic!("expected some diagnostics");
|
||||
};
|
||||
let messages = lint_semantic(&db, file);
|
||||
|
||||
assert_ne!(messages, &[] as &[String], "expected some diagnostics");
|
||||
|
||||
assert_eq!(
|
||||
*messages,
|
||||
|
||||
@@ -55,7 +55,7 @@ impl VirtualEnvironment {
|
||||
|
||||
let venv_path = SysPrefixPath::new(path, system)?;
|
||||
let pyvenv_cfg_path = venv_path.join("pyvenv.cfg");
|
||||
tracing::debug!("Attempting to parse virtual environment metadata at {pyvenv_cfg_path}");
|
||||
tracing::debug!("Attempting to parse virtual environment metadata at '{pyvenv_cfg_path}'");
|
||||
|
||||
let pyvenv_cfg = system
|
||||
.read_to_string(&pyvenv_cfg_path)
|
||||
@@ -191,7 +191,7 @@ impl VirtualEnvironment {
|
||||
} else {
|
||||
tracing::warn!(
|
||||
"Failed to resolve `sys.prefix` of the system Python installation \
|
||||
from the `home` value in the `pyvenv.cfg` file at {}. \
|
||||
from the `home` value in the `pyvenv.cfg` file at '{}'. \
|
||||
System site-packages will not be used for module resolution.",
|
||||
venv_path.join("pyvenv.cfg")
|
||||
);
|
||||
@@ -425,7 +425,7 @@ impl Deref for SysPrefixPath {
|
||||
|
||||
impl fmt::Display for SysPrefixPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "`sys.prefix` path {}", self.0)
|
||||
write!(f, "`sys.prefix` path '{}'", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -482,7 +482,7 @@ impl Deref for PythonHomePath {
|
||||
|
||||
impl fmt::Display for PythonHomePath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "`home` location {}", self.0)
|
||||
write!(f, "`home` location '{}'", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ struct WatcherInner {
|
||||
impl Watcher {
|
||||
/// Sets up file watching for `path`.
|
||||
pub fn watch(&mut self, path: &SystemPath) -> notify::Result<()> {
|
||||
tracing::debug!("Watching path: {path}.");
|
||||
tracing::debug!("Watching path: '{path}'.");
|
||||
|
||||
self.inner_mut()
|
||||
.watcher
|
||||
@@ -118,7 +118,7 @@ impl Watcher {
|
||||
|
||||
/// Stops file watching for `path`.
|
||||
pub fn unwatch(&mut self, path: &SystemPath) -> notify::Result<()> {
|
||||
tracing::debug!("Unwatching path: {path}.");
|
||||
tracing::debug!("Unwatching path: '{path}'.");
|
||||
|
||||
self.inner_mut().watcher.unwatch(path.as_std_path())
|
||||
}
|
||||
@@ -351,7 +351,7 @@ impl Debouncer {
|
||||
}
|
||||
|
||||
EventKind::Any => {
|
||||
tracing::debug!("Skip any FS event for {path}.");
|
||||
tracing::debug!("Skipping any FS event for '{path}'.");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -4,17 +4,19 @@ use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use salsa::{Durability, Setter as _};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use ruff_db::source::{source_text, SourceDiagnostic};
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::source::{line_index, source_text, SourceDiagnostic};
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::workspace::files::{Index, IndexedFiles, PackageFiles};
|
||||
use crate::workspace::files::{Index, Indexed, PackageFiles};
|
||||
use crate::{
|
||||
db::Db,
|
||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
||||
lint::{lint_semantic, lint_syntax},
|
||||
};
|
||||
|
||||
mod files;
|
||||
@@ -92,8 +94,8 @@ pub struct Package {
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are part of this package.
|
||||
#[return_ref]
|
||||
#[default]
|
||||
#[return_ref]
|
||||
file_set: PackageFiles,
|
||||
// TODO: Add the loaded settings.
|
||||
}
|
||||
@@ -141,9 +143,7 @@ impl Workspace {
|
||||
new_packages.insert(path, package);
|
||||
}
|
||||
|
||||
self.set_package_tree(db)
|
||||
.with_durability(Durability::MEDIUM)
|
||||
.to(new_packages);
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
}
|
||||
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
@@ -197,7 +197,7 @@ impl Workspace {
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!("Opening file {}", file.path(db));
|
||||
tracing::debug!("Opening file '{}'", file.path(db));
|
||||
|
||||
let mut open_files = self.take_open_files(db);
|
||||
open_files.insert(file);
|
||||
@@ -206,7 +206,7 @@ impl Workspace {
|
||||
|
||||
/// Closes a file in the workspace.
|
||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
tracing::debug!("Closing file {}", file.path(db));
|
||||
tracing::debug!("Closing file '{}'", file.path(db));
|
||||
|
||||
let mut open_files = self.take_open_files(db);
|
||||
let removed = open_files.remove(&file);
|
||||
@@ -249,6 +249,23 @@ impl Workspace {
|
||||
FxHashSet::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the file is open in the workspace.
|
||||
///
|
||||
/// A file is considered open when:
|
||||
/// * explicitly set as an open file using [`open_file`](Self::open_file)
|
||||
/// * It has a [`SystemPath`] and belongs to a package's `src` files
|
||||
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
|
||||
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
|
||||
if let Some(open_files) = self.open_files(db) {
|
||||
open_files.contains(&file)
|
||||
} else if let Some(system_path) = file.path(db).as_system_path() {
|
||||
self.package(db, system_path)
|
||||
.map_or(false, |package| package.contains_file(db, file))
|
||||
} else {
|
||||
file.path(db).is_system_virtual_path()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
@@ -259,13 +276,13 @@ impl Package {
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).read().contains(&file)
|
||||
self.files(db).contains(&file)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!(
|
||||
"Remove file {} from package {}",
|
||||
"Removing file '{}' from package '{}'",
|
||||
file.path(db),
|
||||
self.name(db)
|
||||
);
|
||||
@@ -278,7 +295,11 @@ impl Package {
|
||||
}
|
||||
|
||||
pub fn add_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!("Add file {} to package {}", file.path(db), self.name(db));
|
||||
tracing::debug!(
|
||||
"Adding file '{}' to package '{}'",
|
||||
file.path(db),
|
||||
self.name(db)
|
||||
);
|
||||
|
||||
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
@@ -289,10 +310,10 @@ impl Package {
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
tracing::debug!("Checking package {}", self.root(db));
|
||||
tracing::debug!("Checking package '{}'", self.root(db));
|
||||
|
||||
let mut result = Vec::new();
|
||||
for file in &self.files(db).read() {
|
||||
for file in &self.files(db) {
|
||||
let diagnostics = check_file(db, file);
|
||||
result.extend_from_slice(&diagnostics);
|
||||
}
|
||||
@@ -301,15 +322,20 @@ impl Package {
|
||||
}
|
||||
|
||||
/// Returns the files belonging to this package.
|
||||
#[salsa::tracked]
|
||||
pub fn files(self, db: &dyn Db) -> IndexedFiles {
|
||||
let _entered = tracing::debug_span!("files").entered();
|
||||
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
|
||||
let files = self.file_set(db);
|
||||
|
||||
let indexed = match files.get() {
|
||||
Index::Lazy(vacant) => {
|
||||
tracing::debug!("Indexing files for package {}", self.name(db));
|
||||
let _entered =
|
||||
tracing::debug_span!("index_package_files", package = %self.name(db)).entered();
|
||||
|
||||
let files = discover_package_files(db, self.root(db));
|
||||
tracing::info!(
|
||||
"Indexed {} files for package '{}'",
|
||||
files.len(),
|
||||
self.name(db)
|
||||
);
|
||||
vacant.set(files)
|
||||
}
|
||||
Index::Indexed(indexed) => indexed,
|
||||
@@ -330,14 +356,12 @@ impl Package {
|
||||
assert_eq!(root, metadata.root());
|
||||
|
||||
if self.name(db) != metadata.name() {
|
||||
self.set_name(db)
|
||||
.with_durability(Durability::MEDIUM)
|
||||
.to(metadata.name);
|
||||
self.set_name(db).to(metadata.name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reload_files(self, db: &mut dyn Db) {
|
||||
tracing::debug!("Reload files for package {}", self.name(db));
|
||||
tracing::debug!("Reloading files for package '{}'", self.name(db));
|
||||
|
||||
if !self.file_set(db).is_lazy() {
|
||||
// Force a re-index of the files in the next revision.
|
||||
@@ -347,10 +371,10 @@ impl Package {
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<String> {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::debug_span!("check_file", file=%path).entered();
|
||||
tracing::debug!("Checking file {path}");
|
||||
tracing::debug!("Checking file '{path}'");
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
@@ -363,13 +387,25 @@ pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
);
|
||||
|
||||
// Abort checking if there are IO errors.
|
||||
if source_text(db.upcast(), file).has_read_error() {
|
||||
return Diagnostics::from(diagnostics);
|
||||
let source = source_text(db.upcast(), file);
|
||||
|
||||
if source.has_read_error() {
|
||||
return diagnostics;
|
||||
}
|
||||
|
||||
for diagnostic in check_types(db.upcast(), file) {
|
||||
let index = line_index(db.upcast(), diagnostic.file());
|
||||
let location = index.source_location(diagnostic.start(), source.as_str());
|
||||
diagnostics.push(format!(
|
||||
"{path}:{location}: {message}",
|
||||
path = file.path(db),
|
||||
message = diagnostic.message()
|
||||
));
|
||||
}
|
||||
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
diagnostics
|
||||
}
|
||||
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
@@ -423,7 +459,7 @@ mod tests {
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::lint::{lint_syntax, Diagnostics};
|
||||
use crate::lint::lint_syntax;
|
||||
use crate::workspace::check_file;
|
||||
|
||||
#[test]
|
||||
@@ -441,9 +477,7 @@ mod tests {
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
check_file(&db, file),
|
||||
Diagnostics::List(vec![
|
||||
"Failed to read file: No such file or directory".to_string()
|
||||
])
|
||||
vec!["Failed to read file: No such file or directory".to_string()]
|
||||
);
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
@@ -454,7 +488,7 @@ mod tests {
|
||||
db.write_file(path, "").unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(check_file(&db, file), Diagnostics::Empty);
|
||||
assert_eq!(check_file(&db, file), vec![] as Vec<String>);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::iter::FusedIterator;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -10,6 +10,9 @@ use ruff_db::files::File;
|
||||
use crate::db::Db;
|
||||
use crate::workspace::Package;
|
||||
|
||||
/// Cheap cloneable hash set of files.
|
||||
type FileSet = Arc<FxHashSet<File>>;
|
||||
|
||||
/// The indexed files of a package.
|
||||
///
|
||||
/// The indexing happens lazily, but the files are then cached for subsequent reads.
|
||||
@@ -18,7 +21,7 @@ use crate::workspace::Package;
|
||||
/// The implementation uses internal mutability to transition between the lazy and indexed state
|
||||
/// without triggering a new salsa revision. This is safe because the initial indexing happens on first access,
|
||||
/// so no query can be depending on the contents of the indexed files before that. All subsequent mutations to
|
||||
/// the indexed files must go through `IndexedFilesMut`, which uses the Salsa setter `package.set_file_set` to
|
||||
/// the indexed files must go through `IndexedMut`, which uses the Salsa setter `package.set_file_set` to
|
||||
/// ensure that Salsa always knows when the set of indexed files have changed.
|
||||
#[derive(Debug)]
|
||||
pub struct PackageFiles {
|
||||
@@ -32,46 +35,67 @@ impl PackageFiles {
|
||||
}
|
||||
}
|
||||
|
||||
fn indexed(indexed_files: IndexedFiles) -> Self {
|
||||
fn indexed(files: FileSet) -> Self {
|
||||
Self {
|
||||
state: std::sync::Mutex::new(State::Indexed(indexed_files)),
|
||||
state: std::sync::Mutex::new(State::Indexed(files)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self) -> Index {
|
||||
pub(super) fn get(&self) -> Index {
|
||||
let state = self.state.lock().unwrap();
|
||||
|
||||
match &*state {
|
||||
State::Lazy => Index::Lazy(LazyFiles { files: state }),
|
||||
State::Indexed(files) => Index::Indexed(files.clone()),
|
||||
State::Indexed(files) => Index::Indexed(Indexed {
|
||||
files: Arc::clone(files),
|
||||
_lifetime: PhantomData,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_lazy(&self) -> bool {
|
||||
pub(super) fn is_lazy(&self) -> bool {
|
||||
matches!(*self.state.lock().unwrap(), State::Lazy)
|
||||
}
|
||||
|
||||
/// Returns a mutable view on the index that allows cheap in-place mutations.
|
||||
///
|
||||
/// The changes are automatically written back to the database once the view is dropped.
|
||||
pub fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedFilesMut> {
|
||||
pub(super) fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedMut> {
|
||||
// Calling `zalsa_mut` cancels all pending salsa queries. This ensures that there are no pending
|
||||
// reads to the file set.
|
||||
// TODO: Use a non-internal API instead https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries
|
||||
let _ = db.as_dyn_database_mut().zalsa_mut();
|
||||
|
||||
let files = package.file_set(db);
|
||||
|
||||
let indexed = match &*files.state.lock().unwrap() {
|
||||
State::Lazy => return None,
|
||||
State::Indexed(indexed) => indexed.clone(),
|
||||
// Replace the state with lazy. The `IndexedMut` guard restores the state
|
||||
// to `State::Indexed` or sets a new `PackageFiles` when it gets dropped to ensure the state
|
||||
// is restored to how it has been before replacing the value.
|
||||
//
|
||||
// It isn't necessary to hold on to the lock after this point:
|
||||
// * The above call to `zalsa_mut` guarantees that there's exactly **one** DB reference.
|
||||
// * `Indexed` has a `'db` lifetime, and this method requires a `&mut db`.
|
||||
// This means that there can't be any pending reference to `Indexed` because Rust
|
||||
// doesn't allow borrowing `db` as mutable (to call this method) and immutable (`Indexed<'db>`) at the same time.
|
||||
// There can't be any other `Indexed<'db>` references created by clones of this DB because
|
||||
// all clones must have been dropped at this point and the `Indexed`
|
||||
// can't outlive the database (constrained by the `db` lifetime).
|
||||
let state = {
|
||||
let files = package.file_set(db);
|
||||
let mut locked = files.state.lock().unwrap();
|
||||
std::mem::replace(&mut *locked, State::Lazy)
|
||||
};
|
||||
|
||||
Some(IndexedFilesMut {
|
||||
let indexed = match state {
|
||||
// If it's already lazy, just return. We also don't need to restore anything because the
|
||||
// replace above was a no-op.
|
||||
State::Lazy => return None,
|
||||
State::Indexed(indexed) => indexed,
|
||||
};
|
||||
|
||||
Some(IndexedMut {
|
||||
db: Some(db),
|
||||
package,
|
||||
new_revision: indexed.revision,
|
||||
indexed,
|
||||
files: indexed,
|
||||
did_change: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -88,152 +112,93 @@ enum State {
|
||||
Lazy,
|
||||
|
||||
/// The files are indexed. Stores the known files of a package.
|
||||
Indexed(IndexedFiles),
|
||||
Indexed(FileSet),
|
||||
}
|
||||
|
||||
pub enum Index<'a> {
|
||||
pub(super) enum Index<'db> {
|
||||
/// The index has not yet been computed. Allows inserting the files.
|
||||
Lazy(LazyFiles<'a>),
|
||||
Lazy(LazyFiles<'db>),
|
||||
|
||||
Indexed(IndexedFiles),
|
||||
Indexed(Indexed<'db>),
|
||||
}
|
||||
|
||||
/// Package files that have not been indexed yet.
|
||||
pub struct LazyFiles<'a> {
|
||||
files: std::sync::MutexGuard<'a, State>,
|
||||
pub(super) struct LazyFiles<'db> {
|
||||
files: std::sync::MutexGuard<'db, State>,
|
||||
}
|
||||
|
||||
impl<'a> LazyFiles<'a> {
|
||||
impl<'db> LazyFiles<'db> {
|
||||
/// Sets the indexed files of a package to `files`.
|
||||
pub fn set(mut self, files: FxHashSet<File>) -> IndexedFiles {
|
||||
let files = IndexedFiles::new(files);
|
||||
*self.files = State::Indexed(files.clone());
|
||||
pub(super) fn set(mut self, files: FxHashSet<File>) -> Indexed<'db> {
|
||||
let files = Indexed {
|
||||
files: Arc::new(files),
|
||||
_lifetime: PhantomData,
|
||||
};
|
||||
*self.files = State::Indexed(Arc::clone(&files.files));
|
||||
files
|
||||
}
|
||||
}
|
||||
|
||||
/// The indexed files of a package.
|
||||
///
|
||||
/// # Salsa integration
|
||||
/// The type is cheap clonable and allows for in-place mutation of the files. The in-place mutation requires
|
||||
/// extra care because the type is used as the result of Salsa queries and Salsa relies on a type's equality
|
||||
/// to determine if the output has changed. This is accomplished by using a `revision` that gets incremented
|
||||
/// whenever the files are changed. The revision ensures that salsa's comparison of the
|
||||
/// previous [`IndexedFiles`] with the next [`IndexedFiles`] returns false even though they both
|
||||
/// point to the same underlying hash set.
|
||||
///
|
||||
/// # Equality
|
||||
/// Two [`IndexedFiles`] are only equal if they have the same revision and point to the **same** (identity) hash set.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IndexedFiles {
|
||||
revision: u64,
|
||||
files: Arc<std::sync::Mutex<FxHashSet<File>>>,
|
||||
/// Note: This type is intentionally non-cloneable. Making it cloneable requires
|
||||
/// revisiting the locking behavior in [`PackageFiles::indexed_mut`].
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Indexed<'db> {
|
||||
files: FileSet,
|
||||
// Preserve the lifetime of `PackageFiles`.
|
||||
_lifetime: PhantomData<&'db ()>,
|
||||
}
|
||||
|
||||
impl IndexedFiles {
|
||||
fn new(files: FxHashSet<File>) -> Self {
|
||||
Self {
|
||||
files: Arc::new(std::sync::Mutex::new(files)),
|
||||
revision: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Locks the file index for reading.
|
||||
pub fn read(&self) -> IndexedFilesGuard {
|
||||
IndexedFilesGuard {
|
||||
guard: self.files.lock().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IndexedFiles {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.revision == other.revision && Arc::ptr_eq(&self.files, &other.files)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for IndexedFiles {}
|
||||
|
||||
pub struct IndexedFilesGuard<'a> {
|
||||
guard: std::sync::MutexGuard<'a, FxHashSet<File>>,
|
||||
}
|
||||
|
||||
impl Deref for IndexedFilesGuard<'_> {
|
||||
impl Deref for Indexed<'_> {
|
||||
type Target = FxHashSet<File>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.guard
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a IndexedFilesGuard<'a> {
|
||||
impl<'a> IntoIterator for &'a Indexed<'_> {
|
||||
type Item = File;
|
||||
type IntoIter = IndexedFilesIter<'a>;
|
||||
type IntoIter = std::iter::Copied<std::collections::hash_set::Iter<'a, File>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
IndexedFilesIter {
|
||||
inner: self.guard.iter(),
|
||||
}
|
||||
self.files.iter().copied()
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the indexed files.
|
||||
///
|
||||
/// # Locks
|
||||
/// Holding on to the iterator locks the file index for reading.
|
||||
pub struct IndexedFilesIter<'a> {
|
||||
inner: std::collections::hash_set::Iter<'a, File>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for IndexedFilesIter<'a> {
|
||||
type Item = File;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().copied()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for IndexedFilesIter<'_> {}
|
||||
|
||||
impl ExactSizeIterator for IndexedFilesIter<'_> {}
|
||||
|
||||
/// A Mutable view of a package's indexed files.
|
||||
///
|
||||
/// Allows in-place mutation of the files without deep cloning the hash set.
|
||||
/// The changes are written back when the mutable view is dropped or by calling [`Self::set`] manually.
|
||||
pub struct IndexedFilesMut<'db> {
|
||||
pub(super) struct IndexedMut<'db> {
|
||||
db: Option<&'db mut dyn Db>,
|
||||
package: Package,
|
||||
indexed: IndexedFiles,
|
||||
new_revision: u64,
|
||||
files: FileSet,
|
||||
did_change: bool,
|
||||
}
|
||||
|
||||
impl IndexedFilesMut<'_> {
|
||||
pub fn insert(&mut self, file: File) -> bool {
|
||||
if self.indexed.files.lock().unwrap().insert(file) {
|
||||
self.new_revision += 1;
|
||||
impl IndexedMut<'_> {
|
||||
pub(super) fn insert(&mut self, file: File) -> bool {
|
||||
if self.files_mut().insert(file) {
|
||||
self.did_change = true;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, file: File) -> bool {
|
||||
if self.indexed.files.lock().unwrap().remove(&file) {
|
||||
self.new_revision += 1;
|
||||
pub(super) fn remove(&mut self, file: File) -> bool {
|
||||
if self.files_mut().remove(&file) {
|
||||
self.did_change = true;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the changes back to the database.
|
||||
pub fn set(mut self) {
|
||||
self.set_impl();
|
||||
fn files_mut(&mut self) -> &mut FxHashSet<File> {
|
||||
Arc::get_mut(&mut self.files).expect("All references to `FilesSet` to have been dropped")
|
||||
}
|
||||
|
||||
fn set_impl(&mut self) {
|
||||
@@ -241,19 +206,70 @@ impl IndexedFilesMut<'_> {
|
||||
return;
|
||||
};
|
||||
|
||||
if self.indexed.revision != self.new_revision {
|
||||
let files = Arc::clone(&self.files);
|
||||
|
||||
if self.did_change {
|
||||
// If there are changes, set the new file_set to trigger a salsa revision change.
|
||||
self.package
|
||||
.set_file_set(db)
|
||||
.to(PackageFiles::indexed(IndexedFiles {
|
||||
revision: self.new_revision,
|
||||
files: self.indexed.files.clone(),
|
||||
}));
|
||||
.to(PackageFiles::indexed(files));
|
||||
} else {
|
||||
// The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state.
|
||||
*self.package.file_set(db).state.lock().unwrap() = State::Indexed(files);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for IndexedFilesMut<'_> {
|
||||
impl Drop for IndexedMut<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.set_impl();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::workspace::files::Index;
|
||||
use crate::workspace::Package;
|
||||
|
||||
#[test]
|
||||
fn re_entrance() -> anyhow::Result<()> {
|
||||
let mut db = TestDb::new();
|
||||
|
||||
db.write_file("test.py", "")?;
|
||||
|
||||
let package = Package::new(&db, Name::new("test"), SystemPathBuf::from("/test"));
|
||||
|
||||
let file = system_path_to_file(&db, "test.py").unwrap();
|
||||
|
||||
let files = match package.file_set(&db).get() {
|
||||
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file])),
|
||||
Index::Indexed(files) => files,
|
||||
};
|
||||
|
||||
// Calling files a second time should not dead-lock.
|
||||
// This can e.g. happen when `check_file` iterates over all files and
|
||||
// `is_file_open` queries the open files.
|
||||
let files_2 = package.file_set(&db).get();
|
||||
|
||||
match files_2 {
|
||||
Index::Lazy(_) => {
|
||||
panic!("Expected indexed files, got lazy files");
|
||||
}
|
||||
Index::Indexed(files_2) => {
|
||||
assert_eq!(
|
||||
files_2.iter().collect::<Vec<_>>(),
|
||||
files.iter().collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch::{ChangeEvent, ChangedKind};
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||
use ruff_benchmark::TestFile;
|
||||
@@ -12,13 +13,53 @@ use ruff_db::system::{MemoryFileSystem, SystemPath, TestSystem};
|
||||
struct Case {
|
||||
db: RootDatabase,
|
||||
fs: MemoryFileSystem,
|
||||
parser: File,
|
||||
re: File,
|
||||
re_path: &'static SystemPath,
|
||||
}
|
||||
|
||||
const TOMLLIB_312_URL: &str = "https://raw.githubusercontent.com/python/cpython/8e8a4baf652f6e1cee7acde9d78c4b6154539748/Lib/tomllib";
|
||||
|
||||
// This first "unresolved import" is because we don't understand `*` imports yet.
|
||||
// The following "unresolved import" violations are because we can't distinguish currently from
|
||||
// "Symbol exists in the module but its type is unknown" and
|
||||
// "Symbol does not exist in the module"
|
||||
static EXPECTED_DIAGNOSTICS: &[&str] = &[
|
||||
"/src/tomllib/_parser.py:7:29: Could not resolve import of 'Iterable' from 'collections.abc'",
|
||||
"/src/tomllib/_parser.py:10:20: Could not resolve import of 'Any' from 'typing'",
|
||||
"/src/tomllib/_parser.py:13:5: Could not resolve import of 'RE_DATETIME' from '._re'",
|
||||
"/src/tomllib/_parser.py:14:5: Could not resolve import of 'RE_LOCALTIME' from '._re'",
|
||||
"/src/tomllib/_parser.py:15:5: Could not resolve import of 'RE_NUMBER' from '._re'",
|
||||
"/src/tomllib/_parser.py:20:21: Could not resolve import of 'Key' from '._types'",
|
||||
"/src/tomllib/_parser.py:20:26: Could not resolve import of 'ParseFloat' from '._types'",
|
||||
"Line 69 is too long (89 characters)",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"/src/tomllib/_parser.py:153:22: Name 'key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:153:27: Name 'flag' used when not defined.",
|
||||
"/src/tomllib/_parser.py:159:16: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:161:25: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:168:16: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:169:22: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:170:25: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:180:16: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:182:31: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:206:16: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:207:22: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:208:25: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:330:32: Name 'header' used when not defined.",
|
||||
"/src/tomllib/_parser.py:330:41: Name 'key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:333:26: Name 'cont_key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:334:71: Name 'cont_key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:337:31: Name 'cont_key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:628:75: Name 'e' used when not defined.",
|
||||
"/src/tomllib/_parser.py:686:23: Name 'parse_float' used when not defined.",
|
||||
];
|
||||
|
||||
fn get_test_file(name: &str) -> TestFile {
|
||||
let path = format!("tomllib/{name}");
|
||||
let url = format!("{TOMLLIB_312_URL}/{name}");
|
||||
@@ -28,15 +69,19 @@ fn get_test_file(name: &str) -> TestFile {
|
||||
fn setup_case() -> Case {
|
||||
let system = TestSystem::default();
|
||||
let fs = system.memory_file_system().clone();
|
||||
let init_path = SystemPath::new("/src/tomllib/__init__.py");
|
||||
let parser_path = SystemPath::new("/src/tomllib/_parser.py");
|
||||
let re_path = SystemPath::new("/src/tomllib/_re.py");
|
||||
let types_path = SystemPath::new("/src/tomllib/_types.py");
|
||||
fs.write_files([
|
||||
(init_path, get_test_file("__init__.py").code()),
|
||||
(
|
||||
SystemPath::new("/src/tomllib/__init__.py"),
|
||||
get_test_file("__init__.py").code(),
|
||||
),
|
||||
(parser_path, get_test_file("_parser.py").code()),
|
||||
(re_path, get_test_file("_re.py").code()),
|
||||
(types_path, get_test_file("_types.py").code()),
|
||||
(
|
||||
SystemPath::new("/src/tomllib/_types.py"),
|
||||
get_test_file("_types.py").code(),
|
||||
),
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
@@ -62,7 +107,6 @@ fn setup_case() -> Case {
|
||||
Case {
|
||||
db,
|
||||
fs,
|
||||
parser,
|
||||
re,
|
||||
re_path,
|
||||
}
|
||||
@@ -72,8 +116,8 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
criterion.bench_function("red_knot_check_file[incremental]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
let mut case = setup_case();
|
||||
case.db.check_file(case.parser).unwrap();
|
||||
let case = setup_case();
|
||||
case.db.check().unwrap();
|
||||
|
||||
case.fs
|
||||
.write_file(
|
||||
@@ -82,14 +126,19 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
case.re.sync(&mut case.db);
|
||||
case
|
||||
},
|
||||
|case| {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
let Case { db, .. } = case;
|
||||
|
||||
assert_eq!(result.len(), 34);
|
||||
db.apply_changes(vec![ChangeEvent::Changed {
|
||||
path: case.re_path.to_path_buf(),
|
||||
kind: ChangedKind::FileContent,
|
||||
}]);
|
||||
|
||||
let result = db.check().unwrap();
|
||||
|
||||
assert_eq!(result, EXPECTED_DIAGNOSTICS);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
@@ -101,10 +150,10 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||
b.iter_batched_ref(
|
||||
setup_case,
|
||||
|case| {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check().unwrap();
|
||||
|
||||
assert_eq!(result.len(), 34);
|
||||
assert_eq!(result, EXPECTED_DIAGNOSTICS);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
|
||||
@@ -85,7 +85,7 @@ impl Files {
|
||||
.system_by_path
|
||||
.entry(absolute.clone())
|
||||
.or_insert_with(|| {
|
||||
tracing::trace!("Adding file {path}");
|
||||
tracing::trace!("Adding file '{path}'");
|
||||
|
||||
let metadata = db.system().path_metadata(path);
|
||||
let durability = self
|
||||
@@ -131,7 +131,7 @@ impl Files {
|
||||
Err(_) => return Err(FileError::NotFound),
|
||||
};
|
||||
|
||||
tracing::trace!("Adding vendored file {}", path);
|
||||
tracing::trace!("Adding vendored file '{}'", path);
|
||||
let file = File::builder(FilePath::Vendored(path.to_path_buf()))
|
||||
.permissions(Some(0o444))
|
||||
.revision(metadata.revision())
|
||||
@@ -158,7 +158,7 @@ impl Files {
|
||||
Entry::Vacant(entry) => {
|
||||
let metadata = db.system().virtual_path_metadata(path).ok()?;
|
||||
|
||||
tracing::trace!("Adding virtual file {}", path);
|
||||
tracing::trace!("Adding virtual file '{}'", path);
|
||||
|
||||
let file = File::builder(FilePath::SystemVirtual(path.to_path_buf()))
|
||||
.revision(metadata.revision())
|
||||
@@ -211,7 +211,7 @@ impl Files {
|
||||
/// That's why [`File::sync_path`] and [`File::sync_path`] is preferred if it is known that the path is a file.
|
||||
pub fn sync_recursively(db: &mut dyn Db, path: &SystemPath) {
|
||||
let path = SystemPath::absolute(path, db.system().current_directory());
|
||||
tracing::debug!("Syncing all files in {path}");
|
||||
tracing::debug!("Syncing all files in '{path}'");
|
||||
|
||||
let inner = Arc::clone(&db.files().inner);
|
||||
for entry in inner.system_by_path.iter_mut() {
|
||||
@@ -224,9 +224,7 @@ impl Files {
|
||||
|
||||
for root in roots.all() {
|
||||
if root.path(db).starts_with(&path) {
|
||||
root.set_revision(db)
|
||||
.with_durability(Durability::HIGH)
|
||||
.to(FileRevision::now());
|
||||
root.set_revision(db).to(FileRevision::now());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -249,9 +247,7 @@ impl Files {
|
||||
let roots = inner.roots.read().unwrap();
|
||||
|
||||
for root in roots.all() {
|
||||
root.set_revision(db)
|
||||
.with_durability(Durability::HIGH)
|
||||
.to(FileRevision::now());
|
||||
root.set_revision(db).to(FileRevision::now());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -381,23 +377,17 @@ impl File {
|
||||
return;
|
||||
};
|
||||
let metadata = db.system().path_metadata(path);
|
||||
let durability = db.files().root(db, path).map(|root| root.durability(db));
|
||||
Self::sync_impl(db, metadata, file, durability);
|
||||
Self::sync_impl(db, metadata, file);
|
||||
}
|
||||
|
||||
fn sync_system_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath, file: File) {
|
||||
let metadata = db.system().virtual_path_metadata(path);
|
||||
Self::sync_impl(db, metadata, file, None);
|
||||
Self::sync_impl(db, metadata, file);
|
||||
}
|
||||
|
||||
/// Private method providing the implementation for [`Self::sync_system_path`] and
|
||||
/// [`Self::sync_system_virtual_path`].
|
||||
fn sync_impl(
|
||||
db: &mut dyn Db,
|
||||
metadata: crate::system::Result<Metadata>,
|
||||
file: File,
|
||||
durability: Option<Durability>,
|
||||
) {
|
||||
fn sync_impl(db: &mut dyn Db, metadata: crate::system::Result<Metadata>, file: File) {
|
||||
let (status, revision, permission) = match metadata {
|
||||
Ok(metadata) if metadata.file_type().is_file() => (
|
||||
FileStatus::Exists,
|
||||
@@ -410,25 +400,19 @@ impl File {
|
||||
_ => (FileStatus::NotFound, FileRevision::zero(), None),
|
||||
};
|
||||
|
||||
let durability = durability.unwrap_or_default();
|
||||
|
||||
if file.status(db) != status {
|
||||
tracing::debug!("Updating the status of {}", file.path(db),);
|
||||
file.set_status(db).with_durability(durability).to(status);
|
||||
tracing::debug!("Updating the status of '{}'", file.path(db),);
|
||||
file.set_status(db).to(status);
|
||||
}
|
||||
|
||||
if file.revision(db) != revision {
|
||||
tracing::debug!("Updating the revision of {}", file.path(db));
|
||||
file.set_revision(db)
|
||||
.with_durability(durability)
|
||||
.to(revision);
|
||||
tracing::debug!("Updating the revision of '{}'", file.path(db));
|
||||
file.set_revision(db).to(revision);
|
||||
}
|
||||
|
||||
if file.permissions(db) != permission {
|
||||
tracing::debug!("Updating the permissions of {}", file.path(db),);
|
||||
file.set_permissions(db)
|
||||
.with_durability(durability)
|
||||
.to(permission);
|
||||
tracing::debug!("Updating the permissions of '{}'", file.path(db),);
|
||||
file.set_permissions(db).to(permission);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let kind = if is_notebook(file.path(db)) {
|
||||
file.read_to_notebook(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read notebook {path}: {error}");
|
||||
tracing::debug!("Failed to read notebook '{path}': {error}");
|
||||
|
||||
has_read_error = true;
|
||||
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadNotebook(error)))
|
||||
@@ -33,7 +33,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
} else {
|
||||
file.read_to_string(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read file {path}: {error}");
|
||||
tracing::debug!("Failed to read file '{path}': {error}");
|
||||
|
||||
has_read_error = true;
|
||||
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadFile(error))).accumulate(db);
|
||||
|
||||
@@ -31,10 +31,20 @@ pub fn assert_const_function_query_was_not_run<Db, Q, QDb, R>(
|
||||
Db: salsa::Database,
|
||||
Q: Fn(QDb) -> R,
|
||||
{
|
||||
let (query_name, will_execute_event) = find_will_execute_event(db, query, (), events);
|
||||
// Salsa now interns singleton ingredients. But we know that it is a singleton, so we can just search for
|
||||
// any event of that ingredient.
|
||||
let query_name = query_name(&query);
|
||||
|
||||
let event = events.iter().find(|event| {
|
||||
if let salsa::EventKind::WillExecute { database_key } = event.kind {
|
||||
db.ingredient_debug_name(database_key.ingredient_index()) == query_name
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
db.attach(|_| {
|
||||
if let Some(will_execute_event) = will_execute_event {
|
||||
if let Some(will_execute_event) = event {
|
||||
panic!(
|
||||
"Expected query {query_name}() not to have run but it did: {will_execute_event:?}"
|
||||
);
|
||||
|
||||
@@ -97,7 +97,16 @@ impl VendoredFileSystem {
|
||||
fn read_to_string(fs: &VendoredFileSystem, path: &VendoredPath) -> Result<String> {
|
||||
let mut archive = fs.lock_archive();
|
||||
let mut zip_file = archive.lookup_path(&NormalizedVendoredPath::from(path))?;
|
||||
let mut buffer = String::new();
|
||||
|
||||
// Pre-allocate the buffer with the size specified in the ZIP file metadata
|
||||
// because `read_to_string` passes `None` as the size hint.
|
||||
// But let's not trust the zip file metadata (even though it's vendored)
|
||||
// and limit it to a reasonable size.
|
||||
let mut buffer = String::with_capacity(
|
||||
usize::try_from(zip_file.size())
|
||||
.unwrap_or(usize::MAX)
|
||||
.min(10_000_000),
|
||||
);
|
||||
zip_file.read_to_string(&mut buffer)?;
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
@@ -66,3 +66,6 @@ def not_warnings_dot_deprecated(
|
||||
def not_a_deprecated_function() -> None: ...
|
||||
|
||||
fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
|
||||
# see https://github.com/astral-sh/ruff/issues/12995
|
||||
def foo(bar: typing.Literal["a", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"]):...
|
||||
@@ -59,6 +59,10 @@ pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike
|
||||
return;
|
||||
}
|
||||
|
||||
if semantic.in_annotation() {
|
||||
return;
|
||||
}
|
||||
|
||||
let length = match string {
|
||||
StringLike::String(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
|
||||
StringLike::Bytes(ast::ExprBytesLiteral { value, .. }) => value.len(),
|
||||
|
||||
@@ -152,6 +152,8 @@ PYI053.pyi:68:13: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
67 |
|
||||
68 | fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
69 |
|
||||
70 | # see https://github.com/astral-sh/ruff/issues/12995
|
||||
|
|
||||
= help: Replace with `...`
|
||||
|
||||
@@ -161,3 +163,6 @@ PYI053.pyi:68:13: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
67 67 |
|
||||
68 |-fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
68 |+fbaz: str = ... # Error: PYI053
|
||||
69 69 |
|
||||
70 70 | # see https://github.com/astral-sh/ruff/issues/12995
|
||||
71 71 | def foo(bar: typing.Literal["a", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"]):...
|
||||
|
||||
@@ -42,7 +42,7 @@ use super::helpers::{
|
||||
/// import pytest
|
||||
///
|
||||
///
|
||||
/// @pytest.fixture
|
||||
/// @pytest.fixture()
|
||||
/// def my_fixture(): ...
|
||||
/// ```
|
||||
///
|
||||
@@ -52,7 +52,7 @@ use super::helpers::{
|
||||
/// import pytest
|
||||
///
|
||||
///
|
||||
/// @pytest.fixture()
|
||||
/// @pytest.fixture
|
||||
/// def my_fixture(): ...
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -218,6 +218,9 @@ impl Violation for UndocumentedPublicClass {
|
||||
/// raise ValueError("Tried to greet an unhappy cat.")
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.pydocstyle.ignore-decorators`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
@@ -305,6 +308,9 @@ impl Violation for UndocumentedPublicMethod {
|
||||
/// raise FasterThanLightError from exc
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.pydocstyle.ignore-decorators`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
@@ -402,6 +408,9 @@ impl Violation for UndocumentedPublicPackage {
|
||||
/// print(cat) # "Cat: Dusty"
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.pydocstyle.ignore-decorators`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
@@ -502,6 +511,9 @@ impl Violation for UndocumentedPublicNestedClass {
|
||||
/// self.population: int = population
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.pydocstyle.ignore-decorators`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,7 @@ use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context;
|
||||
use ignore::{WalkBuilder, WalkState};
|
||||
|
||||
use ruff_linter::{
|
||||
@@ -100,7 +101,7 @@ impl RuffSettings {
|
||||
|
||||
impl RuffSettingsIndex {
|
||||
pub(super) fn new(root: &Path, editor_settings: &ResolvedEditorSettings) -> Self {
|
||||
let mut error = false;
|
||||
let mut has_error = false;
|
||||
let mut index = BTreeMap::default();
|
||||
let mut respect_gitignore = None;
|
||||
|
||||
@@ -127,20 +128,27 @@ impl RuffSettingsIndex {
|
||||
);
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
error => {
|
||||
tracing::error!(
|
||||
"Error while resolving settings from {}: {err}",
|
||||
pyproject.display()
|
||||
"{:#}",
|
||||
error
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to resolve settings for {}",
|
||||
pyproject.display()
|
||||
)
|
||||
})
|
||||
.unwrap_err()
|
||||
);
|
||||
error = true;
|
||||
has_error = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => continue,
|
||||
Err(err) => {
|
||||
tracing::error!("{err}");
|
||||
error = true;
|
||||
tracing::error!("{err:#}");
|
||||
has_error = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@@ -162,7 +170,7 @@ impl RuffSettingsIndex {
|
||||
let walker = builder.build_parallel();
|
||||
|
||||
let index = std::sync::RwLock::new(index);
|
||||
let error = AtomicBool::new(error);
|
||||
let has_error = AtomicBool::new(has_error);
|
||||
|
||||
walker.run(|| {
|
||||
Box::new(|result| {
|
||||
@@ -224,19 +232,26 @@ impl RuffSettingsIndex {
|
||||
}),
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
error => {
|
||||
tracing::error!(
|
||||
"Error while resolving settings from {}: {err}",
|
||||
pyproject.display()
|
||||
"{:#}",
|
||||
error
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to resolve settings for {}",
|
||||
pyproject.display()
|
||||
)
|
||||
})
|
||||
.unwrap_err()
|
||||
);
|
||||
error.store(true, Ordering::Relaxed);
|
||||
has_error.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(err) => {
|
||||
tracing::error!("{err}");
|
||||
error.store(true, Ordering::Relaxed);
|
||||
tracing::error!("{err:#}");
|
||||
has_error.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,7 +259,7 @@ impl RuffSettingsIndex {
|
||||
})
|
||||
});
|
||||
|
||||
if error.load(Ordering::Relaxed) {
|
||||
if has_error.load(Ordering::Relaxed) {
|
||||
let root = root.display();
|
||||
show_err_msg!(
|
||||
"Error while resolving settings from workspace {root}. Please refer to the logs for more details.",
|
||||
|
||||
@@ -254,6 +254,12 @@ impl Debug for SourceLocation {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SourceLocation {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{row}:{column}", row = self.row, column = self.column)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum SourceRow {
|
||||
/// A row within a cell in a Jupyter Notebook.
|
||||
|
||||
@@ -310,7 +310,12 @@ See [LSP Client documentation](https://docs.kde.org/stable5/en/kate/kate/kate-ap
|
||||
on how to configure the server from there.
|
||||
|
||||
!!! important
|
||||
Kate's LSP Client plugin does not support multiple servers for the same language.
|
||||
Kate's LSP Client plugin does not support multiple servers for the same language. As a
|
||||
workaround, you can use the [`python-lsp-server`](https://github.com/python-lsp/python-lsp-server)
|
||||
along with the [`python-lsp-ruff`](https://github.com/python-lsp/python-lsp-ruff) plugin to
|
||||
use Ruff alongside another language server. Note that this setup won't use the [server settings](settings.md)
|
||||
because the [`python-lsp-ruff`](https://github.com/python-lsp/python-lsp-ruff) plugin uses the
|
||||
`ruff` executable and not the language server.
|
||||
|
||||
## Sublime Text
|
||||
|
||||
|
||||
Reference in New Issue
Block a user