Compare commits

..

1 Commits

Author SHA1 Message Date
Dhruv Manilawala
2f9602e16e Keep public / insider docs requirements in sync 2024-08-20 11:48:19 +05:30
40 changed files with 451 additions and 1943 deletions

7
Cargo.lock generated
View File

@@ -1953,6 +1953,7 @@ dependencies = [
"red_knot_python_semantic",
"red_knot_workspace",
"ruff_db",
"ruff_linter",
"ruff_notebook",
"ruff_python_ast",
"ruff_source_file",
@@ -2741,7 +2742,7 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "salsa"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
dependencies = [
"append-only-vec",
"arc-swap",
@@ -2761,12 +2762,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.1.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
[[package]]
name = "salsa-macros"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
dependencies = [
"heck",
"proc-macro2",

View File

@@ -108,7 +108,7 @@ rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }

View File

@@ -5,8 +5,8 @@ use colored::Colorize;
use std::fmt;
use std::fs::File;
use std::io::BufWriter;
use tracing::log::LevelFilter;
use tracing::{Event, Subscriber};
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::fmt::format::Writer;
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
use tracing_subscriber::registry::LookupSpan;
@@ -60,10 +60,10 @@ pub(crate) enum VerbosityLevel {
impl VerbosityLevel {
const fn level_filter(self) -> LevelFilter {
match self {
VerbosityLevel::Default => LevelFilter::WARN,
VerbosityLevel::Verbose => LevelFilter::INFO,
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
VerbosityLevel::Trace => LevelFilter::TRACE,
VerbosityLevel::Default => LevelFilter::Warn,
VerbosityLevel::Verbose => LevelFilter::Info,
VerbosityLevel::ExtraVerbose => LevelFilter::Debug,
VerbosityLevel::Trace => LevelFilter::Trace,
}
}
@@ -88,7 +88,7 @@ pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuar
match level {
VerbosityLevel::Default => {
// Show warning traces
EnvFilter::default().add_directive(LevelFilter::WARN.into())
EnvFilter::default().add_directive(tracing::level_filters::LevelFilter::WARN.into())
}
level => {
let level_filter = level.level_filter();

View File

@@ -127,6 +127,7 @@ impl TestCase {
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
let package = self.db().workspace().package(self.db(), path).unwrap();
let files = package.files(self.db());
let files = files.read();
let mut collected: Vec<_> = files.into_iter().collect();
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
collected

View File

@@ -1,18 +1,15 @@
use ruff_db::files::File;
use ruff_db::{Db as SourceDb, Upcast};
/// Database giving access to semantic information about a Python program.
#[salsa::db]
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
fn is_file_open(&self, file: File) -> bool;
}
pub trait Db: SourceDb + Upcast<dyn SourceDb> {}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::Arc;
use crate::module_resolver::vendored_typeshed_stubs;
use ruff_db::files::{File, Files};
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
@@ -94,11 +91,7 @@ pub(crate) mod tests {
}
#[salsa::db]
impl Db for TestDb {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}
}
impl Db for TestDb {}
#[salsa::db]
impl salsa::Database for TestDb {

View File

@@ -41,7 +41,7 @@ pub(crate) fn resolve_module_query<'db>(
let module = Module::new(name.clone(), kind, search_path, module_file);
tracing::trace!(
tracing::debug!(
"Resolved module '{name}' to '{path}'.",
path = module_file.path(db)
);
@@ -172,11 +172,11 @@ impl SearchPaths {
static_paths.push(search_path);
}
tracing::debug!("Adding first-party search path '{src_root}'");
tracing::debug!("Adding static search path '{src_root}'");
static_paths.push(SearchPath::first_party(system, src_root)?);
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
tracing::debug!("Adding custom-stdlib search path '{custom_typeshed}'");
tracing::debug!("Adding static custom-sdtlib search-path '{custom_typeshed}'");
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
files.try_add_root(
@@ -192,7 +192,7 @@ impl SearchPaths {
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
for path in site_packages_paths {
tracing::debug!("Adding site-packages search path '{path}'");
tracing::debug!("Adding site-package path '{path}'");
let search_path = SearchPath::site_packages(system, path)?;
files.try_add_root(
db.upcast(),

View File

@@ -1,4 +1,4 @@
use ruff_python_ast::{AnyNodeRef, AstNode, NodeKind};
use ruff_python_ast::{AnyNodeRef, NodeKind};
use ruff_text_size::{Ranged, TextRange};
/// Compact key for a node for use in a hash map.
@@ -11,19 +11,7 @@ pub(super) struct NodeKey {
}
impl NodeKey {
#[inline]
pub(super) fn from_node<'a, N>(node: &N) -> Self
where
N: AstNode,
{
NodeKey {
kind: node.kind(),
range: node.range(),
}
}
#[inline]
pub(super) fn from_ref<'a, N>(node: N) -> Self
pub(super) fn from_node<'a, N>(node: N) -> Self
where
N: Into<AnyNodeRef<'a>>,
{

View File

@@ -24,7 +24,7 @@ impl Program {
search_paths,
} = settings;
tracing::info!("Target version: Python {target_version}");
tracing::info!("Target version: {target_version}");
let search_paths = SearchPaths::from_settings(db, search_paths)
.with_context(|| "Invalid search path settings")?;

View File

@@ -154,10 +154,6 @@ impl<'db> SemanticIndex<'db> {
&self.scopes[id]
}
pub(crate) fn scope_ids(&self) -> impl Iterator<Item = ScopeId> {
self.scope_ids_by_scope.iter().copied()
}
/// Returns the id of the parent scope.
pub(crate) fn parent_scope_id(&self, scope_id: FileScopeId) -> Option<FileScopeId> {
let scope = self.scope(scope_id);

View File

@@ -197,14 +197,12 @@ pub(crate) mod node_key {
pub(crate) struct ExpressionNodeKey(NodeKey);
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
#[inline]
fn from(value: ast::ExpressionRef<'_>) -> Self {
Self(NodeKey::from_ref(value))
Self(NodeKey::from_node(value))
}
}
impl From<&ast::Expr> for ExpressionNodeKey {
#[inline]
fn from(value: &ast::Expr) -> Self {
Self(NodeKey::from_node(value))
}

View File

@@ -5,37 +5,21 @@ use crate::builtins::builtins_scope;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
use crate::semantic_index::{
global_scope, semantic_index, symbol_table, use_def_map, DefinitionWithConstraints,
global_scope, symbol_table, use_def_map, DefinitionWithConstraints,
DefinitionWithConstraintsIterator,
};
use crate::types::narrow::narrowing_constraint;
use crate::{Db, FxOrderSet};
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
pub(crate) use self::diagnostic::TypeCheckDiagnostics;
pub(crate) use self::infer::{
infer_definition_types, infer_expression_types, infer_scope_types, TypeInference,
};
mod builder;
mod diagnostic;
mod display;
mod infer;
mod narrow;
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
let _span = tracing::trace_span!("check_types", file=?file.path(db)).entered();
let index = semantic_index(db, file);
let mut diagnostics = TypeCheckDiagnostics::new();
for scope_id in index.scope_ids() {
let result = infer_scope_types(db, scope_id);
diagnostics.extend(result.diagnostics());
}
diagnostics
}
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
pub(crate) use self::infer::{
infer_definition_types, infer_expression_types, infer_scope_types, TypeInference,
};
/// Infer the public type of a symbol (its type as seen from outside its scope).
pub(crate) fn symbol_ty<'db>(
@@ -222,42 +206,20 @@ impl<'db> Type<'db> {
}
}
/// Resolve a member access of a type.
///
/// For example, if `foo` is `Type::Instance(<Bar>)`,
/// `foo.member(&db, "baz")` returns the type of `baz` attributes
/// as accessed from instances of the `Bar` class.
///
/// TODO: use of this method currently requires manually checking
/// whether the returned type is `Unknown`/`Unbound`
/// (or a union with `Unknown`/`Unbound`) in many places.
/// Ideally we'd use a more type-safe pattern, such as returning
/// an `Option` or a `Result` from this method, which would force
/// us to explicitly consider whether to handle an error or propagate
/// it up the call stack.
#[must_use]
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
match self {
Type::Any => Type::Any,
Type::Never => {
// TODO: attribute lookup on Never type
Type::Unknown
}
Type::Never => todo!("attribute lookup on Never type"),
Type::Unknown => Type::Unknown,
Type::Unbound => Type::Unbound,
Type::None => {
// TODO: attribute lookup on None type
Type::Unknown
}
Type::Function(_) => {
// TODO: attribute lookup on function type
Type::Unknown
}
Type::None => todo!("attribute lookup on None type"),
Type::Function(_) => todo!("attribute lookup on Function type"),
Type::Module(file) => global_symbol_ty_by_name(db, *file, name),
Type::Class(class) => class.class_member(db, name),
Type::Instance(_) => {
// TODO MRO? get_own_instance_member, get_instance_member
Type::Unknown
todo!("attribute lookup on Instance type")
}
Type::Union(union) => union
.elements(db)
@@ -269,7 +231,7 @@ impl<'db> Type<'db> {
Type::Intersection(_) => {
// TODO perform the get_member on each type in the intersection
// TODO return the intersection of those results
Type::Unknown
todo!("attribute lookup on Intersection type")
}
Type::IntLiteral(_) => {
// TODO raise error
@@ -371,115 +333,3 @@ pub struct IntersectionType<'db> {
/// directly in intersections rather than as a separate type.
negative: FxOrderSet<Type<'db>>,
}
#[cfg(test)]
mod tests {
use anyhow::Context;
use ruff_db::files::system_path_to_file;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use crate::db::tests::TestDb;
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
use super::TypeCheckDiagnostics;
fn setup_db() -> TestDb {
let db = TestDb::new();
db.memory_file_system()
.create_directory_all("/src")
.unwrap();
Program::from_settings(
&db,
ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings {
extra_paths: Vec::new(),
src_root: SystemPathBuf::from("/src"),
site_packages: vec![],
custom_typeshed: None,
},
},
)
.expect("Valid search path settings");
db
}
fn assert_diagnostic_messages(diagnostics: &TypeCheckDiagnostics, expected: &[&str]) {
let messages: Vec<&str> = diagnostics
.iter()
.map(|diagnostic| diagnostic.message())
.collect();
assert_eq!(&messages, expected);
}
#[test]
fn unresolved_import_statement() -> anyhow::Result<()> {
let mut db = setup_db();
db.write_file("src/foo.py", "import bar\n")
.context("Failed to write foo.py")?;
let foo = system_path_to_file(&db, "src/foo.py").context("Failed to resolve foo.py")?;
let diagnostics = super::check_types(&db, foo);
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
Ok(())
}
#[test]
fn unresolved_import_from_statement() {
let mut db = setup_db();
db.write_file("src/foo.py", "from bar import baz\n")
.unwrap();
let foo = system_path_to_file(&db, "src/foo.py").unwrap();
let diagnostics = super::check_types(&db, foo);
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
}
#[test]
fn unresolved_import_from_resolved_module() {
let mut db = setup_db();
db.write_files([("/src/a.py", ""), ("/src/b.py", "from a import thing")])
.unwrap();
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
let b_file_diagnostics = super::check_types(&db, b_file);
assert_diagnostic_messages(
&b_file_diagnostics,
&["Could not resolve import of 'thing' from 'a'"],
);
}
#[ignore = "\
A spurious second 'Unresolved import' diagnostic message is emitted on `b.py`, \
despite the symbol existing in the symbol table for `a.py`"]
#[test]
fn resolved_import_of_symbol_from_unresolved_import() {
let mut db = setup_db();
db.write_files([
("/src/a.py", "import foo as foo"),
("/src/b.py", "from a import foo"),
])
.unwrap();
let a_file = system_path_to_file(&db, "/src/a.py").unwrap();
let a_file_diagnostics = super::check_types(&db, a_file);
assert_diagnostic_messages(
&a_file_diagnostics,
&["Import 'foo' could not be resolved."],
);
// Importing the unresolved import into a second first-party file should not trigger
// an additional "unresolved import" violation
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
let b_file_diagnostics = super::check_types(&db, b_file);
assert_eq!(&*b_file_diagnostics, &[]);
}
}

View File

@@ -1,111 +0,0 @@
use ruff_db::files::File;
use ruff_text_size::{Ranged, TextRange};
use std::fmt::Formatter;
use std::ops::Deref;
use std::sync::Arc;
#[derive(Debug, Eq, PartialEq)]
pub struct TypeCheckDiagnostic {
// TODO: Don't use string keys for rules
pub(super) rule: String,
pub(super) message: String,
pub(super) range: TextRange,
pub(super) file: File,
}
impl TypeCheckDiagnostic {
pub fn rule(&self) -> &str {
&self.rule
}
pub fn message(&self) -> &str {
&self.message
}
pub fn file(&self) -> File {
self.file
}
}
impl Ranged for TypeCheckDiagnostic {
fn range(&self) -> TextRange {
self.range
}
}
/// A collection of type check diagnostics.
///
/// The diagnostics are wrapped in an `Arc` because they need to be cloned multiple times
/// when going from `infer_expression` to `check_file`. We could consider
/// making [`TypeCheckDiagnostic`] a Salsa struct to have them Arena-allocated (once the Tables refactor is done).
/// Using Salsa struct does have the downside that it leaks the Salsa dependency into diagnostics and
/// each Salsa-struct comes with an overhead.
#[derive(Default, Eq, PartialEq)]
pub struct TypeCheckDiagnostics {
inner: Vec<std::sync::Arc<TypeCheckDiagnostic>>,
}
impl TypeCheckDiagnostics {
pub fn new() -> Self {
Self { inner: Vec::new() }
}
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
self.inner.push(Arc::new(diagnostic));
}
pub(crate) fn shrink_to_fit(&mut self) {
self.inner.shrink_to_fit();
}
}
impl Extend<TypeCheckDiagnostic> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = TypeCheckDiagnostic>>(&mut self, iter: T) {
self.inner.extend(iter.into_iter().map(std::sync::Arc::new));
}
}
impl Extend<std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
self.inner.extend(iter);
}
}
impl<'a> Extend<&'a std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = &'a Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
self.inner
.extend(iter.into_iter().map(std::sync::Arc::clone));
}
}
impl std::fmt::Debug for TypeCheckDiagnostics {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.inner.fmt(f)
}
}
impl Deref for TypeCheckDiagnostics {
type Target = [std::sync::Arc<TypeCheckDiagnostic>];
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl IntoIterator for TypeCheckDiagnostics {
type Item = Arc<TypeCheckDiagnostic>;
type IntoIter = std::vec::IntoIter<std::sync::Arc<TypeCheckDiagnostic>>;
fn into_iter(self) -> Self::IntoIter {
self.inner.into_iter()
}
}
impl<'a> IntoIterator for &'a TypeCheckDiagnostics {
type Item = &'a Arc<TypeCheckDiagnostic>;
type IntoIter = std::slice::Iter<'a, std::sync::Arc<TypeCheckDiagnostic>>;
fn into_iter(self) -> Self::IntoIter {
self.inner.iter()
}
}

View File

@@ -29,8 +29,7 @@ use salsa::plumbing::AsId;
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_python_ast as ast;
use ruff_python_ast::{AnyNodeRef, ExprContext};
use ruff_text_size::Ranged;
use ruff_python_ast::{Expr, ExprContext};
use crate::builtins::builtins_scope;
use crate::module_name::ModuleName;
@@ -41,7 +40,6 @@ use crate::semantic_index::expression::Expression;
use crate::semantic_index::semantic_index;
use crate::semantic_index::symbol::{FileScopeId, NodeWithScopeKind, NodeWithScopeRef, ScopeId};
use crate::semantic_index::SemanticIndex;
use crate::types::diagnostic::{TypeCheckDiagnostic, TypeCheckDiagnostics};
use crate::types::{
builtins_symbol_ty_by_name, definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType,
Name, Type, UnionBuilder,
@@ -125,16 +123,13 @@ pub(crate) enum InferenceRegion<'db> {
}
/// The inferred types for a single region.
#[derive(Debug, Eq, PartialEq, Default)]
#[derive(Debug, Eq, PartialEq, Default, Clone)]
pub(crate) struct TypeInference<'db> {
/// The types of every expression in this region.
expressions: FxHashMap<ScopedExpressionId, Type<'db>>,
/// The types of every definition in this region.
definitions: FxHashMap<Definition<'db>, Type<'db>>,
/// The diagnostics for this region.
diagnostics: TypeCheckDiagnostics,
}
impl<'db> TypeInference<'db> {
@@ -147,14 +142,9 @@ impl<'db> TypeInference<'db> {
self.definitions[&definition]
}
pub(crate) fn diagnostics(&self) -> &[std::sync::Arc<TypeCheckDiagnostic>] {
&self.diagnostics
}
fn shrink_to_fit(&mut self) {
self.expressions.shrink_to_fit();
self.definitions.shrink_to_fit();
self.diagnostics.shrink_to_fit();
}
}
@@ -245,7 +235,6 @@ impl<'db> TypeInferenceBuilder<'db> {
fn extend(&mut self, inference: &TypeInference<'db>) {
self.types.definitions.extend(inference.definitions.iter());
self.types.expressions.extend(inference.expressions.iter());
self.types.diagnostics.extend(&inference.diagnostics);
}
/// Infers types in the given [`InferenceRegion`].
@@ -866,26 +855,7 @@ impl<'db> TypeInferenceBuilder<'db> {
asname: _,
} = alias;
let module_ty = ModuleName::new(name)
.ok_or(ModuleResolutionError::InvalidSyntax)
.and_then(|module_name| self.module_ty_from_name(module_name));
let module_ty = match module_ty {
Ok(ty) => ty,
Err(ModuleResolutionError::InvalidSyntax) => {
tracing::debug!("Failed to resolve import due to invalid syntax");
Type::Unknown
}
Err(ModuleResolutionError::UnresolvedModule) => {
self.add_diagnostic(
AnyNodeRef::Alias(alias),
"unresolved-import",
format_args!("Import '{name}' could not be resolved."),
);
Type::Unknown
}
};
let module_ty = self.module_ty_from_name(ModuleName::new(name));
self.types.definitions.insert(definition, module_ty);
}
@@ -933,18 +903,10 @@ impl<'db> TypeInferenceBuilder<'db> {
/// - `tail` is the relative module name stripped of all leading dots:
/// - `from .foo import bar` => `tail == "foo"`
/// - `from ..foo.bar import baz` => `tail == "foo.bar"`
fn relative_module_name(
&self,
tail: Option<&str>,
level: NonZeroU32,
) -> Result<ModuleName, ModuleResolutionError> {
fn relative_module_name(&self, tail: Option<&str>, level: NonZeroU32) -> Option<ModuleName> {
let Some(module) = file_to_module(self.db, self.file) else {
tracing::debug!(
"Relative module resolution '{}' failed; could not resolve file '{}' to a module",
format_import_from_module(level.get(), tail),
self.file.path(self.db)
);
return Err(ModuleResolutionError::UnresolvedModule);
tracing::debug!("Failed to resolve file {:?} to a module", self.file);
return None;
};
let mut level = level.get();
if module.kind().is_package() {
@@ -952,19 +914,17 @@ impl<'db> TypeInferenceBuilder<'db> {
}
let mut module_name = module.name().to_owned();
for _ in 0..level {
module_name = module_name
.parent()
.ok_or(ModuleResolutionError::UnresolvedModule)?;
module_name = module_name.parent()?;
}
if let Some(tail) = tail {
if let Some(valid_tail) = ModuleName::new(tail) {
module_name.extend(&valid_tail);
} else {
tracing::debug!("Relative module resolution failed: invalid syntax");
return Err(ModuleResolutionError::InvalidSyntax);
tracing::debug!("Failed to resolve relative import due to invalid syntax");
return None;
}
}
Ok(module_name)
Some(module_name)
}
fn infer_import_from_definition(
@@ -984,27 +944,16 @@ impl<'db> TypeInferenceBuilder<'db> {
// `follow_nonexistent_import_bare_to_module()`.
let ast::StmtImportFrom { module, level, .. } = import_from;
tracing::trace!("Resolving imported object {alias:?} from statement {import_from:?}");
let module = module.as_deref();
let module_name = if let Some(level) = NonZeroU32::new(*level) {
tracing::trace!(
"Resolving imported object '{}' from module '{}' relative to file '{}'",
alias.name,
format_import_from_module(level.get(), module),
self.file.path(self.db),
);
self.relative_module_name(module, level)
self.relative_module_name(module.as_deref(), level)
} else {
tracing::trace!(
"Resolving imported object '{}' from module '{}'",
alias.name,
format_import_from_module(*level, module),
);
module
.and_then(ModuleName::new)
.ok_or(ModuleResolutionError::InvalidSyntax)
let module_name = module
.as_ref()
.expect("Non-relative import should always have a non-None `module`!");
ModuleName::new(module_name)
};
let module_ty = module_name.and_then(|module_name| self.module_ty_from_name(module_name));
let module_ty = self.module_ty_from_name(module_name);
let ast::Alias {
range: _,
@@ -1017,34 +966,11 @@ impl<'db> TypeInferenceBuilder<'db> {
// the runtime error will occur immediately (rather than when the symbol is *used*,
// as would be the case for a symbol with type `Unbound`), so it's appropriate to
// think of the type of the imported symbol as `Unknown` rather than `Unbound`
let member_ty = module_ty
.unwrap_or(Type::Unbound)
let ty = module_ty
.member(self.db, &Name::new(&name.id))
.replace_unbound_with(self.db, Type::Unknown);
if matches!(module_ty, Err(ModuleResolutionError::UnresolvedModule)) {
self.add_diagnostic(
AnyNodeRef::StmtImportFrom(import_from),
"unresolved-import",
format_args!(
"Import '{}{}' could not be resolved.",
".".repeat(*level as usize),
module.unwrap_or_default()
),
);
} else if module_ty.is_ok() && member_ty.is_unknown() {
self.add_diagnostic(
AnyNodeRef::Alias(alias),
"unresolved-import",
format_args!(
"Could not resolve import of '{name}' from '{}{}'",
".".repeat(*level as usize),
module.unwrap_or_default()
),
);
}
self.types.definitions.insert(definition, member_ty);
self.types.definitions.insert(definition, ty);
}
fn infer_return_statement(&mut self, ret: &ast::StmtReturn) {
@@ -1058,13 +984,10 @@ impl<'db> TypeInferenceBuilder<'db> {
}
}
fn module_ty_from_name(
&self,
module_name: ModuleName,
) -> Result<Type<'db>, ModuleResolutionError> {
resolve_module(self.db, module_name)
.map(|module| Type::Module(module.file()))
.ok_or(ModuleResolutionError::UnresolvedModule)
fn module_ty_from_name(&self, module_name: Option<ModuleName>) -> Type<'db> {
module_name
.and_then(|module_name| resolve_module(self.db, module_name))
.map_or(Type::Unknown, |module| Type::Module(module.file()))
}
fn infer_decorator(&mut self, decorator: &ast::Decorator) -> Type<'db> {
@@ -1136,7 +1059,7 @@ impl<'db> TypeInferenceBuilder<'db> {
ast::Expr::Yield(yield_expression) => self.infer_yield_expression(yield_expression),
ast::Expr::YieldFrom(yield_from) => self.infer_yield_from_expression(yield_from),
ast::Expr::Await(await_expression) => self.infer_await_expression(await_expression),
ast::Expr::IpyEscapeCommand(_) => todo!("Implement Ipy escape command support"),
Expr::IpyEscapeCommand(_) => todo!("Implement Ipy escape command support"),
};
let expr_id = expression.scoped_ast_id(self.db, self.scope);
@@ -1194,7 +1117,21 @@ impl<'db> TypeInferenceBuilder<'db> {
flags: _,
} = fstring;
for element in elements {
self.infer_fstring_element(element);
match element {
ast::FStringElement::Literal(_) => {
// TODO string literal type
}
ast::FStringElement::Expression(expr_element) => {
let ast::FStringExpressionElement {
range: _,
expression,
debug_text: _,
conversion: _,
format_spec: _,
} = expr_element;
self.infer_expression(expression);
}
}
}
}
}
@@ -1204,30 +1141,6 @@ impl<'db> TypeInferenceBuilder<'db> {
Type::Unknown
}
fn infer_fstring_element(&mut self, element: &ast::FStringElement) {
match element {
ast::FStringElement::Literal(_) => {
// TODO string literal type
}
ast::FStringElement::Expression(expr_element) => {
let ast::FStringExpressionElement {
range: _,
expression,
debug_text: _,
conversion: _,
format_spec,
} = expr_element;
self.infer_expression(expression);
if let Some(format_spec) = format_spec {
for spec_element in &format_spec.elements {
self.infer_fstring_element(spec_element);
}
}
}
}
}
#[allow(clippy::unused_self)]
fn infer_ellipsis_literal_expression(
&mut self,
@@ -1793,28 +1706,6 @@ impl<'db> TypeInferenceBuilder<'db> {
}
}
/// Adds a new diagnostic.
///
/// The diagnostic does not get added if the rule isn't enabled for this file.
fn add_diagnostic(&mut self, node: AnyNodeRef, rule: &str, message: std::fmt::Arguments) {
if !self.db.is_file_open(self.file) {
return;
}
// TODO: Don't emit the diagnostic if:
// * The enclosing node contains any syntax errors
// * The rule is disabled for this file. We probably want to introduce a new query that
// returns a rule selector for a given file that respects the package's settings,
// any global pragma comments in the file, and any per-file-ignores.
self.types.diagnostics.push(TypeCheckDiagnostic {
file: self.file,
rule: rule.to_string(),
message: message.to_string(),
range: node.range(),
});
}
pub(super) fn finish(mut self) -> TypeInference<'db> {
self.infer_region();
self.types.shrink_to_fit();
@@ -1822,20 +1713,6 @@ impl<'db> TypeInferenceBuilder<'db> {
}
}
fn format_import_from_module(level: u32, module: Option<&str>) -> String {
format!(
"{}{}",
".".repeat(level as usize),
module.unwrap_or_default()
)
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum ModuleResolutionError {
InvalidSyntax,
UnresolvedModule,
}
#[cfg(test)]
mod tests {
use anyhow::Context;
@@ -2089,16 +1966,6 @@ mod tests {
Ok(())
}
#[test]
fn from_import_with_no_module_name() -> anyhow::Result<()> {
// This test checks that invalid syntax in a `StmtImportFrom` node
// leads to the type being inferred as `Unknown`
let mut db = setup_db();
db.write_file("src/foo.py", "from import bar")?;
assert_public_ty(&db, "src/foo.py", "bar", "Unknown");
Ok(())
}
#[test]
fn resolve_base_class_by_name() -> anyhow::Result<()> {
let mut db = setup_db();

View File

@@ -14,6 +14,7 @@ license = { workspace = true }
red_knot_python_semantic = { workspace = true }
red_knot_workspace = { workspace = true }
ruff_db = { workspace = true }
ruff_linter = { workspace = true }
ruff_notebook = { workspace = true }
ruff_python_ast = { workspace = true }
ruff_source_file = { workspace = true }

View File

@@ -1,4 +1,5 @@
use lsp_types::ClientCapabilities;
use ruff_linter::display_settings;
#[derive(Debug, Clone, PartialEq, Eq, Default)]
#[allow(clippy::struct_excessive_bools)]
@@ -65,3 +66,20 @@ impl ResolvedClientCapabilities {
}
}
}
impl std::fmt::Display for ResolvedClientCapabilities {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
display_settings! {
formatter = f,
namespace = "capabilities",
fields = [
self.code_action_deferred_edit_resolution,
self.apply_edit,
self.document_changes,
self.workspace_refresh,
self.pull_diagnostics,
]
};
Ok(())
}
}

View File

@@ -278,6 +278,18 @@ impl DocumentQuery {
}
}
/// Generate a source kind used by the linter.
pub(crate) fn make_source_kind(&self) -> ruff_linter::source_kind::SourceKind {
match self {
Self::Text { document, .. } => {
ruff_linter::source_kind::SourceKind::Python(document.contents().to_string())
}
Self::Notebook { notebook, .. } => {
ruff_linter::source_kind::SourceKind::IpyNotebook(notebook.make_ruff_notebook())
}
}
}
/// Attempts to access the underlying notebook document that this query is selecting.
pub fn as_notebook(&self) -> Option<&NotebookDocument> {
match self {

View File

@@ -109,7 +109,7 @@ impl Workspace {
pub fn check_file(&self, file_id: &FileHandle) -> Result<Vec<String>, Error> {
let result = self.db.check_file(file_id.file).map_err(into_error)?;
Ok(result.clone())
Ok(result.to_vec())
}
/// Checks all open files

View File

@@ -17,8 +17,5 @@ fn check() {
let result = workspace.check_file(&test).expect("Check to succeed");
assert_eq!(
result,
vec!["/test.py:1:8: Import 'random22' could not be resolved.",]
);
assert_eq!(result, vec!["/test.py:1:8: Unresolved import 'random22'"]);
}

View File

@@ -11,6 +11,7 @@ use ruff_db::{Db as SourceDb, Upcast};
use salsa::plumbing::ZalsaDatabase;
use salsa::{Cancelled, Event};
use crate::lint::Diagnostics;
use crate::workspace::{check_file, Workspace, WorkspaceMetadata};
mod changes;
@@ -60,7 +61,7 @@ impl RootDatabase {
self.with_db(|db| db.workspace().check(db))
}
pub fn check_file(&self, file: File) -> Result<Vec<String>, Cancelled> {
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
self.with_db(|db| check_file(db, file))
}
@@ -114,15 +115,7 @@ impl Upcast<dyn SourceDb> for RootDatabase {
}
#[salsa::db]
impl SemanticDb for RootDatabase {
fn is_file_open(&self, file: File) -> bool {
let Some(workspace) = &self.workspace else {
return false;
};
workspace.is_file_open(self, file)
}
}
impl SemanticDb for RootDatabase {}
#[salsa::db]
impl SourceDb for RootDatabase {
@@ -249,12 +242,7 @@ pub(crate) mod tests {
}
#[salsa::db]
impl red_knot_python_semantic::Db for TestDb {
fn is_file_open(&self, file: ruff_db::files::File) -> bool {
!file.path(self).is_vendored_path()
}
}
impl red_knot_python_semantic::Db for TestDb {}
#[salsa::db]
impl Db for TestDb {}

View File

@@ -120,7 +120,7 @@ impl RootDatabase {
if workspace_change {
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
Ok(metadata) => {
tracing::debug!("Reloading workspace after structural change.");
tracing::debug!("Reload workspace after structural change.");
// TODO: Handle changes in the program settings.
workspace.reload(self, metadata);
}

View File

@@ -1,4 +1,5 @@
use std::cell::RefCell;
use std::ops::Deref;
use std::time::Duration;
use tracing::debug_span;
@@ -21,7 +22,7 @@ use crate::db::Db;
pub(crate) fn unwind_if_cancelled(db: &dyn Db) {}
#[salsa::tracked(return_ref)]
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Vec<String> {
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
#[allow(clippy::print_stdout)]
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
for i in 0..10 {
@@ -63,7 +64,7 @@ pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Vec<String> {
}));
}
diagnostics
Diagnostics::from(diagnostics)
}
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
@@ -85,7 +86,7 @@ fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
#[allow(unreachable_pub)]
#[salsa::tracked(return_ref)]
pub fn lint_semantic(db: &dyn Db, file_id: File) -> Vec<String> {
pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
let _span = debug_span!("lint_semantic", file=%file_id.path(db)).entered();
let source = source_text(db.upcast(), file_id);
@@ -93,7 +94,7 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Vec<String> {
let semantic = SemanticModel::new(db.upcast(), file_id);
if !parsed.is_valid() {
return vec![];
return Diagnostics::Empty;
}
let context = SemanticLintContext {
@@ -105,7 +106,7 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Vec<String> {
SemanticVisitor { context: &context }.visit_body(parsed.suite());
context.diagnostics.take()
Diagnostics::from(context.diagnostics.take())
}
fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSize) -> String {
@@ -115,13 +116,48 @@ fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSi
.source_location(start, context.source_text());
format!(
"{}:{}:{}: {}",
context.semantic.file_path(),
context.semantic.file_path().as_str(),
source_location.row,
source_location.column,
message,
)
}
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
// TODO: this treats any symbol with `Type::Unknown` as an unresolved import,
// which isn't really correct: if it exists but has `Type::Unknown` in the
// module we're importing it from, we shouldn't really emit a diagnostic here,
// but currently do.
match import {
AnyImportRef::Import(import) => {
for alias in &import.names {
let ty = alias.ty(&context.semantic);
if ty.is_unknown() {
context.push_diagnostic(format_diagnostic(
context,
&format!("Unresolved import '{}'", &alias.name),
alias.start(),
));
}
}
}
AnyImportRef::ImportFrom(import) => {
for alias in &import.names {
let ty = alias.ty(&context.semantic);
if ty.is_unknown() {
context.push_diagnostic(format_diagnostic(
context,
&format!("Unresolved import '{}'", &alias.name),
alias.start(),
));
}
}
}
}
}
fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
if !matches!(name.ctx, ast::ExprContext::Load) {
return;
@@ -244,8 +280,17 @@ struct SemanticVisitor<'a> {
impl Visitor<'_> for SemanticVisitor<'_> {
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
if let ast::Stmt::ClassDef(class) = stmt {
lint_bad_override(self.context, class);
match stmt {
ast::Stmt::ClassDef(class) => {
lint_bad_override(self.context, class);
}
ast::Stmt::Import(import) => {
lint_unresolved_imports(self.context, AnyImportRef::Import(import));
}
ast::Stmt::ImportFrom(import) => {
lint_unresolved_imports(self.context, AnyImportRef::ImportFrom(import));
}
_ => {}
}
walk_stmt(self, stmt);
@@ -263,6 +308,53 @@ impl Visitor<'_> for SemanticVisitor<'_> {
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Diagnostics {
Empty,
List(Vec<String>),
}
impl Diagnostics {
pub fn as_slice(&self) -> &[String] {
match self {
Diagnostics::Empty => &[],
Diagnostics::List(list) => list.as_slice(),
}
}
}
impl Deref for Diagnostics {
type Target = [String];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl From<Vec<String>> for Diagnostics {
fn from(value: Vec<String>) -> Self {
if value.is_empty() {
Diagnostics::Empty
} else {
Diagnostics::List(value)
}
}
}
#[derive(Copy, Clone, Debug)]
enum AnyImportRef<'a> {
Import(&'a ast::StmtImport),
ImportFrom(&'a ast::StmtImportFrom),
}
impl Ranged for AnyImportRef<'_> {
fn range(&self) -> ruff_text_size::TextRange {
match self {
AnyImportRef::Import(import) => import.range(),
AnyImportRef::ImportFrom(import) => import.range(),
}
}
}
#[cfg(test)]
mod tests {
use red_knot_python_semantic::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
@@ -271,7 +363,7 @@ mod tests {
use crate::db::tests::TestDb;
use super::lint_semantic;
use super::{lint_semantic, Diagnostics};
fn setup_db() -> TestDb {
setup_db_with_root(SystemPathBuf::from("/src"))
@@ -317,9 +409,9 @@ mod tests {
.unwrap();
let file = system_path_to_file(&db, "/src/a.py").expect("file to exist");
let messages = lint_semantic(&db, file);
assert_ne!(messages, &[] as &[String], "expected some diagnostics");
let Diagnostics::List(messages) = lint_semantic(&db, file) else {
panic!("expected some diagnostics");
};
assert_eq!(
*messages,

View File

@@ -55,7 +55,7 @@ impl VirtualEnvironment {
let venv_path = SysPrefixPath::new(path, system)?;
let pyvenv_cfg_path = venv_path.join("pyvenv.cfg");
tracing::debug!("Attempting to parse virtual environment metadata at '{pyvenv_cfg_path}'");
tracing::debug!("Attempting to parse virtual environment metadata at {pyvenv_cfg_path}");
let pyvenv_cfg = system
.read_to_string(&pyvenv_cfg_path)
@@ -191,7 +191,7 @@ impl VirtualEnvironment {
} else {
tracing::warn!(
"Failed to resolve `sys.prefix` of the system Python installation \
from the `home` value in the `pyvenv.cfg` file at '{}'. \
from the `home` value in the `pyvenv.cfg` file at {}. \
System site-packages will not be used for module resolution.",
venv_path.join("pyvenv.cfg")
);
@@ -425,7 +425,7 @@ impl Deref for SysPrefixPath {
impl fmt::Display for SysPrefixPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "`sys.prefix` path '{}'", self.0)
write!(f, "`sys.prefix` path {}", self.0)
}
}
@@ -482,7 +482,7 @@ impl Deref for PythonHomePath {
impl fmt::Display for PythonHomePath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "`home` location '{}'", self.0)
write!(f, "`home` location {}", self.0)
}
}

View File

@@ -109,7 +109,7 @@ struct WatcherInner {
impl Watcher {
/// Sets up file watching for `path`.
pub fn watch(&mut self, path: &SystemPath) -> notify::Result<()> {
tracing::debug!("Watching path: '{path}'.");
tracing::debug!("Watching path: {path}.");
self.inner_mut()
.watcher
@@ -118,7 +118,7 @@ impl Watcher {
/// Stops file watching for `path`.
pub fn unwatch(&mut self, path: &SystemPath) -> notify::Result<()> {
tracing::debug!("Unwatching path: '{path}'.");
tracing::debug!("Unwatching path: {path}.");
self.inner_mut().watcher.unwatch(path.as_std_path())
}
@@ -351,7 +351,7 @@ impl Debouncer {
}
EventKind::Any => {
tracing::debug!("Skipping any FS event for '{path}'.");
tracing::debug!("Skip any FS event for {path}.");
return;
}
};

View File

@@ -4,19 +4,17 @@ use rustc_hash::{FxBuildHasher, FxHashSet};
use salsa::{Durability, Setter as _};
pub use metadata::{PackageMetadata, WorkspaceMetadata};
use red_knot_python_semantic::types::check_types;
use ruff_db::source::{line_index, source_text, SourceDiagnostic};
use ruff_db::source::{source_text, SourceDiagnostic};
use ruff_db::{
files::{system_path_to_file, File},
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
};
use ruff_python_ast::{name::Name, PySourceType};
use ruff_text_size::Ranged;
use crate::workspace::files::{Index, Indexed, PackageFiles};
use crate::workspace::files::{Index, IndexedFiles, PackageFiles};
use crate::{
db::Db,
lint::{lint_semantic, lint_syntax},
lint::{lint_semantic, lint_syntax, Diagnostics},
};
mod files;
@@ -94,8 +92,8 @@ pub struct Package {
root_buf: SystemPathBuf,
/// The files that are part of this package.
#[default]
#[return_ref]
#[default]
file_set: PackageFiles,
// TODO: Add the loaded settings.
}
@@ -143,7 +141,9 @@ impl Workspace {
new_packages.insert(path, package);
}
self.set_package_tree(db).to(new_packages);
self.set_package_tree(db)
.with_durability(Durability::MEDIUM)
.to(new_packages);
}
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
@@ -197,7 +197,7 @@ impl Workspace {
///
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
pub fn open_file(self, db: &mut dyn Db, file: File) {
tracing::debug!("Opening file '{}'", file.path(db));
tracing::debug!("Opening file {}", file.path(db));
let mut open_files = self.take_open_files(db);
open_files.insert(file);
@@ -206,7 +206,7 @@ impl Workspace {
/// Closes a file in the workspace.
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
tracing::debug!("Closing file '{}'", file.path(db));
tracing::debug!("Closing file {}", file.path(db));
let mut open_files = self.take_open_files(db);
let removed = open_files.remove(&file);
@@ -249,23 +249,6 @@ impl Workspace {
FxHashSet::default()
}
}
/// Returns `true` if the file is open in the workspace.
///
/// A file is considered open when:
/// * explicitly set as an open file using [`open_file`](Self::open_file)
/// * It has a [`SystemPath`] and belongs to a package's `src` files
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
if let Some(open_files) = self.open_files(db) {
open_files.contains(&file)
} else if let Some(system_path) = file.path(db).as_system_path() {
self.package(db, system_path)
.map_or(false, |package| package.contains_file(db, file))
} else {
file.path(db).is_system_virtual_path()
}
}
}
#[salsa::tracked]
@@ -276,13 +259,13 @@ impl Package {
/// Returns `true` if `file` is a first-party file part of this package.
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
self.files(db).contains(&file)
self.files(db).read().contains(&file)
}
#[tracing::instrument(level = "debug", skip(db))]
pub fn remove_file(self, db: &mut dyn Db, file: File) {
tracing::debug!(
"Removing file '{}' from package '{}'",
"Remove file {} from package {}",
file.path(db),
self.name(db)
);
@@ -295,11 +278,7 @@ impl Package {
}
pub fn add_file(self, db: &mut dyn Db, file: File) {
tracing::debug!(
"Adding file '{}' to package '{}'",
file.path(db),
self.name(db)
);
tracing::debug!("Add file {} to package {}", file.path(db), self.name(db));
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
return;
@@ -310,10 +289,10 @@ impl Package {
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
tracing::debug!("Checking package '{}'", self.root(db));
tracing::debug!("Checking package {}", self.root(db));
let mut result = Vec::new();
for file in &self.files(db) {
for file in &self.files(db).read() {
let diagnostics = check_file(db, file);
result.extend_from_slice(&diagnostics);
}
@@ -322,20 +301,15 @@ impl Package {
}
/// Returns the files belonging to this package.
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
#[salsa::tracked]
pub fn files(self, db: &dyn Db) -> IndexedFiles {
let _entered = tracing::debug_span!("files").entered();
let files = self.file_set(db);
let indexed = match files.get() {
Index::Lazy(vacant) => {
let _entered =
tracing::debug_span!("index_package_files", package = %self.name(db)).entered();
tracing::debug!("Indexing files for package {}", self.name(db));
let files = discover_package_files(db, self.root(db));
tracing::info!(
"Indexed {} files for package '{}'",
files.len(),
self.name(db)
);
vacant.set(files)
}
Index::Indexed(indexed) => indexed,
@@ -356,12 +330,14 @@ impl Package {
assert_eq!(root, metadata.root());
if self.name(db) != metadata.name() {
self.set_name(db).to(metadata.name);
self.set_name(db)
.with_durability(Durability::MEDIUM)
.to(metadata.name);
}
}
pub fn reload_files(self, db: &mut dyn Db) {
tracing::debug!("Reloading files for package '{}'", self.name(db));
tracing::debug!("Reload files for package {}", self.name(db));
if !self.file_set(db).is_lazy() {
// Force a re-index of the files in the next revision.
@@ -371,10 +347,10 @@ impl Package {
}
#[salsa::tracked]
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<String> {
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
let path = file.path(db);
let _span = tracing::debug_span!("check_file", file=%path).entered();
tracing::debug!("Checking file '{path}'");
tracing::debug!("Checking file {path}");
let mut diagnostics = Vec::new();
@@ -387,25 +363,13 @@ pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<String> {
);
// Abort checking if there are IO errors.
let source = source_text(db.upcast(), file);
if source.has_read_error() {
return diagnostics;
}
for diagnostic in check_types(db.upcast(), file) {
let index = line_index(db.upcast(), diagnostic.file());
let location = index.source_location(diagnostic.start(), source.as_str());
diagnostics.push(format!(
"{path}:{location}: {message}",
path = file.path(db),
message = diagnostic.message()
));
if source_text(db.upcast(), file).has_read_error() {
return Diagnostics::from(diagnostics);
}
diagnostics.extend_from_slice(lint_syntax(db, file));
diagnostics.extend_from_slice(lint_semantic(db, file));
diagnostics
Diagnostics::from(diagnostics)
}
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
@@ -459,7 +423,7 @@ mod tests {
use ruff_db::testing::assert_function_query_was_not_run;
use crate::db::tests::TestDb;
use crate::lint::lint_syntax;
use crate::lint::{lint_syntax, Diagnostics};
use crate::workspace::check_file;
#[test]
@@ -477,7 +441,9 @@ mod tests {
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file(&db, file),
vec!["Failed to read file: No such file or directory".to_string()]
Diagnostics::List(vec![
"Failed to read file: No such file or directory".to_string()
])
);
let events = db.take_salsa_events();
@@ -488,7 +454,7 @@ mod tests {
db.write_file(path, "").unwrap();
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(check_file(&db, file), vec![] as Vec<String>);
assert_eq!(check_file(&db, file), Diagnostics::Empty);
Ok(())
}

View File

@@ -1,4 +1,4 @@
use std::marker::PhantomData;
use std::iter::FusedIterator;
use std::ops::Deref;
use std::sync::Arc;
@@ -10,9 +10,6 @@ use ruff_db::files::File;
use crate::db::Db;
use crate::workspace::Package;
/// Cheap cloneable hash set of files.
type FileSet = Arc<FxHashSet<File>>;
/// The indexed files of a package.
///
/// The indexing happens lazily, but the files are then cached for subsequent reads.
@@ -21,7 +18,7 @@ type FileSet = Arc<FxHashSet<File>>;
/// The implementation uses internal mutability to transition between the lazy and indexed state
/// without triggering a new salsa revision. This is safe because the initial indexing happens on first access,
/// so no query can be depending on the contents of the indexed files before that. All subsequent mutations to
/// the indexed files must go through `IndexedMut`, which uses the Salsa setter `package.set_file_set` to
/// the indexed files must go through `IndexedFilesMut`, which uses the Salsa setter `package.set_file_set` to
/// ensure that Salsa always knows when the set of indexed files have changed.
#[derive(Debug)]
pub struct PackageFiles {
@@ -35,67 +32,46 @@ impl PackageFiles {
}
}
fn indexed(files: FileSet) -> Self {
fn indexed(indexed_files: IndexedFiles) -> Self {
Self {
state: std::sync::Mutex::new(State::Indexed(files)),
state: std::sync::Mutex::new(State::Indexed(indexed_files)),
}
}
pub(super) fn get(&self) -> Index {
pub fn get(&self) -> Index {
let state = self.state.lock().unwrap();
match &*state {
State::Lazy => Index::Lazy(LazyFiles { files: state }),
State::Indexed(files) => Index::Indexed(Indexed {
files: Arc::clone(files),
_lifetime: PhantomData,
}),
State::Indexed(files) => Index::Indexed(files.clone()),
}
}
pub(super) fn is_lazy(&self) -> bool {
pub fn is_lazy(&self) -> bool {
matches!(*self.state.lock().unwrap(), State::Lazy)
}
/// Returns a mutable view on the index that allows cheap in-place mutations.
///
/// The changes are automatically written back to the database once the view is dropped.
pub(super) fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedMut> {
pub fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedFilesMut> {
// Calling `zalsa_mut` cancels all pending salsa queries. This ensures that there are no pending
// reads to the file set.
// TODO: Use a non-internal API instead https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries
let _ = db.as_dyn_database_mut().zalsa_mut();
// Replace the state with lazy. The `IndexedMut` guard restores the state
// to `State::Indexed` or sets a new `PackageFiles` when it gets dropped to ensure the state
// is restored to how it has been before replacing the value.
//
// It isn't necessary to hold on to the lock after this point:
// * The above call to `zalsa_mut` guarantees that there's exactly **one** DB reference.
// * `Indexed` has a `'db` lifetime, and this method requires a `&mut db`.
// This means that there can't be any pending reference to `Indexed` because Rust
// doesn't allow borrowing `db` as mutable (to call this method) and immutable (`Indexed<'db>`) at the same time.
// There can't be any other `Indexed<'db>` references created by clones of this DB because
// all clones must have been dropped at this point and the `Indexed`
// can't outlive the database (constrained by the `db` lifetime).
let state = {
let files = package.file_set(db);
let mut locked = files.state.lock().unwrap();
std::mem::replace(&mut *locked, State::Lazy)
};
let files = package.file_set(db);
let indexed = match state {
// If it's already lazy, just return. We also don't need to restore anything because the
// replace above was a no-op.
let indexed = match &*files.state.lock().unwrap() {
State::Lazy => return None,
State::Indexed(indexed) => indexed,
State::Indexed(indexed) => indexed.clone(),
};
Some(IndexedMut {
Some(IndexedFilesMut {
db: Some(db),
package,
files: indexed,
did_change: false,
new_revision: indexed.revision,
indexed,
})
}
}
@@ -112,93 +88,152 @@ enum State {
Lazy,
/// The files are indexed. Stores the known files of a package.
Indexed(FileSet),
Indexed(IndexedFiles),
}
pub(super) enum Index<'db> {
pub enum Index<'a> {
/// The index has not yet been computed. Allows inserting the files.
Lazy(LazyFiles<'db>),
Lazy(LazyFiles<'a>),
Indexed(Indexed<'db>),
Indexed(IndexedFiles),
}
/// Package files that have not been indexed yet.
pub(super) struct LazyFiles<'db> {
files: std::sync::MutexGuard<'db, State>,
pub struct LazyFiles<'a> {
files: std::sync::MutexGuard<'a, State>,
}
impl<'db> LazyFiles<'db> {
impl<'a> LazyFiles<'a> {
/// Sets the indexed files of a package to `files`.
pub(super) fn set(mut self, files: FxHashSet<File>) -> Indexed<'db> {
let files = Indexed {
files: Arc::new(files),
_lifetime: PhantomData,
};
*self.files = State::Indexed(Arc::clone(&files.files));
pub fn set(mut self, files: FxHashSet<File>) -> IndexedFiles {
let files = IndexedFiles::new(files);
*self.files = State::Indexed(files.clone());
files
}
}
/// The indexed files of a package.
///
/// Note: This type is intentionally non-cloneable. Making it cloneable requires
/// revisiting the locking behavior in [`PackageFiles::indexed_mut`].
#[derive(Debug, PartialEq, Eq)]
pub struct Indexed<'db> {
files: FileSet,
// Preserve the lifetime of `PackageFiles`.
_lifetime: PhantomData<&'db ()>,
/// # Salsa integration
/// The type is cheap clonable and allows for in-place mutation of the files. The in-place mutation requires
/// extra care because the type is used as the result of Salsa queries and Salsa relies on a type's equality
/// to determine if the output has changed. This is accomplished by using a `revision` that gets incremented
/// whenever the files are changed. The revision ensures that salsa's comparison of the
/// previous [`IndexedFiles`] with the next [`IndexedFiles`] returns false even though they both
/// point to the same underlying hash set.
///
/// # Equality
/// Two [`IndexedFiles`] are only equal if they have the same revision and point to the **same** (identity) hash set.
#[derive(Debug, Clone)]
pub struct IndexedFiles {
revision: u64,
files: Arc<std::sync::Mutex<FxHashSet<File>>>,
}
impl Deref for Indexed<'_> {
impl IndexedFiles {
fn new(files: FxHashSet<File>) -> Self {
Self {
files: Arc::new(std::sync::Mutex::new(files)),
revision: 0,
}
}
/// Locks the file index for reading.
pub fn read(&self) -> IndexedFilesGuard {
IndexedFilesGuard {
guard: self.files.lock().unwrap(),
}
}
}
impl PartialEq for IndexedFiles {
fn eq(&self, other: &Self) -> bool {
self.revision == other.revision && Arc::ptr_eq(&self.files, &other.files)
}
}
impl Eq for IndexedFiles {}
pub struct IndexedFilesGuard<'a> {
guard: std::sync::MutexGuard<'a, FxHashSet<File>>,
}
impl Deref for IndexedFilesGuard<'_> {
type Target = FxHashSet<File>;
fn deref(&self) -> &Self::Target {
&self.files
&self.guard
}
}
impl<'a> IntoIterator for &'a Indexed<'_> {
impl<'a> IntoIterator for &'a IndexedFilesGuard<'a> {
type Item = File;
type IntoIter = std::iter::Copied<std::collections::hash_set::Iter<'a, File>>;
type IntoIter = IndexedFilesIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.files.iter().copied()
IndexedFilesIter {
inner: self.guard.iter(),
}
}
}
/// Iterator over the indexed files.
///
/// # Locks
/// Holding on to the iterator locks the file index for reading.
pub struct IndexedFilesIter<'a> {
inner: std::collections::hash_set::Iter<'a, File>,
}
impl<'a> Iterator for IndexedFilesIter<'a> {
type Item = File;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().copied()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl FusedIterator for IndexedFilesIter<'_> {}
impl ExactSizeIterator for IndexedFilesIter<'_> {}
/// A Mutable view of a package's indexed files.
///
/// Allows in-place mutation of the files without deep cloning the hash set.
/// The changes are written back when the mutable view is dropped or by calling [`Self::set`] manually.
pub(super) struct IndexedMut<'db> {
pub struct IndexedFilesMut<'db> {
db: Option<&'db mut dyn Db>,
package: Package,
files: FileSet,
did_change: bool,
indexed: IndexedFiles,
new_revision: u64,
}
impl IndexedMut<'_> {
pub(super) fn insert(&mut self, file: File) -> bool {
if self.files_mut().insert(file) {
self.did_change = true;
impl IndexedFilesMut<'_> {
pub fn insert(&mut self, file: File) -> bool {
if self.indexed.files.lock().unwrap().insert(file) {
self.new_revision += 1;
true
} else {
false
}
}
pub(super) fn remove(&mut self, file: File) -> bool {
if self.files_mut().remove(&file) {
self.did_change = true;
pub fn remove(&mut self, file: File) -> bool {
if self.indexed.files.lock().unwrap().remove(&file) {
self.new_revision += 1;
true
} else {
false
}
}
fn files_mut(&mut self) -> &mut FxHashSet<File> {
Arc::get_mut(&mut self.files).expect("All references to `FilesSet` to have been dropped")
/// Writes the changes back to the database.
pub fn set(mut self) {
self.set_impl();
}
fn set_impl(&mut self) {
@@ -206,70 +241,19 @@ impl IndexedMut<'_> {
return;
};
let files = Arc::clone(&self.files);
if self.did_change {
// If there are changes, set the new file_set to trigger a salsa revision change.
if self.indexed.revision != self.new_revision {
self.package
.set_file_set(db)
.to(PackageFiles::indexed(files));
} else {
// The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state.
*self.package.file_set(db).state.lock().unwrap() = State::Indexed(files);
.to(PackageFiles::indexed(IndexedFiles {
revision: self.new_revision,
files: self.indexed.files.clone(),
}));
}
}
}
impl Drop for IndexedMut<'_> {
impl Drop for IndexedFilesMut<'_> {
fn drop(&mut self) {
self.set_impl();
}
}
#[cfg(test)]
mod tests {
use rustc_hash::FxHashSet;
use ruff_db::files::system_path_to_file;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use ruff_python_ast::name::Name;
use crate::db::tests::TestDb;
use crate::workspace::files::Index;
use crate::workspace::Package;
#[test]
fn re_entrance() -> anyhow::Result<()> {
let mut db = TestDb::new();
db.write_file("test.py", "")?;
let package = Package::new(&db, Name::new("test"), SystemPathBuf::from("/test"));
let file = system_path_to_file(&db, "test.py").unwrap();
let files = match package.file_set(&db).get() {
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file])),
Index::Indexed(files) => files,
};
// Calling files a second time should not dead-lock.
// This can e.g. happen when `check_file` iterates over all files and
// `is_file_open` queries the open files.
let files_2 = package.file_set(&db).get();
match files_2 {
Index::Lazy(_) => {
panic!("Expected indexed files, got lazy files");
}
Index::Indexed(files_2) => {
assert_eq!(
files_2.iter().collect::<Vec<_>>(),
files.iter().collect::<Vec<_>>()
);
}
}
Ok(())
}
}

View File

@@ -2,7 +2,6 @@
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings};
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::watch::{ChangeEvent, ChangedKind};
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use ruff_benchmark::TestFile;
@@ -13,53 +12,13 @@ use ruff_db::system::{MemoryFileSystem, SystemPath, TestSystem};
struct Case {
db: RootDatabase,
fs: MemoryFileSystem,
parser: File,
re: File,
re_path: &'static SystemPath,
}
const TOMLLIB_312_URL: &str = "https://raw.githubusercontent.com/python/cpython/8e8a4baf652f6e1cee7acde9d78c4b6154539748/Lib/tomllib";
// This first "unresolved import" is because we don't understand `*` imports yet.
// The following "unresolved import" violations are because we can't distinguish currently from
// "Symbol exists in the module but its type is unknown" and
// "Symbol does not exist in the module"
static EXPECTED_DIAGNOSTICS: &[&str] = &[
"/src/tomllib/_parser.py:7:29: Could not resolve import of 'Iterable' from 'collections.abc'",
"/src/tomllib/_parser.py:10:20: Could not resolve import of 'Any' from 'typing'",
"/src/tomllib/_parser.py:13:5: Could not resolve import of 'RE_DATETIME' from '._re'",
"/src/tomllib/_parser.py:14:5: Could not resolve import of 'RE_LOCALTIME' from '._re'",
"/src/tomllib/_parser.py:15:5: Could not resolve import of 'RE_NUMBER' from '._re'",
"/src/tomllib/_parser.py:20:21: Could not resolve import of 'Key' from '._types'",
"/src/tomllib/_parser.py:20:26: Could not resolve import of 'ParseFloat' from '._types'",
"Line 69 is too long (89 characters)",
"Use double quotes for strings",
"Use double quotes for strings",
"Use double quotes for strings",
"Use double quotes for strings",
"Use double quotes for strings",
"Use double quotes for strings",
"Use double quotes for strings",
"/src/tomllib/_parser.py:153:22: Name 'key' used when not defined.",
"/src/tomllib/_parser.py:153:27: Name 'flag' used when not defined.",
"/src/tomllib/_parser.py:159:16: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:161:25: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:168:16: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:169:22: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:170:25: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:180:16: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:182:31: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:206:16: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:207:22: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:208:25: Name 'k' used when not defined.",
"/src/tomllib/_parser.py:330:32: Name 'header' used when not defined.",
"/src/tomllib/_parser.py:330:41: Name 'key' used when not defined.",
"/src/tomllib/_parser.py:333:26: Name 'cont_key' used when not defined.",
"/src/tomllib/_parser.py:334:71: Name 'cont_key' used when not defined.",
"/src/tomllib/_parser.py:337:31: Name 'cont_key' used when not defined.",
"/src/tomllib/_parser.py:628:75: Name 'e' used when not defined.",
"/src/tomllib/_parser.py:686:23: Name 'parse_float' used when not defined.",
];
fn get_test_file(name: &str) -> TestFile {
let path = format!("tomllib/{name}");
let url = format!("{TOMLLIB_312_URL}/{name}");
@@ -69,19 +28,15 @@ fn get_test_file(name: &str) -> TestFile {
fn setup_case() -> Case {
let system = TestSystem::default();
let fs = system.memory_file_system().clone();
let init_path = SystemPath::new("/src/tomllib/__init__.py");
let parser_path = SystemPath::new("/src/tomllib/_parser.py");
let re_path = SystemPath::new("/src/tomllib/_re.py");
let types_path = SystemPath::new("/src/tomllib/_types.py");
fs.write_files([
(
SystemPath::new("/src/tomllib/__init__.py"),
get_test_file("__init__.py").code(),
),
(init_path, get_test_file("__init__.py").code()),
(parser_path, get_test_file("_parser.py").code()),
(re_path, get_test_file("_re.py").code()),
(
SystemPath::new("/src/tomllib/_types.py"),
get_test_file("_types.py").code(),
),
(types_path, get_test_file("_types.py").code()),
])
.unwrap();
@@ -107,6 +62,7 @@ fn setup_case() -> Case {
Case {
db,
fs,
parser,
re,
re_path,
}
@@ -116,8 +72,8 @@ fn benchmark_incremental(criterion: &mut Criterion) {
criterion.bench_function("red_knot_check_file[incremental]", |b| {
b.iter_batched_ref(
|| {
let case = setup_case();
case.db.check().unwrap();
let mut case = setup_case();
case.db.check_file(case.parser).unwrap();
case.fs
.write_file(
@@ -126,19 +82,14 @@ fn benchmark_incremental(criterion: &mut Criterion) {
)
.unwrap();
case.re.sync(&mut case.db);
case
},
|case| {
let Case { db, .. } = case;
let Case { db, parser, .. } = case;
let result = db.check_file(*parser).unwrap();
db.apply_changes(vec![ChangeEvent::Changed {
path: case.re_path.to_path_buf(),
kind: ChangedKind::FileContent,
}]);
let result = db.check().unwrap();
assert_eq!(result, EXPECTED_DIAGNOSTICS);
assert_eq!(result.len(), 34);
},
BatchSize::SmallInput,
);
@@ -150,10 +101,10 @@ fn benchmark_cold(criterion: &mut Criterion) {
b.iter_batched_ref(
setup_case,
|case| {
let Case { db, .. } = case;
let result = db.check().unwrap();
let Case { db, parser, .. } = case;
let result = db.check_file(*parser).unwrap();
assert_eq!(result, EXPECTED_DIAGNOSTICS);
assert_eq!(result.len(), 34);
},
BatchSize::SmallInput,
);

View File

@@ -85,7 +85,7 @@ impl Files {
.system_by_path
.entry(absolute.clone())
.or_insert_with(|| {
tracing::trace!("Adding file '{path}'");
tracing::trace!("Adding file {path}");
let metadata = db.system().path_metadata(path);
let durability = self
@@ -131,7 +131,7 @@ impl Files {
Err(_) => return Err(FileError::NotFound),
};
tracing::trace!("Adding vendored file '{}'", path);
tracing::trace!("Adding vendored file {}", path);
let file = File::builder(FilePath::Vendored(path.to_path_buf()))
.permissions(Some(0o444))
.revision(metadata.revision())
@@ -158,7 +158,7 @@ impl Files {
Entry::Vacant(entry) => {
let metadata = db.system().virtual_path_metadata(path).ok()?;
tracing::trace!("Adding virtual file '{}'", path);
tracing::trace!("Adding virtual file {}", path);
let file = File::builder(FilePath::SystemVirtual(path.to_path_buf()))
.revision(metadata.revision())
@@ -211,7 +211,7 @@ impl Files {
/// That's why [`File::sync_path`] and [`File::sync_path`] is preferred if it is known that the path is a file.
pub fn sync_recursively(db: &mut dyn Db, path: &SystemPath) {
let path = SystemPath::absolute(path, db.system().current_directory());
tracing::debug!("Syncing all files in '{path}'");
tracing::debug!("Syncing all files in {path}");
let inner = Arc::clone(&db.files().inner);
for entry in inner.system_by_path.iter_mut() {
@@ -224,7 +224,9 @@ impl Files {
for root in roots.all() {
if root.path(db).starts_with(&path) {
root.set_revision(db).to(FileRevision::now());
root.set_revision(db)
.with_durability(Durability::HIGH)
.to(FileRevision::now());
}
}
}
@@ -247,7 +249,9 @@ impl Files {
let roots = inner.roots.read().unwrap();
for root in roots.all() {
root.set_revision(db).to(FileRevision::now());
root.set_revision(db)
.with_durability(Durability::HIGH)
.to(FileRevision::now());
}
}
@@ -377,17 +381,23 @@ impl File {
return;
};
let metadata = db.system().path_metadata(path);
Self::sync_impl(db, metadata, file);
let durability = db.files().root(db, path).map(|root| root.durability(db));
Self::sync_impl(db, metadata, file, durability);
}
fn sync_system_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath, file: File) {
let metadata = db.system().virtual_path_metadata(path);
Self::sync_impl(db, metadata, file);
Self::sync_impl(db, metadata, file, None);
}
/// Private method providing the implementation for [`Self::sync_system_path`] and
/// [`Self::sync_system_virtual_path`].
fn sync_impl(db: &mut dyn Db, metadata: crate::system::Result<Metadata>, file: File) {
fn sync_impl(
db: &mut dyn Db,
metadata: crate::system::Result<Metadata>,
file: File,
durability: Option<Durability>,
) {
let (status, revision, permission) = match metadata {
Ok(metadata) if metadata.file_type().is_file() => (
FileStatus::Exists,
@@ -400,19 +410,25 @@ impl File {
_ => (FileStatus::NotFound, FileRevision::zero(), None),
};
let durability = durability.unwrap_or_default();
if file.status(db) != status {
tracing::debug!("Updating the status of '{}'", file.path(db),);
file.set_status(db).to(status);
tracing::debug!("Updating the status of {}", file.path(db),);
file.set_status(db).with_durability(durability).to(status);
}
if file.revision(db) != revision {
tracing::debug!("Updating the revision of '{}'", file.path(db));
file.set_revision(db).to(revision);
tracing::debug!("Updating the revision of {}", file.path(db));
file.set_revision(db)
.with_durability(durability)
.to(revision);
}
if file.permissions(db) != permission {
tracing::debug!("Updating the permissions of '{}'", file.path(db),);
file.set_permissions(db).to(permission);
tracing::debug!("Updating the permissions of {}", file.path(db),);
file.set_permissions(db)
.with_durability(durability)
.to(permission);
}
}

View File

@@ -22,7 +22,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
let kind = if is_notebook(file.path(db)) {
file.read_to_notebook(db)
.unwrap_or_else(|error| {
tracing::debug!("Failed to read notebook '{path}': {error}");
tracing::debug!("Failed to read notebook {path}: {error}");
has_read_error = true;
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadNotebook(error)))
@@ -33,7 +33,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
} else {
file.read_to_string(db)
.unwrap_or_else(|error| {
tracing::debug!("Failed to read file '{path}': {error}");
tracing::debug!("Failed to read file {path}: {error}");
has_read_error = true;
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadFile(error))).accumulate(db);

View File

@@ -31,20 +31,10 @@ pub fn assert_const_function_query_was_not_run<Db, Q, QDb, R>(
Db: salsa::Database,
Q: Fn(QDb) -> R,
{
// Salsa now interns singleton ingredients. But we know that it is a singleton, so we can just search for
// any event of that ingredient.
let query_name = query_name(&query);
let event = events.iter().find(|event| {
if let salsa::EventKind::WillExecute { database_key } = event.kind {
db.ingredient_debug_name(database_key.ingredient_index()) == query_name
} else {
false
}
});
let (query_name, will_execute_event) = find_will_execute_event(db, query, (), events);
db.attach(|_| {
if let Some(will_execute_event) = event {
if let Some(will_execute_event) = will_execute_event {
panic!(
"Expected query {query_name}() not to have run but it did: {will_execute_event:?}"
);

View File

@@ -97,16 +97,7 @@ impl VendoredFileSystem {
fn read_to_string(fs: &VendoredFileSystem, path: &VendoredPath) -> Result<String> {
let mut archive = fs.lock_archive();
let mut zip_file = archive.lookup_path(&NormalizedVendoredPath::from(path))?;
// Pre-allocate the buffer with the size specified in the ZIP file metadata
// because `read_to_string` passes `None` as the size hint.
// But let's not trust the zip file metadata (even though it's vendored)
// and limit it to a reasonable size.
let mut buffer = String::with_capacity(
usize::try_from(zip_file.size())
.unwrap_or(usize::MAX)
.min(10_000_000),
);
let mut buffer = String::new();
zip_file.read_to_string(&mut buffer)?;
Ok(buffer)
}

View File

@@ -66,6 +66,3 @@ def not_warnings_dot_deprecated(
def not_a_deprecated_function() -> None: ...
fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
# see https://github.com/astral-sh/ruff/issues/12995
def foo(bar: typing.Literal["a", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"]):...

View File

@@ -59,10 +59,6 @@ pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike
return;
}
if semantic.in_annotation() {
return;
}
let length = match string {
StringLike::String(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
StringLike::Bytes(ast::ExprBytesLiteral { value, .. }) => value.len(),

View File

@@ -152,8 +152,6 @@ PYI053.pyi:68:13: PYI053 [*] String and bytes literals longer than 50 characters
67 |
68 | fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
69 |
70 | # see https://github.com/astral-sh/ruff/issues/12995
|
= help: Replace with `...`
@@ -163,6 +161,3 @@ PYI053.pyi:68:13: PYI053 [*] String and bytes literals longer than 50 characters
67 67 |
68 |-fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
68 |+fbaz: str = ... # Error: PYI053
69 69 |
70 70 | # see https://github.com/astral-sh/ruff/issues/12995
71 71 | def foo(bar: typing.Literal["a", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"]):...

View File

@@ -42,7 +42,7 @@ use super::helpers::{
/// import pytest
///
///
/// @pytest.fixture()
/// @pytest.fixture
/// def my_fixture(): ...
/// ```
///
@@ -52,7 +52,7 @@ use super::helpers::{
/// import pytest
///
///
/// @pytest.fixture
/// @pytest.fixture()
/// def my_fixture(): ...
/// ```
///

View File

@@ -218,9 +218,6 @@ impl Violation for UndocumentedPublicClass {
/// raise ValueError("Tried to greet an unhappy cat.")
/// ```
///
/// ## Options
/// - `lint.pydocstyle.ignore-decorators`
///
/// ## References
/// - [PEP 257 Docstring Conventions](https://peps.python.org/pep-0257/)
/// - [PEP 287 reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
@@ -308,9 +305,6 @@ impl Violation for UndocumentedPublicMethod {
/// raise FasterThanLightError from exc
/// ```
///
/// ## Options
/// - `lint.pydocstyle.ignore-decorators`
///
/// ## References
/// - [PEP 257 Docstring Conventions](https://peps.python.org/pep-0257/)
/// - [PEP 287 reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
@@ -408,9 +402,6 @@ impl Violation for UndocumentedPublicPackage {
/// print(cat) # "Cat: Dusty"
/// ```
///
/// ## Options
/// - `lint.pydocstyle.ignore-decorators`
///
/// ## References
/// - [PEP 257 Docstring Conventions](https://peps.python.org/pep-0257/)
/// - [PEP 287 reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
@@ -511,9 +502,6 @@ impl Violation for UndocumentedPublicNestedClass {
/// self.population: int = population
/// ```
///
/// ## Options
/// - `lint.pydocstyle.ignore-decorators`
///
/// ## References
/// - [PEP 257 Docstring Conventions](https://peps.python.org/pep-0257/)
/// - [PEP 287 reStructuredText Docstring Format](https://peps.python.org/pep-0287/)

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,6 @@ use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use anyhow::Context;
use ignore::{WalkBuilder, WalkState};
use ruff_linter::{
@@ -101,7 +100,7 @@ impl RuffSettings {
impl RuffSettingsIndex {
pub(super) fn new(root: &Path, editor_settings: &ResolvedEditorSettings) -> Self {
let mut has_error = false;
let mut error = false;
let mut index = BTreeMap::default();
let mut respect_gitignore = None;
@@ -128,27 +127,20 @@ impl RuffSettingsIndex {
);
break;
}
error => {
Err(err) => {
tracing::error!(
"{:#}",
error
.with_context(|| {
format!(
"Failed to resolve settings for {}",
pyproject.display()
)
})
.unwrap_err()
"Error while resolving settings from {}: {err}",
pyproject.display()
);
has_error = true;
error = true;
continue;
}
}
}
Ok(None) => continue,
Err(err) => {
tracing::error!("{err:#}");
has_error = true;
tracing::error!("{err}");
error = true;
continue;
}
}
@@ -170,7 +162,7 @@ impl RuffSettingsIndex {
let walker = builder.build_parallel();
let index = std::sync::RwLock::new(index);
let has_error = AtomicBool::new(has_error);
let error = AtomicBool::new(error);
walker.run(|| {
Box::new(|result| {
@@ -232,26 +224,19 @@ impl RuffSettingsIndex {
}),
);
}
error => {
Err(err) => {
tracing::error!(
"{:#}",
error
.with_context(|| {
format!(
"Failed to resolve settings for {}",
pyproject.display()
)
})
.unwrap_err()
"Error while resolving settings from {}: {err}",
pyproject.display()
);
has_error.store(true, Ordering::Relaxed);
error.store(true, Ordering::Relaxed);
}
}
}
Ok(None) => {}
Err(err) => {
tracing::error!("{err:#}");
has_error.store(true, Ordering::Relaxed);
tracing::error!("{err}");
error.store(true, Ordering::Relaxed);
}
}
@@ -259,7 +244,7 @@ impl RuffSettingsIndex {
})
});
if has_error.load(Ordering::Relaxed) {
if error.load(Ordering::Relaxed) {
let root = root.display();
show_err_msg!(
"Error while resolving settings from workspace {root}. Please refer to the logs for more details.",

View File

@@ -254,12 +254,6 @@ impl Debug for SourceLocation {
}
}
impl std::fmt::Display for SourceLocation {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{row}:{column}", row = self.row, column = self.column)
}
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum SourceRow {
/// A row within a cell in a Jupyter Notebook.

View File

@@ -310,12 +310,7 @@ See [LSP Client documentation](https://docs.kde.org/stable5/en/kate/kate/kate-ap
on how to configure the server from there.
!!! important
Kate's LSP Client plugin does not support multiple servers for the same language. As a
workaround, you can use the [`python-lsp-server`](https://github.com/python-lsp/python-lsp-server)
along with the [`python-lsp-ruff`](https://github.com/python-lsp/python-lsp-ruff) plugin to
use Ruff alongside another language server. Note that this setup won't use the [server settings](settings.md)
because the [`python-lsp-ruff`](https://github.com/python-lsp/python-lsp-ruff) plugin uses the
`ruff` executable and not the language server.
Kate's LSP Client plugin does not support multiple servers for the same language.
## Sublime Text

View File

@@ -1,8 +1,2 @@
PyYAML==6.0.2
black==24.8.0
mkdocs==1.6.0
-r requirements.txt
mkdocs-material @ git+ssh://git@github.com/astral-sh/mkdocs-material-insiders.git@38c0b8187325c3bab386b666daf3518ac036f2f4
mkdocs-redirects==1.2.1
mdformat==0.7.17
mdformat-mkdocs==3.0.0
mdformat-admon==2.0.6