Compare commits
9 Commits
0.5.3
...
editables-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
09e8599e91 | ||
|
|
5f96f69151 | ||
|
|
ad19b3fd0e | ||
|
|
a62e2d2000 | ||
|
|
d61747093c | ||
|
|
0ba7fc63d0 | ||
|
|
fa5b19d4b6 | ||
|
|
181e7b3c0d | ||
|
|
519eca9fe7 |
2
crates/red_knot/src/cli/mod.rs
Normal file
2
crates/red_knot/src/cli/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub(crate) mod target_version;
|
||||
pub(crate) mod verbosity;
|
||||
34
crates/red_knot/src/cli/verbosity.rs
Normal file
34
crates/red_knot/src/cli/verbosity.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
Info,
|
||||
Debug,
|
||||
Trace,
|
||||
}
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> Option<VerbosityLevel> {
|
||||
match self.verbose {
|
||||
0 => None,
|
||||
1 => Some(VerbosityLevel::Info),
|
||||
2 => Some(VerbosityLevel::Debug),
|
||||
_ => Some(VerbosityLevel::Trace),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -130,7 +130,7 @@ fn lint_bad_override(context: &SemanticLintContext, class: &ast::StmtClassDef) {
|
||||
return;
|
||||
};
|
||||
|
||||
let override_ty = semantic.module_global_symbol_ty(&typing, "override");
|
||||
let override_ty = semantic.global_symbol_ty(&typing, "override");
|
||||
|
||||
let Type::Class(class_ty) = class.ty(semantic) else {
|
||||
return;
|
||||
|
||||
@@ -17,9 +17,10 @@ use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
|
||||
use self::target_version::TargetVersion;
|
||||
use cli::target_version::TargetVersion;
|
||||
use cli::verbosity::{Verbosity, VerbosityLevel};
|
||||
|
||||
mod target_version;
|
||||
mod cli;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
@@ -43,14 +44,19 @@ struct Args {
|
||||
help = "Custom directory to use for stdlib typeshed stubs"
|
||||
)]
|
||||
custom_typeshed_dir: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(long, help = "Python version to assume when resolving types", default_value_t = TargetVersion::default(), value_name="VERSION")]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
}
|
||||
|
||||
#[allow(
|
||||
@@ -60,16 +66,18 @@ struct Args {
|
||||
clippy::dbg_macro
|
||||
)]
|
||||
pub fn main() -> anyhow::Result<()> {
|
||||
countme::enable(true);
|
||||
setup_tracing();
|
||||
|
||||
let Args {
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
target_version,
|
||||
verbosity,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity == Some(VerbosityLevel::Trace));
|
||||
setup_tracing(verbosity);
|
||||
|
||||
let cwd = if let Some(cwd) = current_directory {
|
||||
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
|
||||
SystemPathBuf::from_utf8_path_buf(canonicalized)
|
||||
@@ -97,7 +105,7 @@ pub fn main() -> anyhow::Result<()> {
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -126,18 +134,19 @@ pub fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
verbosity: Option<VerbosityLevel>,
|
||||
orchestrator: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
main_loop: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
@@ -147,8 +156,9 @@ impl MainLoop {
|
||||
|
||||
(
|
||||
Self {
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
verbosity,
|
||||
orchestrator: orchestrator_sender,
|
||||
receiver: main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
@@ -158,29 +168,27 @@ impl MainLoop {
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
sender: self.orchestrator.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, db: &mut RootDatabase) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
self.orchestrator.send(OrchestratorMessage::Run).unwrap();
|
||||
|
||||
for message in &self.main_loop_receiver {
|
||||
for message in &self.receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace { revision } => {
|
||||
let db = db.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
let orchestrator = self.orchestrator.clone();
|
||||
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
sender
|
||||
orchestrator
|
||||
.send(OrchestratorMessage::CheckCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
@@ -195,10 +203,14 @@ impl MainLoop {
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
eprintln!("{}", countme::get_all());
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
eprintln!("{}", countme::get_all());
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -208,7 +220,7 @@ impl MainLoop {
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
self.orchestrator
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
@@ -240,7 +252,7 @@ impl MainLoopCancellationToken {
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
main_loop: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
@@ -252,7 +264,7 @@ impl Orchestrator {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckWorkspace {
|
||||
revision: self.revision,
|
||||
})
|
||||
@@ -265,7 +277,7 @@ impl Orchestrator {
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
@@ -313,8 +325,8 @@ impl Orchestrator {
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -349,7 +361,14 @@ enum OrchestratorMessage {
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
|
||||
let trace_level = match verbosity {
|
||||
None => Level::WARN,
|
||||
Some(VerbosityLevel::Info) => Level::INFO,
|
||||
Some(VerbosityLevel::Debug) => Level::DEBUG,
|
||||
Some(VerbosityLevel::Trace) => Level::TRACE,
|
||||
};
|
||||
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
@@ -359,9 +378,7 @@ fn setup_tracing() {
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
.with_filter(LoggingFilter { trace_level }),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
|
||||
@@ -369,10 +369,9 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
|
||||
#[must_use]
|
||||
fn is_regular_package(&self, search_path: Self, resolver: &ResolverState) -> bool {
|
||||
fn is_non_stdlib_pkg(state: &ResolverState, path: &SystemPath) -> bool {
|
||||
let file_system = state.system();
|
||||
file_system.path_exists(&path.join("__init__.py"))
|
||||
|| file_system.path_exists(&path.join("__init__.pyi"))
|
||||
fn is_non_stdlib_pkg(resolver: &ResolverState, path: &SystemPath) -> bool {
|
||||
system_path_to_file(resolver.db.upcast(), path.join("__init__.py")).is_some()
|
||||
|| system_path_to_file(resolver.db.upcast(), path.join("__init__.py")).is_some()
|
||||
}
|
||||
|
||||
match (self, search_path) {
|
||||
@@ -387,8 +386,13 @@ impl<'a> ModuleResolutionPathRefInner<'a> {
|
||||
match Self::query_stdlib_version( path, search_path, &stdlib_root, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists | TypeshedVersionsQueryResult::MaybeExists => match path {
|
||||
FilePathRef::System(path) => resolver.db.system().path_exists(&path.join("__init__.pyi")),
|
||||
FilePathRef::Vendored(path) => resolver.db.vendored().exists(path.join("__init__.pyi")),
|
||||
FilePathRef::System(path) => system_path_to_file(resolver.db.upcast(),path.join("__init__.pyi")).is_some(),
|
||||
// No need to use `vendored_path_to_file` here:
|
||||
// (1) The vendored filesystem is immutable, so we don't need to worry about Salsa invalidation
|
||||
// (2) The caching Salsa provides probably won't speed us up that much
|
||||
// (TODO: check that assumption when we're able to run red-knot on larger code bases)
|
||||
// (3) We don't need the `File` object that `vendored_path_to_file` would return; we just need to know if the file exists
|
||||
FilePathRef::Vendored(path) => resolver.db.vendored().exists(path.join("__init__.pyi"))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,11 +125,11 @@ pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSetting
|
||||
} = program.search_paths(db.upcast());
|
||||
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::debug!("Custom typeshed directory: {custom_typeshed}");
|
||||
tracing::info!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
|
||||
if !extra_paths.is_empty() {
|
||||
tracing::debug!("extra search paths: {extra_paths:?}");
|
||||
tracing::info!("extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
let current_directory = db.system().current_directory();
|
||||
@@ -174,7 +174,7 @@ pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSetting
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
let target_version = program.target_version(db.upcast());
|
||||
tracing::debug!("Target version: {target_version}");
|
||||
tracing::info!("Target version: {target_version}");
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
@@ -1603,4 +1603,28 @@ not_a_directory
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/src").unwrap()
|
||||
)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_editable_search_paths_added() {
|
||||
let TestCase { mut db, .. } = TestCaseBuilder::new()
|
||||
.with_site_packages_files(&[("_foo.pth", "/x"), ("_bar.pth", "/x")])
|
||||
.build();
|
||||
|
||||
db.write_file("/x/foo.py", "").unwrap();
|
||||
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
let editable_install =
|
||||
ModuleResolutionPathBuf::editable_installation_root(db.system(), "/x").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
search_paths
|
||||
.iter()
|
||||
.filter(|path| ****path == editable_install)
|
||||
.count(),
|
||||
1,
|
||||
"Unexpected search paths: {search_paths:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use ruff_db::{Db as SourceDb, Upcast};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::semantic_index::{module_global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::types::{
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, ClassType, FunctionType,
|
||||
IntersectionType, UnionType,
|
||||
@@ -23,7 +23,7 @@ pub struct Jar(
|
||||
IntersectionType<'_>,
|
||||
symbol_table,
|
||||
use_def_map,
|
||||
module_global_scope,
|
||||
global_scope,
|
||||
semantic_index,
|
||||
infer_definition_types,
|
||||
infer_expression_types,
|
||||
|
||||
@@ -68,10 +68,10 @@ pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseD
|
||||
|
||||
/// Returns the module global scope of `file`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn module_global_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let _span = tracing::trace_span!("module_global_scope", ?file).entered();
|
||||
pub(crate) fn global_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let _span = tracing::trace_span!("global_scope", ?file).entered();
|
||||
|
||||
FileScopeId::module_global().to_scope_id(db, file)
|
||||
FileScopeId::global().to_scope_id(db, file)
|
||||
}
|
||||
|
||||
/// The symbol tables and use-def maps for all scopes in a file.
|
||||
@@ -309,7 +309,7 @@ mod tests {
|
||||
use crate::semantic_index::ast_ids::HasScopedUseId;
|
||||
use crate::semantic_index::definition::DefinitionKind;
|
||||
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
|
||||
use crate::semantic_index::{module_global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::Db;
|
||||
|
||||
struct TestCase {
|
||||
@@ -336,38 +336,38 @@ mod tests {
|
||||
#[test]
|
||||
fn empty() {
|
||||
let TestCase { db, file } = test_case("");
|
||||
let module_global_table = symbol_table(&db, module_global_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
let module_global_names = names(&module_global_table);
|
||||
let global_names = names(&global_table);
|
||||
|
||||
assert_eq!(module_global_names, Vec::<&str>::new());
|
||||
assert_eq!(global_names, Vec::<&str>::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let TestCase { db, file } = test_case("x");
|
||||
let module_global_table = symbol_table(&db, module_global_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["x"]);
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn annotation_only() {
|
||||
let TestCase { db, file } = test_case("x: int");
|
||||
let module_global_table = symbol_table(&db, module_global_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["int", "x"]);
|
||||
assert_eq!(names(&global_table), vec!["int", "x"]);
|
||||
// TODO record definition
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import() {
|
||||
let TestCase { db, file } = test_case("import foo");
|
||||
let scope = module_global_scope(&db, file);
|
||||
let module_global_table = symbol_table(&db, scope);
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["foo"]);
|
||||
let foo = module_global_table.symbol_id_by_name("foo").unwrap();
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(foo) else {
|
||||
@@ -379,28 +379,28 @@ mod tests {
|
||||
#[test]
|
||||
fn import_sub() {
|
||||
let TestCase { db, file } = test_case("import foo.bar");
|
||||
let module_global_table = symbol_table(&db, module_global_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["foo"]);
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_as() {
|
||||
let TestCase { db, file } = test_case("import foo.bar as baz");
|
||||
let module_global_table = symbol_table(&db, module_global_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["baz"]);
|
||||
assert_eq!(names(&global_table), vec!["baz"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_from() {
|
||||
let TestCase { db, file } = test_case("from bar import foo");
|
||||
let scope = module_global_scope(&db, file);
|
||||
let module_global_table = symbol_table(&db, scope);
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["foo"]);
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
assert!(
|
||||
module_global_table
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { symbol.is_defined() && !symbol.is_used() }),
|
||||
"symbols that are defined get the defined flag"
|
||||
@@ -408,7 +408,7 @@ mod tests {
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(
|
||||
module_global_table
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
) else {
|
||||
@@ -423,22 +423,20 @@ mod tests {
|
||||
#[test]
|
||||
fn assign() {
|
||||
let TestCase { db, file } = test_case("x = foo");
|
||||
let scope = module_global_scope(&db, file);
|
||||
let module_global_table = symbol_table(&db, scope);
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["foo", "x"]);
|
||||
assert_eq!(names(&global_table), vec!["foo", "x"]);
|
||||
assert!(
|
||||
module_global_table
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { !symbol.is_defined() && symbol.is_used() }),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(
|
||||
module_global_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
let [definition] =
|
||||
use_def.public_definitions(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(
|
||||
@@ -456,14 +454,14 @@ class C:
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let module_global_table = symbol_table(&db, module_global_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["C", "y"]);
|
||||
assert_eq!(names(&global_table), vec!["C", "y"]);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
|
||||
let [(class_scope_id, class_scope)] = index
|
||||
.child_scopes(FileScopeId::module_global())
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
@@ -496,12 +494,12 @@ y = 2
|
||||
",
|
||||
);
|
||||
let index = semantic_index(&db, file);
|
||||
let module_global_table = index.symbol_table(FileScopeId::module_global());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["func", "y"]);
|
||||
assert_eq!(names(&global_table), vec!["func", "y"]);
|
||||
|
||||
let [(function_scope_id, function_scope)] = index
|
||||
.child_scopes(FileScopeId::module_global())
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
@@ -537,11 +535,11 @@ def func():
|
||||
",
|
||||
);
|
||||
let index = semantic_index(&db, file);
|
||||
let module_global_table = index.symbol_table(FileScopeId::module_global());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["func"]);
|
||||
assert_eq!(names(&global_table), vec!["func"]);
|
||||
let [(func_scope1_id, func_scope_1), (func_scope2_id, func_scope_2)] = index
|
||||
.child_scopes(FileScopeId::module_global())
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected two child scopes");
|
||||
@@ -558,9 +556,9 @@ def func():
|
||||
assert_eq!(names(&func1_table), vec!["x"]);
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::module_global());
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let [definition] = use_def.public_definitions(
|
||||
module_global_table
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
@@ -579,12 +577,12 @@ def func[T]():
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let module_global_table = index.symbol_table(FileScopeId::module_global());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["func"]);
|
||||
assert_eq!(names(&global_table), vec!["func"]);
|
||||
|
||||
let [(ann_scope_id, ann_scope)] = index
|
||||
.child_scopes(FileScopeId::module_global())
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
@@ -616,12 +614,12 @@ class C[T]:
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let module_global_table = index.symbol_table(FileScopeId::module_global());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&module_global_table), vec!["C"]);
|
||||
assert_eq!(names(&global_table), vec!["C"]);
|
||||
|
||||
let [(ann_scope_id, ann_scope)] = index
|
||||
.child_scopes(FileScopeId::module_global())
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
@@ -653,7 +651,7 @@ class C[T]:
|
||||
fn reachability_trivial() {
|
||||
let TestCase { db, file } = test_case("x = 1; x");
|
||||
let parsed = parsed_module(&db, file);
|
||||
let scope = module_global_scope(&db, file);
|
||||
let scope = global_scope(&db, file);
|
||||
let ast = parsed.syntax();
|
||||
let ast::Stmt::Expr(ast::StmtExpr {
|
||||
value: x_use_expr, ..
|
||||
@@ -694,7 +692,7 @@ class C[T]:
|
||||
let x = &x_stmt.targets[0];
|
||||
|
||||
assert_eq!(index.expression_scope(x).kind(), ScopeKind::Module);
|
||||
assert_eq!(index.expression_scope_id(x), FileScopeId::module_global());
|
||||
assert_eq!(index.expression_scope_id(x), FileScopeId::global());
|
||||
|
||||
let def = ast.body[1].as_function_def_stmt().unwrap();
|
||||
let y_stmt = def.body[0].as_assign_stmt().unwrap();
|
||||
@@ -731,20 +729,16 @@ def x():
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
|
||||
let descendents = index.descendent_scopes(FileScopeId::module_global());
|
||||
let descendents = index.descendent_scopes(FileScopeId::global());
|
||||
assert_eq!(
|
||||
scope_names(descendents, &db, file),
|
||||
vec!["Test", "foo", "bar", "baz", "x"]
|
||||
);
|
||||
|
||||
let children = index.child_scopes(FileScopeId::module_global());
|
||||
let children = index.child_scopes(FileScopeId::global());
|
||||
assert_eq!(scope_names(children, &db, file), vec!["Test", "x"]);
|
||||
|
||||
let test_class = index
|
||||
.child_scopes(FileScopeId::module_global())
|
||||
.next()
|
||||
.unwrap()
|
||||
.0;
|
||||
let test_class = index.child_scopes(FileScopeId::global()).next().unwrap().0;
|
||||
let test_child_scopes = index.child_scopes(test_class);
|
||||
assert_eq!(
|
||||
scope_names(test_child_scopes, &db, file),
|
||||
@@ -752,7 +746,7 @@ def x():
|
||||
);
|
||||
|
||||
let bar_scope = index
|
||||
.descendent_scopes(FileScopeId::module_global())
|
||||
.descendent_scopes(FileScopeId::global())
|
||||
.nth(2)
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
@@ -103,6 +103,17 @@ pub struct ScopeId<'db> {
|
||||
}
|
||||
|
||||
impl<'db> ScopeId<'db> {
|
||||
pub(crate) fn is_function_like(self, db: &'db dyn Db) -> bool {
|
||||
// Type parameter scopes behave like function scopes in terms of name resolution; CPython
|
||||
// symbol table also uses the term "function-like" for these scopes.
|
||||
matches!(
|
||||
self.node(db),
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn name(self, db: &'db dyn Db) -> &'db str {
|
||||
match self.node(db) {
|
||||
@@ -122,7 +133,7 @@ pub struct FileScopeId;
|
||||
|
||||
impl FileScopeId {
|
||||
/// Returns the scope id of the module-global scope.
|
||||
pub fn module_global() -> Self {
|
||||
pub fn global() -> Self {
|
||||
FileScopeId::from_u32(0)
|
||||
}
|
||||
|
||||
@@ -193,7 +204,6 @@ impl SymbolTable {
|
||||
}
|
||||
|
||||
/// Returns the symbol named `name`.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn symbol_by_name(&self, name: &str) -> Option<&Symbol> {
|
||||
let id = self.symbol_id_by_name(name)?;
|
||||
Some(self.symbol(id))
|
||||
|
||||
@@ -5,7 +5,7 @@ use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{definition_ty, infer_scope_types, module_global_symbol_ty_by_name, Type};
|
||||
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
|
||||
pub struct SemanticModel<'db> {
|
||||
@@ -28,8 +28,8 @@ impl<'db> SemanticModel<'db> {
|
||||
resolve_module(self.db.upcast(), module_name)
|
||||
}
|
||||
|
||||
pub fn module_global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
|
||||
module_global_symbol_ty_by_name(self.db, module.file(), symbol_name)
|
||||
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
|
||||
global_symbol_ty_by_name(self.db, module.file(), symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{module_global_scope, symbol_table, use_def_map};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
mod display;
|
||||
@@ -23,7 +23,9 @@ pub(crate) fn symbol_ty<'db>(
|
||||
definitions_ty(
|
||||
db,
|
||||
use_def.public_definitions(symbol),
|
||||
use_def.public_may_be_unbound(symbol),
|
||||
use_def
|
||||
.public_may_be_unbound(symbol)
|
||||
.then_some(Type::Unbound),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -41,12 +43,8 @@ pub(crate) fn symbol_ty_by_name<'db>(
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that looks up a module-global symbol in a file.
|
||||
pub(crate) fn module_global_symbol_ty_by_name<'db>(
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
name: &str,
|
||||
) -> Type<'db> {
|
||||
symbol_ty_by_name(db, module_global_scope(db, file), name)
|
||||
pub(crate) fn global_symbol_ty_by_name<'db>(db: &'db dyn Db, file: File, name: &str) -> Type<'db> {
|
||||
symbol_ty_by_name(db, global_scope(db, file), name)
|
||||
}
|
||||
|
||||
/// Infer the type of a [`Definition`].
|
||||
@@ -55,24 +53,31 @@ pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -
|
||||
inference.definition_ty(definition)
|
||||
}
|
||||
|
||||
/// Infer the combined type of an array of [`Definition`].
|
||||
/// Will return a union if there are more than definition, or at least one plus the possibility of
|
||||
/// Unbound.
|
||||
/// Infer the combined type of an array of [`Definition`]s, plus one optional "unbound type".
|
||||
///
|
||||
/// Will return a union if there is more than one definition, or at least one plus an unbound
|
||||
/// type.
|
||||
///
|
||||
/// The "unbound type" represents the type in case control flow may not have passed through any
|
||||
/// definitions in this scope. If this isn't possible, then it will be `None`. If it is possible,
|
||||
/// and the result in that case should be Unbound (e.g. an unbound function local), then it will be
|
||||
/// `Some(Type::Unbound)`. If it is possible and the result should be something else (e.g. an
|
||||
/// implicit global lookup), then `unbound_type` will be `Some(the_global_symbol_type)`.
|
||||
///
|
||||
/// # Panics
|
||||
/// Will panic if called with zero definitions and no `unbound_ty`. This is a logic error,
|
||||
/// as any symbol with zero visible definitions clearly may be unbound, and the caller should
|
||||
/// provide an `unbound_ty`.
|
||||
pub(crate) fn definitions_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definitions: &[Definition<'db>],
|
||||
may_be_unbound: bool,
|
||||
unbound_ty: Option<Type<'db>>,
|
||||
) -> Type<'db> {
|
||||
let unbound_iter = if may_be_unbound {
|
||||
[Type::Unbound].iter()
|
||||
} else {
|
||||
[].iter()
|
||||
};
|
||||
let def_types = definitions.iter().map(|def| definition_ty(db, *def));
|
||||
let mut all_types = unbound_iter.copied().chain(def_types);
|
||||
let mut all_types = unbound_ty.into_iter().chain(def_types);
|
||||
|
||||
let Some(first) = all_types.next() else {
|
||||
return Type::Unbound;
|
||||
panic!("definitions_ty should never be called with zero definitions and no unbound_ty.")
|
||||
};
|
||||
|
||||
if let Some(second) = all_types.next() {
|
||||
@@ -136,7 +141,7 @@ impl<'db> Type<'db> {
|
||||
Type::Unbound => Type::Unbound,
|
||||
Type::None => todo!("attribute lookup on None type"),
|
||||
Type::Function(_) => todo!("attribute lookup on Function type"),
|
||||
Type::Module(file) => module_global_symbol_ty_by_name(db, *file, name),
|
||||
Type::Module(file) => global_symbol_ty_by_name(db, *file, name),
|
||||
Type::Class(class) => class.class_member(db, name),
|
||||
Type::Instance(_) => {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
|
||||
@@ -36,7 +36,9 @@ use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::NodeWithScopeKind;
|
||||
use crate::semantic_index::symbol::{NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::types::{definitions_ty, ClassType, FunctionType, Name, Type, UnionTypeBuilder};
|
||||
use crate::types::{
|
||||
definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType, Name, Type, UnionTypeBuilder,
|
||||
};
|
||||
use crate::Db;
|
||||
|
||||
/// Infer all types for a [`ScopeId`], including all definitions and expressions in that scope.
|
||||
@@ -667,18 +669,30 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
|
||||
fn infer_name_expression(&mut self, name: &ast::ExprName) -> Type<'db> {
|
||||
let ast::ExprName {
|
||||
range: _,
|
||||
id: _,
|
||||
ctx,
|
||||
} = name;
|
||||
let ast::ExprName { range: _, id, ctx } = name;
|
||||
|
||||
match ctx {
|
||||
ExprContext::Load => {
|
||||
let use_def = self.index.use_def_map(self.scope.file_scope_id(self.db));
|
||||
let file_scope_id = self.scope.file_scope_id(self.db);
|
||||
let use_def = self.index.use_def_map(file_scope_id);
|
||||
let use_id = name.scoped_use_id(self.db, self.scope);
|
||||
let definitions = use_def.use_definitions(use_id);
|
||||
definitions_ty(self.db, definitions, use_def.use_may_be_unbound(use_id))
|
||||
let may_be_unbound = use_def.use_may_be_unbound(use_id);
|
||||
|
||||
let unbound_ty = if may_be_unbound {
|
||||
let symbols = self.index.symbol_table(file_scope_id);
|
||||
// SAFETY: the symbol table always creates a symbol for every Name node.
|
||||
let symbol = symbols.symbol_by_name(id).unwrap();
|
||||
if !symbol.is_defined() || !self.scope.is_function_like(self.db) {
|
||||
// implicit global
|
||||
Some(global_symbol_ty_by_name(self.db, self.file, id))
|
||||
} else {
|
||||
Some(Type::Unbound)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
definitions_ty(self.db, use_def.use_definitions(use_id), unbound_ty)
|
||||
}
|
||||
ExprContext::Store | ExprContext::Del => Type::None,
|
||||
ExprContext::Invalid => Type::Unknown,
|
||||
@@ -778,9 +792,11 @@ mod tests {
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::FileScopeId;
|
||||
use crate::types::{
|
||||
infer_definition_types, module_global_scope, module_global_symbol_ty_by_name, symbol_table,
|
||||
use_def_map, Type,
|
||||
global_scope, global_symbol_ty_by_name, infer_definition_types, symbol_table,
|
||||
symbol_ty_by_name, use_def_map, Type,
|
||||
};
|
||||
use crate::{HasTy, SemanticModel};
|
||||
|
||||
@@ -804,7 +820,7 @@ mod tests {
|
||||
fn assert_public_ty(db: &TestDb, file_name: &str, symbol_name: &str, expected: &str) {
|
||||
let file = system_path_to_file(db, file_name).expect("Expected file to exist.");
|
||||
|
||||
let ty = module_global_symbol_ty_by_name(db, file, symbol_name);
|
||||
let ty = global_symbol_ty_by_name(db, file, symbol_name);
|
||||
assert_eq!(ty.display(db).to_string(), expected);
|
||||
}
|
||||
|
||||
@@ -838,7 +854,7 @@ mod tests {
|
||||
)?;
|
||||
|
||||
let mod_file = system_path_to_file(&db, "src/mod.py").expect("Expected file to exist.");
|
||||
let ty = module_global_symbol_ty_by_name(&db, mod_file, "Sub");
|
||||
let ty = global_symbol_ty_by_name(&db, mod_file, "Sub");
|
||||
|
||||
let Type::Class(class) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
@@ -868,7 +884,7 @@ mod tests {
|
||||
)?;
|
||||
|
||||
let mod_file = system_path_to_file(&db, "src/mod.py").unwrap();
|
||||
let ty = module_global_symbol_ty_by_name(&db, mod_file, "C");
|
||||
let ty = global_symbol_ty_by_name(&db, mod_file, "C");
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
@@ -1217,7 +1233,7 @@ mod tests {
|
||||
)?;
|
||||
|
||||
let a = system_path_to_file(&db, "src/a.py").expect("Expected file to exist.");
|
||||
let c_ty = module_global_symbol_ty_by_name(&db, a, "C");
|
||||
let c_ty = global_symbol_ty_by_name(&db, a, "C");
|
||||
let Type::Class(c_class) = c_ty else {
|
||||
panic!("C is not a Class")
|
||||
};
|
||||
@@ -1237,6 +1253,102 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// An unbound function local that has definitions in the scope does not fall back to globals.
|
||||
#[test]
|
||||
fn unbound_function_local() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/a.py",
|
||||
"
|
||||
x = 1
|
||||
def f():
|
||||
y = x
|
||||
x = 2
|
||||
",
|
||||
)?;
|
||||
|
||||
let file = system_path_to_file(&db, "src/a.py").expect("Expected file to exist.");
|
||||
let index = semantic_index(&db, file);
|
||||
let function_scope = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.next()
|
||||
.unwrap()
|
||||
.0
|
||||
.to_scope_id(&db, file);
|
||||
let y_ty = symbol_ty_by_name(&db, function_scope, "y");
|
||||
let x_ty = symbol_ty_by_name(&db, function_scope, "x");
|
||||
|
||||
assert_eq!(y_ty.display(&db).to_string(), "Unbound");
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Literal[2]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// A name reference to a never-defined symbol in a function is implicitly a global lookup.
|
||||
#[test]
|
||||
fn implicit_global_in_function() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/a.py",
|
||||
"
|
||||
x = 1
|
||||
def f():
|
||||
y = x
|
||||
",
|
||||
)?;
|
||||
|
||||
let file = system_path_to_file(&db, "src/a.py").expect("Expected file to exist.");
|
||||
let index = semantic_index(&db, file);
|
||||
let function_scope = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.next()
|
||||
.unwrap()
|
||||
.0
|
||||
.to_scope_id(&db, file);
|
||||
let y_ty = symbol_ty_by_name(&db, function_scope, "y");
|
||||
let x_ty = symbol_ty_by_name(&db, function_scope, "x");
|
||||
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Unbound");
|
||||
assert_eq!(y_ty.display(&db).to_string(), "Literal[1]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Class name lookups do fall back to globals, but the public type never does.
|
||||
#[test]
|
||||
fn unbound_class_local() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/a.py",
|
||||
"
|
||||
x = 1
|
||||
class C:
|
||||
y = x
|
||||
if flag:
|
||||
x = 2
|
||||
",
|
||||
)?;
|
||||
|
||||
let file = system_path_to_file(&db, "src/a.py").expect("Expected file to exist.");
|
||||
let index = semantic_index(&db, file);
|
||||
let class_scope = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.next()
|
||||
.unwrap()
|
||||
.0
|
||||
.to_scope_id(&db, file);
|
||||
let y_ty = symbol_ty_by_name(&db, class_scope, "y");
|
||||
let x_ty = symbol_ty_by_name(&db, class_scope, "x");
|
||||
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Literal[2] | Unbound");
|
||||
assert_eq!(y_ty.display(&db).to_string(), "Literal[1]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn local_inference() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
@@ -1257,7 +1369,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn first_public_def<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
|
||||
let scope = module_global_scope(db, file);
|
||||
let scope = global_scope(db, file);
|
||||
*use_def_map(db, scope)
|
||||
.public_definitions(symbol_table(db, scope).symbol_id_by_name(name).unwrap())
|
||||
.first()
|
||||
@@ -1274,7 +1386,7 @@ mod tests {
|
||||
])?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let x_ty = module_global_symbol_ty_by_name(&db, a, "x");
|
||||
let x_ty = global_symbol_ty_by_name(&db, a, "x");
|
||||
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
@@ -1283,7 +1395,7 @@ mod tests {
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
|
||||
let x_ty_2 = module_global_symbol_ty_by_name(&db, a, "x");
|
||||
let x_ty_2 = global_symbol_ty_by_name(&db, a, "x");
|
||||
|
||||
assert_eq!(x_ty_2.display(&db).to_string(), "Literal[20]");
|
||||
|
||||
@@ -1300,7 +1412,7 @@ mod tests {
|
||||
])?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let x_ty = module_global_symbol_ty_by_name(&db, a, "x");
|
||||
let x_ty = global_symbol_ty_by_name(&db, a, "x");
|
||||
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
@@ -1310,7 +1422,7 @@ mod tests {
|
||||
|
||||
db.clear_salsa_events();
|
||||
|
||||
let x_ty_2 = module_global_symbol_ty_by_name(&db, a, "x");
|
||||
let x_ty_2 = global_symbol_ty_by_name(&db, a, "x");
|
||||
|
||||
assert_eq!(x_ty_2.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
@@ -1336,7 +1448,7 @@ mod tests {
|
||||
])?;
|
||||
|
||||
let a = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let x_ty = module_global_symbol_ty_by_name(&db, a, "x");
|
||||
let x_ty = global_symbol_ty_by_name(&db, a, "x");
|
||||
|
||||
assert_eq!(x_ty.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
@@ -1346,7 +1458,7 @@ mod tests {
|
||||
|
||||
db.clear_salsa_events();
|
||||
|
||||
let x_ty_2 = module_global_symbol_ty_by_name(&db, a, "x");
|
||||
let x_ty_2 = global_symbol_ty_by_name(&db, a, "x");
|
||||
|
||||
assert_eq!(x_ty_2.display(&db).to_string(), "Literal[10]");
|
||||
|
||||
|
||||
@@ -5,10 +5,11 @@ use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_benchmark::criterion::{
|
||||
criterion_group, criterion_main, BatchSize, Criterion, Throughput,
|
||||
};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::files::{system_path_to_file, vendored_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{MemoryFileSystem, SystemPath, TestSystem};
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
use ruff_db::Upcast;
|
||||
|
||||
static FOO_CODE: &str = r#"
|
||||
@@ -17,17 +18,17 @@ import typing
|
||||
from bar import Bar
|
||||
|
||||
class Foo(Bar):
|
||||
def foo() -> str:
|
||||
def foo() -> object:
|
||||
return "foo"
|
||||
|
||||
@typing.override
|
||||
def bar() -> str:
|
||||
def bar() -> object:
|
||||
return "foo_bar"
|
||||
"#;
|
||||
|
||||
static BAR_CODE: &str = r#"
|
||||
class Bar:
|
||||
def bar() -> str:
|
||||
def bar() -> object:
|
||||
return "bar"
|
||||
|
||||
def random(arg: int) -> int:
|
||||
@@ -48,6 +49,7 @@ struct Case {
|
||||
foo: File,
|
||||
bar: File,
|
||||
typing: File,
|
||||
builtins: File,
|
||||
}
|
||||
|
||||
fn setup_case() -> Case {
|
||||
@@ -56,6 +58,7 @@ fn setup_case() -> Case {
|
||||
let foo_path = SystemPath::new("/src/foo.py");
|
||||
let bar_path = SystemPath::new("/src/bar.py");
|
||||
let typing_path = SystemPath::new("/src/typing.pyi");
|
||||
let builtins_path = VendoredPath::new("stdlib/builtins.pyi");
|
||||
fs.write_files([
|
||||
(foo_path, FOO_CODE),
|
||||
(bar_path, BAR_CODE),
|
||||
@@ -82,6 +85,7 @@ fn setup_case() -> Case {
|
||||
|
||||
let bar = system_path_to_file(&db, bar_path).unwrap();
|
||||
let typing = system_path_to_file(&db, typing_path).unwrap();
|
||||
let builtins = vendored_path_to_file(&db, builtins_path).unwrap();
|
||||
|
||||
Case {
|
||||
db,
|
||||
@@ -89,6 +93,7 @@ fn setup_case() -> Case {
|
||||
foo,
|
||||
bar,
|
||||
typing,
|
||||
builtins,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -104,6 +109,7 @@ fn benchmark_without_parse(criterion: &mut Criterion) {
|
||||
parsed_module(case.db.upcast(), case.foo);
|
||||
parsed_module(case.db.upcast(), case.bar);
|
||||
parsed_module(case.db.upcast(), case.typing);
|
||||
parsed_module(case.db.upcast(), case.builtins);
|
||||
case
|
||||
},
|
||||
|case| {
|
||||
@@ -172,7 +178,7 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(cold, benchmark_without_parse);
|
||||
criterion_group!(without_parse, benchmark_cold);
|
||||
criterion_group!(cold, benchmark_cold);
|
||||
criterion_group!(without_parse, benchmark_without_parse);
|
||||
criterion_group!(incremental, benchmark_incremental);
|
||||
criterion_main!(without_parse, cold, incremental);
|
||||
|
||||
@@ -58,7 +58,7 @@ impl Files {
|
||||
///
|
||||
/// The operation always succeeds even if the path doesn't exist on disk, isn't accessible or if the path points to a directory.
|
||||
/// In these cases, a file with status [`FileStatus::Deleted`] is returned.
|
||||
#[tracing::instrument(level = "debug", skip(self, db), ret)]
|
||||
#[tracing::instrument(level = "trace", skip(self, db), ret)]
|
||||
fn system(&self, db: &dyn Db, path: &SystemPath) -> File {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
let absolute = FilePath::System(absolute);
|
||||
@@ -102,7 +102,7 @@ impl Files {
|
||||
|
||||
/// Looks up a vendored file by its path. Returns `Some` if a vendored file for the given path
|
||||
/// exists and `None` otherwise.
|
||||
#[tracing::instrument(level = "debug", skip(self, db), ret)]
|
||||
#[tracing::instrument(level = "trace", skip(self, db), ret)]
|
||||
fn vendored(&self, db: &dyn Db, path: &VendoredPath) -> Option<File> {
|
||||
let file = match self
|
||||
.inner
|
||||
|
||||
@@ -35,3 +35,67 @@ def make_unique_pod_id(pod_id: str) -> str | None:
|
||||
|
||||
def shouldnt_add_raw_here2():
|
||||
u"Sum\\mary."
|
||||
|
||||
|
||||
def shouldnt_add_raw_for_double_quote_docstring_contains_docstring():
|
||||
"""
|
||||
This docstring contains another double-quote docstring.
|
||||
|
||||
def foo():
|
||||
\"\"\"Foo.\"\"\"
|
||||
"""
|
||||
|
||||
|
||||
def shouldnt_add_raw_for_double_quote_docstring_contains_docstring2():
|
||||
"""
|
||||
This docstring contains another double-quote docstring.
|
||||
|
||||
def bar():
|
||||
\"""Bar.\"""
|
||||
|
||||
More content here.
|
||||
"""
|
||||
|
||||
|
||||
def shouldnt_add_raw_for_single_quote_docstring_contains_docstring():
|
||||
'''
|
||||
This docstring contains another single-quote docstring.
|
||||
|
||||
def foo():
|
||||
\'\'\'Foo.\'\'\'
|
||||
|
||||
More content here.
|
||||
'''
|
||||
|
||||
|
||||
def shouldnt_add_raw_for_single_quote_docstring_contains_docstring2():
|
||||
'''
|
||||
This docstring contains another single-quote docstring.
|
||||
|
||||
def bar():
|
||||
\'''Bar.\'''
|
||||
|
||||
More content here.
|
||||
'''
|
||||
|
||||
def shouldnt_add_raw_for_docstring_contains_escaped_double_triple_quotes():
|
||||
"""
|
||||
Escaped triple quote \""" or \"\"\".
|
||||
"""
|
||||
|
||||
def shouldnt_add_raw_for_docstring_contains_escaped_single_triple_quotes():
|
||||
'''
|
||||
Escaped triple quote \''' or \'\'\'.
|
||||
'''
|
||||
|
||||
|
||||
def should_add_raw_for_single_double_quote_escape():
|
||||
"""
|
||||
This is single quote escape \".
|
||||
"""
|
||||
|
||||
|
||||
def should_add_raw_for_single_single_quote_escape():
|
||||
'''
|
||||
This is single quote escape \'.
|
||||
'''
|
||||
|
||||
@@ -850,9 +850,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::PytestFailWithoutMessage) {
|
||||
flake8_pytest_style::rules::fail_call(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::PairwiseOverZipped) {
|
||||
if checker.enabled(Rule::ZipInsteadOfPairwise) {
|
||||
if checker.settings.target_version >= PythonVersion::Py310 {
|
||||
ruff::rules::pairwise_over_zipped(checker, func, args);
|
||||
ruff::rules::zip_instead_of_pairwise(checker, func, args);
|
||||
}
|
||||
}
|
||||
if checker.any_enabled(&[
|
||||
|
||||
@@ -918,7 +918,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "003") => (RuleGroup::Stable, rules::ruff::rules::AmbiguousUnicodeCharacterComment),
|
||||
(Ruff, "005") => (RuleGroup::Stable, rules::ruff::rules::CollectionLiteralConcatenation),
|
||||
(Ruff, "006") => (RuleGroup::Stable, rules::ruff::rules::AsyncioDanglingTask),
|
||||
(Ruff, "007") => (RuleGroup::Stable, rules::ruff::rules::PairwiseOverZipped),
|
||||
(Ruff, "007") => (RuleGroup::Stable, rules::ruff::rules::ZipInsteadOfPairwise),
|
||||
(Ruff, "008") => (RuleGroup::Stable, rules::ruff::rules::MutableDataclassDefault),
|
||||
(Ruff, "009") => (RuleGroup::Stable, rules::ruff::rules::FunctionCallInDataclassDefaultArgument),
|
||||
(Ruff, "010") => (RuleGroup::Stable, rules::ruff::rules::ExplicitFStringTypeConversion),
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use memchr::memchr_iter;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
@@ -69,20 +67,47 @@ pub(crate) fn backslashes(checker: &mut Checker, docstring: &Docstring) {
|
||||
// Docstring contains at least one backslash.
|
||||
let body = docstring.body();
|
||||
let bytes = body.as_bytes();
|
||||
if memchr_iter(b'\\', bytes).any(|position| {
|
||||
let escaped_char = bytes.get(position.saturating_add(1));
|
||||
// Allow continuations (backslashes followed by newlines) and Unicode escapes.
|
||||
!matches!(escaped_char, Some(b'\r' | b'\n' | b'u' | b'U' | b'N'))
|
||||
}) {
|
||||
let mut diagnostic = Diagnostic::new(EscapeSequenceInDocstring, docstring.range());
|
||||
|
||||
if !docstring.leading_quote().contains(['u', 'U']) {
|
||||
diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement(
|
||||
"r".to_owned() + docstring.contents,
|
||||
docstring.range(),
|
||||
)));
|
||||
let mut offset = 0;
|
||||
while let Some(position) = memchr::memchr(b'\\', &bytes[offset..]) {
|
||||
if position + offset + 1 >= body.len() {
|
||||
break;
|
||||
}
|
||||
|
||||
checker.diagnostics.push(diagnostic);
|
||||
let after_escape = &body[position + offset + 1..];
|
||||
|
||||
// End of Docstring.
|
||||
let Some(escaped_char) = &after_escape.chars().next() else {
|
||||
break;
|
||||
};
|
||||
|
||||
if matches!(escaped_char, '"' | '\'') {
|
||||
// If the next three characters are equal to """, it indicates an escaped docstring pattern.
|
||||
if after_escape.starts_with("\"\"\"") || after_escape.starts_with("\'\'\'") {
|
||||
offset += position + 3;
|
||||
continue;
|
||||
}
|
||||
// If the next three characters are equal to "\"\", it indicates an escaped docstring pattern.
|
||||
if after_escape.starts_with("\"\\\"\\\"") || after_escape.starts_with("\'\\\'\\\'") {
|
||||
offset += position + 5;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
offset += position + escaped_char.len_utf8();
|
||||
|
||||
// Only allow continuations (backslashes followed by newlines) and Unicode escapes.
|
||||
if !matches!(*escaped_char, '\r' | '\n' | 'u' | 'U' | 'N') {
|
||||
let mut diagnostic = Diagnostic::new(EscapeSequenceInDocstring, docstring.range());
|
||||
|
||||
if !docstring.leading_quote().contains(['u', 'U']) {
|
||||
diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement(
|
||||
"r".to_owned() + docstring.contents,
|
||||
docstring.range(),
|
||||
)));
|
||||
}
|
||||
|
||||
checker.diagnostics.push(diagnostic);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,4 +25,43 @@ D301.py:37:5: D301 Use `r"""` if any backslashes in a docstring
|
||||
|
|
||||
= help: Add `r` prefix
|
||||
|
||||
D301.py:93:5: D301 [*] Use `r"""` if any backslashes in a docstring
|
||||
|
|
||||
92 | def should_add_raw_for_single_double_quote_escape():
|
||||
93 | """
|
||||
| _____^
|
||||
94 | | This is single quote escape \".
|
||||
95 | | """
|
||||
| |_______^ D301
|
||||
|
|
||||
= help: Add `r` prefix
|
||||
|
||||
ℹ Unsafe fix
|
||||
90 90 |
|
||||
91 91 |
|
||||
92 92 | def should_add_raw_for_single_double_quote_escape():
|
||||
93 |- """
|
||||
93 |+ r"""
|
||||
94 94 | This is single quote escape \".
|
||||
95 95 | """
|
||||
96 96 |
|
||||
|
||||
D301.py:99:5: D301 [*] Use `r"""` if any backslashes in a docstring
|
||||
|
|
||||
98 | def should_add_raw_for_single_single_quote_escape():
|
||||
99 | '''
|
||||
| _____^
|
||||
100 | | This is single quote escape \'.
|
||||
101 | | '''
|
||||
| |_______^ D301
|
||||
|
|
||||
= help: Add `r` prefix
|
||||
|
||||
ℹ Unsafe fix
|
||||
96 96 |
|
||||
97 97 |
|
||||
98 98 | def should_add_raw_for_single_single_quote_escape():
|
||||
99 |- '''
|
||||
99 |+ r'''
|
||||
100 100 | This is single quote escape \'.
|
||||
101 101 | '''
|
||||
|
||||
@@ -32,7 +32,7 @@ mod tests {
|
||||
#[test_case(Rule::ImplicitOptional, Path::new("RUF013_3.py"))]
|
||||
#[test_case(Rule::MutableClassDefault, Path::new("RUF012.py"))]
|
||||
#[test_case(Rule::MutableDataclassDefault, Path::new("RUF008.py"))]
|
||||
#[test_case(Rule::PairwiseOverZipped, Path::new("RUF007.py"))]
|
||||
#[test_case(Rule::ZipInsteadOfPairwise, Path::new("RUF007.py"))]
|
||||
#[test_case(
|
||||
Rule::UnnecessaryIterableAllocationForFirstElement,
|
||||
Path::new("RUF015.py")
|
||||
|
||||
@@ -15,7 +15,6 @@ pub(crate) use mutable_class_default::*;
|
||||
pub(crate) use mutable_dataclass_default::*;
|
||||
pub(crate) use mutable_fromkeys_value::*;
|
||||
pub(crate) use never_union::*;
|
||||
pub(crate) use pairwise_over_zipped::*;
|
||||
pub(crate) use parenthesize_logical_operators::*;
|
||||
pub(crate) use quadratic_list_summation::*;
|
||||
pub(crate) use redirected_noqa::*;
|
||||
@@ -29,6 +28,7 @@ pub(crate) use unnecessary_iterable_allocation_for_first_element::*;
|
||||
pub(crate) use unnecessary_key_check::*;
|
||||
pub(crate) use unused_async::*;
|
||||
pub(crate) use unused_noqa::*;
|
||||
pub(crate) use zip_instead_of_pairwise::*;
|
||||
|
||||
mod ambiguous_unicode_character;
|
||||
mod assert_with_print_message;
|
||||
@@ -49,7 +49,6 @@ mod mutable_class_default;
|
||||
mod mutable_dataclass_default;
|
||||
mod mutable_fromkeys_value;
|
||||
mod never_union;
|
||||
mod pairwise_over_zipped;
|
||||
mod parenthesize_logical_operators;
|
||||
mod quadratic_list_summation;
|
||||
mod redirected_noqa;
|
||||
@@ -65,6 +64,7 @@ mod unnecessary_iterable_allocation_for_first_element;
|
||||
mod unnecessary_key_check;
|
||||
mod unused_async;
|
||||
mod unused_noqa;
|
||||
mod zip_instead_of_pairwise;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) enum Context {
|
||||
|
||||
@@ -32,9 +32,9 @@ use crate::checkers::ast::Checker;
|
||||
/// ## References
|
||||
/// - [Python documentation: `itertools.pairwise`](https://docs.python.org/3/library/itertools.html#itertools.pairwise)
|
||||
#[violation]
|
||||
pub struct PairwiseOverZipped;
|
||||
pub struct ZipInsteadOfPairwise;
|
||||
|
||||
impl Violation for PairwiseOverZipped {
|
||||
impl Violation for ZipInsteadOfPairwise {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Prefer `itertools.pairwise()` over `zip()` when iterating over successive pairs")
|
||||
@@ -95,7 +95,7 @@ fn match_slice_info(expr: &Expr) -> Option<SliceInfo> {
|
||||
}
|
||||
|
||||
/// RUF007
|
||||
pub(crate) fn pairwise_over_zipped(checker: &mut Checker, func: &Expr, args: &[Expr]) {
|
||||
pub(crate) fn zip_instead_of_pairwise(checker: &mut Checker, func: &Expr, args: &[Expr]) {
|
||||
// Require exactly two positional arguments.
|
||||
let [first, second] = args else {
|
||||
return;
|
||||
@@ -141,5 +141,5 @@ pub(crate) fn pairwise_over_zipped(checker: &mut Checker, func: &Expr, args: &[E
|
||||
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(PairwiseOverZipped, func.range()));
|
||||
.push(Diagnostic::new(ZipInsteadOfPairwise, func.range()));
|
||||
}
|
||||
Reference in New Issue
Block a user