Compare commits

..

10 Commits

Author SHA1 Message Date
David Peter
0252ee6531 Equivalence 2025-06-05 15:48:09 +02:00
David Peter
969efae929 Update tests 2025-06-04 16:46:46 +02:00
David Peter
e08024ffda Revert "Update lists"
This reverts commit 9981ac7ecc.
2025-06-04 16:42:17 +02:00
David Peter
9981ac7ecc Update lists 2025-06-04 16:23:53 +02:00
David Peter
30c9a5d47e Cycle recorvery and todo types 2025-06-04 16:01:45 +02:00
David Peter
7684e23612 Change inference 2025-06-04 14:49:41 +02:00
David Peter
e9b1fc3942 Introduce TypeAliasRef 2025-06-04 14:32:51 +02:00
David Peter
11db567b0b [ty] ty_ide: Hotfix for expression_scope_id panics (#18455)
## Summary

Implement a hotfix for the playground/LSP crashes related to missing
`expression_scope_id`s.

relates to: https://github.com/astral-sh/ty/issues/572

## Test Plan

* Regression tests from https://github.com/astral-sh/ruff/pull/18441
* Ran the playground locally to check if panics occur / completions
still work.

---------

Co-authored-by: Andrew Gallant <andrew@astral.sh>
2025-06-04 10:39:16 +02:00
David Peter
9f8c3de462 [ty] Improve docs for Class{Literal,Type}::instance_member (#18454)
## Summary

Mostly just refer to `Type::instance_member` which has much more
details.
2025-06-04 09:55:45 +02:00
David Peter
293d4ac388 [ty] Add meta-type tests for legavy TypeVars (#18453)
## Summary

Follow up to the comment by @dcreager
[here](https://github.com/astral-sh/ruff/pull/18439#discussion_r2123802784).
2025-06-04 07:44:44 +00:00
29 changed files with 485 additions and 887 deletions

4
Cargo.lock generated
View File

@@ -3933,16 +3933,12 @@ name = "ty_project"
version = "0.0.0"
dependencies = [
"anyhow",
"camino",
"crossbeam",
"glob",
"globset",
"ignore",
"insta",
"notify",
"pep440_rs",
"rayon",
"regex-automata 0.4.9",
"ruff_cache",
"ruff_db",
"ruff_macros",

View File

@@ -126,7 +126,6 @@ quote = { version = "1.0.23" }
rand = { version = "0.9.0" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
@@ -166,7 +165,7 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
"env-filter",
"fmt",
"ansi",
"smallvec",
"smallvec"
] }
tryfn = { version = "0.2.1" }
typed-arena = { version = "2.0.2" }
@@ -176,7 +175,11 @@ unicode-width = { version = "0.2.0" }
unicode_names2 = { version = "1.2.2" }
unicode-normalization = { version = "0.1.23" }
url = { version = "2.5.0" }
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
wasm-bindgen-test = { version = "0.3.42" }
@@ -211,8 +214,8 @@ must_use_candidate = "allow"
similar_names = "allow"
single_match_else = "allow"
too_many_lines = "allow"
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
needless_continue = "allow" # An explicit continue can be more readable, especially if the alternative is an empty block.
unnecessary_debug_formatting = "allow" # too many instances, the display also doesn't quote the path which is often desired in logs where we use them the most often.
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
needless_raw_string_hashes = "allow"
# Disallowed restriction lints

View File

@@ -4,6 +4,10 @@ extend-exclude = [
"crates/ty_vendored/vendor/**/*",
"**/resources/**/*",
"**/snapshots/**/*",
# Completion tests tend to have a lot of incomplete
# words naturally. It's annoying to have to make all
# of them actually words. So just ignore typos here.
"crates/ty_ide/src/completion.rs",
]
[default.extend-words]

View File

@@ -652,8 +652,6 @@ pub enum DiagnosticId {
/// Some I/O operation failed
Io,
NoFiles,
/// Some code contains a syntax error
InvalidSyntax,
@@ -701,7 +699,6 @@ impl DiagnosticId {
DiagnosticId::Lint(name) => name.as_str(),
DiagnosticId::RevealedType => "revealed-type",
DiagnosticId::UnknownRule => "unknown-rule",
DiagnosticId::NoFiles => "no-files",
}
}

View File

@@ -369,30 +369,6 @@ impl SystemPath {
Some(SystemPath::new(Utf8Path::from_path(path)?))
}
/// Returns `true` if the `SystemPath` is absolute, i.e., if it is independent of
/// the current directory.
///
/// * On Unix, a path is absolute if it starts with the root, so
/// `is_absolute` and [`has_root`] are equivalent.
///
/// * On Windows, a path is absolute if it has a prefix and starts with the
/// root: `c:\windows` is absolute, while `c:temp` and `\temp` are not.
///
/// # Examples
///
/// ```
/// use ruff_db::system::SystemPath;
///
/// assert!(!SystemPath::new("foo.txt").is_absolute());
/// ```
///
/// [`has_root`]: Utf8Path::has_root
#[inline]
#[must_use]
pub fn is_absolute(&self) -> bool {
self.0.is_absolute()
}
/// Makes a path absolute and normalizes it without accessing the file system.
///
/// Adapted from [cargo](https://github.com/rust-lang/cargo/blob/fede83ccf973457de319ba6fa0e36ead454d2e20/src/cargo/util/paths.rs#L61)
@@ -558,10 +534,6 @@ impl SystemPathBuf {
self.0
}
pub fn into_string(self) -> String {
self.0.into_string()
}
pub fn into_std_path_buf(self) -> PathBuf {
self.0.into_std_path_buf()
}
@@ -850,7 +822,7 @@ impl ruff_cache::CacheKey for SystemVirtualPathBuf {
///
/// # Examples
/// ```rust
/// use ruff_db::system::{SystemPath, deduplicate_nested_paths};
/// use ruff_db::system::{SystemPath, deduplicate_nested_paths};///
///
/// let paths = vec![SystemPath::new("/a/b/c"), SystemPath::new("/a/b"), SystemPath::new("/a/beta"), SystemPath::new("/a/b/c")];
/// assert_eq!(deduplicate_nested_paths(paths).collect::<Vec<_>>(), &[SystemPath::new("/a/b"), SystemPath::new("/a/beta")]);

View File

@@ -143,23 +143,6 @@ typeshed = "/path/to/custom/typeshed"
## `src`
#### `files`
TODO
**Default value**: `null`
**Type**: `list[pattern]`
**Example usage** (`pyproject.toml`):
```toml
[tool.ty.src]
files = ["./app", "!app/build"]
```
---
#### `respect-ignore-files`
Whether to automatically exclude files that are ignored by `.ignore`,

View File

@@ -19,7 +19,7 @@ use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use rayon::ThreadPoolBuilder;
use ruff_db::Upcast;
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, DisplayDiagnosticConfig, Severity};
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, Severity};
use ruff_db::max_parallelism;
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use salsa::plumbing::ZalsaDatabase;
@@ -276,7 +276,7 @@ impl MainLoop {
}
MainLoopMessage::CheckCompleted {
mut result,
result,
revision: check_revision,
} => {
let terminal_settings = db.project().settings(db).terminal();
@@ -286,11 +286,7 @@ impl MainLoop {
if check_revision == revision {
if db.project().files(db).is_empty() {
result.push(Diagnostic::new(
DiagnosticId::NoFiles,
Severity::Warning,
"No python files found under the given path(s)",
));
tracing::warn!("No python files found under the given path(s)");
}
let mut stdout = stdout().lock();

View File

@@ -861,6 +861,162 @@ print(f\"{some<CURSOR>
");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_function_identifier1() {
let test = cursor_test(
"\
def m<CURSOR>
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_function_identifier2() {
let test = cursor_test(
"\
def m<CURSOR>(): pass
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn fscope_id_missing_function_identifier3() {
let test = cursor_test(
"\
def m(): pass
<CURSOR>
",
);
assert_snapshot!(test.completions(), @r"
m
");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_class_identifier1() {
let test = cursor_test(
"\
class M<CURSOR>
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_type_alias1() {
let test = cursor_test(
"\
Fo<CURSOR> = float
",
);
assert_snapshot!(test.completions(), @r"
Fo
float
");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_import1() {
let test = cursor_test(
"\
import fo<CURSOR>
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_import2() {
let test = cursor_test(
"\
import foo as ba<CURSOR>
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_from_import1() {
let test = cursor_test(
"\
from fo<CURSOR> import wat
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_from_import2() {
let test = cursor_test(
"\
from foo import wa<CURSOR>
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_from_import3() {
let test = cursor_test(
"\
from foo import wat as ba<CURSOR>
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_try_except1() {
let test = cursor_test(
"\
try:
pass
except Type<CURSOR>:
pass
",
);
assert_snapshot!(test.completions(), @r"
Type
");
}
// Ref: https://github.com/astral-sh/ty/issues/572
#[test]
fn scope_id_missing_global1() {
let test = cursor_test(
"\
def _():
global fo<CURSOR>
",
);
assert_snapshot!(test.completions(), @"<No completions found>");
}
impl CursorTest {
fn completions(&self) -> String {
let completions = completion(&self.db, self.file, self.cursor_offset);

View File

@@ -24,11 +24,8 @@ ty_python_semantic = { workspace = true, features = ["serde"] }
ty_vendored = { workspace = true }
anyhow = { workspace = true }
camino = { workspace = true }
crossbeam = { workspace = true }
ignore = { workspace = true }
glob = { workspace = true }
globset = { workspace = true }
notify = { workspace = true }
pep440_rs = { workspace = true, features = ["version-ranges"] }
rayon = { workspace = true }
@@ -38,7 +35,6 @@ schemars = { workspace = true, optional = true }
serde = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
regex-automata = { workspace = true }
tracing = { workspace = true }
[dev-dependencies]

View File

@@ -7,7 +7,7 @@ use std::collections::BTreeSet;
use crate::walk::ProjectFilesWalker;
use ruff_db::Db as _;
use ruff_db::files::{File, Files};
use ruff_db::system::{FileType, SystemPath};
use ruff_db::system::SystemPath;
use rustc_hash::FxHashSet;
use ty_python_semantic::Program;
@@ -113,16 +113,8 @@ impl ProjectDatabase {
// should be included in the project. We can skip this check for
// paths that aren't part of the project or shouldn't be included
// when checking the project.
let metadata = self
.system()
.path_metadata(&path)
.map(|metadata| metadata.file_type());
if project.is_path_included(
self,
&path,
matches!(metadata, Ok(FileType::Directory)),
) {
if matches!(metadata, Ok(FileType::File)) {
if project.is_path_included(self, &path) {
if self.system().is_file(&path) {
// Add the parent directory because `walkdir` always visits explicitly passed files
// even if they match an exclude filter.
added_paths.insert(path.parent().unwrap().to_path_buf());
@@ -161,7 +153,7 @@ impl ProjectDatabase {
result.custom_stdlib_changed = true;
}
if project.is_path_included(self, &path, true) || path == project_root {
if project.is_path_included(self, &path) || path == project_root {
// TODO: Shouldn't it be enough to simply traverse the project files and remove all
// that start with the given path?
tracing::debug!(

View File

@@ -127,7 +127,7 @@ impl Reporter for DummyReporter {
#[salsa::tracked]
impl Project {
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
let (settings, settings_diagnostics) = metadata.options().to_settings(db, metadata.root());
let (settings, settings_diagnostics) = metadata.options().to_settings(db);
Project::builder(metadata, settings, settings_diagnostics)
.durability(Durability::MEDIUM)
@@ -160,8 +160,8 @@ impl Project {
/// the project's include and exclude settings as well as the paths that were passed to `ty check <paths>`.
/// This means, that this method is an over-approximation of `Self::files` and may return `true` for paths
/// that won't be included when checking the project because they're ignored in a `.gitignore` file.
pub fn is_path_included(self, db: &dyn Db, path: &SystemPath, is_directory: bool) -> bool {
ProjectFilesFilter::from_project(db, self).is_included(path, is_directory)
pub fn is_path_included(self, db: &dyn Db, path: &SystemPath) -> bool {
ProjectFilesFilter::from_project(db, self).is_included(path)
}
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
@@ -169,8 +169,7 @@ impl Project {
assert_eq!(self.root(db), metadata.root());
if &metadata != self.metadata(db) {
let (settings, settings_diagnostics) =
metadata.options().to_settings(db, metadata.root());
let (settings, settings_diagnostics) = metadata.options().to_settings(db);
if self.settings(db) != &settings {
self.set_settings(db).to(settings);

View File

@@ -1,9 +1,5 @@
use crate::Db;
use crate::metadata::settings::SrcSettings;
use crate::metadata::value::{
RangedValue, RelativePathBuf, RelativePathPattern, ValueSource, ValueSourceGuard,
};
use crate::walk::FilePatternsBuilder;
use crate::metadata::value::{RangedValue, RelativePathBuf, ValueSource, ValueSourceGuard};
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, Severity, Span};
use ruff_db::files::system_path_to_file;
use ruff_db::system::{System, SystemPath, SystemPathBuf};
@@ -203,20 +199,10 @@ impl Options {
}
#[must_use]
pub(crate) fn to_settings(
&self,
db: &dyn Db,
project_root: &SystemPath,
) -> (Settings, Vec<OptionDiagnostic>) {
pub(crate) fn to_settings(&self, db: &dyn Db) -> (Settings, Vec<OptionDiagnostic>) {
let (rules, diagnostics) = self.to_rule_selection(db);
let mut settings = Settings::new(rules);
if let Some(src) = self.src.as_ref() {
tracing::debug!("found src options: {src:?}");
// TODO: Error handling
settings.set_src(src.to_settings(db.system(), project_root).unwrap());
}
let mut settings = Settings::new(rules, self.src.as_ref());
if let Some(terminal) = self.terminal.as_ref() {
settings.set_terminal(TerminalSettings {
@@ -422,17 +408,6 @@ pub struct SrcOptions {
)]
pub root: Option<RelativePathBuf>,
/// TODO
#[serde(skip_serializing_if = "Option::is_none")]
#[option(
default = r#"null"#,
value_type = "list[pattern]",
example = r#"
files = ["./app", "!app/build"]
"#
)]
pub files: Option<Vec<RelativePathPattern>>,
/// Whether to automatically exclude files that are ignored by `.ignore`,
/// `.gitignore`, `.git/info/exclude`, and global `gitignore` files.
/// Enabled by default.
@@ -447,31 +422,6 @@ pub struct SrcOptions {
pub respect_ignore_files: Option<bool>,
}
impl SrcOptions {
fn to_settings(
&self,
system: &dyn System,
project_root: &SystemPath,
// diagnostics: &mut Vec<OptionDiagnostic>,
) -> Result<SrcSettings, ()> {
// TODO: Error handling, default exclusions
let mut files = FilePatternsBuilder::new();
for pattern in self.files.iter().flatten() {
files.add(&pattern.absolute(project_root, system)).unwrap();
}
let src = SrcSettings {
respect_ignore_files: self.respect_ignore_files.unwrap_or(true),
files: files.build().unwrap(),
};
tracing::debug!("Resolved src settings: {src:?}");
Ok(src)
}
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", transparent)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]

View File

@@ -1,8 +1,8 @@
use ruff_db::diagnostic::DiagnosticFormat;
use std::sync::Arc;
use ty_python_semantic::lint::RuleSelection;
use crate::walk::FilePatterns;
use crate::metadata::options::SrcOptions;
use ruff_db::diagnostic::DiagnosticFormat;
use ty_python_semantic::lint::RuleSelection;
/// The resolved [`super::Options`] for the project.
///
@@ -23,15 +23,19 @@ pub struct Settings {
terminal: TerminalSettings,
src: SrcSettings,
respect_ignore_files: bool,
}
impl Settings {
pub fn new(rules: RuleSelection) -> Self {
pub fn new(rules: RuleSelection, src_options: Option<&SrcOptions>) -> Self {
let respect_ignore_files = src_options
.and_then(|src| src.respect_ignore_files)
.unwrap_or(true);
Self {
rules: Arc::new(rules),
terminal: TerminalSettings::default(),
src: SrcSettings::default(),
respect_ignore_files,
}
}
@@ -39,12 +43,8 @@ impl Settings {
&self.rules
}
pub fn src(&self) -> &SrcSettings {
&self.src
}
pub fn set_src(&mut self, src: SrcSettings) {
self.src = src;
pub fn respect_ignore_files(&self) -> bool {
self.respect_ignore_files
}
pub fn to_rules(&self) -> Arc<RuleSelection> {
@@ -65,20 +65,3 @@ pub struct TerminalSettings {
pub output_format: DiagnosticFormat,
pub error_on_warning: bool,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SrcSettings {
pub respect_ignore_files: bool,
pub files: FilePatterns,
}
impl Default for SrcSettings {
fn default() -> Self {
Self {
respect_ignore_files: true,
// TODO: This should include all files by default
files: FilePatterns::empty(),
}
}
}

View File

@@ -344,56 +344,3 @@ impl RelativePathBuf {
SystemPath::absolute(&self.0, relative_to)
}
}
/// A relative path pattern that allows for negative patterns (git ignore style).
#[derive(
Debug,
Clone,
serde::Serialize,
serde::Deserialize,
PartialEq,
Eq,
PartialOrd,
Ord,
Hash,
Combine,
)]
#[serde(transparent)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct RelativePathPattern(RangedValue<String>);
impl RelativePathPattern {
pub fn new(pattern: String, source: ValueSource) -> Self {
Self(RangedValue::new(pattern, source))
}
pub fn cli(pattern: String) -> Self {
Self::new(pattern, ValueSource::Cli)
}
/// Returns the relative pattern.
pub fn pattern(&self) -> &str {
&self.0
}
/// Resolves the relative pattern to an absolute pattern.
pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> String {
let relative_to = match &self.0.source {
ValueSource::File(_) => project_root,
ValueSource::Cli => system.current_directory(),
};
if let Some(after) = self.0.strip_prefix('!') {
// Patterns starting with `**` don't need to be anchored.
if after.starts_with("**") {
self.0.to_string()
} else {
format!("!{}", SystemPath::absolute(after, relative_to))
}
} else if self.0.starts_with("**") {
self.0.to_string()
} else {
SystemPath::absolute(&self.0, relative_to).into_string()
}
}
}

View File

@@ -1,14 +1,10 @@
use crate::{Db, IOErrorDiagnostic, IOErrorKind, Project};
use globset::{Candidate, GlobBuilder, GlobSet, GlobSetBuilder};
use regex_automata::util::pool::Pool;
use ruff_db::files::{File, system_path_to_file};
use ruff_db::system::walk_directory::{ErrorKind, WalkDirectoryBuilder, WalkState};
use ruff_db::system::{FileType, SystemPath, SystemPathBuf};
use ruff_python_ast::PySourceType;
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use std::borrow::Cow;
use rustc_hash::{FxBuildHasher, FxHashSet};
use std::path::PathBuf;
use std::sync::Arc;
use thiserror::Error;
/// Filter that decides which files are included in the project.
@@ -17,15 +13,11 @@ use thiserror::Error;
///
/// This struct mainly exists because `dyn Db` isn't `Send` or `Sync`, making it impossible
/// to access fields from within the walker.
#[derive(Debug)]
#[derive(Default, Debug)]
pub(crate) struct ProjectFilesFilter<'a> {
/// The same as [`Project::included_paths_or_root`].
included_paths: &'a [SystemPathBuf],
files_patterns: &'a FilePatterns,
project_root: &'a SystemPath,
/// The filter skips checking if the path is in `included_paths` if set to `true`.
///
/// Skipping this check is useful when the walker only walks over `included_paths`.
@@ -36,8 +28,6 @@ impl<'a> ProjectFilesFilter<'a> {
pub(crate) fn from_project(db: &'a dyn Db, project: Project) -> Self {
Self {
included_paths: project.included_paths_or_root(db),
project_root: project.root(db),
files_patterns: &project.settings(db).src().files,
skip_included_paths: false,
}
}
@@ -55,7 +45,7 @@ impl<'a> ProjectFilesFilter<'a> {
/// This method may return `true` for files that don't end up being included when walking the
/// project tree because it doesn't consider `.gitignore` and other ignore files when deciding
/// if a file's included.
pub(crate) fn is_included(&self, path: &SystemPath, is_directory: bool) -> bool {
pub(crate) fn is_included(&self, path: &SystemPath) -> bool {
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
enum CheckPathMatch {
/// The path is a partial match of the checked path (it's a sub path)
@@ -88,26 +78,8 @@ impl<'a> ProjectFilesFilter<'a> {
match m {
None => false,
Some(CheckPathMatch::Partial) => {
if path == self.project_root {
return true;
}
// TODO: Do we need to use `matched_path_or_any_parents` when not walking?
let matched = self.files_patterns.matches_path(path, is_directory);
tracing::debug!("path `{path} matches {matched:?}");
// TODO: For partial matches, only include the file if it is included by the project's include/exclude settings.
match matched {
// We need to traverse directories that don't match because `a` doesn't match the pattern `a/b/c/d.py`
// but we need to traverse the directory to successfully match `a/b/c/d.py`.
// This is very unfortunate because it means ty traverses all directories when e.g. using `files = ["src"]`.
// TODO(micha): 04.06.2025: It would be nice if we could avoid traversing directories
// that are known can never match because they don't share a common prefix with any of the globs.
// But we'd need to be careful in the precense of `**/test` patterns because they can match any path.
PatternMatch::None => true,
PatternMatch::Exclude(_) => false,
PatternMatch::Include => true,
}
true
}
Some(CheckPathMatch::Full) => true,
}
@@ -160,7 +132,7 @@ impl<'a> ProjectFilesWalker<'a> {
let mut walker = db
.system()
.walk_directory(paths.next()?.as_ref())
.standard_filters(db.project().settings(db).src().respect_ignore_files)
.standard_filters(db.project().settings(db).respect_ignore_files())
.ignore_hidden(false);
for path in paths {
@@ -180,10 +152,7 @@ impl<'a> ProjectFilesWalker<'a> {
Box::new(|entry| {
match entry {
Ok(entry) => {
if !self
.filter
.is_included(entry.path(), entry.file_type().is_directory())
{
if !self.filter.is_included(entry.path()) {
tracing::debug!("Ignoring not-included path: {}", entry.path());
return WalkState::Skip;
}
@@ -289,583 +258,3 @@ pub(crate) enum WalkError {
#[error("`{path}` is not a valid UTF-8 path")]
NonUtf8Path { path: PathBuf },
}
#[derive(Clone)]
pub struct FilePatterns {
set: GlobSet,
patterns: Box<[FilePattern]>,
matches: Option<Arc<Pool<Vec<usize>>>>,
static_prefixes: IncludedPrefixes,
num_positive: usize,
}
impl FilePatterns {
pub(crate) fn empty() -> Self {
Self {
set: GlobSet::empty(),
patterns: Box::default(),
matches: None,
static_prefixes: IncludedPrefixes::new(),
num_positive: 0,
}
}
pub(crate) fn matches(&self, path: impl AsRef<SystemPath>) -> PatternMatch {
self.matches_path(path.as_ref(), false)
}
pub(crate) fn matches_directory(&self, path: impl AsRef<SystemPath>) -> PatternMatch {
self.matches_path(path.as_ref(), true)
}
pub(crate) fn matches_path(&self, path: &SystemPath, is_directory: bool) -> PatternMatch {
debug_assert!(path.is_absolute(), "Path {path} isn't absolute");
if self.patterns.is_empty() {
return PatternMatch::None;
}
let candidate = Candidate::new(path);
let mut matches = self.matches.as_ref().unwrap().get();
self.set.matches_candidate_into(&candidate, &mut *matches);
for &i in matches.iter().rev() {
let pattern = &self.patterns[i];
if pattern.is_only_directory && !is_directory {
continue;
}
return if pattern.negated {
PatternMatch::Exclude(ExcludeReason::Match)
} else {
PatternMatch::Include
};
}
if self.num_positive > 0 {
if is_directory {
// Skip directories for which we know that no glob has a shared prefix with.
// E.g. if `files = ["src"], skip `tests`
if dbg!(self.static_prefixes.is_statically_excluded(path)) {
return PatternMatch::Exclude(ExcludeReason::NoIncludePattern);
}
} else {
// If this is a file and there's at least one include pattern but the file doesn't match it,
// then the file is excluded. If there are only exclude patterns, than the file should be included.
return PatternMatch::Exclude(ExcludeReason::NoIncludePattern);
}
}
PatternMatch::None
}
pub(crate) fn match_once(&self, path: &SystemPath) -> PatternMatch {
for parent in path.ancestors().skip(1) {
match self.matches_directory(parent) {
PatternMatch::Include | PatternMatch::None => {
continue;
}
PatternMatch::Exclude(exclude_reason) => {
return PatternMatch::Exclude(exclude_reason);
}
}
}
// At this point it is known that no parent path is excluded.
// TODO: This could be adirectory too
self.matches(path)
}
}
impl PartialEq for FilePatterns {
fn eq(&self, other: &Self) -> bool {
self.patterns == other.patterns
}
}
impl Eq for FilePatterns {}
impl std::fmt::Debug for FilePatterns {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FilePatterns")
.field("patterns", &self.patterns)
.finish()
}
}
#[derive(Debug)]
pub(crate) struct FilePatternsBuilder {
set: GlobSetBuilder,
patterns: Vec<FilePattern>,
static_prefixes: IncludedPrefixes,
num_positive: usize,
}
impl FilePatternsBuilder {
pub(crate) fn new() -> Self {
Self {
set: GlobSetBuilder::new(),
patterns: Vec::new(),
static_prefixes: IncludedPrefixes::new(),
num_positive: 0,
}
}
pub(crate) fn add(&mut self, input: &str) -> Result<&mut Self, globset::Error> {
let mut pattern = FilePattern {
negated: false,
is_only_directory: false,
original: input.to_string(),
};
let mut glob = input;
if let Some(after) = glob.strip_prefix('!') {
pattern.negated = true;
glob = after;
}
debug_assert!(
SystemPath::new(glob).is_absolute(),
"The glob {input} isn't anchored"
);
// A pattern ending with a `/` should only match directories. E.g. `src/` only matches directories
// whereas `src` matches both files and directories.
// We need to remove the `/` to ensure that a path missing the trailing `/` matches.
if let Some(before) = glob.strip_suffix('/') {
pattern.is_only_directory = true;
glob = before;
// If the slash was escaped, then remove the escape.
// See: https://github.com/BurntSushi/ripgrep/issues/2236
let trailing_backslashes = glob.chars().rev().filter(|c| *c == '\\').count();
if trailing_backslashes % 2 == 1 {
glob = &glob[..glob.len() - trailing_backslashes]
}
}
// If the last component contains no wildcards or extension, consider it an implicit glob
// This turns `src` into `src/**/*`
// TODO: Should we also enable this behavior for `is_only_directory` patterns?
if !glob.ends_with("**") && !pattern.negated {
let parsed = GlobBuilder::new(&format!("{glob}/**"))
.literal_separator(true)
.backslash_escape(true)
// TODO: Map the error to the pattern the user provided.
.build()?;
self.set.add(parsed);
self.patterns.push(FilePattern {
is_only_directory: false,
..pattern.clone()
});
}
let mut actual = Cow::Borrowed(glob);
// If the glob ends with `/**`, then we should only match everything
// inside a directory, but not the directory itself. Standard globs
// will match the directory. So we add `/*` to force the issue.
if actual.ends_with("/**") {
actual = Cow::Owned(format!("{}/*", actual));
}
// Unlike gitignore, anchor paths (don't insert a `**` prefix).
let parsed = GlobBuilder::new(&*actual)
.literal_separator(true)
.backslash_escape(true)
// TODO: Map the error to the pattern the user provided.
.build()?;
if !pattern.negated {
self.num_positive += 1;
let mut parent = self.static_prefixes.root_mut();
// Do a best effort at extracting a static prefix from a positive include match.
// This allows short-circuting traversal of folders that are known to not overlap with any positive
// match. However, we have to be careful. Any path starting with a `**` requires visiting all folders.
for component in SystemPath::new(glob).components() {
if glob::Pattern::escape(component.as_str()) != component.as_str() {
*parent = IncludedPrefix::Dynamic;
break;
}
let static_parent = match parent {
IncludedPrefix::Dynamic => {
break;
}
IncludedPrefix::Static(static_prefix) => static_prefix,
};
parent = static_parent
.0
.entry(component.to_string())
.or_insert_with(|| IncludedPrefix::Static(StaticPrefix::default()));
}
}
self.set.add(parsed);
self.patterns.push(pattern);
Ok(self)
}
pub(crate) fn build(self) -> Result<FilePatterns, globset::Error> {
Ok(FilePatterns {
set: self.set.build()?,
patterns: self.patterns.into(),
matches: Some(Arc::new(Pool::new(|| vec![]))),
static_prefixes: self.static_prefixes,
num_positive: self.num_positive,
})
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(crate) enum PatternMatch {
/// The highest precedence pattern is an include pattern.
Include,
/// The highest precedence pattern is a negated pattern (the file should not be included).
Exclude(ExcludeReason),
/// No pattern matched the path.
None,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(crate) enum ExcludeReason {
/// The path is excluded because it matches a negative pattern.
Match,
/// It's a path that doesn't match any include pattern.
NoIncludePattern,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) struct FilePattern {
/// The pattern as specified by the user.
original: String,
/// Whether the glob should only match directories (`src/` matches only directories).
is_only_directory: bool,
/// Whether this pattern was negated.
negated: bool,
}
fn is_implicit_glob(pattern: &str) -> bool {
let as_path = SystemPath::new(pattern);
as_path
.components()
.last()
.is_some_and(|last| !last.as_str().contains(['.', '*', '?']))
}
#[derive(Clone, Debug)]
struct IncludedPrefixes {
root: IncludedPrefix,
}
impl IncludedPrefixes {
fn new() -> Self {
Self {
root: IncludedPrefix::Static(StaticPrefix(FxHashMap::default())),
}
}
fn root_mut(&mut self) -> &mut IncludedPrefix {
&mut self.root
}
fn is_statically_excluded(&self, path: &SystemPath) -> bool {
let mut current = &self.root;
for component in path.components() {
match current {
IncludedPrefix::Dynamic => return false,
IncludedPrefix::Static(static_prefix) => {
match static_prefix.0.get(component.as_str()) {
Some(parent) => {
current = parent;
}
None => {
return true;
}
}
}
}
}
false
}
}
#[derive(Clone, Debug)]
enum IncludedPrefix {
/// The path contains at least one dynamic child pattern. E.g. if `a/*/b` and `a/c`, then `a` is dynamic because of the first pattern.
Dynamic,
/// All it's children are fixed.
Static(StaticPrefix),
}
#[derive(Clone, Debug, Default)]
struct StaticPrefix(FxHashMap<String, IncludedPrefix>);
#[cfg(test)]
mod tests {
use crate::walk::{ExcludeReason, FilePatterns, FilePatternsBuilder, PatternMatch};
fn create_patterns(patterns: impl IntoIterator<Item = &'static str>) -> FilePatterns {
let mut builder = FilePatternsBuilder::new();
for pattern in patterns {
builder.add(pattern).unwrap_or_else(|err| {
panic!("Invalid pattern '{pattern}`: {err}");
});
}
builder.build().unwrap()
}
/// The pattern set matching `**` always returns `Include`
#[test]
fn all() {
let patterns = create_patterns(["/**"]);
assert_eq!(patterns.matches_directory("/src"), PatternMatch::Include);
assert_eq!(patterns.matches_directory("/src/"), PatternMatch::Include);
assert_eq!(patterns.matches_directory("/"), PatternMatch::Include);
assert_eq!(patterns.matches("/test.py"), PatternMatch::Include);
}
/// The empty pattern set always returns `None`.
#[test]
fn empty() {
let patterns = create_patterns([]);
assert_eq!(patterns.matches("/a.foo"), PatternMatch::None);
assert_eq!(patterns.matches("/a"), PatternMatch::None);
assert_eq!(patterns.matches("/"), PatternMatch::None);
}
#[test]
fn simple() {
let patterns = create_patterns(["/*.foo", "!/*.bar"]);
assert_eq!(patterns.matches("/a.foo"), PatternMatch::Include);
assert_eq!(patterns.matches_directory("/a.foo"), PatternMatch::Include);
assert_eq!(
patterns.matches("/a.rs"),
PatternMatch::Exclude(ExcludeReason::NoIncludePattern)
);
assert_eq!(patterns.matches_directory("/a.rs"), PatternMatch::None);
assert_eq!(
patterns.matches("/a.bar"),
PatternMatch::Exclude(ExcludeReason::Match)
);
assert_eq!(
patterns.matches_directory("/a.bar"),
PatternMatch::Exclude(ExcludeReason::Match)
);
}
#[test]
fn only_excludes() {
let patterns = create_patterns(["!/*.bar"]);
assert_eq!(patterns.matches("/a.rs"), PatternMatch::None);
assert_eq!(patterns.matches_directory("/a.rs"), PatternMatch::None);
assert_eq!(
patterns.matches("/a.bar"),
PatternMatch::Exclude(ExcludeReason::Match)
);
assert_eq!(
patterns.matches_directory("/a.bar"),
PatternMatch::Exclude(ExcludeReason::Match)
);
}
#[test]
fn precedence() {
let patterns = create_patterns(["/*.foo", "!/*.bar.foo"]);
assert_eq!(patterns.matches("/a.foo"), PatternMatch::Include);
assert_eq!(
patterns.matches("/a.baz"),
PatternMatch::Exclude(ExcludeReason::NoIncludePattern)
);
assert_eq!(
patterns.matches("/a.bar.foo"),
PatternMatch::Exclude(ExcludeReason::Match)
);
}
/// `directory/` matches the directory `directory` and its contents. It doesn't match files.
#[test]
fn implicit_directory_pattern() {
let patterns = create_patterns(["/src/"]);
assert_eq!(patterns.matches_directory("/src"), PatternMatch::Include);
assert_eq!(patterns.matches_directory("/src/"), PatternMatch::Include);
// Don't include files, because the pattern ends with `/`
assert_eq!(
patterns.matches("/src"),
PatternMatch::Exclude(ExcludeReason::NoIncludePattern)
);
// But include the content of src
assert_eq!(patterns.matches("/src/test.py"), PatternMatch::Include);
// Deep nesting
assert_eq!(
patterns.matches("/src/glob/builder.py"),
PatternMatch::Include
);
// Or a file with the same name
assert_eq!(patterns.matches("/src/src"), PatternMatch::Include);
// Or a directory with the same name
assert_eq!(
patterns.matches_directory("/src/src"),
PatternMatch::Include
);
}
#[test]
fn implicit_pattern() {
// Patterns ending without a slash include both files and directories.
// It includes all files in said directory
let patterns = create_patterns(["/src"]);
assert_eq!(patterns.matches_directory("/src"), PatternMatch::Include);
assert_eq!(patterns.matches("/src/"), PatternMatch::Include);
// Also include files
assert_eq!(patterns.matches("/src"), PatternMatch::Include);
assert_eq!(patterns.matches("/src/test.py"), PatternMatch::Include);
// Deep nesting
assert_eq!(
patterns.matches("/src/glob/builder.py"),
PatternMatch::Include
);
// Or a file with the same name
assert_eq!(patterns.matches("/src/src"), PatternMatch::Include);
// Or a directory with the same name
assert_eq!(
patterns.matches_directory("/src/src"),
PatternMatch::Include
);
}
/// Patterns where the last part has an extension match files or directories (without their content).
#[test]
fn pattern_with_extension() {
let patterns = create_patterns(["/test.py"]);
assert_eq!(
patterns.matches_directory("/test.py"),
PatternMatch::Include
);
assert_eq!(
patterns.matches_directory("/test.py"),
PatternMatch::Include
);
assert_eq!(
patterns.matches("/test.py/abcd"),
PatternMatch::Exclude(ExcludeReason::NoIncludePattern)
);
assert_eq!(
patterns.matches_directory("/test.py/abcd"),
PatternMatch::Exclude(ExcludeReason::NoIncludePattern)
);
}
/// Tests that `matches` returns `Exclude` if:
///
/// * There's at least one include
/// * The parent component of `paths` are known to not overlap with any include pattern
///
/// This allows to avoid traversing directories for which it is known that no file will match
/// any include pattern. For example, we want to avoid traversing `tests` if the pattern is `["src"]`.
#[test]
fn directory_pruning() {
let patterns = create_patterns(["/a/b/test-*/d", "/a/b/c/e", "/b/c"]);
// Paths that can be statically pruned because they match no known prefix
assert_eq!(
patterns.matches_directory("/a/x"),
PatternMatch::Exclude(ExcludeReason::NoIncludePattern),
);
assert_eq!(
patterns.matches_directory("/x"),
PatternMatch::Exclude(ExcludeReason::NoIncludePattern),
);
// Paths that are known to be included
assert_eq!(patterns.matches_directory("/b/c"), PatternMatch::Include);
assert_eq!(
patterns.matches_directory("/a/b/test-x/d"),
PatternMatch::Include
);
assert_eq!(
patterns.matches_directory("/a/b/c/e"),
PatternMatch::Include
);
// Path's that can't be pruned because they could match the `test-*` wildcard pattern
assert_eq!(patterns.matches_directory("/a/b/b/d"), PatternMatch::None);
// Path's that can't be pruned because they match a known prefix (in this case `/b/c`) but they don't
// match a pattern themselves
assert_eq!(patterns.matches_directory("/b"), PatternMatch::None)
}
#[test]
fn prefix_wildcard_include() {
let patterns = create_patterns(["/**/test/**", "/a/b/c/e", "/b/c"]);
assert_eq!(
patterns.matches_directory("/src/test/"),
PatternMatch::Include
);
assert_eq!(
patterns.matches_directory("/a/b/c/e"),
PatternMatch::Include
);
assert_eq!(patterns.matches_directory("/b/c"), PatternMatch::Include);
// We can't skip over the following directories because of the `**` wildcard
assert_eq!(
patterns.matches_directory("/not_included/a/b"),
PatternMatch::None
);
}
#[test]
fn nested_prefix_wildcard_include() {
let patterns = create_patterns(["/src/**/test", "/a/b", "/src/abcd/main.py"]);
assert_eq!(patterns.matches_directory("/a/b"), PatternMatch::Include);
assert_eq!(
patterns.matches_directory("/src/test"),
PatternMatch::Include
);
// We can't skip over the following directories because of the `**` wildcard
assert_eq!(
patterns.matches_directory("/src/not_included/a/b"),
PatternMatch::None
);
}
}

View File

@@ -196,4 +196,33 @@ def constrained(f: T):
reveal_type(f()) # revealed: int | str
```
## Meta-type
The meta-type of a typevar is the same as the meta-type of the upper bound, or the union of the
meta-types of the constraints:
```py
from typing import TypeVar
T_normal = TypeVar("T_normal")
def normal(x: T_normal):
reveal_type(type(x)) # revealed: type
T_bound_object = TypeVar("T_bound_object", bound=object)
def bound_object(x: T_bound_object):
reveal_type(type(x)) # revealed: type
T_bound_int = TypeVar("T_bound_int", bound=int)
def bound_int(x: T_bound_int):
reveal_type(type(x)) # revealed: type[int]
T_constrained = TypeVar("T_constrained", int, str)
def constrained(x: T_constrained):
reveal_type(type(x)) # revealed: type[int] | type[str]
```
[generics]: https://typing.python.org/en/latest/spec/generics.html

View File

@@ -20,7 +20,99 @@ x: IntOrStr = 1
reveal_type(x) # revealed: Literal[1]
def f() -> None:
reveal_type(x) # revealed: int | str
reveal_type(x) # revealed: IntOrStr
```
## Type properties
### Equivalence
```py
from ty_extensions import static_assert, is_equivalent_to
type IntOrStr = int | str
type StrOrInt = str | int
static_assert(is_equivalent_to(IntOrStr, IntOrStr))
static_assert(is_equivalent_to(IntOrStr, StrOrInt))
type Rec1 = tuple[Rec1, int]
type Rec2 = tuple[Rec2, int]
type Other = tuple[Other, str]
static_assert(is_equivalent_to(Rec1, Rec2))
static_assert(not is_equivalent_to(Rec1, Other))
type Cycle1A = tuple[Cycle1B, int]
type Cycle1B = tuple[Cycle1A, str]
type Cycle2A = tuple[Cycle2B, int]
type Cycle2B = tuple[Cycle2A, str]
static_assert(is_equivalent_to(Cycle1A, Cycle2A))
static_assert(is_equivalent_to(Cycle1B, Cycle2B))
static_assert(not is_equivalent_to(Cycle1A, Cycle1B))
static_assert(not is_equivalent_to(Cycle1A, Cycle2B))
# type Cycle3A = tuple[Cycle3B] | None
# type Cycle3B = tuple[Cycle3A] | None
# static_assert(is_equivalent_to(Cycle3A, Cycle3A))
# static_assert(is_equivalent_to(Cycle3A, Cycle3B))
```
### Assignability
```py
type IntOrStr = int | str
x1: IntOrStr = 1
x2: IntOrStr = "1"
x3: IntOrStr | None = None
def _(int_or_str: IntOrStr) -> None:
# TODO: those should not be errors
x3: int | str = int_or_str # error: [invalid-assignment]
x4: int | str | None = int_or_str # error: [invalid-assignment]
x5: int | str | None = int_or_str or None # error: [invalid-assignment]
```
### Narrowing (intersections)
```py
class P: ...
class Q: ...
type EitherOr = P | Q
def _(x: EitherOr) -> None:
if isinstance(x, P):
reveal_type(x) # revealed: P
elif isinstance(x, Q):
reveal_type(x) # revealed: Q & ~P
else:
# TODO: This should be Never
reveal_type(x) # revealed: EitherOr & ~P & ~Q
```
### Fully static
```py
from typing import Any
from ty_extensions import static_assert, is_fully_static
type IntOrStr = int | str
type RecFullyStatic = int | tuple[RecFullyStatic]
static_assert(is_fully_static(IntOrStr))
static_assert(is_fully_static(RecFullyStatic))
type IntOrAny = int | Any
type RecNotFullyStatic = Any | tuple[RecNotFullyStatic]
static_assert(not is_fully_static(IntOrAny))
static_assert(not is_fully_static(RecNotFullyStatic))
```
## `__value__` attribute
@@ -49,7 +141,7 @@ type IntOrStrOrBytes = IntOrStr | bytes
x: IntOrStrOrBytes = 1
def f() -> None:
reveal_type(x) # revealed: int | str | bytes
reveal_type(x) # revealed: IntOrStrOrBytes
```
## Aliased type aliases
@@ -109,7 +201,7 @@ reveal_type(IntOrStr) # revealed: typing.TypeAliasType
reveal_type(IntOrStr.__name__) # revealed: Literal["IntOrStr"]
def f(x: IntOrStr) -> None:
reveal_type(x) # revealed: int | str
reveal_type(x) # revealed: IntOrStr
```
### Generic example
@@ -138,3 +230,12 @@ def get_name() -> str:
# error: [invalid-type-alias-type] "The name of a `typing.TypeAlias` must be a string literal"
IntOrStr = TypeAliasType(get_name(), int | str)
```
## Recursive type aliases
```py
type Recursive = dict[str, "Recursive"]
# TODO: this should not be an error
r: Recursive = {"key": {}} # error: [invalid-assignment]
```

View File

@@ -396,10 +396,11 @@ type LiteralInt = TypeOf[int]
type LiteralStr = TypeOf[str]
type LiteralObject = TypeOf[object]
assert_type(bool, LiteralBool)
assert_type(int, LiteralInt)
assert_type(str, LiteralStr)
assert_type(object, LiteralObject)
# TODO: these should not be errors
assert_type(bool, LiteralBool) # error: [type-assertion-failure]
assert_type(int, LiteralInt) # error: [type-assertion-failure]
assert_type(str, LiteralStr) # error: [type-assertion-failure]
assert_type(object, LiteralObject) # error: [type-assertion-failure]
# bool
@@ -462,9 +463,10 @@ type LiteralBase = TypeOf[Base]
type LiteralDerived = TypeOf[Derived]
type LiteralUnrelated = TypeOf[Unrelated]
assert_type(Base, LiteralBase)
assert_type(Derived, LiteralDerived)
assert_type(Unrelated, LiteralUnrelated)
# TODO: these should not be errors
assert_type(Base, LiteralBase) # error: [type-assertion-failure]
assert_type(Derived, LiteralDerived) # error: [type-assertion-failure]
assert_type(Unrelated, LiteralUnrelated) # error: [type-assertion-failure]
static_assert(is_subtype_of(LiteralBase, type))
static_assert(is_subtype_of(LiteralBase, object))

View File

@@ -196,21 +196,21 @@ def _(
bytes_or_falsy: bytes | AlwaysFalsy,
falsy_or_bytes: AlwaysFalsy | bytes,
):
reveal_type(strings_or_truthy) # revealed: Literal[""] | AlwaysTruthy
reveal_type(truthy_or_strings) # revealed: AlwaysTruthy | Literal[""]
reveal_type(strings_or_truthy) # revealed: strings | AlwaysTruthy
reveal_type(truthy_or_strings) # revealed: AlwaysTruthy | strings
reveal_type(strings_or_falsy) # revealed: Literal["foo"] | AlwaysFalsy
reveal_type(falsy_or_strings) # revealed: AlwaysFalsy | Literal["foo"]
reveal_type(strings_or_falsy) # revealed: strings | AlwaysFalsy
reveal_type(falsy_or_strings) # revealed: AlwaysFalsy | strings
reveal_type(ints_or_truthy) # revealed: Literal[0] | AlwaysTruthy
reveal_type(truthy_or_ints) # revealed: AlwaysTruthy | Literal[0]
reveal_type(ints_or_truthy) # revealed: ints | AlwaysTruthy
reveal_type(truthy_or_ints) # revealed: AlwaysTruthy | ints
reveal_type(ints_or_falsy) # revealed: Literal[1] | AlwaysFalsy
reveal_type(falsy_or_ints) # revealed: AlwaysFalsy | Literal[1]
reveal_type(ints_or_falsy) # revealed: ints | AlwaysFalsy
reveal_type(falsy_or_ints) # revealed: AlwaysFalsy | ints
reveal_type(bytes_or_truthy) # revealed: Literal[b""] | AlwaysTruthy
reveal_type(truthy_or_bytes) # revealed: AlwaysTruthy | Literal[b""]
reveal_type(bytes_or_truthy) # revealed: bytes | AlwaysTruthy
reveal_type(truthy_or_bytes) # revealed: AlwaysTruthy | bytes
reveal_type(bytes_or_falsy) # revealed: Literal[b"foo"] | AlwaysFalsy
reveal_type(falsy_or_bytes) # revealed: AlwaysFalsy | Literal[b"foo"]
reveal_type(bytes_or_falsy) # revealed: bytes | AlwaysFalsy
reveal_type(falsy_or_bytes) # revealed: AlwaysFalsy | bytes
```

View File

@@ -259,6 +259,14 @@ impl<'db> SemanticIndex<'db> {
self.scopes_by_expression[&expression.into()]
}
/// Returns the ID of the `expression`'s enclosing scope.
pub(crate) fn try_expression_scope_id(
&self,
expression: impl Into<ExpressionNodeKey>,
) -> Option<FileScopeId> {
self.scopes_by_expression.get(&expression.into()).copied()
}
/// Returns the [`Scope`] of the `expression`'s enclosing scope.
#[allow(unused)]
#[track_caller]

View File

@@ -47,15 +47,22 @@ impl<'db> SemanticModel<'db> {
/// scope of this model's `File` are returned.
pub fn completions(&self, node: ast::AnyNodeRef<'_>) -> Vec<Name> {
let index = semantic_index(self.db, self.file);
let file_scope = match node {
ast::AnyNodeRef::Identifier(identifier) => index.expression_scope_id(identifier),
// TODO: We currently use `try_expression_scope_id` here as a hotfix for [1].
// Revert this to use `expression_scope_id` once a proper fix is in place.
//
// [1] https://github.com/astral-sh/ty/issues/572
let Some(file_scope) = (match node {
ast::AnyNodeRef::Identifier(identifier) => index.try_expression_scope_id(identifier),
node => match node.as_expr_ref() {
// If we couldn't identify a specific
// expression that we're in, then just
// fall back to the global scope.
None => FileScopeId::global(),
Some(expr) => index.expression_scope_id(expr),
None => Some(FileScopeId::global()),
Some(expr) => index.try_expression_scope_id(expr),
},
}) else {
return vec![];
};
let mut symbols = vec![];
for (file_scope, _) in index.ancestor_scopes(file_scope) {

View File

@@ -260,6 +260,40 @@ fn member_lookup_cycle_initial<'db>(
Symbol::bound(Type::Never).into()
}
#[expect(clippy::trivially_copy_pass_by_ref)]
fn is_fully_static_cycle_recover<'db>(
_db: &'db dyn Db,
_value: &bool,
_count: u32,
_self: Type<'db>,
_dummy: (),
) -> salsa::CycleRecoveryAction<bool> {
salsa::CycleRecoveryAction::Iterate
}
fn is_fully_static_cycle_initial<'db>(_db: &'db dyn Db, _self: Type<'db>, _dummy: ()) -> bool {
true
}
#[expect(clippy::trivially_copy_pass_by_ref)]
fn is_equivalent_to_cycle_recover<'db>(
_db: &'db dyn Db,
_value: &bool,
_count: u32,
_self: Type<'db>,
_other: Type<'db>,
) -> salsa::CycleRecoveryAction<bool> {
salsa::CycleRecoveryAction::Iterate
}
fn is_equivalent_to_cycle_initial<'db>(
_db: &'db dyn Db,
_self: Type<'db>,
_other: Type<'db>,
) -> bool {
true
}
/// Meta data for `Type::Todo`, which represents a known limitation in ty.
#[cfg(debug_assertions)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
@@ -532,6 +566,7 @@ pub enum Type<'db> {
// a `Type::NominalInstance` of `builtins.super`.
BoundSuper(BoundSuperType<'db>),
// TODO protocols, overloads, generics
TypeAliasRef(TypeAliasType<'db>),
}
#[salsa::tracked]
@@ -660,6 +695,8 @@ impl<'db> Type<'db> {
| Self::DataclassTransformer(_)
| Self::SubclassOf(_)
| Self::BoundSuper(_) => *self,
Type::TypeAliasRef(_) => todo_type!("replace_self_reference for TypeAliasRef"),
}
}
@@ -670,6 +707,8 @@ impl<'db> Type<'db> {
}
match self {
Self::TypeAliasRef(_) => false, // TODO
Self::AlwaysFalsy
| Self::AlwaysTruthy
| Self::Never
@@ -987,6 +1026,7 @@ impl<'db> Type<'db> {
#[must_use]
pub fn normalized(self, db: &'db dyn Db) -> Self {
match self {
Type::TypeAliasRef(_) => self,
Type::Union(union) => Type::Union(union.normalized(db)),
Type::Intersection(intersection) => Type::Intersection(intersection.normalized(db)),
Type::Tuple(tuple) => Type::Tuple(tuple.normalized(db)),
@@ -1063,6 +1103,9 @@ impl<'db> Type<'db> {
(Type::Never, _) => true,
(_, Type::Never) => false,
(left, Type::TypeAliasRef(right)) => left.is_subtype_of(db, right.value_type(db)),
(Type::TypeAliasRef(left), right) => left.value_type(db).is_subtype_of(db, right),
// Everything is a subtype of `object`.
(_, Type::NominalInstance(instance)) if instance.class.is_object(db) => true,
@@ -1682,6 +1725,8 @@ impl<'db> Type<'db> {
/// This method returns `false` if either `self` or `other` is not fully static.
///
/// [equivalent to]: https://typing.python.org/en/latest/spec/glossary.html#term-equivalent
#[salsa::tracked(cycle_fn=is_equivalent_to_cycle_recover, cycle_initial=is_equivalent_to_cycle_initial)]
pub(crate) fn is_equivalent_to(self, db: &'db dyn Db, other: Type<'db>) -> bool {
// TODO equivalent but not identical types: TypedDicts, Protocols, type aliases, etc.
@@ -1711,6 +1756,9 @@ impl<'db> Type<'db> {
| (nominal @ Type::NominalInstance(n), Type::ProtocolInstance(protocol)) => {
n.class.is_object(db) && protocol.normalized(db) == nominal
}
(Type::TypeAliasRef(left), right) => left.value_type(db).is_equivalent_to(db, right),
(left, Type::TypeAliasRef(right)) => left.is_equivalent_to(db, right.value_type(db)),
_ => self == other && self.is_fully_static(db) && other.is_fully_static(db),
}
}
@@ -1789,6 +1837,9 @@ impl<'db> Type<'db> {
(Type::Dynamic(_), _) | (_, Type::Dynamic(_)) => false,
(Type::TypeAliasRef(left), right) => left.value_type(db).is_disjoint_from(db, right),
(left, Type::TypeAliasRef(right)) => left.is_disjoint_from(db, right.value_type(db)),
// A typevar is never disjoint from itself, since all occurrences of the typevar must
// be specialized to the same type. (This is an important difference between typevars
// and `Any`!) Different typevars might be disjoint, depending on their bounds and
@@ -2235,8 +2286,15 @@ impl<'db> Type<'db> {
}
/// Returns true if the type does not contain any gradual forms (as a sub-part).
pub(crate) fn is_fully_static(&self, db: &'db dyn Db) -> bool {
pub(crate) fn is_fully_static(self, db: &'db dyn Db) -> bool {
self.is_fully_static_impl(db, ())
}
#[allow(clippy::used_underscore_binding)]
#[salsa::tracked(cycle_fn=is_fully_static_cycle_recover, cycle_initial=is_fully_static_cycle_initial)]
pub(crate) fn is_fully_static_impl(self, db: &'db dyn Db, _dummy: ()) -> bool {
match self {
Type::TypeAliasRef(alias) => alias.value_type(db).is_fully_static(db),
Type::Dynamic(_) => false,
Type::Never
| Type::FunctionLiteral(..)
@@ -2311,6 +2369,7 @@ impl<'db> Type<'db> {
/// for more complicated types that are actually singletons.
pub(crate) fn is_singleton(self, db: &'db dyn Db) -> bool {
match self {
Type::TypeAliasRef(_) => false, //TODO
Type::Dynamic(_)
| Type::Never
| Type::IntLiteral(..)
@@ -2432,6 +2491,7 @@ impl<'db> Type<'db> {
/// Return true if this type is non-empty and all inhabitants of this type compare equal.
pub(crate) fn is_single_valued(self, db: &'db dyn Db) -> bool {
match self {
Type::TypeAliasRef(_) => false, // TODO
Type::FunctionLiteral(..)
| Type::BoundMethod(_)
| Type::WrapperDescriptor(_)
@@ -2515,6 +2575,9 @@ impl<'db> Type<'db> {
policy: MemberLookupPolicy,
) -> Option<SymbolAndQualifiers<'db>> {
match self {
Type::TypeAliasRef(_) => {
Some(Symbol::bound(todo_type!("TypeAliasRef::find_name_in_mro_with_policy")).into())
}
Type::Union(union) => Some(union.map_with_boundness_and_qualifiers(db, |elem| {
elem.find_name_in_mro_with_policy(db, name, policy)
// If some elements are classes, and some are not, we simply fall back to `Unbound` for the non-class
@@ -2666,6 +2729,9 @@ impl<'db> Type<'db> {
/// ```
fn instance_member(&self, db: &'db dyn Db, name: &str) -> SymbolAndQualifiers<'db> {
match self {
Type::TypeAliasRef(_) => {
Symbol::bound(todo_type!("TypeAliasRef::instance_member")).into()
}
Type::Union(union) => {
union.map_with_boundness_and_qualifiers(db, |elem| elem.instance_member(db, name))
}
@@ -3047,6 +3113,7 @@ impl<'db> Type<'db> {
let name_str = name.as_str();
match self {
Type::TypeAliasRef(_) => Symbol::bound(todo_type!("type alias member lookup")).into(),
Type::Union(union) => union
.map_with_boundness(db, |elem| {
elem.member_lookup_with_policy(db, name_str.into(), policy)
@@ -3466,6 +3533,8 @@ impl<'db> Type<'db> {
};
let truthiness = match self {
Type::TypeAliasRef(_) => Truthiness::Ambiguous, // TODO
Type::Dynamic(_) | Type::Never | Type::Callable(_) | Type::LiteralString => {
Truthiness::Ambiguous
}
@@ -3590,6 +3659,9 @@ impl<'db> Type<'db> {
/// argument list, via [`try_call`][Self::try_call] and [`CallErrorKind::NotCallable`].
fn bindings(self, db: &'db dyn Db) -> Bindings<'db> {
match self {
Type::TypeAliasRef(_) => {
CallableBinding::not_callable(todo_type!("type alias bindings")).into()
}
Type::Callable(callable) => {
CallableBinding::from_overloads(self, callable.signatures(db).iter().cloned())
.into()
@@ -4847,6 +4919,7 @@ impl<'db> Type<'db> {
#[must_use]
pub fn to_instance(&self, db: &'db dyn Db) -> Option<Type<'db>> {
match self {
Type::TypeAliasRef(_) => Some(todo_type!("Type::TypeAliasRef.to_instance")),
Type::Dynamic(_) | Type::Never => Some(*self),
Type::ClassLiteral(class) => Some(Type::instance(db, class.default_specialization(db))),
Type::GenericAlias(alias) => Some(Type::instance(db, ClassType::from(*alias))),
@@ -4969,7 +5042,8 @@ impl<'db> Type<'db> {
| Type::FunctionLiteral(_)
| Type::BoundSuper(_)
| Type::ProtocolInstance(_)
| Type::PropertyInstance(_) => Err(InvalidTypeExpressionError {
| Type::PropertyInstance(_)
| Type::TypeAliasRef(_) => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::InvalidType(
*self, scope_id
)],
@@ -4977,7 +5051,7 @@ impl<'db> Type<'db> {
}),
Type::KnownInstance(known_instance) => match known_instance {
KnownInstanceType::TypeAliasType(alias) => Ok(alias.value_type(db)),
KnownInstanceType::TypeAliasType(alias) => Ok(Type::TypeAliasRef(*alias)),
KnownInstanceType::TypeVar(typevar) => Ok(Type::TypeVar(*typevar)),
KnownInstanceType::SubscriptedProtocol(_) => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::Protocol],
@@ -5201,6 +5275,7 @@ impl<'db> Type<'db> {
#[must_use]
pub fn to_meta_type(&self, db: &'db dyn Db) -> Type<'db> {
match self {
Type::TypeAliasRef(_) => todo_type!("Type::TypeAliasRef.to_meta_type"),
Type::Never => Type::Never,
Type::NominalInstance(instance) => instance.to_meta_type(db),
Type::KnownInstance(known_instance) => known_instance.to_meta_type(db),
@@ -5291,6 +5366,7 @@ impl<'db> Type<'db> {
type_mapping: &TypeMapping<'a, 'db>,
) -> Type<'db> {
match self {
Type::TypeAliasRef(_) => self,
Type::TypeVar(typevar) => match type_mapping {
TypeMapping::Specialization(specialization) => {
specialization.get(db, typevar).unwrap_or(self)
@@ -5420,6 +5496,7 @@ impl<'db> Type<'db> {
typevars: &mut FxOrderSet<TypeVarInstance<'db>>,
) {
match self {
Type::TypeAliasRef(_) => {} // TODO
Type::TypeVar(typevar) => {
if typevar.is_legacy(db) {
typevars.insert(typevar);
@@ -5563,6 +5640,7 @@ impl<'db> Type<'db> {
/// specific to the call site.
pub fn definition(&self, db: &'db dyn Db) -> Option<TypeDefinition<'db>> {
match self {
Type::TypeAliasRef(_) => None, // TODO
Self::BoundMethod(method) => {
Some(TypeDefinition::Function(method.function(db).definition(db)))
}

View File

@@ -472,12 +472,9 @@ impl<'db> ClassType<'db> {
.map_type(|ty| ty.apply_optional_specialization(db, specialization))
}
/// Returns the `name` attribute of an instance of this class.
/// Look up an instance attribute (available in `__dict__`) of the given name.
///
/// The attribute could be defined in the class body, but it could also be an implicitly
/// defined attribute that is only present in a method (typically `__init__`).
///
/// The attribute might also be defined in a superclass of this class.
/// See [`Type::instance_member`] for more details.
pub(super) fn instance_member(self, db: &'db dyn Db, name: &str) -> SymbolAndQualifiers<'db> {
let (class_literal, specialization) = self.class_literal(db);
class_literal
@@ -1537,12 +1534,9 @@ impl<'db> ClassLiteral<'db> {
attributes
}
/// Returns the `name` attribute of an instance of this class.
/// Look up an instance attribute (available in `__dict__`) of the given name.
///
/// The attribute could be defined in the class body, but it could also be an implicitly
/// defined attribute that is only present in a method (typically `__init__`).
///
/// The attribute might also be defined in a superclass of this class.
/// See [`Type::instance_member`] for more details.
pub(super) fn instance_member(
self,
db: &'db dyn Db,

View File

@@ -63,6 +63,7 @@ impl<'db> ClassBase<'db> {
/// Return `None` if `ty` is not an acceptable type for a class base.
pub(super) fn try_from_type(db: &'db dyn Db, ty: Type<'db>) -> Option<Self> {
match ty {
Type::TypeAliasRef(_) => None, //TODO
Type::Dynamic(dynamic) => Some(Self::Dynamic(dynamic)),
Type::ClassLiteral(literal) => {
if literal.is_known(db, KnownClass::Any) {

View File

@@ -67,6 +67,7 @@ struct DisplayRepresentation<'db> {
impl Display for DisplayRepresentation<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self.ty {
Type::TypeAliasRef(alias) => f.write_str(alias.name(self.db)),
Type::Dynamic(dynamic) => dynamic.fmt(f),
Type::Never => f.write_str("Never"),
Type::NominalInstance(instance) => {

View File

@@ -23,6 +23,8 @@ impl AllMembers {
fn extend_with_type<'db>(&mut self, db: &'db dyn Db, ty: Type<'db>) {
match ty {
Type::TypeAliasRef(_) => {} // TODO
Type::Union(union) => self.members.extend(
union
.elements(db)

View File

@@ -3012,6 +3012,7 @@ impl<'db> TypeInferenceBuilder<'db> {
};
match object_ty {
Type::TypeAliasRef(_) => true, // TODO
Type::Union(union) => {
if union.elements(self.db()).iter().all(|elem| {
self.validate_attribute_assignment(target, *elem, attribute, value_ty, false)
@@ -6033,6 +6034,8 @@ impl<'db> TypeInferenceBuilder<'db> {
let operand_type = self.infer_expression(operand);
match (op, operand_type) {
(_, Type::TypeAliasRef(_)) => todo_type!("type alias in unary expression"),
(_, Type::Dynamic(_)) => operand_type,
(_, Type::Never) => Type::Never,
@@ -6174,6 +6177,21 @@ impl<'db> TypeInferenceBuilder<'db> {
}
match (left_ty, right_ty, op) {
(Type::TypeAliasRef(alias), _, _) => self.infer_binary_expression_type(
node,
emitted_division_by_zero_diagnostic,
alias.value_type(self.db()),
right_ty,
op,
),
(_, Type::TypeAliasRef(alias), _) => self.infer_binary_expression_type(
node,
emitted_division_by_zero_diagnostic,
left_ty,
alias.value_type(self.db()),
op,
),
(Type::Union(lhs_union), rhs, _) => {
let mut union = UnionBuilder::new(self.db());
for lhs in lhs_union.elements(self.db()) {

View File

@@ -187,6 +187,10 @@ pub(super) fn union_or_intersection_elements_ordering<'db>(
(Type::KnownInstance(_), _) => Ordering::Less,
(_, Type::KnownInstance(_)) => Ordering::Greater,
(Type::TypeAliasRef(left), Type::TypeAliasRef(right)) => left.cmp(right),
(Type::TypeAliasRef(_), _) => Ordering::Less,
(_, Type::TypeAliasRef(_)) => Ordering::Greater,
(Type::PropertyInstance(left), Type::PropertyInstance(right)) => left.cmp(right),
(Type::PropertyInstance(_), _) => Ordering::Less,
(_, Type::PropertyInstance(_)) => Ordering::Greater,

10
ty.schema.json generated
View File

@@ -851,16 +851,6 @@
"SrcOptions": {
"type": "object",
"properties": {
"files": {
"description": "TODO",
"type": [
"array",
"null"
],
"items": {
"type": "string"
}
},
"respect-ignore-files": {
"description": "Whether to automatically exclude files that are ignored by `.ignore`, `.gitignore`, `.git/info/exclude`, and global `gitignore` files. Enabled by default.",
"type": [