Compare commits

..

1 Commits

Author SHA1 Message Date
Carl Meyer
b9d8d5033d [ty] Fully support 'if typing.TYPE_CHECKING' 2025-07-17 14:19:02 -06:00
118 changed files with 1219 additions and 3870 deletions

View File

@@ -1,36 +1,5 @@
# Changelog
## 0.12.4
### Preview features
- \[`flake8-type-checking`, `pyupgrade`, `ruff`\] Add `from __future__ import annotations` when it would allow new fixes (`TC001`, `TC002`, `TC003`, `UP037`, `RUF013`) ([#19100](https://github.com/astral-sh/ruff/pull/19100))
- \[`flake8-use-pathlib`\] Add autofix for `PTH109` ([#19245](https://github.com/astral-sh/ruff/pull/19245))
- \[`pylint`\] Detect indirect `pathlib.Path` usages for `unspecified-encoding` (`PLW1514`) ([#19304](https://github.com/astral-sh/ruff/pull/19304))
### Bug fixes
- \[`flake8-bugbear`\] Fix `B017` false negatives for keyword exception arguments ([#19217](https://github.com/astral-sh/ruff/pull/19217))
- \[`flake8-use-pathlib`\] Fix false negative on direct `Path()` instantiation (`PTH210`) ([#19388](https://github.com/astral-sh/ruff/pull/19388))
- \[`flake8-django`\] Fix `DJ008` false positive for abstract models with type-annotated `abstract` field ([#19221](https://github.com/astral-sh/ruff/pull/19221))
- \[`isort`\] Fix `I002` import insertion after docstring with multiple string statements ([#19222](https://github.com/astral-sh/ruff/pull/19222))
- \[`isort`\] Treat form feed as valid whitespace before a semicolon ([#19343](https://github.com/astral-sh/ruff/pull/19343))
- \[`pydoclint`\] Fix `SyntaxError` from fixes with line continuations (`D201`, `D202`) ([#19246](https://github.com/astral-sh/ruff/pull/19246))
- \[`refurb`\] `FURB164` fix should validate arguments and should usually be marked unsafe ([#19136](https://github.com/astral-sh/ruff/pull/19136))
### Rule changes
- \[`flake8-use-pathlib`\] Skip single dots for `invalid-pathlib-with-suffix` (`PTH210`) on versions >= 3.14 ([#19331](https://github.com/astral-sh/ruff/pull/19331))
- \[`pep8_naming`\] Avoid false positives on standard library functions with uppercase names (`N802`) ([#18907](https://github.com/astral-sh/ruff/pull/18907))
- \[`pycodestyle`\] Handle brace escapes for t-strings in logical lines ([#19358](https://github.com/astral-sh/ruff/pull/19358))
- \[`pylint`\] Extend invalid string character rules to include t-strings ([#19355](https://github.com/astral-sh/ruff/pull/19355))
- \[`ruff`\] Allow `strict` kwarg when checking for `starmap-zip` (`RUF058`) in Python 3.14+ ([#19333](https://github.com/astral-sh/ruff/pull/19333))
### Documentation
- \[`flake8-type-checking`\] Make `TC010` docs example more realistic ([#19356](https://github.com/astral-sh/ruff/pull/19356))
- Make more documentation examples error out-of-the-box ([#19288](https://github.com/astral-sh/ruff/pull/19288),[#19272](https://github.com/astral-sh/ruff/pull/19272),[#19291](https://github.com/astral-sh/ruff/pull/19291),[#19296](https://github.com/astral-sh/ruff/pull/19296),[#19292](https://github.com/astral-sh/ruff/pull/19292),[#19295](https://github.com/astral-sh/ruff/pull/19295),[#19297](https://github.com/astral-sh/ruff/pull/19297),[#19309](https://github.com/astral-sh/ruff/pull/19309))
## 0.12.3
### Preview features

46
Cargo.lock generated
View File

@@ -1557,15 +1557,6 @@ dependencies = [
"memoffset",
]
[[package]]
name = "inventory"
version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab08d7cd2c5897f2c949e5383ea7c7db03fb19130ffcfbf7eda795137ae3cb83"
dependencies = [
"rustversion",
]
[[package]]
name = "is-docker"
version = "0.2.0"
@@ -2132,6 +2123,16 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "papaya"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
dependencies = [
"equivalent",
"seize",
]
[[package]]
name = "parking_lot"
version = "0.12.3"
@@ -2710,7 +2711,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.12.4"
version = "0.12.3"
dependencies = [
"anyhow",
"argfile",
@@ -2838,7 +2839,6 @@ dependencies = [
"insta",
"matchit",
"path-slash",
"quick-junit",
"ruff_annotate_snippets",
"ruff_cache",
"ruff_diagnostics",
@@ -2962,7 +2962,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.12.4"
version = "0.12.3"
dependencies = [
"aho-corasick",
"anyhow",
@@ -2987,6 +2987,7 @@ dependencies = [
"pathdiff",
"pep440_rs",
"pyproject-toml",
"quick-junit",
"regex",
"ruff_annotate_snippets",
"ruff_cache",
@@ -3294,7 +3295,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.12.4"
version = "0.12.3"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3408,7 +3409,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
dependencies = [
"boxcar",
"compact_str",
@@ -3418,7 +3419,7 @@ dependencies = [
"hashlink",
"indexmap",
"intrusive-collections",
"inventory",
"papaya",
"parking_lot",
"portable-atomic",
"rayon",
@@ -3433,12 +3434,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
[[package]]
name = "salsa-macros"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa?rev=dba66f1a37acca014c2402f231ed5b361bd7d8fe#dba66f1a37acca014c2402f231ed5b361bd7d8fe"
source = "git+https://github.com/salsa-rs/salsa?rev=fc00eba89e5dcaa5edba51c41aa5f309b5cb126b#fc00eba89e5dcaa5edba51c41aa5f309b5cb126b"
dependencies = [
"proc-macro2",
"quote",
@@ -3491,6 +3492,16 @@ version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "seize"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "serde"
version = "1.0.219"
@@ -4289,7 +4300,6 @@ name = "ty_server"
version = "0.0.0"
dependencies = [
"anyhow",
"bitflags 2.9.1",
"crossbeam",
"jod-thread",
"libc",

View File

@@ -138,7 +138,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "dba66f1a37acca014c2402f231ed5b361bd7d8fe" }
salsa = { git = "https://github.com/salsa-rs/salsa", rev = "fc00eba89e5dcaa5edba51c41aa5f309b5cb126b" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }
@@ -150,7 +150,7 @@ serde_with = { version = "3.6.0", default-features = false, features = [
] }
shellexpand = { version = "3.0.0" }
similar = { version = "2.4.0", features = ["inline"] }
smallvec = { version = "1.13.2", features = ["union", "const_generics", "const_new"] }
smallvec = { version = "1.13.2" }
snapbox = { version = "0.6.0", features = [
"diff",
"term-svg",

View File

@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.12.4/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.4/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.12.3/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.12.3/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.12.4
rev: v0.12.3
hooks:
# Run the linter.
- id: ruff-check

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.12.4"
version = "0.12.3"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -15,8 +15,8 @@ use ruff_db::diagnostic::{
use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel;
use ruff_linter::message::{
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, SarifEmitter,
TextEmitter,
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, JunitEmitter,
SarifEmitter, TextEmitter,
};
use ruff_linter::notify_user;
use ruff_linter::settings::flags::{self};
@@ -252,11 +252,7 @@ impl Printer {
write!(writer, "{value}")?;
}
OutputFormat::Junit => {
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Junit)
.preview(preview);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
JunitEmitter.emit(writer, &diagnostics.inner, &context)?;
}
OutputFormat::Concise | OutputFormat::Full => {
TextEmitter::default()

View File

@@ -5718,11 +5718,8 @@ match 42: # invalid-syntax
let snapshot = format!("output_format_{output_format}");
let project_dir = dunce::canonicalize(tempdir.path())?;
insta::with_settings!({
filters => vec![
(tempdir_filter(&project_dir).as_str(), "[TMP]/"),
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
(ruff_linter::VERSION, "[VERSION]"),

View File

@@ -25,7 +25,7 @@ exit_code: 1
<testcase name="org.ruff.F821" classname="[TMP]/input" line="2" column="5">
<failure message="Undefined name `y`">line 2, col 5, Undefined name `y`</failure>
</testcase>
<testcase name="org.ruff.invalid-syntax" classname="[TMP]/input" line="3" column="1">
<testcase name="org.ruff" classname="[TMP]/input" line="3" column="1">
<failure message="SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)">line 3, col 1, SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)</failure>
</testcase>
</testsuite>

View File

@@ -18,7 +18,7 @@ use ruff_python_ast::PythonVersion;
use ty_project::metadata::options::{EnvironmentOptions, Options};
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
use ty_project::watch::{ChangeEvent, ChangedKind};
use ty_project::{CheckMode, Db, ProjectDatabase, ProjectMetadata};
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
struct Case {
db: ProjectDatabase,
@@ -102,7 +102,6 @@ fn setup_tomllib_case() -> Case {
let re = re.unwrap();
db.set_check_mode(CheckMode::OpenFiles);
db.project().set_open_files(&mut db, tomllib_files);
let re_path = re.path(&db).as_system_path().unwrap().to_owned();
@@ -238,7 +237,6 @@ fn setup_micro_case(code: &str) -> Case {
let mut db = ProjectDatabase::new(metadata, system).unwrap();
let file = system_path_to_file(&db, SystemPathBuf::from(file_path)).unwrap();
db.set_check_mode(CheckMode::OpenFiles);
db.project()
.set_open_files(&mut db, FxHashSet::from_iter([file]));
@@ -527,21 +525,14 @@ impl<'a> ProjectBenchmark<'a> {
#[track_caller]
fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
fn check_project(db: &mut ProjectDatabase, project_name: &str, max_diagnostics: usize) {
fn check_project(db: &mut ProjectDatabase, max_diagnostics: usize) {
let result = db.check();
let diagnostics = result.len();
if diagnostics > max_diagnostics {
let details = result
.into_iter()
.map(|diagnostic| diagnostic.concise_message().to_string())
.collect::<Vec<_>>()
.join("\n ");
assert!(
diagnostics <= max_diagnostics,
"{project_name}: Expected <={max_diagnostics} diagnostics but got {diagnostics}:\n {details}",
);
}
assert!(
diagnostics <= max_diagnostics,
"Expected <={max_diagnostics} diagnostics but got {diagnostics}"
);
}
setup_rayon();
@@ -551,7 +542,7 @@ fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
group.bench_function(benchmark.project.config.name, |b| {
b.iter_batched_ref(
|| benchmark.setup_iteration(),
|db| check_project(db, benchmark.project.config.name, benchmark.max_diagnostics),
|db| check_project(db, benchmark.max_diagnostics),
BatchSize::SmallInput,
);
});
@@ -619,7 +610,7 @@ fn datetype(criterion: &mut Criterion) {
max_dep_date: "2025-07-04",
python_version: PythonVersion::PY313,
},
2,
0,
);
bench_project(&benchmark, criterion);

View File

@@ -34,7 +34,6 @@ glob = { workspace = true }
ignore = { workspace = true, optional = true }
matchit = { workspace = true }
path-slash = { workspace = true }
quick-junit = { workspace = true, optional = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
schemars = { workspace = true, optional = true }
@@ -57,7 +56,6 @@ tempfile = { workspace = true }
[features]
cache = ["ruff_cache"]
junit = ["dep:quick-junit"]
os = ["ignore", "dep:etcetera"]
serde = ["camino/serde1", "dep:serde", "dep:serde_json", "ruff_diagnostics/serde"]
# Exposes testing utilities.

View File

@@ -1282,9 +1282,6 @@ pub enum DiagnosticFormat {
Rdjson,
/// Print diagnostics in the format emitted by Pylint.
Pylint,
/// Print diagnostics in the format expected by JUnit.
#[cfg(feature = "junit")]
Junit,
}
/// A representation of the kinds of messages inside a diagnostic.

View File

@@ -30,8 +30,6 @@ mod azure;
mod json;
#[cfg(feature = "serde")]
mod json_lines;
#[cfg(feature = "junit")]
mod junit;
mod pylint;
#[cfg(feature = "serde")]
mod rdjson;
@@ -158,8 +156,7 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
AnnotateRenderer::styled()
} else {
AnnotateRenderer::plain()
}
.cut_indicator("");
};
renderer = renderer
.error(stylesheet.error)
@@ -199,10 +196,6 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
DiagnosticFormat::Pylint => {
PylintRenderer::new(self.resolver).render(f, self.diagnostics)?;
}
#[cfg(feature = "junit")]
DiagnosticFormat::Junit => {
junit::JunitRenderer::new(self.resolver).render(f, self.diagnostics)?;
}
}
Ok(())

View File

@@ -1,195 +0,0 @@
use std::{collections::BTreeMap, ops::Deref, path::Path};
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
use ruff_source_file::LineColumn;
use crate::diagnostic::{Diagnostic, SecondaryCode, render::FileResolver};
/// A renderer for diagnostics in the [JUnit] format.
///
/// See [`junit.xsd`] for the specification in the JUnit repository and an annotated [version]
/// linked from the [`quick_junit`] docs.
///
/// [JUnit]: https://junit.org/
/// [`junit.xsd`]: https://github.com/junit-team/junit-framework/blob/2870b7d8fd5bf7c1efe489d3991d3ed3900e82bb/platform-tests/src/test/resources/jenkins-junit.xsd
/// [version]: https://llg.cubic.org/docs/junit/
/// [`quick_junit`]: https://docs.rs/quick-junit/latest/quick_junit/
pub struct JunitRenderer<'a> {
resolver: &'a dyn FileResolver,
}
impl<'a> JunitRenderer<'a> {
pub fn new(resolver: &'a dyn FileResolver) -> Self {
Self { resolver }
}
pub(super) fn render(
&self,
f: &mut std::fmt::Formatter,
diagnostics: &[Diagnostic],
) -> std::fmt::Result {
let mut report = Report::new("ruff");
if diagnostics.is_empty() {
let mut test_suite = TestSuite::new("ruff");
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
let mut case = TestCase::new("No errors found", TestCaseStatus::success());
case.set_classname("ruff");
test_suite.add_test_case(case);
report.add_test_suite(test_suite);
} else {
for (filename, diagnostics) in group_diagnostics_by_filename(diagnostics, self.resolver)
{
let mut test_suite = TestSuite::new(filename);
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
let classname = Path::new(filename).with_extension("");
for diagnostic in diagnostics {
let DiagnosticWithLocation {
diagnostic,
start_location: location,
} = diagnostic;
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
status.set_message(diagnostic.body());
if let Some(location) = location {
status.set_description(format!(
"line {row}, col {col}, {body}",
row = location.line,
col = location.column,
body = diagnostic.body()
));
} else {
status.set_description(diagnostic.body());
}
let code = diagnostic
.secondary_code()
.map_or_else(|| diagnostic.name(), SecondaryCode::as_str);
let mut case = TestCase::new(format!("org.ruff.{code}"), status);
case.set_classname(classname.to_str().unwrap());
if let Some(location) = location {
case.extra.insert(
XmlString::new("line"),
XmlString::new(location.line.to_string()),
);
case.extra.insert(
XmlString::new("column"),
XmlString::new(location.column.to_string()),
);
}
test_suite.add_test_case(case);
}
report.add_test_suite(test_suite);
}
}
let adapter = FmtAdapter { fmt: f };
report.serialize(adapter).map_err(|_| std::fmt::Error)
}
}
// TODO(brent) this and `group_diagnostics_by_filename` are also used by the `grouped` output
// format. I think they'd make more sense in that file, but I started here first. I'll move them to
// that module when adding the `grouped` output format.
struct DiagnosticWithLocation<'a> {
diagnostic: &'a Diagnostic,
start_location: Option<LineColumn>,
}
impl Deref for DiagnosticWithLocation<'_> {
type Target = Diagnostic;
fn deref(&self) -> &Self::Target {
self.diagnostic
}
}
fn group_diagnostics_by_filename<'a>(
diagnostics: &'a [Diagnostic],
resolver: &'a dyn FileResolver,
) -> BTreeMap<&'a str, Vec<DiagnosticWithLocation<'a>>> {
let mut grouped_diagnostics = BTreeMap::default();
for diagnostic in diagnostics {
let (filename, start_location) = diagnostic
.primary_span_ref()
.map(|span| {
let file = span.file();
let start_location =
span.range()
.filter(|_| !resolver.is_notebook(file))
.map(|range| {
file.diagnostic_source(resolver)
.as_source_code()
.line_column(range.start())
});
(span.file().path(resolver), start_location)
})
.unwrap_or_default();
grouped_diagnostics
.entry(filename)
.or_insert_with(Vec::new)
.push(DiagnosticWithLocation {
diagnostic,
start_location,
});
}
grouped_diagnostics
}
struct FmtAdapter<'a> {
fmt: &'a mut dyn std::fmt::Write,
}
impl std::io::Write for FmtAdapter<'_> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.fmt
.write_str(std::str::from_utf8(buf).map_err(|_| {
std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid UTF-8 in JUnit report",
)
})?)
.map_err(std::io::Error::other)?;
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
fn write_fmt(&mut self, args: std::fmt::Arguments<'_>) -> std::io::Result<()> {
self.fmt.write_fmt(args).map_err(std::io::Error::other)
}
}
#[cfg(test)]
mod tests {
use crate::diagnostic::{
DiagnosticFormat,
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
};
#[test]
fn output() {
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Junit);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
#[test]
fn syntax_errors() {
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Junit);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
}
}

View File

@@ -232,7 +232,7 @@ impl Files {
let roots = inner.roots.read().unwrap();
for root in roots.all() {
if path.starts_with(root.path(db)) {
if root.path(db).starts_with(&path) {
root.set_revision(db).to(FileRevision::now());
}
}
@@ -375,25 +375,12 @@ impl File {
}
/// Refreshes the file metadata by querying the file system if needed.
///
/// This also "touches" the file root associated with the given path.
/// This means that any Salsa queries that depend on the corresponding
/// root's revision will become invalidated.
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
let absolute = SystemPath::absolute(path, db.system().current_directory());
Files::touch_root(db, &absolute);
Self::sync_system_path(db, &absolute, None);
}
/// Refreshes *only* the file metadata by querying the file system if needed.
///
/// This specifically does not touch any file root associated with the
/// given file path.
pub fn sync_path_only(db: &mut dyn Db, path: &SystemPath) {
let absolute = SystemPath::absolute(path, db.system().current_directory());
Self::sync_system_path(db, &absolute, None);
}
/// Increments the revision for the virtual file at `path`.
pub fn sync_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath) {
if let Some(virtual_file) = db.files().try_virtual_file(path) {
@@ -499,7 +486,7 @@ impl fmt::Debug for File {
///
/// This is a wrapper around a [`File`] that provides additional methods to interact with a virtual
/// file.
#[derive(Copy, Clone, Debug)]
#[derive(Copy, Clone)]
pub struct VirtualFile(File);
impl VirtualFile {

View File

@@ -23,7 +23,7 @@ pub struct FileRoot {
pub path: SystemPathBuf,
/// The kind of the root at the time of its creation.
pub kind_at_time_of_creation: FileRootKind,
kind_at_time_of_creation: FileRootKind,
/// A revision that changes when the contents of the source root change.
///

View File

@@ -87,7 +87,7 @@ impl SourceDb for ModuleDb {
#[salsa::db]
impl Db for ModuleDb {
fn should_check_file(&self, file: File) -> bool {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.12.4"
version = "0.12.3"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -15,7 +15,7 @@ license = { workspace = true }
[dependencies]
ruff_annotate_snippets = { workspace = true }
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["junit", "serde"] }
ruff_db = { workspace = true, features = ["serde"] }
ruff_diagnostics = { workspace = true, features = ["serde"] }
ruff_notebook = { workspace = true }
ruff_macros = { workspace = true }
@@ -55,6 +55,7 @@ path-absolutize = { workspace = true, features = [
pathdiff = { workspace = true }
pep440_rs = { workspace = true }
pyproject-toml = { workspace = true }
quick-junit = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }

View File

@@ -104,6 +104,3 @@ os.chmod(x)
os.replace("src", "dst", src_dir_fd=1, dst_dir_fd=2)
os.replace("src", "dst", src_dir_fd=1)
os.replace("src", "dst", dst_dir_fd=2)
os.getcwd()
os.getcwdb()

View File

@@ -1044,6 +1044,7 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
Rule::OsMakedirs,
Rule::OsRename,
Rule::OsReplace,
Rule::OsGetcwd,
Rule::OsStat,
Rule::OsPathJoin,
Rule::OsPathSamefile,
@@ -1109,9 +1110,6 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
if checker.is_rule_enabled(Rule::OsReadlink) {
flake8_use_pathlib::rules::os_readlink(checker, call, segments);
}
if checker.is_rule_enabled(Rule::OsGetcwd) {
flake8_use_pathlib::rules::os_getcwd(checker, call, segments);
}
if checker.is_rule_enabled(Rule::PathConstructorCurrentDirectory) {
flake8_use_pathlib::rules::path_constructor_current_directory(
checker, call, segments,

View File

@@ -928,7 +928,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Flake8UsePathlib, "106") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRmdir),
(Flake8UsePathlib, "107") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsRemove),
(Flake8UsePathlib, "108") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsUnlink),
(Flake8UsePathlib, "109") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsGetcwd),
(Flake8UsePathlib, "109") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsGetcwd),
(Flake8UsePathlib, "110") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathExists),
(Flake8UsePathlib, "111") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathExpanduser),
(Flake8UsePathlib, "112") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsPathIsdir),

View File

@@ -0,0 +1,117 @@
use std::io::Write;
use std::path::Path;
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
use ruff_db::diagnostic::Diagnostic;
use ruff_source_file::LineColumn;
use crate::message::{Emitter, EmitterContext, MessageWithLocation, group_diagnostics_by_filename};
#[derive(Default)]
pub struct JunitEmitter;
impl Emitter for JunitEmitter {
fn emit(
&mut self,
writer: &mut dyn Write,
diagnostics: &[Diagnostic],
context: &EmitterContext,
) -> anyhow::Result<()> {
let mut report = Report::new("ruff");
if diagnostics.is_empty() {
let mut test_suite = TestSuite::new("ruff");
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
let mut case = TestCase::new("No errors found", TestCaseStatus::success());
case.set_classname("ruff");
test_suite.add_test_case(case);
report.add_test_suite(test_suite);
} else {
for (filename, messages) in group_diagnostics_by_filename(diagnostics) {
let mut test_suite = TestSuite::new(&filename);
test_suite
.extra
.insert(XmlString::new("package"), XmlString::new("org.ruff"));
for message in messages {
let MessageWithLocation {
message,
start_location,
} = message;
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
status.set_message(message.body());
let location = if context.is_notebook(&message.expect_ruff_filename()) {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
LineColumn::default()
} else {
start_location
};
status.set_description(format!(
"line {row}, col {col}, {body}",
row = location.line,
col = location.column,
body = message.body()
));
let mut case = TestCase::new(
if let Some(code) = message.secondary_code() {
format!("org.ruff.{code}")
} else {
"org.ruff".to_string()
},
status,
);
let file_path = Path::new(&*filename);
let file_stem = file_path.file_stem().unwrap().to_str().unwrap();
let classname = file_path.parent().unwrap().join(file_stem);
case.set_classname(classname.to_str().unwrap());
case.extra.insert(
XmlString::new("line"),
XmlString::new(location.line.to_string()),
);
case.extra.insert(
XmlString::new("column"),
XmlString::new(location.column.to_string()),
);
test_suite.add_test_case(case);
}
report.add_test_suite(test_suite);
}
}
report.serialize(writer)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use crate::message::JunitEmitter;
use crate::message::tests::{
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
};
#[test]
fn output() {
let mut emitter = JunitEmitter;
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn syntax_errors() {
let mut emitter = JunitEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
assert_snapshot!(content);
}
}

View File

@@ -14,6 +14,7 @@ use ruff_db::files::File;
pub use github::GithubEmitter;
pub use gitlab::GitlabEmitter;
pub use grouped::GroupedEmitter;
pub use junit::JunitEmitter;
use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, SourceFile};
use ruff_text_size::{Ranged, TextRange, TextSize};
@@ -27,6 +28,7 @@ mod diff;
mod github;
mod gitlab;
mod grouped;
mod junit;
mod sarif;
mod text;

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_db/src/diagnostic/render/junit.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/junit.rs
expression: content
snapshot_kind: text
---
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="3" failures="3" errors="0">

View File

@@ -1,14 +1,15 @@
---
source: crates/ruff_db/src/diagnostic/render/junit.rs
expression: env.render_diagnostics(&diagnostics)
source: crates/ruff_linter/src/message/junit.rs
expression: content
snapshot_kind: text
---
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="2" failures="2" errors="0">
<testsuite name="syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="1" column="15">
<testcase name="org.ruff" classname="syntax_errors" line="1" column="15">
<failure message="SyntaxError: Expected one or more symbol names after import">line 1, col 15, SyntaxError: Expected one or more symbol names after import</failure>
</testcase>
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="3" column="12">
<testcase name="org.ruff" classname="syntax_errors" line="3" column="12">
<failure message="SyntaxError: Expected &apos;)&apos;, found newline">line 3, col 12, SyntaxError: Expected &apos;)&apos;, found newline</failure>
</testcase>
</testsuite>

View File

@@ -134,11 +134,6 @@ pub(crate) const fn is_fix_os_path_dirname_enabled(settings: &LinterSettings) ->
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/19245
pub(crate) const fn is_fix_os_getcwd_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/11436
// https://github.com/astral-sh/ruff/pull/11168
pub(crate) const fn is_dunder_init_fix_unused_import_enabled(settings: &LinterSettings) -> bool {

View File

@@ -1,6 +1,5 @@
pub(crate) use glob_rule::*;
pub(crate) use invalid_pathlib_with_suffix::*;
pub(crate) use os_getcwd::*;
pub(crate) use os_path_abspath::*;
pub(crate) use os_path_basename::*;
pub(crate) use os_path_dirname::*;
@@ -24,7 +23,6 @@ pub(crate) use replaceable_by_pathlib::*;
mod glob_rule;
mod invalid_pathlib_with_suffix;
mod os_getcwd;
mod os_path_abspath;
mod os_path_basename;
mod os_path_dirname;

View File

@@ -1,100 +0,0 @@
use crate::checkers::ast::Checker;
use crate::importer::ImportRequest;
use crate::preview::is_fix_os_getcwd_enabled;
use crate::{FixAvailability, Violation};
use ruff_diagnostics::{Applicability, Edit, Fix};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::ExprCall;
use ruff_text_size::Ranged;
/// ## What it does
/// Checks for uses of `os.getcwd` and `os.getcwdb`.
///
/// ## Why is this bad?
/// `pathlib` offers a high-level API for path manipulation, as compared to
/// the lower-level API offered by `os`. When possible, using `Path` object
/// methods such as `Path.cwd()` can improve readability over the `os`
/// module's counterparts (e.g., `os.getcwd()`).
///
/// ## Examples
/// ```python
/// import os
///
/// cwd = os.getcwd()
/// ```
///
/// Use instead:
/// ```python
/// from pathlib import Path
///
/// cwd = Path.cwd()
/// ```
///
/// ## Known issues
/// While using `pathlib` can improve the readability and type safety of your code,
/// it can be less performant than the lower-level alternatives that work directly with strings,
/// especially on older versions of Python.
///
/// ## Fix Safety
/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the original expression.
///
/// ## References
/// - [Python documentation: `Path.cwd`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.cwd)
/// - [Python documentation: `os.getcwd`](https://docs.python.org/3/library/os.html#os.getcwd)
/// - [Python documentation: `os.getcwdb`](https://docs.python.org/3/library/os.html#os.getcwdb)
/// - [PEP 428 The pathlib module object-oriented filesystem paths](https://peps.python.org/pep-0428/)
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
#[derive(ViolationMetadata)]
pub(crate) struct OsGetcwd;
impl Violation for OsGetcwd {
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
#[derive_message_formats]
fn message(&self) -> String {
"`os.getcwd()` should be replaced by `Path.cwd()`".to_string()
}
fn fix_title(&self) -> Option<String> {
Some("Replace with `Path.cwd()`".to_string())
}
}
/// PTH109
pub(crate) fn os_getcwd(checker: &Checker, call: &ExprCall, segments: &[&str]) {
if !matches!(segments, ["os", "getcwd" | "getcwdb"]) {
return;
}
let range = call.range();
let mut diagnostic = checker.report_diagnostic(OsGetcwd, call.func.range());
if !call.arguments.is_empty() {
return;
}
if is_fix_os_getcwd_enabled(checker.settings()) {
diagnostic.try_set_fix(|| {
let (import_edit, binding) = checker.importer().get_or_import_symbol(
&ImportRequest::import("pathlib", "Path"),
call.start(),
checker.semantic(),
)?;
let applicability = if checker.comment_ranges().intersects(range) {
Applicability::Unsafe
} else {
Applicability::Safe
};
let replacement = format!("{binding}.cwd()");
Ok(Fix::applicable_edits(
Edit::range_replacement(replacement, range),
[import_edit],
applicability,
))
});
}
}

View File

@@ -7,8 +7,8 @@ use crate::checkers::ast::Checker;
use crate::rules::flake8_use_pathlib::helpers::is_keyword_only_argument_non_default;
use crate::rules::flake8_use_pathlib::rules::Glob;
use crate::rules::flake8_use_pathlib::violations::{
BuiltinOpen, Joiner, OsChmod, OsListdir, OsMakedirs, OsMkdir, OsPathJoin, OsPathSamefile,
OsPathSplitext, OsRename, OsReplace, OsStat, OsSymlink, PyPath,
BuiltinOpen, Joiner, OsChmod, OsGetcwd, OsListdir, OsMakedirs, OsMkdir, OsPathJoin,
OsPathSamefile, OsPathSplitext, OsRename, OsReplace, OsStat, OsSymlink, PyPath,
};
pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
@@ -83,6 +83,10 @@ pub(crate) fn replaceable_by_pathlib(checker: &Checker, call: &ExprCall) {
}
checker.report_diagnostic_if_enabled(OsReplace, range)
}
// PTH109
["os", "getcwd"] => checker.report_diagnostic_if_enabled(OsGetcwd, range),
["os", "getcwdb"] => checker.report_diagnostic_if_enabled(OsGetcwd, range),
// PTH116
["os", "stat"] => {
// `dir_fd` is not supported by pathlib, so check if it's set to non-default values.

View File

@@ -103,7 +103,6 @@ full_name.py:16:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
17 | b = os.path.exists(p)
18 | bb = os.path.expanduser(p)
|
= help: Replace with `Path.cwd()`
full_name.py:17:5: PTH110 `os.path.exists()` should be replaced by `Path.exists()`
|
@@ -293,7 +292,6 @@ full_name.py:35:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
36 | os.path.join(p, *q)
37 | os.sep.join(p, *q)
|
= help: Replace with `Path.cwd()`
full_name.py:36:1: PTH118 `os.path.join()` should be replaced by `Path.joinpath()`
|
@@ -362,21 +360,3 @@ full_name.py:71:1: PTH123 `open()` should be replaced by `Path.open()`
72 |
73 | # https://github.com/astral-sh/ruff/issues/17693
|
full_name.py:108:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
106 | os.replace("src", "dst", dst_dir_fd=2)
107 |
108 | os.getcwd()
| ^^^^^^^^^ PTH109
109 | os.getcwdb()
|
= help: Replace with `Path.cwd()`
full_name.py:109:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
|
108 | os.getcwd()
109 | os.getcwdb()
| ^^^^^^^^^^ PTH109
|
= help: Replace with `Path.cwd()`

View File

@@ -103,7 +103,6 @@ import_as.py:16:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
17 | b = foo_p.exists(p)
18 | bb = foo_p.expanduser(p)
|
= help: Replace with `Path.cwd()`
import_as.py:17:5: PTH110 `os.path.exists()` should be replaced by `Path.exists()`
|

View File

@@ -103,7 +103,6 @@ import_from.py:18:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
19 | b = exists(p)
20 | bb = expanduser(p)
|
= help: Replace with `Path.cwd()`
import_from.py:19:5: PTH110 `os.path.exists()` should be replaced by `Path.exists()`
|

View File

@@ -103,7 +103,6 @@ import_from_as.py:23:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
24 | b = xexists(p)
25 | bb = xexpanduser(p)
|
= help: Replace with `Path.cwd()`
import_from_as.py:24:5: PTH110 `os.path.exists()` should be replaced by `Path.exists()`
|

View File

@@ -168,7 +168,6 @@ full_name.py:16:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
17 | b = os.path.exists(p)
18 | bb = os.path.expanduser(p)
|
= help: Replace with `Path.cwd()`
full_name.py:17:5: PTH110 [*] `os.path.exists()` should be replaced by `Path.exists()`
|
@@ -511,7 +510,6 @@ full_name.py:35:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
36 | os.path.join(p, *q)
37 | os.sep.join(p, *q)
|
= help: Replace with `Path.cwd()`
full_name.py:36:1: PTH118 `os.path.join()` should be replaced by `Path.joinpath()`
|
@@ -580,50 +578,3 @@ full_name.py:71:1: PTH123 `open()` should be replaced by `Path.open()`
72 |
73 | # https://github.com/astral-sh/ruff/issues/17693
|
full_name.py:108:1: PTH109 [*] `os.getcwd()` should be replaced by `Path.cwd()`
|
106 | os.replace("src", "dst", dst_dir_fd=2)
107 |
108 | os.getcwd()
| ^^^^^^^^^ PTH109
109 | os.getcwdb()
|
= help: Replace with `Path.cwd()`
Safe fix
1 1 | import os
2 2 | import os.path
3 |+import pathlib
3 4 |
4 5 | p = "/foo"
5 6 | q = "bar"
--------------------------------------------------------------------------------
105 106 | os.replace("src", "dst", src_dir_fd=1)
106 107 | os.replace("src", "dst", dst_dir_fd=2)
107 108 |
108 |-os.getcwd()
109 |+pathlib.Path.cwd()
109 110 | os.getcwdb()
full_name.py:109:1: PTH109 [*] `os.getcwd()` should be replaced by `Path.cwd()`
|
108 | os.getcwd()
109 | os.getcwdb()
| ^^^^^^^^^^ PTH109
|
= help: Replace with `Path.cwd()`
Safe fix
1 1 | import os
2 2 | import os.path
3 |+import pathlib
3 4 |
4 5 | p = "/foo"
5 6 | q = "bar"
--------------------------------------------------------------------------------
106 107 | os.replace("src", "dst", dst_dir_fd=2)
107 108 |
108 109 | os.getcwd()
109 |-os.getcwdb()
110 |+pathlib.Path.cwd()

View File

@@ -168,7 +168,6 @@ import_as.py:16:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
17 | b = foo_p.exists(p)
18 | bb = foo_p.expanduser(p)
|
= help: Replace with `Path.cwd()`
import_as.py:17:5: PTH110 [*] `os.path.exists()` should be replaced by `Path.exists()`
|

View File

@@ -172,7 +172,6 @@ import_from.py:18:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
19 | b = exists(p)
20 | bb = expanduser(p)
|
= help: Replace with `Path.cwd()`
import_from.py:19:5: PTH110 [*] `os.path.exists()` should be replaced by `Path.exists()`
|

View File

@@ -172,7 +172,6 @@ import_from_as.py:23:1: PTH109 `os.getcwd()` should be replaced by `Path.cwd()`
24 | b = xexists(p)
25 | bb = xexpanduser(p)
|
= help: Replace with `Path.cwd()`
import_from_as.py:24:5: PTH110 [*] `os.path.exists()` should be replaced by `Path.exists()`
|

View File

@@ -230,6 +230,52 @@ impl Violation for OsReplace {
}
}
/// ## What it does
/// Checks for uses of `os.getcwd` and `os.getcwdb`.
///
/// ## Why is this bad?
/// `pathlib` offers a high-level API for path manipulation, as compared to
/// the lower-level API offered by `os`. When possible, using `Path` object
/// methods such as `Path.cwd()` can improve readability over the `os`
/// module's counterparts (e.g., `os.getcwd()`).
///
/// ## Examples
/// ```python
/// import os
///
/// cwd = os.getcwd()
/// ```
///
/// Use instead:
/// ```python
/// from pathlib import Path
///
/// cwd = Path.cwd()
/// ```
///
/// ## Known issues
/// While using `pathlib` can improve the readability and type safety of your code,
/// it can be less performant than the lower-level alternatives that work directly with strings,
/// especially on older versions of Python.
///
/// ## References
/// - [Python documentation: `Path.cwd`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.cwd)
/// - [Python documentation: `os.getcwd`](https://docs.python.org/3/library/os.html#os.getcwd)
/// - [Python documentation: `os.getcwdb`](https://docs.python.org/3/library/os.html#os.getcwdb)
/// - [PEP 428 The pathlib module object-oriented filesystem paths](https://peps.python.org/pep-0428/)
/// - [Correspondence between `os` and `pathlib`](https://docs.python.org/3/library/pathlib.html#correspondence-to-tools-in-the-os-module)
/// - [Why you should be using pathlib](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
/// - [No really, pathlib is great](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
#[derive(ViolationMetadata)]
pub(crate) struct OsGetcwd;
impl Violation for OsGetcwd {
#[derive_message_formats]
fn message(&self) -> String {
"`os.getcwd()` should be replaced by `Path.cwd()`".to_string()
}
}
/// ## What it does
/// Checks for uses of `os.stat`.
///

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_wasm"
version = "0.12.4"
version = "0.12.3"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -77,31 +77,10 @@ struct ExpandedEdit {
content: Option<String>,
}
/// Perform global constructor initialization.
#[cfg(target_family = "wasm")]
#[expect(unsafe_code)]
pub fn before_main() {
unsafe extern "C" {
fn __wasm_call_ctors();
}
// Salsa uses the `inventory` crate, which registers global constructors that may need to be
// called explicitly on WASM. See <https://github.com/dtolnay/inventory/blob/master/src/lib.rs#L105>
// for details.
unsafe {
__wasm_call_ctors();
}
}
#[cfg(not(target_family = "wasm"))]
pub fn before_main() {}
#[wasm_bindgen(start)]
pub fn run() {
use log::Level;
before_main();
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.

View File

@@ -21,8 +21,6 @@ macro_rules! check {
#[wasm_bindgen_test]
fn empty_config() {
ruff_wasm::before_main();
check!(
"if (1, 2):\n pass",
r#"{}"#,
@@ -44,8 +42,6 @@ fn empty_config() {
#[wasm_bindgen_test]
fn syntax_error() {
ruff_wasm::before_main();
check!(
"x =\ny = 1\n",
r#"{}"#,
@@ -67,8 +63,6 @@ fn syntax_error() {
#[wasm_bindgen_test]
fn unsupported_syntax_error() {
ruff_wasm::before_main();
check!(
"match 2:\n case 1: ...",
r#"{"target-version": "py39"}"#,
@@ -90,15 +84,11 @@ fn unsupported_syntax_error() {
#[wasm_bindgen_test]
fn partial_config() {
ruff_wasm::before_main();
check!("if (1, 2):\n pass", r#"{"ignore": ["F"]}"#, []);
}
#[wasm_bindgen_test]
fn partial_nested_config() {
ruff_wasm::before_main();
let config = r#"{
"select": ["Q"],
"flake8-quotes": {

141
crates/ty/docs/rules.md generated
View File

@@ -36,7 +36,7 @@ def test(): -> "int":
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20call-non-callable) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L100)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L99)
</small>
**What it does**
@@ -58,7 +58,7 @@ Calling a non-callable object will raise a `TypeError` at runtime.
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-argument-forms) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L144)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L143)
</small>
**What it does**
@@ -88,7 +88,7 @@ f(int) # error
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-declarations) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L170)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L169)
</small>
**What it does**
@@ -117,7 +117,7 @@ a = 1
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-metaclass) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L195)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L194)
</small>
**What it does**
@@ -147,7 +147,7 @@ class C(A, B): ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-class-definition) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L221)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L220)
</small>
**What it does**
@@ -177,7 +177,7 @@ class B(A): ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-base) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L286)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L264)
</small>
**What it does**
@@ -202,7 +202,7 @@ class B(A, A): ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-kw-only) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L307)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L285)
</small>
**What it does**
@@ -306,7 +306,7 @@ def test(): -> "Literal[5]":
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20inconsistent-mro) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L449)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L427)
</small>
**What it does**
@@ -334,7 +334,7 @@ class C(A, B): ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20index-out-of-bounds) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L473)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L451)
</small>
**What it does**
@@ -358,7 +358,7 @@ t[3] # IndexError: tuple index out of range
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20instance-layout-conflict) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L339)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L317)
</small>
**What it does**
@@ -445,7 +445,7 @@ an atypical memory layout.
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-argument-type) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L493)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L471)
</small>
**What it does**
@@ -470,7 +470,7 @@ func("foo") # error: [invalid-argument-type]
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-assignment) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L533)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L511)
</small>
**What it does**
@@ -496,7 +496,7 @@ a: int = ''
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-attribute-access) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1537)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1515)
</small>
**What it does**
@@ -528,7 +528,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-base) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L555)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L533)
</small>
**What it does**
@@ -550,7 +550,7 @@ class A(42): ... # error: [invalid-base]
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-context-manager) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L606)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L584)
</small>
**What it does**
@@ -575,7 +575,7 @@ with 1:
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-declaration) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L627)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L605)
</small>
**What it does**
@@ -602,7 +602,7 @@ a: str
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-exception-caught) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L650)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L628)
</small>
**What it does**
@@ -644,7 +644,7 @@ except ZeroDivisionError:
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-generic-class) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L686)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L664)
</small>
**What it does**
@@ -675,7 +675,7 @@ class C[U](Generic[T]): ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-legacy-type-variable) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L712)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L690)
</small>
**What it does**
@@ -708,7 +708,7 @@ def f(t: TypeVar("U")): ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-metaclass) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L761)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L739)
</small>
**What it does**
@@ -740,7 +740,7 @@ class B(metaclass=f): ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-overload) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L788)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L766)
</small>
**What it does**
@@ -788,7 +788,7 @@ def foo(x: int) -> int: ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-parameter-default) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L831)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L809)
</small>
**What it does**
@@ -812,7 +812,7 @@ def f(a: int = ''): ...
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-protocol) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L421)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L399)
</small>
**What it does**
@@ -844,7 +844,7 @@ TypeError: Protocols can only inherit from other protocols, got <class 'int'>
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-raise) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L851)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L829)
</small>
Checks for `raise` statements that raise non-exceptions or use invalid
@@ -891,7 +891,7 @@ def g():
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-return-type) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L514)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L492)
</small>
**What it does**
@@ -914,7 +914,7 @@ def func() -> int:
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-super-argument) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L894)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L872)
</small>
**What it does**
@@ -968,7 +968,7 @@ TODO #14889
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-alias-type) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L740)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L718)
</small>
**What it does**
@@ -993,7 +993,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-checking-constant) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L933)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L911)
</small>
**What it does**
@@ -1021,7 +1021,7 @@ TYPE_CHECKING = ''
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-form) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L957)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L935)
</small>
**What it does**
@@ -1049,7 +1049,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-call) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1009)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L987)
</small>
**What it does**
@@ -1081,7 +1081,7 @@ f(10) # Error
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-definition) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L981)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L959)
</small>
**What it does**
@@ -1113,7 +1113,7 @@ class C:
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-variable-constraints) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1037)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1015)
</small>
**What it does**
@@ -1146,7 +1146,7 @@ T = TypeVar('T', bound=str) # valid bound TypeVar
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-argument) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1066)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1044)
</small>
**What it does**
@@ -1169,7 +1169,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x'
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20no-matching-overload) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1085)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1063)
</small>
**What it does**
@@ -1196,7 +1196,7 @@ func("string") # error: [no-matching-overload]
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20non-subscriptable) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1108)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1086)
</small>
**What it does**
@@ -1218,7 +1218,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20not-iterable) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1126)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1104)
</small>
**What it does**
@@ -1242,7 +1242,7 @@ for i in 34: # TypeError: 'int' object is not iterable
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20parameter-already-assigned) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1177)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1155)
</small>
**What it does**
@@ -1296,7 +1296,7 @@ def test(): -> "int":
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20static-assert-error) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1513)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1491)
</small>
**What it does**
@@ -1324,7 +1324,7 @@ static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known tr
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20subclass-of-final-class) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1268)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1246)
</small>
**What it does**
@@ -1351,7 +1351,7 @@ class B(A): ... # Error raised here
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20too-many-positional-arguments) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1313)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1291)
</small>
**What it does**
@@ -1376,7 +1376,7 @@ f("foo") # Error raised here
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20type-assertion-failure) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1291)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1269)
</small>
**What it does**
@@ -1402,7 +1402,7 @@ def _(x: int):
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unavailable-implicit-super-arguments) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1334)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1312)
</small>
**What it does**
@@ -1446,7 +1446,7 @@ class A:
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unknown-argument) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1391)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1369)
</small>
**What it does**
@@ -1471,7 +1471,7 @@ f(x=1, y=2) # Error raised here
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-attribute) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1412)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1390)
</small>
**What it does**
@@ -1497,7 +1497,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo'
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-import) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1434)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1412)
</small>
**What it does**
@@ -1520,7 +1520,7 @@ import foo # ModuleNotFoundError: No module named 'foo'
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-reference) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1453)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1431)
</small>
**What it does**
@@ -1543,7 +1543,7 @@ print(x) # NameError: name 'x' is not defined
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-bool-conversion) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1146)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1124)
</small>
**What it does**
@@ -1578,7 +1578,7 @@ b1 < b2 < b1 # exception raised here
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-operator) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1472)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1450)
</small>
**What it does**
@@ -1604,7 +1604,7 @@ A() + A() # TypeError: unsupported operand type(s) for +: 'A' and 'A'
<small>
Default level: [`error`](../rules.md#rule-levels "This lint has a default level of 'error'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20zero-stepsize-in-slice) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1494)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1472)
</small>
**What it does**
@@ -1622,31 +1622,6 @@ l = list(range(10))
l[1:10:0] # ValueError: slice step cannot be zero
```
## `deprecated`
<small>
Default level: [`warn`](../rules.md#rule-levels "This lint has a default level of 'warn'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20deprecated) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L265)
</small>
**What it does**
Checks for uses of deprecated items
**Why is this bad?**
Deprecated items should no longer be used.
**Examples**
```python
@warnings.deprecated("use new_func instead")
def old_func(): ...
old_func() # emits [deprecated] diagnostic
```
## `invalid-ignore-comment`
<small>
@@ -1680,7 +1655,7 @@ a = 20 / 0 # type: ignore
<small>
Default level: [`warn`](../rules.md#rule-levels "This lint has a default level of 'warn'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unbound-attribute) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1198)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1176)
</small>
**What it does**
@@ -1706,7 +1681,7 @@ A.c # AttributeError: type object 'A' has no attribute 'c'
<small>
Default level: [`warn`](../rules.md#rule-levels "This lint has a default level of 'warn'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unbound-implicit-call) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L118)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L117)
</small>
**What it does**
@@ -1736,7 +1711,7 @@ A()[0] # TypeError: 'A' object is not subscriptable
<small>
Default level: [`warn`](../rules.md#rule-levels "This lint has a default level of 'warn'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unbound-import) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1220)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1198)
</small>
**What it does**
@@ -1766,7 +1741,7 @@ from module import a # ImportError: cannot import name 'a' from 'module'
<small>
Default level: [`warn`](../rules.md#rule-levels "This lint has a default level of 'warn'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20redundant-cast) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1565)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1543)
</small>
**What it does**
@@ -1791,7 +1766,7 @@ cast(int, f()) # Redundant
<small>
Default level: [`warn`](../rules.md#rule-levels "This lint has a default level of 'warn'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20undefined-reveal) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1373)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1351)
</small>
**What it does**
@@ -1842,7 +1817,7 @@ a = 20 / 0 # ty: ignore[division-by-zero]
<small>
Default level: [`warn`](../rules.md#rule-levels "This lint has a default level of 'warn'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-global) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1586)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1564)
</small>
**What it does**
@@ -1896,7 +1871,7 @@ def g():
<small>
Default level: [`warn`](../rules.md#rule-levels "This lint has a default level of 'warn'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-base) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L573)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L551)
</small>
**What it does**
@@ -1933,7 +1908,7 @@ class D(C): ... # error: [unsupported-base]
<small>
Default level: [`ignore`](../rules.md#rule-levels "This lint has a default level of 'ignore'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20division-by-zero) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L247)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L246)
</small>
**What it does**
@@ -1955,7 +1930,7 @@ Dividing by zero raises a `ZeroDivisionError` at runtime.
<small>
Default level: [`ignore`](../rules.md#rule-levels "This lint has a default level of 'ignore'.") ·
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unresolved-reference) ·
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1246)
[View source](https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1224)
</small>
**What it does**

View File

@@ -660,7 +660,7 @@ fn can_handle_large_binop_expressions() -> anyhow::Result<()> {
--> test.py:4:13
|
2 | from typing_extensions import reveal_type
3 | total = 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 +…
3 | total = 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1...
4 | reveal_type(total)
| ^^^^^ `Literal[2000]`
|

View File

@@ -15,7 +15,7 @@ use ty_project::metadata::pyproject::{PyProject, Tool};
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
use ty_project::watch::{ChangeEvent, ProjectWatcher, directory_watcher};
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
use ty_python_semantic::{Module, ModuleName, PythonPlatform, resolve_module};
use ty_python_semantic::{ModuleName, PythonPlatform, resolve_module};
struct TestCase {
db: ProjectDatabase,
@@ -40,14 +40,6 @@ impl TestCase {
&self.db
}
/// Stops file-watching and returns the collected change events.
///
/// The caller must pass a `MatchEvent` filter that is applied to
/// the change events returned. To get all change events, use `|_:
/// &ChangeEvent| true`. If possible, callers should pass a filter for a
/// specific file name, e.g., `event_for_file("foo.py")`. When done this
/// way, the watcher will specifically try to wait for a change event
/// matching the filter. This can help avoid flakes.
#[track_caller]
fn stop_watch<M>(&mut self, matcher: M) -> Vec<ChangeEvent>
where
@@ -1885,156 +1877,3 @@ fn rename_files_casing_only() -> anyhow::Result<()> {
Ok(())
}
/// This tests that retrieving submodules from a module has its cache
/// appropriately invalidated after a file is created.
#[test]
fn submodule_cache_invalidation_created() -> anyhow::Result<()> {
let mut case = setup([("lib.py", ""), ("bar/__init__.py", ""), ("bar/foo.py", "")])?;
let module = resolve_module(case.db(), &ModuleName::new("bar").unwrap()).expect("`bar` module");
let get_submodules = |db: &dyn Db, module: &Module| {
let mut names = module
.all_submodules(db)
.iter()
.map(|name| name.as_str().to_string())
.collect::<Vec<String>>();
names.sort();
names.join("\n")
};
insta::assert_snapshot!(
get_submodules(case.db(), &module),
@"foo",
);
std::fs::write(case.project_path("bar/wazoo.py").as_std_path(), "")?;
let changes = case.stop_watch(event_for_file("wazoo.py"));
case.apply_changes(changes, None);
insta::assert_snapshot!(
get_submodules(case.db(), &module),
@r"
foo
wazoo
",
);
Ok(())
}
/// This tests that retrieving submodules from a module has its cache
/// appropriately invalidated after a file is deleted.
#[test]
fn submodule_cache_invalidation_deleted() -> anyhow::Result<()> {
let mut case = setup([
("lib.py", ""),
("bar/__init__.py", ""),
("bar/foo.py", ""),
("bar/wazoo.py", ""),
])?;
let module = resolve_module(case.db(), &ModuleName::new("bar").unwrap()).expect("`bar` module");
let get_submodules = |db: &dyn Db, module: &Module| {
let mut names = module
.all_submodules(db)
.iter()
.map(|name| name.as_str().to_string())
.collect::<Vec<String>>();
names.sort();
names.join("\n")
};
insta::assert_snapshot!(
get_submodules(case.db(), &module),
@r"
foo
wazoo
",
);
std::fs::remove_file(case.project_path("bar/wazoo.py").as_std_path())?;
let changes = case.stop_watch(event_for_file("wazoo.py"));
case.apply_changes(changes, None);
insta::assert_snapshot!(
get_submodules(case.db(), &module),
@"foo",
);
Ok(())
}
/// This tests that retrieving submodules from a module has its cache
/// appropriately invalidated after a file is created and then deleted.
#[test]
fn submodule_cache_invalidation_created_then_deleted() -> anyhow::Result<()> {
let mut case = setup([("lib.py", ""), ("bar/__init__.py", ""), ("bar/foo.py", "")])?;
let module = resolve_module(case.db(), &ModuleName::new("bar").unwrap()).expect("`bar` module");
let get_submodules = |db: &dyn Db, module: &Module| {
let mut names = module
.all_submodules(db)
.iter()
.map(|name| name.as_str().to_string())
.collect::<Vec<String>>();
names.sort();
names.join("\n")
};
insta::assert_snapshot!(
get_submodules(case.db(), &module),
@"foo",
);
std::fs::write(case.project_path("bar/wazoo.py").as_std_path(), "")?;
let changes = case.take_watch_changes(event_for_file("wazoo.py"));
case.apply_changes(changes, None);
std::fs::remove_file(case.project_path("bar/wazoo.py").as_std_path())?;
let changes = case.stop_watch(event_for_file("wazoo.py"));
case.apply_changes(changes, None);
insta::assert_snapshot!(
get_submodules(case.db(), &module),
@"foo",
);
Ok(())
}
/// This tests that retrieving submodules from a module has its cache
/// appropriately invalidated after a file is created *after* a project
/// configuration change.
#[test]
fn submodule_cache_invalidation_after_pyproject_created() -> anyhow::Result<()> {
let mut case = setup([("lib.py", ""), ("bar/__init__.py", ""), ("bar/foo.py", "")])?;
let module = resolve_module(case.db(), &ModuleName::new("bar").unwrap()).expect("`bar` module");
let get_submodules = |db: &dyn Db, module: &Module| {
let mut names = module
.all_submodules(db)
.iter()
.map(|name| name.as_str().to_string())
.collect::<Vec<String>>();
names.sort();
names.join("\n")
};
insta::assert_snapshot!(
get_submodules(case.db(), &module),
@"foo",
);
case.update_options(Options::default())?;
std::fs::write(case.project_path("bar/wazoo.py").as_std_path(), "")?;
let changes = case.take_watch_changes(event_for_file("wazoo.py"));
case.apply_changes(changes, None);
insta::assert_snapshot!(
get_submodules(case.db(), &module),
@r"
foo
wazoo
",
);
Ok(())
}

View File

@@ -96,7 +96,7 @@ pub(crate) mod tests {
#[salsa::db]
impl SemanticDb for TestDb {
fn should_check_file(&self, file: File) -> bool {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}

View File

@@ -4,13 +4,12 @@ pub use crate::goto_type_definition::goto_type_definition;
use crate::find_node::covering_node;
use crate::stub_mapping::StubMapper;
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
use ruff_db::parsed::ParsedModuleRef;
use ruff_python_ast::{self as ast, AnyNodeRef};
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange, TextSize};
use ty_python_semantic::types::Type;
use ty_python_semantic::types::definitions_for_keyword_argument;
use ty_python_semantic::{HasType, SemanticModel, definitions_for_name};
use ty_python_semantic::{HasType, SemanticModel};
#[derive(Clone, Copy, Debug)]
pub(crate) enum GotoTarget<'a> {
@@ -151,19 +150,15 @@ impl GotoTarget<'_> {
use ruff_python_ast as ast;
match self {
GotoTarget::Expression(expression) => match expression {
ast::ExprRef::Name(name) => definitions_to_navigation_targets(
db,
stub_mapper,
definitions_for_name(db, file, name),
),
ast::ExprRef::Attribute(attribute) => definitions_to_navigation_targets(
db,
stub_mapper,
ty_python_semantic::definitions_for_attribute(db, file, attribute),
),
_ => None,
},
// For names, find the definitions of the symbol
GotoTarget::Expression(expression) => {
if let ast::ExprRef::Name(name) = expression {
Self::get_name_definition_targets(name, file, db, stub_mapper)
} else {
// For other expressions, we can't find definitions
None
}
}
// For already-defined symbols, they are their own definitions
GotoTarget::FunctionDef(function) => {
@@ -200,31 +195,41 @@ impl GotoTarget<'_> {
None
}
// Handle keyword arguments in call expressions
GotoTarget::KeywordArgument(keyword) => {
// Find the call expression that contains this keyword
let module = parsed_module(db, file).load(db);
// Use the keyword's range to find the containing call expression
let covering_node = covering_node(module.syntax().into(), keyword.range())
.find_first(|node| matches!(node, AnyNodeRef::ExprCall(_)))
.ok()?;
if let AnyNodeRef::ExprCall(call_expr) = covering_node.node() {
let definitions =
definitions_for_keyword_argument(db, file, keyword, call_expr);
return definitions_to_navigation_targets(db, stub_mapper, definitions);
}
None
}
// TODO: Handle attribute and method accesses (y in `x.y` expressions)
// TODO: Handle keyword arguments in call expression
// TODO: Handle multi-part module names in import statements
// TODO: Handle imported symbol in y in `from x import y as z` statement
// TODO: Handle string literals that map to TypedDict fields
_ => None,
}
}
/// Get navigation targets for definitions associated with a name expression
fn get_name_definition_targets(
name: &ruff_python_ast::ExprName,
file: ruff_db::files::File,
db: &dyn crate::Db,
stub_mapper: Option<&StubMapper>,
) -> Option<crate::NavigationTargets> {
use ty_python_semantic::definitions_for_name;
// Get all definitions for this name
let mut definitions = definitions_for_name(db, file, name);
// Apply stub mapping if a mapper is provided
if let Some(mapper) = stub_mapper {
definitions = mapper.map_definitions(definitions);
}
if definitions.is_empty() {
return None;
}
// Convert definitions to navigation targets
let targets = convert_resolved_definitions_to_targets(db, definitions);
Some(crate::NavigationTargets::unique(targets))
}
}
impl Ranged for GotoTarget<'_> {
@@ -274,35 +279,18 @@ fn convert_resolved_definitions_to_targets(
full_range: full_range.range(),
}
}
ty_python_semantic::ResolvedDefinition::FileWithRange(file_range) => {
// For file ranges, navigate to the specific range within the file
ty_python_semantic::ResolvedDefinition::ModuleFile(module_file) => {
// For module files, navigate to the beginning of the file
crate::NavigationTarget {
file: file_range.file(),
focus_range: file_range.range(),
full_range: file_range.range(),
file: module_file,
focus_range: ruff_text_size::TextRange::default(), // Start of file
full_range: ruff_text_size::TextRange::default(), // Start of file
}
}
})
.collect()
}
/// Shared helper to map and convert resolved definitions into navigation targets.
fn definitions_to_navigation_targets<'db>(
db: &dyn crate::Db,
stub_mapper: Option<&StubMapper<'db>>,
mut definitions: Vec<ty_python_semantic::ResolvedDefinition<'db>>,
) -> Option<crate::NavigationTargets> {
if let Some(mapper) = stub_mapper {
definitions = mapper.map_definitions(definitions);
}
if definitions.is_empty() {
None
} else {
let targets = convert_resolved_definitions_to_targets(db, definitions);
Some(crate::NavigationTargets::unique(targets))
}
}
pub(crate) fn find_goto_target(
parsed: &ParsedModuleRef,
offset: TextSize,

View File

@@ -611,107 +611,7 @@ def another_helper():
}
#[test]
fn goto_declaration_instance_attribute() {
let test = cursor_test(
"
class C:
def __init__(self):
self.x: int = 1
c = C()
y = c.x<CURSOR>
",
);
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> main.py:4:21
|
2 | class C:
3 | def __init__(self):
4 | self.x: int = 1
| ^^^^^^
5 |
6 | c = C()
|
info: Source
--> main.py:7:17
|
6 | c = C()
7 | y = c.x
| ^^^
|
");
}
#[test]
fn goto_declaration_instance_attribute_no_annotation() {
let test = cursor_test(
"
class C:
def __init__(self):
self.x = 1
c = C()
y = c.x<CURSOR>
",
);
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> main.py:4:21
|
2 | class C:
3 | def __init__(self):
4 | self.x = 1
| ^^^^^^
5 |
6 | c = C()
|
info: Source
--> main.py:7:17
|
6 | c = C()
7 | y = c.x
| ^^^
|
");
}
#[test]
fn goto_declaration_method_call_to_definition() {
let test = cursor_test(
"
class C:
def foo(self):
return 42
c = C()
res = c.foo<CURSOR>()
",
);
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> main.py:3:21
|
2 | class C:
3 | def foo(self):
| ^^^
4 | return 42
|
info: Source
--> main.py:7:19
|
6 | c = C()
7 | res = c.foo()
| ^^^^^
|
");
}
#[test]
fn goto_declaration_module_attribute() {
fn goto_declaration_builtin_type() {
let test = cursor_test(
r#"
x: i<CURSOR>nt = 42
@@ -821,152 +721,6 @@ def function():
"#);
}
#[test]
fn goto_declaration_inherited_attribute() {
let test = cursor_test(
"
class A:
x = 10
class B(A):
pass
b = B()
y = b.x<CURSOR>
",
);
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> main.py:3:17
|
2 | class A:
3 | x = 10
| ^
4 |
5 | class B(A):
|
info: Source
--> main.py:9:17
|
8 | b = B()
9 | y = b.x
| ^^^
|
");
}
#[test]
fn goto_declaration_property_getter_setter() {
let test = cursor_test(
"
class C:
def __init__(self):
self._value = 0
@property
def value(self):
return self._value
c = C()
c.value<CURSOR> = 42
",
);
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> main.py:7:21
|
6 | @property
7 | def value(self):
| ^^^^^
8 | return self._value
|
info: Source
--> main.py:11:13
|
10 | c = C()
11 | c.value = 42
| ^^^^^^^
|
");
}
#[test]
fn goto_declaration_function_doc_attribute() {
let test = cursor_test(
r#"
def my_function():
"""This is a docstring."""
return 42
doc = my_function.__doc<CURSOR>__
"#,
);
// Should navigate to the __doc__ property in the FunctionType class in typeshed
let result = test.goto_declaration();
assert!(
!result.contains("No goto target found"),
"Should find builtin __doc__ attribute"
);
assert!(
!result.contains("No declarations found"),
"Should find builtin __doc__ declarations"
);
// Should navigate to a typeshed file containing the __doc__ attribute
assert!(
result.contains("types.pyi") || result.contains("builtins.pyi"),
"Should navigate to typeshed file with __doc__ definition"
);
assert!(
result.contains("__doc__"),
"Should find the __doc__ attribute definition"
);
assert!(
result.contains("info[goto-declaration]: Declaration"),
"Should be a goto-declaration result"
);
}
#[test]
fn goto_declaration_protocol_instance_attribute() {
let test = cursor_test(
"
from typing import Protocol
class Drawable(Protocol):
def draw(self) -> None: ...
name: str
def use_drawable(obj: Drawable):
obj.na<CURSOR>me
",
);
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> main.py:6:17
|
4 | class Drawable(Protocol):
5 | def draw(self) -> None: ...
6 | name: str
| ^^^^
7 |
8 | def use_drawable(obj: Drawable):
|
info: Source
--> main.py:9:17
|
8 | def use_drawable(obj: Drawable):
9 | obj.name
| ^^^^^^^^
|
");
}
#[test]
fn goto_declaration_generic_method_class_type() {
let test = cursor_test(
@@ -1002,94 +756,6 @@ class MyClass:
");
}
#[test]
fn goto_declaration_keyword_argument_simple() {
let test = cursor_test(
"
def my_function(x, y, z=10):
return x + y + z
result = my_function(1, y<CURSOR>=2, z=3)
",
);
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> main.py:2:32
|
2 | def my_function(x, y, z=10):
| ^
3 | return x + y + z
|
info: Source
--> main.py:5:37
|
3 | return x + y + z
4 |
5 | result = my_function(1, y=2, z=3)
| ^
|
");
}
#[test]
fn goto_declaration_keyword_argument_overloaded() {
let test = cursor_test(
r#"
from typing import overload
@overload
def process(data: str, format: str) -> str: ...
@overload
def process(data: int, format: int) -> int: ...
def process(data, format):
return data
# Call the overloaded function
result = process("hello", format<CURSOR>="json")
"#,
);
// Should navigate to the parameter in both matching overloads
assert_snapshot!(test.goto_declaration(), @r#"
info[goto-declaration]: Declaration
--> main.py:5:36
|
4 | @overload
5 | def process(data: str, format: str) -> str: ...
| ^^^^^^
6 |
7 | @overload
|
info: Source
--> main.py:14:39
|
13 | # Call the overloaded function
14 | result = process("hello", format="json")
| ^^^^^^
|
info[goto-declaration]: Declaration
--> main.py:8:36
|
7 | @overload
8 | def process(data: int, format: int) -> int: ...
| ^^^^^^
9 |
10 | def process(data, format):
|
info: Source
--> main.py:14:39
|
13 | # Call the overloaded function
14 | result = process("hello", format="json")
| ^^^^^^
|
"#);
}
impl CursorTest {
fn goto_declaration(&self) -> String {
let Some(targets) = goto_declaration(&self.db, self.cursor.file, self.cursor.offset)

View File

@@ -105,11 +105,11 @@ pub struct NavigationTargets(smallvec::SmallVec<[NavigationTarget; 1]>);
impl NavigationTargets {
fn single(target: NavigationTarget) -> Self {
Self(smallvec::smallvec_inline![target])
Self(smallvec::smallvec![target])
}
fn empty() -> Self {
Self(smallvec::SmallVec::new_const())
Self(smallvec::SmallVec::new())
}
fn unique(targets: impl IntoIterator<Item = NavigationTarget>) -> Self {

View File

@@ -482,40 +482,25 @@ impl<'db> SemanticTokenVisitor<'db> {
parameters: &ast::Parameters,
func: Option<&ast::StmtFunctionDef>,
) {
let mut param_index = 0;
for any_param in parameters {
let parameter = any_param.as_parameter();
let token_type = match any_param {
ast::AnyParameterRef::NonVariadic(_) => {
// For non-variadic parameters (positional-only, regular, keyword-only),
// check if this should be classified as self/cls parameter
if let Some(func) = func {
let result = self.classify_parameter(parameter, param_index == 0, func);
param_index += 1;
result
} else {
// For lambdas, all parameters are just parameters (no self/cls)
param_index += 1;
SemanticTokenType::Parameter
}
}
ast::AnyParameterRef::Variadic(_) => {
// Variadic parameters (*args, **kwargs) are always just parameters
param_index += 1;
SemanticTokenType::Parameter
}
// Parameters
for (i, param) in parameters.args.iter().enumerate() {
let token_type = if let Some(func) = func {
// For function definitions, use the classification logic to determine
// whether this is a self/cls parameter or just a regular parameter
self.classify_parameter(&param.parameter, i == 0, func)
} else {
// For lambdas, all parameters are just parameters (no self/cls)
SemanticTokenType::Parameter
};
self.add_token(
parameter.name.range(),
param.parameter.name.range(),
token_type,
SemanticTokenModifier::empty(),
);
// Handle parameter type annotations
if let Some(annotation) = &parameter.annotation {
if let Some(annotation) = &param.parameter.annotation {
self.visit_type_annotation(annotation);
}
}
@@ -992,8 +977,7 @@ class MyClass:
class MyClass:
def method(instance, x): pass
@classmethod
def other(klass, y): pass
def complex_method(instance, posonly, /, regular, *args, kwonly, **kwargs): pass<CURSOR>
def other(klass, y): pass<CURSOR>
",
);
@@ -1008,13 +992,6 @@ class MyClass:
"other" @ 75..80: Method [definition]
"klass" @ 81..86: ClsParameter
"y" @ 88..89: Parameter
"complex_method" @ 105..119: Method [definition]
"instance" @ 120..128: SelfParameter
"posonly" @ 130..137: Parameter
"regular" @ 142..149: Parameter
"args" @ 152..156: Parameter
"kwonly" @ 158..164: Parameter
"kwargs" @ 168..174: Parameter
"#);
}
@@ -1688,12 +1665,6 @@ class BoundedContainer[T: int, U = str]:
"P" @ 324..325: Variable
"str" @ 327..330: Class
"wrapper" @ 341..348: Function [definition]
"args" @ 350..354: Parameter
"P" @ 356..357: Variable
"args" @ 358..362: Variable
"kwargs" @ 366..372: Parameter
"P" @ 374..375: Variable
"kwargs" @ 376..382: Variable
"str" @ 387..390: Class
"str" @ 407..410: Class
"func" @ 411..415: Variable

View File

@@ -12,8 +12,8 @@ use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::{File, Files};
use ruff_db::system::System;
use ruff_db::vendored::VendoredFileSystem;
use salsa::Event;
use salsa::plumbing::ZalsaDatabase;
use salsa::{Event, Setter};
use ty_ide::Db as IdeDb;
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{Db as SemanticDb, Program};
@@ -82,25 +82,22 @@ impl ProjectDatabase {
Ok(db)
}
/// Checks the files in the project and its dependencies as per the project's check mode.
///
/// Use [`set_check_mode`] to update the check mode.
///
/// [`set_check_mode`]: ProjectDatabase::set_check_mode
/// Checks all open files in the project and its dependencies.
pub fn check(&self) -> Vec<Diagnostic> {
let mut reporter = DummyReporter;
let reporter = AssertUnwindSafe(&mut reporter as &mut dyn ProgressReporter);
self.project().check(self, reporter)
self.check_with_mode(CheckMode::OpenFiles)
}
/// Checks the files in the project and its dependencies, using the given reporter.
///
/// Use [`set_check_mode`] to update the check mode.
///
/// [`set_check_mode`]: ProjectDatabase::set_check_mode
/// Checks all open files in the project and its dependencies, using the given reporter.
pub fn check_with_reporter(&self, reporter: &mut dyn ProgressReporter) -> Vec<Diagnostic> {
let reporter = AssertUnwindSafe(reporter);
self.project().check(self, reporter)
self.project().check(self, CheckMode::OpenFiles, reporter)
}
/// Check the project with the given mode.
pub fn check_with_mode(&self, mode: CheckMode) -> Vec<Diagnostic> {
let mut reporter = DummyReporter;
let reporter = AssertUnwindSafe(&mut reporter as &mut dyn ProgressReporter);
self.project().check(self, mode, reporter)
}
#[tracing::instrument(level = "debug", skip(self))]
@@ -108,12 +105,6 @@ impl ProjectDatabase {
self.project().check_file(self, file)
}
/// Set the check mode for the project.
pub fn set_check_mode(&mut self, mode: CheckMode) {
tracing::debug!("Updating project to check {mode}");
self.project().set_check_mode(self).to(mode);
}
/// Returns a mutable reference to the system.
///
/// WARNING: Triggers a new revision, canceling other database handles. This can lead to deadlock.
@@ -172,28 +163,17 @@ impl std::fmt::Debug for ProjectDatabase {
}
}
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum CheckMode {
/// Checks the open files in the project.
/// Checks only the open files in the project.
OpenFiles,
/// Checks all files in the project, ignoring the open file set.
///
/// This includes virtual files, such as those opened in an editor.
#[default]
/// This includes virtual files, such as those created by the language server.
AllFiles,
}
impl fmt::Display for CheckMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CheckMode::OpenFiles => write!(f, "open files"),
CheckMode::AllFiles => write!(f, "all files"),
}
}
}
/// Stores memory usage information.
pub struct SalsaMemoryDump {
total_fields: usize,
@@ -409,9 +389,12 @@ impl IdeDb for ProjectDatabase {}
#[salsa::db]
impl SemanticDb for ProjectDatabase {
fn should_check_file(&self, file: File) -> bool {
self.project
.is_some_and(|project| project.should_check_file(self, file))
fn is_file_open(&self, file: File) -> bool {
let Some(project) = &self.project else {
return false;
};
project.is_file_open(self, file)
}
fn rule_selection(&self, file: File) -> &RuleSelection {
@@ -560,7 +543,7 @@ pub(crate) mod tests {
#[salsa::db]
impl ty_python_semantic::Db for TestDb {
fn should_check_file(&self, file: ruff_db::files::File) -> bool {
fn is_file_open(&self, file: ruff_db::files::File) -> bool {
!file.path(self).is_vendored_path()
}

View File

@@ -6,8 +6,7 @@ use std::collections::BTreeSet;
use crate::walk::ProjectFilesWalker;
use ruff_db::Db as _;
use ruff_db::file_revision::FileRevision;
use ruff_db::files::{File, FileRootKind, Files};
use ruff_db::files::{File, Files};
use ruff_db::system::SystemPath;
use rustc_hash::FxHashSet;
use salsa::Setter;
@@ -58,6 +57,12 @@ impl ProjectDatabase {
let mut synced_files = FxHashSet::default();
let mut sync_recursively = BTreeSet::default();
let mut sync_path = |db: &mut ProjectDatabase, path: &SystemPath| {
if synced_files.insert(path.to_path_buf()) {
File::sync_path(db, path);
}
};
for change in changes {
tracing::trace!("Handle change: {:?}", change);
@@ -87,49 +92,12 @@ impl ProjectDatabase {
match change {
ChangeEvent::Changed { path, kind: _ } | ChangeEvent::Opened(path) => {
if synced_files.insert(path.to_path_buf()) {
let absolute =
SystemPath::absolute(&path, self.system().current_directory());
File::sync_path_only(self, &absolute);
if let Some(root) = self.files().root(self, &absolute) {
match root.kind_at_time_of_creation(self) {
// When a file inside the root of
// the project is changed, we don't
// want to mark the entire root as
// having changed too. In theory it
// might make sense to, but at time
// of writing, the file root revision
// on a project is used to invalidate
// the submodule files found within a
// directory. If we bumped the revision
// on every change within a project,
// then this caching technique would be
// effectively useless.
//
// It's plausible we should explore
// a more robust cache invalidation
// strategy that models more directly
// what we care about. For example, by
// keeping track of directories and
// their direct children explicitly,
// and then keying the submodule cache
// off of that instead. ---AG
FileRootKind::Project => {}
FileRootKind::LibrarySearchPath => {
root.set_revision(self).to(FileRevision::now());
}
}
}
}
sync_path(self, &path);
}
ChangeEvent::Created { kind, path } => {
match kind {
CreatedKind::File => {
if synced_files.insert(path.to_path_buf()) {
File::sync_path(self, &path);
}
}
CreatedKind::File => sync_path(self, &path),
CreatedKind::Directory | CreatedKind::Any => {
sync_recursively.insert(path.clone());
}
@@ -170,9 +138,7 @@ impl ProjectDatabase {
};
if is_file {
if synced_files.insert(path.to_path_buf()) {
File::sync_path(self, &path);
}
sync_path(self, &path);
if let Some(file) = self.files().try_system(self, &path) {
project.remove_file(self, file);

View File

@@ -18,7 +18,7 @@ use crate::{IOErrorDiagnostic, Project};
/// The implementation uses internal mutability to transition between the lazy and indexed state
/// without triggering a new salsa revision. This is safe because the initial indexing happens on first access,
/// so no query can be depending on the contents of the indexed files before that. All subsequent mutations to
/// the indexed files must go through `IndexedMut`, which uses the Salsa setter `project.set_file_set` to
/// the indexed files must go through `IndexedMut`, which uses the Salsa setter `package.set_file_set` to
/// ensure that Salsa always knows when the set of indexed files have changed.
#[derive(Debug)]
pub struct IndexedFiles {
@@ -280,7 +280,7 @@ mod tests {
// Calling files a second time should not dead-lock.
// This can e.g. happen when `check_file` iterates over all files and
// `should_check_file` queries the open files.
// `is_file_open` queries the open files.
let files_2 = project.file_set(&db).get();
match files_2 {

View File

@@ -6,7 +6,7 @@ use files::{Index, Indexed, IndexedFiles};
use metadata::settings::Settings;
pub use metadata::{ProjectMetadata, ProjectMetadataError};
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span, SubDiagnostic};
use ruff_db::files::{File, FileRootKind};
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_db::source::{SourceTextError, source_text};
use ruff_db::system::{SystemPath, SystemPathBuf};
@@ -14,8 +14,6 @@ use rustc_hash::FxHashSet;
use salsa::Durability;
use salsa::Setter;
use std::backtrace::BacktraceStatus;
use std::collections::hash_set;
use std::iter::FusedIterator;
use std::panic::{AssertUnwindSafe, UnwindSafe};
use std::sync::Arc;
use thiserror::Error;
@@ -56,10 +54,13 @@ pub fn default_lints_registry() -> LintRegistry {
#[salsa::input]
#[derive(Debug)]
pub struct Project {
/// The files that are open in the project, [`None`] if there are no open files.
#[returns(ref)]
/// The files that are open in the project.
///
/// Setting the open files to a non-`None` value changes `check` to only check the
/// open files rather than all files in the project.
#[returns(as_deref)]
#[default]
open_fileset: FxHashSet<File>,
open_fileset: Option<Arc<FxHashSet<File>>>,
/// The first-party files of this project.
#[default]
@@ -109,13 +110,6 @@ pub struct Project {
/// Diagnostics that were generated when resolving the project settings.
#[returns(deref)]
settings_diagnostics: Vec<OptionDiagnostic>,
/// The mode in which the project should be checked.
///
/// This changes the behavior of `check` to either check only the open files or all files in
/// the project including the virtual files that might exists in the editor.
#[default]
check_mode: CheckMode,
}
/// A progress reporter.
@@ -141,13 +135,6 @@ impl Project {
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Result<Self, ToSettingsError> {
let (settings, diagnostics) = metadata.options().to_settings(db, metadata.root())?;
// This adds a file root for the project itself. This enables
// tracking of when changes are made to the files in a project
// at the directory level. At time of writing (2025-07-17),
// this is used for caching completions for submodules.
db.files()
.try_add_root(db, metadata.root(), FileRootKind::Project);
let project = Project::builder(Box::new(metadata), Box::new(settings), diagnostics)
.durability(Durability::MEDIUM)
.open_fileset_durability(Durability::LOW)
@@ -220,20 +207,17 @@ impl Project {
self.reload_files(db);
}
/// Checks the project and its dependencies according to the project's check mode.
/// Checks all open files in the project and its dependencies.
pub(crate) fn check(
self,
db: &ProjectDatabase,
mode: CheckMode,
mut reporter: AssertUnwindSafe<&mut dyn ProgressReporter>,
) -> Vec<Diagnostic> {
let project_span = tracing::debug_span!("Project::check");
let _span = project_span.enter();
tracing::debug!(
"Checking {} in project '{name}'",
self.check_mode(db),
name = self.name(db)
);
tracing::debug!("Checking project '{name}'", name = self.name(db));
let mut diagnostics: Vec<Diagnostic> = Vec::new();
diagnostics.extend(
@@ -242,7 +226,11 @@ impl Project {
.map(OptionDiagnostic::to_diagnostic),
);
let files = ProjectFiles::new(db, self);
let files = match mode {
CheckMode::OpenFiles => ProjectFiles::new(db, self),
// TODO: Consider open virtual files as well
CheckMode::AllFiles => ProjectFiles::Indexed(self.files(db)),
};
reporter.set_files(files.len());
diagnostics.extend(
@@ -296,7 +284,7 @@ impl Project {
}
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Diagnostic> {
if !self.should_check_file(db, file) {
if !self.is_file_open(db, file) {
return Vec::new();
}
@@ -304,6 +292,8 @@ impl Project {
}
/// Opens a file in the project.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
pub fn open_file(self, db: &mut dyn Db, file: File) {
tracing::debug!("Opening file `{}`", file.path(db));
@@ -350,40 +340,45 @@ impl Project {
}
}
/// Returns the open files in the project or `None` if there are no open files.
pub fn open_files(self, db: &dyn Db) -> &FxHashSet<File> {
/// Returns the open files in the project or `None` if the entire project should be checked.
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
self.open_fileset(db)
}
/// Sets the open files in the project.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
#[tracing::instrument(level = "debug", skip(self, db))]
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
tracing::debug!("Set open project files (count: {})", open_files.len());
self.set_open_fileset(db).to(open_files);
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
}
/// This takes the open files from the project and returns them.
///
/// This changes the behavior of `check` to check all files in the project instead of just the open files.
fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
tracing::debug!("Take open project files");
// Salsa will cancel any pending queries and remove its own reference to `open_files`
// so that the reference counter to `open_files` now drops to 1.
self.set_open_fileset(db).to(FxHashSet::default())
let open_files = self.set_open_fileset(db).to(None);
if let Some(open_files) = open_files {
Arc::try_unwrap(open_files).unwrap()
} else {
FxHashSet::default()
}
}
/// Returns `true` if the file should be checked.
/// Returns `true` if the file is open in the project.
///
/// This depends on the project's check mode:
/// * For [`OpenFiles`], it checks if the file is either explicitly set as an open file using
/// [`open_file`] or a system virtual path
/// * For [`AllFiles`], it checks if the file is either a system virtual path or a part of the
/// indexed files in the project
///
/// [`open_file`]: Self::open_file
/// [`OpenFiles`]: CheckMode::OpenFiles
/// [`AllFiles`]: CheckMode::AllFiles
pub fn should_check_file(self, db: &dyn Db, file: File) -> bool {
/// A file is considered open when:
/// * explicitly set as an open file using [`open_file`](Self::open_file)
/// * It has a [`SystemPath`] and belongs to a package's `src` files
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
let path = file.path(db);
// Try to return early to avoid adding a dependency on `open_files` or `file_set` which
@@ -392,12 +387,12 @@ impl Project {
return false;
}
match self.check_mode(db) {
CheckMode::OpenFiles => self.open_files(db).contains(&file),
CheckMode::AllFiles => {
// Virtual files are always checked.
path.is_system_virtual_path() || self.files(db).contains(&file)
}
if let Some(open_files) = self.open_files(db) {
open_files.contains(&file)
} else if file.path(db).is_system_path() {
self.files(db).contains(&file)
} else {
file.path(db).is_system_virtual_path()
}
}
@@ -474,14 +469,6 @@ impl Project {
self.set_file_set(db).to(IndexedFiles::lazy());
}
}
/// Check if the project's settings have any issues
pub fn check_settings(&self, db: &dyn Db) -> Vec<Diagnostic> {
self.settings_diagnostics(db)
.iter()
.map(OptionDiagnostic::to_diagnostic)
.collect()
}
}
#[salsa::tracked(returns(deref), heap_size=get_size2::GetSize::get_heap_size)]
@@ -529,7 +516,11 @@ pub(crate) fn check_file_impl(db: &dyn Db, file: File) -> Box<[Diagnostic]> {
}
}
if !db.project().open_fileset(db).contains(&file) {
if db
.project()
.open_fileset(db)
.is_none_or(|files| !files.contains(&file))
{
// Drop the AST now that we are done checking this file. It is not currently open,
// so it is unlikely to be accessed again soon. If any queries need to access the AST
// from across files, it will be re-parsed.
@@ -555,23 +546,24 @@ enum ProjectFiles<'a> {
impl<'a> ProjectFiles<'a> {
fn new(db: &'a dyn Db, project: Project) -> Self {
match project.check_mode(db) {
CheckMode::OpenFiles => ProjectFiles::OpenFiles(project.open_files(db)),
CheckMode::AllFiles => ProjectFiles::Indexed(project.files(db)),
if let Some(open_files) = project.open_files(db) {
ProjectFiles::OpenFiles(open_files)
} else {
ProjectFiles::Indexed(project.files(db))
}
}
fn diagnostics(&self) -> &[IOErrorDiagnostic] {
match self {
ProjectFiles::OpenFiles(_) => &[],
ProjectFiles::Indexed(files) => files.diagnostics(),
ProjectFiles::Indexed(indexed) => indexed.diagnostics(),
}
}
fn len(&self) -> usize {
match self {
ProjectFiles::OpenFiles(open_files) => open_files.len(),
ProjectFiles::Indexed(files) => files.len(),
ProjectFiles::Indexed(indexed) => indexed.len(),
}
}
}
@@ -583,14 +575,16 @@ impl<'a> IntoIterator for &'a ProjectFiles<'a> {
fn into_iter(self) -> Self::IntoIter {
match self {
ProjectFiles::OpenFiles(files) => ProjectFilesIter::OpenFiles(files.iter()),
ProjectFiles::Indexed(files) => ProjectFilesIter::Indexed(files.into_iter()),
ProjectFiles::Indexed(indexed) => ProjectFilesIter::Indexed {
files: indexed.into_iter(),
},
}
}
}
enum ProjectFilesIter<'db> {
OpenFiles(hash_set::Iter<'db, File>),
Indexed(files::IndexedIter<'db>),
OpenFiles(std::collections::hash_set::Iter<'db, File>),
Indexed { files: files::IndexedIter<'db> },
}
impl Iterator for ProjectFilesIter<'_> {
@@ -599,13 +593,11 @@ impl Iterator for ProjectFilesIter<'_> {
fn next(&mut self) -> Option<Self::Item> {
match self {
ProjectFilesIter::OpenFiles(files) => files.next().copied(),
ProjectFilesIter::Indexed(files) => files.next(),
ProjectFilesIter::Indexed { files } => files.next(),
}
}
}
impl FusedIterator for ProjectFilesIter<'_> {}
#[derive(Debug, Clone)]
pub struct IOErrorDiagnostic {
file: Option<File>,

View File

@@ -372,11 +372,11 @@ mod tests {
with_escaped_paths(|| {
assert_ron_snapshot!(&project, @r#"
ProjectMetadata(
name: Name("app"),
root: "/app",
options: Options(),
)
ProjectMetadata(
name: Name("app"),
root: "/app",
options: Options(),
)
"#);
});
@@ -410,11 +410,11 @@ mod tests {
with_escaped_paths(|| {
assert_ron_snapshot!(&project, @r#"
ProjectMetadata(
name: Name("backend"),
root: "/app",
options: Options(),
)
ProjectMetadata(
name: Name("backend"),
root: "/app",
options: Options(),
)
"#);
});
@@ -552,16 +552,16 @@ unclosed table, expected `]`
with_escaped_paths(|| {
assert_ron_snapshot!(root, @r#"
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
src: Some(SrcOptions(
root: Some("src"),
)),
),
)
"#);
ProjectMetadata(
name: Name("project-root"),
root: "/app",
options: Options(
src: Some(SrcOptions(
root: Some("src"),
)),
),
)
"#);
});
Ok(())

View File

@@ -1806,7 +1806,7 @@ class Frozen:
raise AttributeError("Attributes can not be modified")
instance = Frozen()
instance.non_existing = 2 # error: [invalid-assignment] "Can not assign to unresolved attribute `non_existing` on type `Frozen`"
instance.non_existing = 2 # error: [invalid-assignment] "Cannot assign to attribute `non_existing` on type `Frozen` whose `__setattr__` method returns `Never`/`NoReturn`"
instance.existing = 2 # error: [invalid-assignment] "Cannot assign to attribute `existing` on type `Frozen` whose `__setattr__` method returns `Never`/`NoReturn`"
```

View File

@@ -415,7 +415,8 @@ frozen_instance = MyFrozenGeneric[int](1)
frozen_instance.x = 2 # error: [invalid-assignment]
```
Attempting to mutate an unresolved attribute on a frozen dataclass:
When attempting to mutate an unresolved attribute on a frozen dataclass, only `unresolved-attribute`
is emitted:
```py
from dataclasses import dataclass
@@ -424,39 +425,7 @@ from dataclasses import dataclass
class MyFrozenClass: ...
frozen = MyFrozenClass()
frozen.x = 2 # error: [invalid-assignment] "Can not assign to unresolved attribute `x` on type `MyFrozenClass`"
```
A diagnostic is also emitted if a frozen dataclass is inherited, and an attempt is made to mutate an
attribute in the child class:
```py
from dataclasses import dataclass
@dataclass(frozen=True)
class MyFrozenClass:
x: int = 1
class MyFrozenChildClass(MyFrozenClass): ...
frozen = MyFrozenChildClass()
frozen.x = 2 # error: [invalid-assignment]
```
The same diagnostic is emitted if a frozen dataclass is inherited, and an attempt is made to delete
an attribute:
```py
from dataclasses import dataclass
@dataclass(frozen=True)
class MyFrozenClass:
x: int = 1
class MyFrozenChildClass(MyFrozenClass): ...
frozen = MyFrozenChildClass()
del frozen.x # TODO this should emit an [invalid-assignment]
frozen.x = 2 # error: [unresolved-attribute]
```
### `match_args`

View File

@@ -40,92 +40,22 @@ else:
# error: [possibly-unresolved-reference]
reveal_type(c) # revealed: Literal[2]
d = [1, 2, 3]
d = 1
def delete():
del d # error: [unresolved-reference] "Name `d` used when not defined"
# TODO: this results in `UnboundLocalError`; we should emit `unresolved-reference`
del d
delete()
reveal_type(d) # revealed: list[Unknown]
def delete_element():
# When the `del` target isn't a name, it doesn't force local resolution.
del d[0]
print(d)
reveal_type(d) # revealed: Literal[1]
def delete_global():
global d
del d
# We could lint that `d` is unbound in this trivial case, but because it's global we'd need to
# be careful about false positives if `d` got reinitialized somehow in between the two `del`s.
del d
delete_global()
# Again, the variable should have been removed, but we don't check it.
reveal_type(d) # revealed: list[Unknown]
def delete_nonlocal():
e = 2
def delete_nonlocal_bad():
del e # error: [unresolved-reference] "Name `e` used when not defined"
def delete_nonlocal_ok():
nonlocal e
del e
# As with `global` above, we don't track that the nonlocal `e` is unbound.
del e
```
## `del` forces local resolution even if it's unreachable
Without a `global x` or `nonlocal x` declaration in `foo`, `del x` in `foo` causes `print(x)` in an
inner function `bar` to resolve to `foo`'s binding, in this case an unresolved reference / unbound
local error:
```py
x = 1
def foo():
print(x) # error: [unresolved-reference] "Name `x` used when not defined"
if False:
# Assigning to `x` would have the same effect here.
del x
def bar():
print(x) # error: [unresolved-reference] "Name `x` used when not defined"
```
## But `del` doesn't force local resolution of `global` or `nonlocal` variables
However, with `global x` in `foo`, `print(x)` in `bar` resolves in the global scope, despite the
`del` in `foo`:
```py
x = 1
def foo():
global x
def bar():
# allowed, refers to `x` in the global scope
reveal_type(x) # revealed: Unknown | Literal[1]
bar()
del x # allowed, deletes `x` in the global scope (though we don't track that)
```
`nonlocal x` has a similar effect, if we add an extra `enclosing` scope to give it something to
refer to:
```py
def enclosing():
x = 2
def foo():
nonlocal x
def bar():
# allowed, refers to `x` in `enclosing`
reveal_type(x) # revealed: Unknown | Literal[2]
bar()
del x # allowed, deletes `x` in `enclosing` (though we don't track that)
# The variable should have been removed, but we won't check it for now.
reveal_type(d) # revealed: Literal[1]
```
## Delete attributes

View File

@@ -1,355 +0,0 @@
# Tests for the `@deprecated` decorator
## Introduction
<!-- snapshot-diagnostics -->
The decorator `@deprecated("some message")` can be applied to functions, methods, overloads, and
classes. Uses of these items should subsequently produce a warning.
```py
from typing_extensions import deprecated
@deprecated("use OtherClass")
def myfunc(): ...
myfunc() # error: [deprecated] "use OtherClass"
```
```py
from typing_extensions import deprecated
@deprecated("use BetterClass")
class MyClass: ...
MyClass() # error: [deprecated] "use BetterClass"
```
```py
from typing_extensions import deprecated
class MyClass:
@deprecated("use something else")
def afunc(): ...
@deprecated("don't use this!")
def amethod(self): ...
MyClass.afunc() # error: [deprecated] "use something else"
MyClass().amethod() # error: [deprecated] "don't use this!"
```
## Syntax
<!-- snapshot-diagnostics -->
The typeshed declaration of the decorator is as follows:
```ignore
class deprecated:
message: LiteralString
category: type[Warning] | None
stacklevel: int
def __init__(self, message: LiteralString, /, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> None: ...
def __call__(self, arg: _T, /) -> _T: ...
```
Only the mandatory message string is of interest to static analysis, the other two affect only
runtime behaviour.
```py
from typing_extensions import deprecated
@deprecated # error: [invalid-argument-type] "LiteralString"
def invalid_deco(): ...
invalid_deco() # error: [missing-argument]
```
```py
from typing_extensions import deprecated
@deprecated() # error: [missing-argument] "message"
def invalid_deco(): ...
invalid_deco()
```
The argument is supposed to be a LiteralString, and we can handle simple constant propagations like
this:
```py
from typing_extensions import deprecated
x = "message"
@deprecated(x)
def invalid_deco(): ...
invalid_deco() # error: [deprecated] "message"
```
However sufficiently opaque LiteralStrings we can't resolve, and so we lose the message:
```py
from typing_extensions import deprecated, LiteralString
def opaque() -> LiteralString:
return "message"
@deprecated(opaque())
def valid_deco(): ...
valid_deco() # error: [deprecated]
```
Fully dynamic strings are technically allowed at runtime, but typeshed mandates that the input is a
LiteralString, so we can/should emit a diagnostic for this:
```py
from typing_extensions import deprecated
def opaque() -> str:
return "message"
@deprecated(opaque()) # error: [invalid-argument-type] "LiteralString"
def dubious_deco(): ...
dubious_deco()
```
Although we have no use for the other arguments, we should still error if they're wrong.
```py
from typing_extensions import deprecated
@deprecated("some message", dsfsdf="whatever") # error: [unknown-argument] "dsfsdf"
def invalid_deco(): ...
invalid_deco()
```
And we should always handle correct ones fine.
```py
from typing_extensions import deprecated
@deprecated("some message", category=DeprecationWarning, stacklevel=1)
def valid_deco(): ...
valid_deco() # error: [deprecated] "some message"
```
## Different Versions
There are 2 different sources of `@deprecated`: `warnings` and `typing_extensions`. The version in
`warnings` was added in 3.13, the version in `typing_extensions` is a compatibility shim.
```toml
[environment]
python-version = "3.13"
```
`main.py`:
```py
import warnings
import typing_extensions
@warnings.deprecated("nope")
def func1(): ...
@typing_extensions.deprecated("nada")
def func2(): ...
func1() # error: [deprecated] "nope"
func2() # error: [deprecated] "nada"
```
## Imports
### Direct Import Deprecated
Importing a deprecated item should produce a warning. Subsequent uses of the deprecated item
shouldn't produce a warning.
`module.py`:
```py
from typing_extensions import deprecated
@deprecated("Use OtherType instead")
class DeprType: ...
@deprecated("Use other_func instead")
def depr_func(): ...
```
`main.py`:
```py
# error: [deprecated] "Use OtherType instead"
# error: [deprecated] "Use other_func instead"
from module import DeprType, depr_func
# TODO: these diagnostics ideally shouldn't fire since we warn on the import
DeprType() # error: [deprecated] "Use OtherType instead"
depr_func() # error: [deprecated] "Use other_func instead"
def higher_order(x): ...
# TODO: these diagnostics ideally shouldn't fire since we warn on the import
higher_order(DeprType) # error: [deprecated] "Use OtherType instead"
higher_order(depr_func) # error: [deprecated] "Use other_func instead"
# TODO: these diagnostics ideally shouldn't fire since we warn on the import
DeprType.__str__ # error: [deprecated] "Use OtherType instead"
depr_func.__str__ # error: [deprecated] "Use other_func instead"
```
### Non-Import Deprecated
If the items aren't imported and instead referenced using `module.item` then each use should produce
a warning.
`module.py`:
```py
from typing_extensions import deprecated
@deprecated("Use OtherType instead")
class DeprType: ...
@deprecated("Use other_func instead")
def depr_func(): ...
```
`main.py`:
```py
import module
module.DeprType() # error: [deprecated] "Use OtherType instead"
module.depr_func() # error: [deprecated] "Use other_func instead"
def higher_order(x): ...
higher_order(module.DeprType) # error: [deprecated] "Use OtherType instead"
higher_order(module.depr_func) # error: [deprecated] "Use other_func instead"
module.DeprType.__str__ # error: [deprecated] "Use OtherType instead"
module.depr_func.__str__ # error: [deprecated] "Use other_func instead"
```
### Star Import Deprecated
If the items are instead star-imported, then the actual uses should warn.
`module.py`:
```py
from typing_extensions import deprecated
@deprecated("Use OtherType instead")
class DeprType: ...
@deprecated("Use other_func instead")
def depr_func(): ...
```
`main.py`:
```py
from module import *
DeprType() # error: [deprecated] "Use OtherType instead"
depr_func() # error: [deprecated] "Use other_func instead"
def higher_order(x): ...
higher_order(DeprType) # error: [deprecated] "Use OtherType instead"
higher_order(depr_func) # error: [deprecated] "Use other_func instead"
DeprType.__str__ # error: [deprecated] "Use OtherType instead"
depr_func.__str__ # error: [deprecated] "Use other_func instead"
```
## Aliases
Ideally a deprecated warning shouldn't transitively follow assignments, as you already had to "name"
the deprecated symbol to assign it to something else. These kinds of diagnostics would therefore be
redundant and annoying.
```py
from typing_extensions import deprecated
@deprecated("Use OtherType instead")
class DeprType: ...
@deprecated("Use other_func instead")
def depr_func(): ...
alias_func = depr_func # error: [deprecated] "Use other_func instead"
AliasClass = DeprType # error: [deprecated] "Use OtherType instead"
# TODO: these diagnostics ideally shouldn't fire
alias_func() # error: [deprecated] "Use other_func instead"
AliasClass() # error: [deprecated] "Use OtherType instead"
```
## Dunders
If a dunder like `__add__` is deprecated, then the equivalent syntactic sugar like `+` should fire a
diagnostic.
```py
from typing_extensions import deprecated
class MyInt:
def __init__(self, val):
self.val = val
@deprecated("MyInt `+` support is broken")
def __add__(self, other):
return MyInt(self.val + other.val)
x = MyInt(1)
y = MyInt(2)
z = x + y # TODO error: [deprecated] "MyInt `+` support is broken"
```
## Overloads
Overloads can be deprecated, but only trigger warnings when invoked.
```py
from typing_extensions import deprecated
from typing_extensions import overload
@overload
@deprecated("strings are no longer supported")
def f(x: str): ...
@overload
def f(x: int): ...
def f(x):
print(x)
f(1)
f("hello") # TODO: error: [deprecated] "strings are no longer supported"
```
If the actual impl is deprecated, the deprecation always fires.
```py
from typing_extensions import deprecated
from typing_extensions import overload
@overload
def f(x: str): ...
@overload
def f(x: int): ...
@deprecated("unusable")
def f(x):
print(x)
f(1) # error: [deprecated] "unusable"
f("hello") # error: [deprecated] "unusable"
```

View File

@@ -134,6 +134,7 @@ since these functions will never actually be called.
```py
from typing import TYPE_CHECKING
import typing
if TYPE_CHECKING:
def f() -> int: ...
@@ -199,6 +200,9 @@ if get_bool():
if TYPE_CHECKING:
if not TYPE_CHECKING:
def n() -> str: ...
if typing.TYPE_CHECKING:
def o() -> str: ...
```
## Conditional return type

View File

@@ -3,27 +3,37 @@
## `typing.TYPE_CHECKING`
This constant is `True` when in type-checking mode, `False` otherwise. The symbol is defined to be
`False` at runtime. In typeshed, it is annotated as `bool`. This test makes sure that we infer
`Literal[True]` for it anyways.
`False` at runtime. In typeshed, it is annotated as `bool`.
### Basic
```py
from typing import TYPE_CHECKING
import typing
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
reveal_type(typing.TYPE_CHECKING) # revealed: Literal[True]
if TYPE_CHECKING:
type_checking = True
if not TYPE_CHECKING:
runtime = True
# type_checking is treated as unconditionally assigned.
reveal_type(type_checking) # revealed: Literal[True]
# error: [unresolved-reference]
reveal_type(runtime) # revealed: Unknown
```
### Aliased
Make sure that we still infer the correct type if the constant has been given a different name:
### As module attribute
```py
from typing import TYPE_CHECKING as TC
import typing
reveal_type(TC) # revealed: Literal[True]
if typing.TYPE_CHECKING:
type_checking = True
if not typing.TYPE_CHECKING:
runtime = True
reveal_type(type_checking) # revealed: Literal[True]
# error: [unresolved-reference]
reveal_type(runtime) # revealed: Unknown
```
### `typing_extensions` re-export
@@ -33,7 +43,14 @@ This should behave in the same way as `typing.TYPE_CHECKING`:
```py
from typing_extensions import TYPE_CHECKING
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
if TYPE_CHECKING:
type_checking = True
if not TYPE_CHECKING:
runtime = True
reveal_type(type_checking) # revealed: Literal[True]
# error: [unresolved-reference]
reveal_type(runtime) # revealed: Unknown
```
## User-defined `TYPE_CHECKING`
@@ -46,7 +63,7 @@ type checkers, e.g. mypy and pyright.
```py
TYPE_CHECKING = False
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
if TYPE_CHECKING:
type_checking = True
if not TYPE_CHECKING:
@@ -61,11 +78,11 @@ reveal_type(runtime) # revealed: Unknown
### With a type annotation
We can also define `TYPE_CHECKING` with a type annotation. The type must be one to which `bool` can
be assigned. Even in this case, the type of `TYPE_CHECKING` is still inferred to be `Literal[True]`.
be assigned.
```py
TYPE_CHECKING: bool = False
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
if TYPE_CHECKING:
type_checking = True
if not TYPE_CHECKING:
@@ -84,6 +101,21 @@ reveal_type(runtime) # revealed: Unknown
TYPE_CHECKING = False
```
```py
from constants import TYPE_CHECKING
if TYPE_CHECKING:
type_checking = True
if not TYPE_CHECKING:
runtime = True
reveal_type(type_checking) # revealed: Literal[True]
# error: [unresolved-reference]
reveal_type(runtime) # revealed: Unknown
```
### Importing user-defined `TYPE_CHECKING` from stub
`stub.pyi`:
```pyi
@@ -93,13 +125,16 @@ TYPE_CHECKING: bool = ...
```
```py
from constants import TYPE_CHECKING
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
from stub import TYPE_CHECKING
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
if TYPE_CHECKING:
type_checking = True
if not TYPE_CHECKING:
runtime = True
reveal_type(type_checking) # revealed: Literal[True]
# error: [unresolved-reference]
reveal_type(runtime) # revealed: Unknown
```
### Invalid assignment to `TYPE_CHECKING`
@@ -122,12 +157,14 @@ TYPE_CHECKING: int = 1
# error: [invalid-type-checking-constant]
TYPE_CHECKING: str = "str"
# error: [invalid-assignment]
# error: [invalid-type-checking-constant]
TYPE_CHECKING: str = False
# error: [invalid-type-checking-constant]
TYPE_CHECKING: Literal[False] = False
# error: [invalid-assignment]
# error: [invalid-type-checking-constant]
TYPE_CHECKING: Literal[True] = False
```
@@ -140,6 +177,7 @@ from typing import Literal
# error: [invalid-type-checking-constant]
TYPE_CHECKING: str
# error: [invalid-assignment]
# error: [invalid-type-checking-constant]
TYPE_CHECKING: str = False

View File

@@ -84,52 +84,6 @@ def f():
x = "hello" # error: [invalid-assignment] "Object of type `Literal["hello"]` is not assignable to `int`"
```
## The types of `nonlocal` binding get unioned
Without a type declaration, we union the bindings in enclosing scopes to infer a type. But name
resolution stops at the closest binding that isn't declared `nonlocal`, and we ignore bindings
outside of that one:
```py
def a():
# This binding is shadowed in `b`, so we ignore it in inner scopes.
x = 1
def b():
x = 2
def c():
nonlocal x
x = 3
def d():
nonlocal x
reveal_type(x) # revealed: Unknown | Literal[3, 2]
x = 4
reveal_type(x) # revealed: Literal[4]
def e():
reveal_type(x) # revealed: Unknown | Literal[4, 3, 2]
```
However, currently the union of types that we build is incomplete. We walk parent scopes, but not
sibling scopes, child scopes, second-cousin-once-removed scopes, etc:
```py
def a():
x = 1
def b():
nonlocal x
x = 2
def c():
def d():
nonlocal x
x = 3
# TODO: This should include 2 and 3.
reveal_type(x) # revealed: Unknown | Literal[1]
```
## Local variable bindings "look ahead" to any assignment in the current scope
The binding `x = 2` in `g` causes the earlier read of `x` to refer to `g`'s not-yet-initialized
@@ -436,13 +390,3 @@ def f():
nonlocal x
x = 1
```
## Narrowing nonlocal types to `Never` doesn't make them unbound
```py
def foo():
x: int = 1
def bar():
if isinstance(x, str):
reveal_type(x) # revealed: Never
```

View File

@@ -1,93 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: deprecated.md - Tests for the `@deprecated` decorator - Introduction
mdtest path: crates/ty_python_semantic/resources/mdtest/deprecated.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import deprecated
2 |
3 | @deprecated("use OtherClass")
4 | def myfunc(): ...
5 |
6 | myfunc() # error: [deprecated] "use OtherClass"
7 | from typing_extensions import deprecated
8 |
9 | @deprecated("use BetterClass")
10 | class MyClass: ...
11 |
12 | MyClass() # error: [deprecated] "use BetterClass"
13 | from typing_extensions import deprecated
14 |
15 | class MyClass:
16 | @deprecated("use something else")
17 | def afunc(): ...
18 | @deprecated("don't use this!")
19 | def amethod(self): ...
20 |
21 | MyClass.afunc() # error: [deprecated] "use something else"
22 | MyClass().amethod() # error: [deprecated] "don't use this!"
```
# Diagnostics
```
warning[deprecated]: The function `myfunc` is deprecated
--> src/mdtest_snippet.py:6:1
|
4 | def myfunc(): ...
5 |
6 | myfunc() # error: [deprecated] "use OtherClass"
| ^^^^^^ use OtherClass
7 | from typing_extensions import deprecated
|
info: rule `deprecated` is enabled by default
```
```
warning[deprecated]: The class `MyClass` is deprecated
--> src/mdtest_snippet.py:12:1
|
10 | class MyClass: ...
11 |
12 | MyClass() # error: [deprecated] "use BetterClass"
| ^^^^^^^ use BetterClass
13 | from typing_extensions import deprecated
|
info: rule `deprecated` is enabled by default
```
```
warning[deprecated]: The function `afunc` is deprecated
--> src/mdtest_snippet.py:21:9
|
19 | def amethod(self): ...
20 |
21 | MyClass.afunc() # error: [deprecated] "use something else"
| ^^^^^ use something else
22 | MyClass().amethod() # error: [deprecated] "don't use this!"
|
info: rule `deprecated` is enabled by default
```
```
warning[deprecated]: The function `amethod` is deprecated
--> src/mdtest_snippet.py:22:11
|
21 | MyClass.afunc() # error: [deprecated] "use something else"
22 | MyClass().amethod() # error: [deprecated] "don't use this!"
| ^^^^^^^ don't use this!
|
info: rule `deprecated` is enabled by default
```

View File

@@ -1,178 +0,0 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: deprecated.md - Tests for the `@deprecated` decorator - Syntax
mdtest path: crates/ty_python_semantic/resources/mdtest/deprecated.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing_extensions import deprecated
2 |
3 | @deprecated # error: [invalid-argument-type] "LiteralString"
4 | def invalid_deco(): ...
5 |
6 | invalid_deco() # error: [missing-argument]
7 | from typing_extensions import deprecated
8 |
9 | @deprecated() # error: [missing-argument] "message"
10 | def invalid_deco(): ...
11 |
12 | invalid_deco()
13 | from typing_extensions import deprecated
14 |
15 | x = "message"
16 |
17 | @deprecated(x)
18 | def invalid_deco(): ...
19 |
20 | invalid_deco() # error: [deprecated] "message"
21 | from typing_extensions import deprecated, LiteralString
22 |
23 | def opaque() -> LiteralString:
24 | return "message"
25 |
26 | @deprecated(opaque())
27 | def valid_deco(): ...
28 |
29 | valid_deco() # error: [deprecated]
30 | from typing_extensions import deprecated
31 |
32 | def opaque() -> str:
33 | return "message"
34 |
35 | @deprecated(opaque()) # error: [invalid-argument-type] "LiteralString"
36 | def dubious_deco(): ...
37 |
38 | dubious_deco()
39 | from typing_extensions import deprecated
40 |
41 | @deprecated("some message", dsfsdf="whatever") # error: [unknown-argument] "dsfsdf"
42 | def invalid_deco(): ...
43 |
44 | invalid_deco()
45 | from typing_extensions import deprecated
46 |
47 | @deprecated("some message", category=DeprecationWarning, stacklevel=1)
48 | def valid_deco(): ...
49 |
50 | valid_deco() # error: [deprecated] "some message"
```
# Diagnostics
```
error[invalid-argument-type]: Argument to class `deprecated` is incorrect
--> src/mdtest_snippet.py:3:1
|
1 | from typing_extensions import deprecated
2 |
3 | @deprecated # error: [invalid-argument-type] "LiteralString"
| ^^^^^^^^^^^ Expected `LiteralString`, found `def invalid_deco() -> Unknown`
4 | def invalid_deco(): ...
|
info: rule `invalid-argument-type` is enabled by default
```
```
error[missing-argument]: No argument provided for required parameter `arg` of bound method `__call__`
--> src/mdtest_snippet.py:6:1
|
4 | def invalid_deco(): ...
5 |
6 | invalid_deco() # error: [missing-argument]
| ^^^^^^^^^^^^^^
7 | from typing_extensions import deprecated
|
info: rule `missing-argument` is enabled by default
```
```
error[missing-argument]: No argument provided for required parameter `message` of class `deprecated`
--> src/mdtest_snippet.py:9:2
|
7 | from typing_extensions import deprecated
8 |
9 | @deprecated() # error: [missing-argument] "message"
| ^^^^^^^^^^^^
10 | def invalid_deco(): ...
|
info: rule `missing-argument` is enabled by default
```
```
warning[deprecated]: The function `invalid_deco` is deprecated
--> src/mdtest_snippet.py:20:1
|
18 | def invalid_deco(): ...
19 |
20 | invalid_deco() # error: [deprecated] "message"
| ^^^^^^^^^^^^ message
21 | from typing_extensions import deprecated, LiteralString
|
info: rule `deprecated` is enabled by default
```
```
warning[deprecated]: The function `valid_deco` is deprecated
--> src/mdtest_snippet.py:29:1
|
27 | def valid_deco(): ...
28 |
29 | valid_deco() # error: [deprecated]
| ^^^^^^^^^^
30 | from typing_extensions import deprecated
|
info: rule `deprecated` is enabled by default
```
```
error[invalid-argument-type]: Argument to class `deprecated` is incorrect
--> src/mdtest_snippet.py:35:13
|
33 | return "message"
34 |
35 | @deprecated(opaque()) # error: [invalid-argument-type] "LiteralString"
| ^^^^^^^^ Expected `LiteralString`, found `str`
36 | def dubious_deco(): ...
|
info: rule `invalid-argument-type` is enabled by default
```
```
error[unknown-argument]: Argument `dsfsdf` does not match any known parameter of class `deprecated`
--> src/mdtest_snippet.py:41:29
|
39 | from typing_extensions import deprecated
40 |
41 | @deprecated("some message", dsfsdf="whatever") # error: [unknown-argument] "dsfsdf"
| ^^^^^^^^^^^^^^^^^
42 | def invalid_deco(): ...
|
info: rule `unknown-argument` is enabled by default
```
```
warning[deprecated]: The function `valid_deco` is deprecated
--> src/mdtest_snippet.py:50:1
|
48 | def valid_deco(): ...
49 |
50 | valid_deco() # error: [deprecated] "some message"
| ^^^^^^^^^^ some message
|
info: rule `deprecated` is enabled by default
```

View File

@@ -36,7 +36,7 @@ error[invalid-syntax]
--> src/mdtest_snippet.py:6:19
|
4 | async def f():
5 | # error: 19 [invalid-syntax] "cannot use an asynchronous comprehension inside of a synchronous comprehension on Python 3.10 (syntax…
5 | # error: 19 [invalid-syntax] "cannot use an asynchronous comprehension inside of a synchronous comprehension on Python 3.10 (synt...
6 | return {n: [x async for x in elements(n)] for n in range(3)}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot use an asynchronous comprehension inside of a synchronous comprehension on Python 3.10 (syntax was added in 3.11)
7 | async def test():

View File

@@ -41,7 +41,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/with/sync.md
error[invalid-context-manager]: Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` and `__exit__`
--> src/mdtest_snippet.py:6:6
|
5 | # error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` and `…
5 | # error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` and...
6 | with Manager():
| ^^^^^^^^^
7 | ...
@@ -57,7 +57,7 @@ info: rule `invalid-context-manager` is enabled by default
error[invalid-context-manager]: Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` and `__exit__`
--> src/mdtest_snippet.py:13:6
|
12 | # error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` and …
12 | # error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` an...
13 | with Manager():
| ^^^^^^^^^
14 | ...
@@ -73,7 +73,7 @@ info: rule `invalid-context-manager` is enabled by default
error[invalid-context-manager]: Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` and `__exit__`
--> src/mdtest_snippet.py:20:6
|
19 | # error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` and …
19 | # error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` an...
20 | with Manager():
| ^^^^^^^^^
21 | ...

View File

@@ -154,7 +154,6 @@ the expression is not of statically known truthiness.
```py
from ty_extensions import static_assert
from typing import TYPE_CHECKING
import sys
static_assert(True)
@@ -174,8 +173,6 @@ static_assert("d" in "abc") # error: "Static assertion error: argument evaluate
n = None
static_assert(n is None)
static_assert(TYPE_CHECKING)
static_assert(sys.version_info >= (3, 6))
```

View File

@@ -5,8 +5,7 @@ use ruff_db::files::File;
/// Database giving access to semantic information about a Python program.
#[salsa::db]
pub trait Db: SourceDb {
/// Returns `true` if the file should be checked.
fn should_check_file(&self, file: File) -> bool;
fn is_file_open(&self, file: File) -> bool;
/// Resolves the rule selection for a given file.
fn rule_selection(&self, file: File) -> &RuleSelection;
@@ -115,7 +114,7 @@ pub(crate) mod tests {
#[salsa::db]
impl Db for TestDb {
fn should_check_file(&self, file: File) -> bool {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}

View File

@@ -17,8 +17,8 @@ pub use program::{
pub use python_platform::PythonPlatform;
pub use semantic_model::{Completion, CompletionKind, HasType, NameKind, SemanticModel};
pub use site_packages::{PythonEnvironment, SitePackagesPaths, SysPrefixPathOrigin};
pub use types::definitions_for_name;
pub use types::ide_support::ResolvedDefinition;
pub use types::{definitions_for_attribute, definitions_for_name};
pub use util::diagnostics::add_inferred_python_version_hint_to_diagnostic;
pub mod ast_node_ref;

View File

@@ -17,7 +17,6 @@ pub struct Module {
inner: Arc<ModuleInner>,
}
#[salsa::tracked]
impl Module {
pub(crate) fn file_module(
name: ModuleName,
@@ -98,16 +97,11 @@ impl Module {
///
/// The names returned correspond to the "base" name of the module.
/// That is, `{self.name}.{basename}` should give the full module name.
pub fn all_submodules<'db>(&self, db: &'db dyn Db) -> &'db [Name] {
self.clone()
.all_submodules_inner(db, ())
.as_deref()
.unwrap_or_default()
pub fn all_submodules(&self, db: &dyn Db) -> Vec<Name> {
self.all_submodules_inner(db).unwrap_or_default()
}
#[allow(clippy::ref_option, clippy::used_underscore_binding)]
#[salsa::tracked(returns(ref))]
fn all_submodules_inner(self, db: &dyn Db, _dummy: ()) -> Option<Vec<Name>> {
fn all_submodules_inner(&self, db: &dyn Db) -> Option<Vec<Name>> {
fn is_submodule(
is_dir: bool,
is_file: bool,
@@ -142,42 +136,32 @@ impl Module {
);
Some(match path.parent()? {
SystemOrVendoredPathRef::System(parent_directory) => {
// Read the revision on the corresponding file root to
// register an explicit dependency on this directory
// tree. When the revision gets bumped, the cache
// that Salsa creates does for this routine will be
// invalidated.
if let Some(root) = db.files().root(db, parent_directory) {
let _ = root.revision(db);
}
db.system()
.read_directory(parent_directory)
.inspect_err(|err| {
tracing::debug!(
"Failed to read {parent_directory:?} when looking for \
its possible submodules: {err}"
);
})
.ok()?
.flatten()
.filter(|entry| {
let ty = entry.file_type();
let path = entry.path();
is_submodule(
ty.is_directory(),
ty.is_file(),
path.file_name(),
path.extension(),
)
})
.filter_map(|entry| {
let stem = entry.path().file_stem()?;
is_identifier(stem).then(|| Name::from(stem))
})
.collect()
}
SystemOrVendoredPathRef::System(parent_directory) => db
.system()
.read_directory(parent_directory)
.inspect_err(|err| {
tracing::debug!(
"Failed to read {parent_directory:?} when looking for \
its possible submodules: {err}"
);
})
.ok()?
.flatten()
.filter(|entry| {
let ty = entry.file_type();
let path = entry.path();
is_submodule(
ty.is_directory(),
ty.is_file(),
path.file_name(),
path.extension(),
)
})
.filter_map(|entry| {
let stem = entry.path().file_stem()?;
is_identifier(stem).then(|| Name::from(stem))
})
.collect(),
SystemOrVendoredPathRef::Vendored(parent_directory) => db
.vendored()
.read_directory(parent_directory)
@@ -275,7 +259,6 @@ pub enum KnownModule {
UnittestMock,
#[cfg(test)]
Uuid,
Warnings,
}
impl KnownModule {
@@ -295,7 +278,6 @@ impl KnownModule {
Self::TypeCheckerInternals => "_typeshed._type_checker_internals",
Self::TyExtensions => "ty_extensions",
Self::ImportLib => "importlib",
Self::Warnings => "warnings",
#[cfg(test)]
Self::UnittestMock => "unittest.mock",
#[cfg(test)]

View File

@@ -754,14 +754,10 @@ fn place_by_id<'db>(
// a diagnostic if we see it being modified externally. In type inference, we
// can assign a "narrow" type to it even if it is not *declared*. This means, we
// do not have to call [`widen_type_for_undeclared_public_symbol`].
//
// `TYPE_CHECKING` is a special variable that should only be assigned `False`
// at runtime, but is always considered `True` in type checking.
// See mdtest/known_constants.md#user-defined-type_checking for details.
let is_considered_non_modifiable = place_table(db, scope)
.place_expr(place_id)
.expr
.is_name_and(|name| matches!(name, "__slots__" | "TYPE_CHECKING"));
.is_name_and(|name| matches!(name, "__slots__"));
if scope.file(db).is_stub(db) {
// We generally trust module-level undeclared places in stubs and do not union

View File

@@ -549,14 +549,20 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
}
fn build_predicate(&mut self, predicate_node: &ast::Expr) -> PredicateOrLiteral<'db> {
// Some commonly used test expressions are eagerly evaluated as `true`
// or `false` here for performance reasons. This list does not need to
// be exhaustive. More complex expressions will still evaluate to the
// correct value during type-checking.
// Some commonly used test expressions are eagerly evaluated as `true` or `false` here for
// performance reasons. This list does not need to be exhaustive. More complex expressions
// will still evaluate to the correct value during type-checking. (The one exception is
// `TYPE_CHECKING`; we need to detect it here in order to handle it correctly in
// conditions; in type inference it will resolve to its runtime value.)
fn resolve_to_literal(node: &ast::Expr) -> Option<bool> {
match node {
ast::Expr::BooleanLiteral(ast::ExprBooleanLiteral { value, .. }) => Some(*value),
ast::Expr::Name(ast::ExprName { id, .. }) if id == "TYPE_CHECKING" => Some(true),
ast::Expr::Attribute(ast::ExprAttribute { attr, .. })
if attr == "TYPE_CHECKING" =>
{
Some(true)
}
ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
value: ast::Number::Int(n),
..
@@ -1994,26 +2000,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
walk_stmt(self, stmt);
for target in targets {
if let Ok(target) = PlaceExpr::try_from(target) {
let is_name = target.is_name();
let place_id = self.add_place(PlaceExprWithFlags::new(target));
let place_table = self.current_place_table_mut();
if is_name {
// `del x` behaves like an assignment in that it forces all references
// to `x` in the current scope (including *prior* references) to refer
// to the current scope's binding (unless `x` is declared `global` or
// `nonlocal`). For example, this is an UnboundLocalError at runtime:
//
// ```py
// x = 1
// def foo():
// print(x) # can't refer to global `x`
// if False:
// del x
// foo()
// ```
place_table.mark_place_bound(place_id);
}
place_table.mark_place_used(place_id);
self.current_place_table_mut().mark_place_used(place_id);
self.delete_binding(place_id);
}
}
@@ -2540,7 +2528,7 @@ impl SemanticSyntaxContext for SemanticIndexBuilder<'_, '_> {
}
fn report_semantic_error(&self, error: SemanticSyntaxError) {
if self.db.should_check_file(self.file) {
if self.db.is_file_open(self.file) {
self.semantic_syntax_errors.borrow_mut().push(error);
}
}
@@ -2771,14 +2759,12 @@ impl ExpressionsScopeMapBuilder {
/// Returns if the expression is a `TYPE_CHECKING` expression.
fn is_if_type_checking(expr: &ast::Expr) -> bool {
matches!(expr, ast::Expr::Name(ast::ExprName { id, .. }) if id == "TYPE_CHECKING")
|| matches!(expr, ast::Expr::Attribute(ast::ExprAttribute { attr, .. }) if attr == "TYPE_CHECKING")
}
/// Returns if the expression is a `not TYPE_CHECKING` expression.
fn is_if_not_type_checking(expr: &ast::Expr) -> bool {
matches!(expr, ast::Expr::UnaryOp(ast::ExprUnaryOp { op, operand, .. }) if *op == ruff_python_ast::UnaryOp::Not
&& matches!(
&**operand,
ast::Expr::Name(ast::ExprName { id, .. }) if id == "TYPE_CHECKING"
)
&& is_if_type_checking(operand)
)
}

View File

@@ -107,7 +107,7 @@ pub struct Definitions<'db> {
impl<'db> Definitions<'db> {
pub(crate) fn single(definition: Definition<'db>) -> Self {
Self {
definitions: smallvec::smallvec_inline![definition],
definitions: smallvec::smallvec![definition],
}
}

View File

@@ -10,7 +10,7 @@ use ruff_index::{IndexVec, newtype_index};
use ruff_python_ast as ast;
use ruff_python_ast::name::Name;
use rustc_hash::FxHasher;
use smallvec::SmallVec;
use smallvec::{SmallVec, smallvec};
use crate::Db;
use crate::ast_node_ref::AstNodeRef;
@@ -162,10 +162,10 @@ impl TryFrom<ast::ExprRef<'_>> for PlaceExpr {
}
impl PlaceExpr {
pub(crate) const fn name(name: Name) -> Self {
pub(crate) fn name(name: Name) -> Self {
Self {
root_name: name,
sub_segments: SmallVec::new_const(),
sub_segments: smallvec![],
}
}

View File

@@ -334,9 +334,7 @@ pub(crate) struct ReachabilityConstraintsBuilder {
}
impl ReachabilityConstraintsBuilder {
pub(crate) fn build(mut self) -> ReachabilityConstraints {
self.interiors.shrink_to_fit();
pub(crate) fn build(self) -> ReachabilityConstraints {
ReachabilityConstraints {
interiors: self.interiors,
}

View File

@@ -71,12 +71,16 @@ impl ScopedDefinitionId {
}
}
/// Can keep inline this many live bindings or declarations per place at a given time; more will
/// go to heap.
const INLINE_DEFINITIONS_PER_PLACE: usize = 4;
/// Live declarations for a single place at some point in control flow, with their
/// corresponding reachability constraints.
#[derive(Clone, Debug, Default, PartialEq, Eq, salsa::Update, get_size2::GetSize)]
pub(super) struct Declarations {
/// A list of live declarations for this place, sorted by their `ScopedDefinitionId`
live_declarations: SmallVec<[LiveDeclaration; 2]>,
live_declarations: SmallVec<[LiveDeclaration; INLINE_DEFINITIONS_PER_PLACE]>,
}
/// One of the live declarations for a single place at some point in control flow.
@@ -195,7 +199,7 @@ pub(super) struct Bindings {
/// "unbound" binding.
unbound_narrowing_constraint: Option<ScopedNarrowingConstraint>,
/// A list of live bindings for this place, sorted by their `ScopedDefinitionId`
live_bindings: SmallVec<[LiveBinding; 2]>,
live_bindings: SmallVec<[LiveBinding; INLINE_DEFINITIONS_PER_PLACE]>,
}
impl Bindings {

View File

@@ -86,7 +86,7 @@ impl<'db> SemanticModel<'db> {
};
let ty = Type::module_literal(self.db, self.file, &submodule);
completions.push(Completion {
name: submodule_basename.clone(),
name: submodule_basename,
ty,
builtin,
});

View File

@@ -508,7 +508,7 @@ impl<'a> SuppressionsBuilder<'a> {
lint_registry,
seen_non_trivia_token: false,
line: Vec::new(),
file: SmallVec::new_const(),
file: SmallVec::new(),
unknown: Vec::new(),
invalid: Vec::new(),
}

View File

@@ -49,7 +49,7 @@ use crate::types::generics::{
};
pub use crate::types::ide_support::{
CallSignatureDetails, Member, all_members, call_signature_details, definition_kind_for_name,
definitions_for_attribute, definitions_for_keyword_argument, definitions_for_name,
definitions_for_name,
};
use crate::types::infer::infer_unpack_types;
use crate::types::mro::{Mro, MroError, MroIterator};
@@ -4187,45 +4187,6 @@ impl<'db> Type<'db> {
.into()
}
Some(KnownClass::Deprecated) => {
// ```py
// class deprecated:
// def __new__(
// cls,
// message: LiteralString,
// /,
// *,
// category: type[Warning] | None = ...,
// stacklevel: int = 1
// ) -> Self: ...
// ```
Binding::single(
self,
Signature::new(
Parameters::new([
Parameter::positional_only(Some(Name::new_static("message")))
.with_annotated_type(Type::LiteralString),
Parameter::keyword_only(Name::new_static("category"))
.with_annotated_type(UnionType::from_elements(
db,
[
// TODO: should be `type[Warning]`
Type::any(),
KnownClass::NoneType.to_instance(db),
],
))
// TODO: should be `type[Warning]`
.with_default_type(Type::any()),
Parameter::keyword_only(Name::new_static("stacklevel"))
.with_annotated_type(KnownClass::Int.to_instance(db))
.with_default_type(Type::IntLiteral(1)),
]),
Some(KnownClass::Deprecated.to_instance(db)),
),
)
.into()
}
Some(KnownClass::TypeAliasType) => {
// ```py
// def __new__(
@@ -4489,11 +4450,8 @@ impl<'db> Type<'db> {
Type::EnumLiteral(enum_literal) => enum_literal.enum_class_instance(db).bindings(db),
Type::KnownInstance(known_instance) => {
known_instance.instance_fallback(db).bindings(db)
}
Type::PropertyInstance(_)
| Type::KnownInstance(_)
| Type::AlwaysFalsy
| Type::AlwaysTruthy
| Type::IntLiteral(_)
@@ -5049,27 +5007,21 @@ impl<'db> Type<'db> {
| Type::ProtocolInstance(_)
| Type::PropertyInstance(_)
| Type::TypeIs(_) => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
InvalidTypeExpression::InvalidType(*self, scope_id)
],
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::InvalidType(
*self, scope_id
)],
fallback_type: Type::unknown(),
}),
Type::KnownInstance(known_instance) => match known_instance {
KnownInstanceType::TypeAliasType(alias) => Ok(alias.value_type(db)),
KnownInstanceType::TypeVar(typevar) => Ok(Type::TypeVar(*typevar)),
KnownInstanceType::Deprecated(_) => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::Deprecated],
fallback_type: Type::unknown(),
}),
KnownInstanceType::SubscriptedProtocol(_) => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
InvalidTypeExpression::Protocol
],
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::Protocol],
fallback_type: Type::unknown(),
}),
KnownInstanceType::SubscriptedGeneric(_) => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![InvalidTypeExpression::Generic],
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::Generic],
fallback_type: Type::unknown(),
}),
},
@@ -5105,7 +5057,7 @@ impl<'db> Type<'db> {
let Some(class) = nearest_enclosing_class(db, index, scope_id, &module) else {
return Err(InvalidTypeExpressionError {
fallback_type: Type::unknown(),
invalid_expressions: smallvec::smallvec_inline![
invalid_expressions: smallvec::smallvec![
InvalidTypeExpression::InvalidType(*self, scope_id)
],
});
@@ -5129,20 +5081,18 @@ impl<'db> Type<'db> {
SpecialFormType::Literal
| SpecialFormType::Union
| SpecialFormType::Intersection => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
invalid_expressions: smallvec::smallvec![
InvalidTypeExpression::RequiresArguments(*self)
],
fallback_type: Type::unknown(),
}),
SpecialFormType::Protocol => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
InvalidTypeExpression::Protocol
],
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::Protocol],
fallback_type: Type::unknown(),
}),
SpecialFormType::Generic => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![InvalidTypeExpression::Generic],
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::Generic],
fallback_type: Type::unknown(),
}),
@@ -5153,7 +5103,7 @@ impl<'db> Type<'db> {
| SpecialFormType::TypeGuard
| SpecialFormType::Unpack
| SpecialFormType::CallableTypeOf => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
invalid_expressions: smallvec::smallvec![
InvalidTypeExpression::RequiresOneArgument(*self)
],
fallback_type: Type::unknown(),
@@ -5161,7 +5111,7 @@ impl<'db> Type<'db> {
SpecialFormType::Annotated | SpecialFormType::Concatenate => {
Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
invalid_expressions: smallvec::smallvec![
InvalidTypeExpression::RequiresTwoArguments(*self)
],
fallback_type: Type::unknown(),
@@ -5170,7 +5120,7 @@ impl<'db> Type<'db> {
SpecialFormType::ClassVar | SpecialFormType::Final => {
Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
invalid_expressions: smallvec::smallvec![
InvalidTypeExpression::TypeQualifier(*special_form)
],
fallback_type: Type::unknown(),
@@ -5180,7 +5130,7 @@ impl<'db> Type<'db> {
SpecialFormType::ReadOnly
| SpecialFormType::NotRequired
| SpecialFormType::Required => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
invalid_expressions: smallvec::smallvec![
InvalidTypeExpression::TypeQualifierRequiresOneArgument(*special_form)
],
fallback_type: Type::unknown(),
@@ -5234,9 +5184,9 @@ impl<'db> Type<'db> {
"Support for `types.UnionType` instances in type expressions"
)),
_ => Err(InvalidTypeExpressionError {
invalid_expressions: smallvec::smallvec_inline![
InvalidTypeExpression::InvalidType(*self, scope_id)
],
invalid_expressions: smallvec::smallvec![InvalidTypeExpression::InvalidType(
*self, scope_id
)],
fallback_type: Type::unknown(),
}),
},
@@ -5907,9 +5857,6 @@ pub enum KnownInstanceType<'db> {
/// A single instance of `typing.TypeAliasType` (PEP 695 type alias)
TypeAliasType(TypeAliasType<'db>),
/// A single instance of `warnings.deprecated` or `typing_extensions.deprecated`
Deprecated(DeprecatedInstance<'db>),
}
fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
@@ -5928,9 +5875,6 @@ fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
KnownInstanceType::TypeAliasType(type_alias) => {
visitor.visit_type_alias_type(db, type_alias);
}
KnownInstanceType::Deprecated(_) => {
// Nothing to visit
}
}
}
@@ -5947,10 +5891,6 @@ impl<'db> KnownInstanceType<'db> {
Self::TypeAliasType(type_alias) => {
Self::TypeAliasType(type_alias.normalized_impl(db, visitor))
}
Self::Deprecated(deprecated) => {
// Nothing to normalize
Self::Deprecated(deprecated)
}
}
}
@@ -5959,7 +5899,6 @@ impl<'db> KnownInstanceType<'db> {
Self::SubscriptedProtocol(_) | Self::SubscriptedGeneric(_) => KnownClass::SpecialForm,
Self::TypeVar(_) => KnownClass::TypeVar,
Self::TypeAliasType(_) => KnownClass::TypeAliasType,
Self::Deprecated(_) => KnownClass::Deprecated,
}
}
@@ -6004,7 +5943,6 @@ impl<'db> KnownInstanceType<'db> {
// it as an instance of `typing.TypeVar`. Inside of a generic class or function, we'll
// have a `Type::TypeVar(_)`, which is rendered as the typevar's name.
KnownInstanceType::TypeVar(_) => f.write_str("typing.TypeVar"),
KnownInstanceType::Deprecated(_) => f.write_str("warnings.deprecated"),
}
}
}
@@ -6193,8 +6131,6 @@ enum InvalidTypeExpression<'db> {
Protocol,
/// Same for `Generic`
Generic,
/// Same for `@deprecated`
Deprecated,
/// Type qualifiers are always invalid in *type expressions*,
/// but these ones are okay with 0 arguments in *annotation expressions*
TypeQualifier(SpecialFormType),
@@ -6236,9 +6172,6 @@ impl<'db> InvalidTypeExpression<'db> {
InvalidTypeExpression::Generic => {
f.write_str("`typing.Generic` is not allowed in type expressions")
}
InvalidTypeExpression::Deprecated => {
f.write_str("`warnings.deprecated` is not allowed in type expressions")
}
InvalidTypeExpression::TypeQualifier(qualifier) => write!(
f,
"Type qualifier `{qualifier}` is not allowed in type expressions \
@@ -6294,17 +6227,6 @@ impl<'db> InvalidTypeExpression<'db> {
}
}
/// Data regarding a `warnings.deprecated` or `typing_extensions.deprecated` decorator.
#[salsa::interned(debug)]
#[derive(PartialOrd, Ord)]
pub struct DeprecatedInstance<'db> {
/// The message for the deprecation
pub message: Option<StringLiteralType<'db>>,
}
// The Salsa heap is tracked separately.
impl get_size2::GetSize for DeprecatedInstance<'_> {}
/// Whether this typecar was created via the legacy `TypeVar` constructor, or using PEP 695 syntax.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum TypeVarKind {

View File

@@ -8,7 +8,7 @@ use std::fmt;
use itertools::Itertools;
use ruff_db::parsed::parsed_module;
use smallvec::{SmallVec, smallvec, smallvec_inline};
use smallvec::{SmallVec, smallvec};
use super::{Argument, CallArguments, CallError, CallErrorKind, InferContext, Signature, Type};
use crate::db::Db;
@@ -848,7 +848,6 @@ impl<'db> Bindings<'db> {
class_literal.name(db),
class_literal.body_scope(db),
class_literal.known(db),
class_literal.deprecated(db),
Some(params),
class_literal.dataclass_transformer_params(db),
)));
@@ -1020,7 +1019,7 @@ impl<'db> From<CallableBinding<'db>> for Bindings<'db> {
fn from(from: CallableBinding<'db>) -> Bindings<'db> {
Bindings {
callable_type: from.callable_type,
elements: smallvec_inline![from],
elements: smallvec![from],
argument_forms: Box::from([]),
conflicting_forms: Box::from([]),
}
@@ -1038,11 +1037,11 @@ impl<'db> From<Binding<'db>> for Bindings<'db> {
bound_type: None,
overload_call_return_type: None,
matching_overload_index: None,
overloads: smallvec_inline![from],
overloads: smallvec![from],
};
Bindings {
callable_type,
elements: smallvec_inline![callable_binding],
elements: smallvec![callable_binding],
argument_forms: Box::from([]),
conflicting_forms: Box::from([]),
}

View File

@@ -22,9 +22,8 @@ use crate::types::signatures::{CallableSignature, Parameter, Parameters, Signatu
use crate::types::tuple::TupleType;
use crate::types::{
BareTypeAliasType, Binding, BoundSuperError, BoundSuperType, CallableType, DataclassParams,
DeprecatedInstance, DynamicType, KnownInstanceType, TypeAliasType, TypeMapping, TypeRelation,
TypeTransformer, TypeVarBoundOrConstraints, TypeVarInstance, TypeVarKind,
infer_definition_types,
DynamicType, KnownInstanceType, TypeAliasType, TypeMapping, TypeRelation, TypeTransformer,
TypeVarBoundOrConstraints, TypeVarInstance, TypeVarKind, infer_definition_types,
};
use crate::{
Db, FxOrderSet, KnownModule, Program,
@@ -800,9 +799,6 @@ pub struct ClassLiteral<'db> {
pub(crate) known: Option<KnownClass>,
/// If this class is deprecated, this holds the deprecation message.
pub(crate) deprecated: Option<DeprecatedInstance<'db>>,
pub(crate) dataclass_params: Option<DataclassParams>,
pub(crate) dataclass_transformer_params: Option<DataclassTransformerParams>,
}
@@ -1604,25 +1600,6 @@ impl<'db> ClassLiteral<'db> {
.place
.ignore_possibly_unbound()
}
(CodeGeneratorKind::DataclassLike, "__setattr__") => {
if has_dataclass_param(DataclassParams::FROZEN) {
let signature = Signature::new(
Parameters::new([
Parameter::positional_or_keyword(Name::new_static("self"))
.with_annotated_type(Type::instance(
db,
self.apply_optional_specialization(db, specialization),
)),
Parameter::positional_or_keyword(Name::new_static("name")),
Parameter::positional_or_keyword(Name::new_static("value")),
]),
Some(Type::Never),
);
return Some(CallableType::function_like(db, signature));
}
None
}
_ => None,
}
}
@@ -2422,7 +2399,6 @@ pub enum KnownClass {
NoneType, // Part of `types` for Python >= 3.10
// Typing
Any,
Deprecated,
StdlibAlias,
SpecialForm,
TypeVar,
@@ -2540,7 +2516,6 @@ impl KnownClass {
| Self::NotImplementedType
| Self::Staticmethod
| Self::Classmethod
| Self::Deprecated
| Self::Field
| Self::KwOnly
| Self::NamedTupleFallback => Truthiness::Ambiguous,
@@ -2568,7 +2543,6 @@ impl KnownClass {
| Self::Property
| Self::Staticmethod
| Self::Classmethod
| Self::Deprecated
| Self::Type
| Self::ModuleType
| Self::Super
@@ -2655,7 +2629,6 @@ impl KnownClass {
| KnownClass::ExceptionGroup
| KnownClass::Staticmethod
| KnownClass::Classmethod
| KnownClass::Deprecated
| KnownClass::Super
| KnownClass::Enum
| KnownClass::Auto
@@ -2739,7 +2712,6 @@ impl KnownClass {
| Self::ExceptionGroup
| Self::Staticmethod
| Self::Classmethod
| Self::Deprecated
| Self::GenericAlias
| Self::GeneratorType
| Self::AsyncGeneratorType
@@ -2806,7 +2778,6 @@ impl KnownClass {
Self::ExceptionGroup => "ExceptionGroup",
Self::Staticmethod => "staticmethod",
Self::Classmethod => "classmethod",
Self::Deprecated => "deprecated",
Self::GenericAlias => "GenericAlias",
Self::ModuleType => "ModuleType",
Self::FunctionType => "FunctionType",
@@ -3081,7 +3052,6 @@ impl KnownClass {
| Self::ParamSpec
| Self::ParamSpecArgs
| Self::ParamSpecKwargs
| Self::Deprecated
| Self::NewType => KnownModule::TypingExtensions,
Self::NoDefaultType => {
let python_version = Program::get(db).python_version(db);
@@ -3150,7 +3120,6 @@ impl KnownClass {
| Self::ExceptionGroup
| Self::Staticmethod
| Self::Classmethod
| Self::Deprecated
| Self::GenericAlias
| Self::ModuleType
| Self::FunctionType
@@ -3238,7 +3207,6 @@ impl KnownClass {
| Self::ExceptionGroup
| Self::Staticmethod
| Self::Classmethod
| Self::Deprecated
| Self::TypeVar
| Self::ParamSpec
| Self::ParamSpecArgs
@@ -3291,7 +3259,6 @@ impl KnownClass {
"ExceptionGroup" => Self::ExceptionGroup,
"staticmethod" => Self::Staticmethod,
"classmethod" => Self::Classmethod,
"deprecated" => Self::Deprecated,
"GenericAlias" => Self::GenericAlias,
"NoneType" => Self::NoneType,
"ModuleType" => Self::ModuleType,
@@ -3411,8 +3378,6 @@ impl KnownClass {
| Self::NamedTuple
| Self::Iterable
| Self::NewType => matches!(module, KnownModule::Typing | KnownModule::TypingExtensions),
Self::Deprecated => matches!(module, KnownModule::Warnings | KnownModule::TypingExtensions),
}
}
@@ -3422,10 +3387,10 @@ impl KnownClass {
self,
context: &InferContext<'db, '_>,
index: &SemanticIndex<'db>,
overload: &mut Binding<'db>,
call_arguments: &CallArguments<'_, 'db>,
overload_binding: &Binding<'db>,
call_argument_types: &CallArguments<'_, 'db>,
call_expression: &ast::ExprCall,
) {
) -> Option<Type<'db>> {
let db = context.db();
let scope = context.scope();
let module = context.module();
@@ -3436,15 +3401,14 @@ impl KnownClass {
// In this case, we need to infer the two arguments:
// 1. The nearest enclosing class
// 2. The first parameter of the current function (typically `self` or `cls`)
match overload.parameter_types() {
match overload_binding.parameter_types() {
[] => {
let Some(enclosing_class) =
nearest_enclosing_class(db, index, scope, module)
else {
BoundSuperError::UnavailableImplicitArguments
.report_diagnostic(context, call_expression.into());
overload.set_return_type(Type::unknown());
return;
return Some(Type::unknown());
};
// The type of the first parameter if the given scope is function-like (i.e. function or lambda).
@@ -3466,8 +3430,7 @@ impl KnownClass {
let Some(first_param) = first_param else {
BoundSuperError::UnavailableImplicitArguments
.report_diagnostic(context, call_expression.into());
overload.set_return_type(Type::unknown());
return;
return Some(Type::unknown());
};
let definition = index.expect_single_definition(first_param);
@@ -3484,7 +3447,7 @@ impl KnownClass {
Type::unknown()
});
overload.set_return_type(bound_super);
Some(bound_super)
}
[Some(pivot_class_type), Some(owner_type)] => {
let bound_super = BoundSuperType::build(db, *pivot_class_type, *owner_type)
@@ -3492,37 +3455,13 @@ impl KnownClass {
err.report_diagnostic(context, call_expression.into());
Type::unknown()
});
overload.set_return_type(bound_super);
Some(bound_super)
}
_ => {}
_ => None,
}
}
KnownClass::Deprecated => {
// Parsing something of the form:
//
// @deprecated("message")
// @deprecated("message", caregory = DeprecationWarning, stacklevel = 1)
//
// "Static type checker behavior is not affected by the category and stacklevel arguments"
// so we only need the message and can ignore everything else. The message is mandatory,
// must be a LiteralString, and always comes first.
//
// We aren't guaranteed to know the static value of a LiteralString, so we need to
// accept that sometimes we will fail to include the message.
//
// We don't do any serious validation/diagnostics here, as the signature for this
// is included in `Type::bindings`.
//
// See: <https://typing.python.org/en/latest/spec/directives.html#deprecated>
let [Some(message), ..] = overload.parameter_types() else {
// Checking in Type::bindings will complain about this for us
return;
};
overload.set_return_type(Type::KnownInstance(KnownInstanceType::Deprecated(
DeprecatedInstance::new(db, message.into_string_literal()),
)));
}
KnownClass::TypeVar => {
let assigned_to = index
.try_expression(ast::ExprRef::from(call_expression))
@@ -3534,14 +3473,12 @@ impl KnownClass {
_ => None,
}
}) else {
if let Some(builder) =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)
{
builder.into_diagnostic(
"A legacy `typing.TypeVar` must be immediately assigned to a variable",
);
}
return;
let builder =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)?;
builder.into_diagnostic(
"A legacy `typing.TypeVar` must be immediately assigned to a variable",
);
return None;
};
let [
@@ -3552,9 +3489,9 @@ impl KnownClass {
contravariant,
covariant,
_infer_variance,
] = overload.parameter_types()
] = overload_binding.parameter_types()
else {
return;
return None;
};
let covariant = covariant
@@ -3567,37 +3504,30 @@ impl KnownClass {
let variance = match (contravariant, covariant) {
(Truthiness::Ambiguous, _) => {
if let Some(builder) =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)
{
builder.into_diagnostic(
"The `contravariant` parameter of a legacy `typing.TypeVar` \
let builder =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)?;
builder.into_diagnostic(
"The `contravariant` parameter of a legacy `typing.TypeVar` \
cannot have an ambiguous value",
);
}
return;
);
return None;
}
(_, Truthiness::Ambiguous) => {
if let Some(builder) =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)
{
builder.into_diagnostic(
"The `covariant` parameter of a legacy `typing.TypeVar` \
let builder =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)?;
builder.into_diagnostic(
"The `covariant` parameter of a legacy `typing.TypeVar` \
cannot have an ambiguous value",
);
}
return;
);
return None;
}
(Truthiness::AlwaysTrue, Truthiness::AlwaysTrue) => {
if let Some(builder) =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)
{
builder.into_diagnostic(
"A legacy `typing.TypeVar` cannot be both \
covariant and contravariant",
);
}
return;
let builder =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)?;
builder.into_diagnostic(
"A legacy `typing.TypeVar` cannot be both covariant and contravariant",
);
return None;
}
(Truthiness::AlwaysTrue, Truthiness::AlwaysFalse) => {
TypeVarVariance::Contravariant
@@ -3611,21 +3541,19 @@ impl KnownClass {
let name_param = name_param.into_string_literal().map(|name| name.value(db));
if name_param.is_none_or(|name_param| name_param != target.id) {
if let Some(builder) =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)
{
builder.into_diagnostic(format_args!(
"The name of a legacy `typing.TypeVar`{} must match \
let builder =
context.report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)?;
builder.into_diagnostic(format_args!(
"The name of a legacy `typing.TypeVar`{} must match \
the name of the variable it is assigned to (`{}`)",
if let Some(name_param) = name_param {
format!(" (`{name_param}`)")
} else {
String::new()
},
target.id,
));
}
return;
if let Some(name_param) = name_param {
format!(" (`{name_param}`)")
} else {
String::new()
},
target.id,
));
return None;
}
let bound_or_constraint = match (bound, constraints) {
@@ -3640,8 +3568,8 @@ impl KnownClass {
// typevar constraints.
let elements = UnionType::new(
db,
overload
.arguments_for_parameter(call_arguments, 1)
overload_binding
.arguments_for_parameter(call_argument_types, 1)
.map(|(_, ty)| ty)
.collect::<Box<_>>(),
);
@@ -3650,13 +3578,13 @@ impl KnownClass {
// TODO: Emit a diagnostic that TypeVar cannot be both bounded and
// constrained
(Some(_), Some(_)) => return,
(Some(_), Some(_)) => return None,
(None, None) => None,
};
let containing_assignment = index.expect_single_definition(target);
overload.set_return_type(Type::KnownInstance(KnownInstanceType::TypeVar(
Some(Type::KnownInstance(KnownInstanceType::TypeVar(
TypeVarInstance::new(
db,
&target.id,
@@ -3666,7 +3594,7 @@ impl KnownClass {
*default,
TypeVarKind::Legacy,
),
)));
)))
}
KnownClass::TypeAliasType => {
@@ -3681,31 +3609,32 @@ impl KnownClass {
}
});
let [Some(name), Some(value), ..] = overload.parameter_types() else {
return;
let [Some(name), Some(value), ..] = overload_binding.parameter_types() else {
return None;
};
let Some(name) = name.into_string_literal() else {
if let Some(builder) =
context.report_lint(&INVALID_TYPE_ALIAS_TYPE, call_expression)
{
name.into_string_literal()
.map(|name| {
Type::KnownInstance(KnownInstanceType::TypeAliasType(TypeAliasType::Bare(
BareTypeAliasType::new(
db,
ast::name::Name::new(name.value(db)),
containing_assignment,
value,
),
)))
})
.or_else(|| {
let builder =
context.report_lint(&INVALID_TYPE_ALIAS_TYPE, call_expression)?;
builder.into_diagnostic(
"The name of a `typing.TypeAlias` must be a string literal",
);
}
return;
};
overload.set_return_type(Type::KnownInstance(KnownInstanceType::TypeAliasType(
TypeAliasType::Bare(BareTypeAliasType::new(
db,
ast::name::Name::new(name.value(db)),
containing_assignment,
value,
)),
)));
None
})
}
_ => {}
_ => None,
}
}
}

View File

@@ -163,9 +163,7 @@ impl<'db> ClassBase<'db> {
Type::KnownInstance(known_instance) => match known_instance {
KnownInstanceType::SubscriptedGeneric(_) => Some(Self::Generic),
KnownInstanceType::SubscriptedProtocol(_) => Some(Self::Protocol),
KnownInstanceType::TypeAliasType(_)
| KnownInstanceType::TypeVar(_)
| KnownInstanceType::Deprecated(_) => None,
KnownInstanceType::TypeAliasType(_) | KnownInstanceType::TypeVar(_) => None,
},
Type::SpecialForm(special_form) => match special_form {

View File

@@ -288,6 +288,7 @@ impl LintDiagnosticGuard<'_, '_> {
///
/// Callers can add additional primary or secondary annotations via the
/// `DerefMut` trait implementation to a `Diagnostic`.
#[expect(dead_code)]
pub(super) fn add_primary_tag(&mut self, tag: DiagnosticTag) {
let ann = self.primary_annotation_mut().unwrap();
ann.push_tag(tag);
@@ -398,7 +399,7 @@ impl<'db, 'ctx> LintDiagnosticGuardBuilder<'db, 'ctx> {
// returns a rule selector for a given file that respects the package's settings,
// any global pragma comments in the file, and any per-file-ignores.
if !ctx.db.should_check_file(ctx.file) {
if !ctx.db.is_file_open(ctx.file) {
return None;
}
let lint_id = LintId::of(lint);
@@ -572,7 +573,7 @@ impl<'db, 'ctx> DiagnosticGuardBuilder<'db, 'ctx> {
id: DiagnosticId,
severity: Severity,
) -> Option<DiagnosticGuardBuilder<'db, 'ctx>> {
if !ctx.db.should_check_file(ctx.file) {
if !ctx.db.is_file_open(ctx.file) {
return None;
}
Some(DiagnosticGuardBuilder { ctx, id, severity })

View File

@@ -34,7 +34,6 @@ pub(crate) fn register_lints(registry: &mut LintRegistryBuilder) {
registry.register_lint(&CONFLICTING_DECLARATIONS);
registry.register_lint(&CONFLICTING_METACLASS);
registry.register_lint(&CYCLIC_CLASS_DEFINITION);
registry.register_lint(&DEPRECATED);
registry.register_lint(&DIVISION_BY_ZERO);
registry.register_lint(&DUPLICATE_BASE);
registry.register_lint(&DUPLICATE_KW_ONLY);
@@ -262,27 +261,6 @@ declare_lint! {
}
}
declare_lint! {
/// ## What it does
/// Checks for uses of deprecated items
///
/// ## Why is this bad?
/// Deprecated items should no longer be used.
///
/// ## Examples
/// ```python
/// @warnings.deprecated("use new_func instead")
/// def old_func(): ...
///
/// old_func() # emits [deprecated] diagnostic
/// ```
pub(crate) static DEPRECATED = {
summary: "detects uses of deprecated items",
status: LintStatus::preview("1.0.0"),
default_level: Level::Warn,
}
}
declare_lint! {
/// ## What it does
/// Checks for class definitions with duplicate bases.

View File

@@ -64,7 +64,6 @@ use crate::semantic_index::ast_ids::HasScopedUseId;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::place::ScopeId;
use crate::semantic_index::semantic_index;
use crate::types::call::{Binding, CallArguments};
use crate::types::context::InferContext;
use crate::types::diagnostic::{
REDUNDANT_CAST, STATIC_ASSERT_ERROR, TYPE_ASSERTION_FAILURE,
@@ -76,8 +75,8 @@ use crate::types::narrow::ClassInfoConstraintFunction;
use crate::types::signatures::{CallableSignature, Signature};
use crate::types::visitor::any_over_type;
use crate::types::{
BoundMethodType, CallableType, DeprecatedInstance, DynamicType, KnownClass, Type, TypeMapping,
TypeRelation, TypeTransformer, TypeVarInstance, UnionBuilder, walk_type_mapping,
BoundMethodType, CallableType, DynamicType, KnownClass, Type, TypeMapping, TypeRelation,
TypeTransformer, TypeVarInstance, walk_type_mapping,
};
use crate::{Db, FxOrderSet, ModuleName, resolve_module};
@@ -199,9 +198,6 @@ pub struct OverloadLiteral<'db> {
/// A set of special decorators that were applied to this function
pub(crate) decorators: FunctionDecorators,
/// If `Some` then contains the `@warnings.deprecated`
pub(crate) deprecated: Option<DeprecatedInstance<'db>>,
/// The arguments to `dataclass_transformer`, if this function was annotated
/// with `@dataclass_transformer(...)`.
pub(crate) dataclass_transformer_params: Option<DataclassTransformerParams>,
@@ -223,7 +219,6 @@ impl<'db> OverloadLiteral<'db> {
self.known(db),
self.body_scope(db),
self.decorators(db),
self.deprecated(db),
Some(params),
)
}
@@ -469,14 +464,6 @@ impl<'db> FunctionLiteral<'db> {
.any(|overload| overload.decorators(db).contains(decorator))
}
/// If the implementation of this function is deprecated, returns the `@warnings.deprecated`.
///
/// Checking if an overload is deprecated requires deeper call analysis.
fn implementation_deprecated(self, db: &'db dyn Db) -> Option<DeprecatedInstance<'db>> {
let (_overloads, implementation) = self.overloads_and_implementation(db);
implementation.and_then(|overload| overload.deprecated(db))
}
fn definition(self, db: &'db dyn Db) -> Definition<'db> {
self.last_definition(db).definition(db)
}
@@ -684,16 +671,6 @@ impl<'db> FunctionType<'db> {
self.literal(db).has_known_decorator(db, decorator)
}
/// If the implementation of this function is deprecated, returns the `@warnings.deprecated`.
///
/// Checking if an overload is deprecated requires deeper call analysis.
pub(crate) fn implementation_deprecated(
self,
db: &'db dyn Db,
) -> Option<DeprecatedInstance<'db>> {
self.literal(db).implementation_deprecated(db)
}
/// Returns the [`Definition`] of the implementation or first overload of this function.
///
/// ## Warning
@@ -1062,90 +1039,86 @@ impl KnownFunction {
pub(super) fn check_call<'db>(
self,
context: &InferContext<'db, '_>,
overload: &mut Binding<'db>,
call_arguments: &CallArguments<'_, 'db>,
parameter_types: &[Option<Type<'db>>],
call_expression: &ast::ExprCall,
file: File,
) {
) -> Option<Type<'db>> {
let db = context.db();
let parameter_types = overload.parameter_types();
match self {
KnownFunction::RevealType => {
let revealed_type = overload
.arguments_for_parameter(call_arguments, 0)
.fold(UnionBuilder::new(db), |builder, (_, ty)| builder.add(ty))
.build();
if let Some(builder) =
context.report_diagnostic(DiagnosticId::RevealedType, Severity::Info)
{
let mut diag = builder.into_diagnostic("Revealed type");
let span = context.span(&call_expression.arguments.args[0]);
diag.annotate(
Annotation::primary(span)
.message(format_args!("`{}`", revealed_type.display(db))),
);
}
let [Some(revealed_type)] = parameter_types else {
return None;
};
let builder =
context.report_diagnostic(DiagnosticId::RevealedType, Severity::Info)?;
let mut diag = builder.into_diagnostic("Revealed type");
let span = context.span(&call_expression.arguments.args[0]);
diag.annotate(
Annotation::primary(span)
.message(format_args!("`{}`", revealed_type.display(db))),
);
None
}
KnownFunction::AssertType => {
let [Some(actual_ty), Some(asserted_ty)] = parameter_types else {
return;
return None;
};
if actual_ty.is_equivalent_to(db, *asserted_ty) {
return;
return None;
}
if let Some(builder) = context.report_lint(&TYPE_ASSERTION_FAILURE, call_expression)
{
let mut diagnostic = builder.into_diagnostic(format_args!(
"Argument does not have asserted type `{}`",
asserted_ty.display(db),
));
let builder = context.report_lint(&TYPE_ASSERTION_FAILURE, call_expression)?;
diagnostic.annotate(
Annotation::secondary(context.span(&call_expression.arguments.args[0]))
.message(format_args!(
"Inferred type of argument is `{}`",
actual_ty.display(db),
)),
);
let mut diagnostic = builder.into_diagnostic(format_args!(
"Argument does not have asserted type `{}`",
asserted_ty.display(db),
));
diagnostic.info(format_args!(
"`{asserted_type}` and `{inferred_type}` are not equivalent types",
asserted_type = asserted_ty.display(db),
inferred_type = actual_ty.display(db),
));
}
diagnostic.annotate(
Annotation::secondary(context.span(&call_expression.arguments.args[0]))
.message(format_args!(
"Inferred type of argument is `{}`",
actual_ty.display(db),
)),
);
diagnostic.info(format_args!(
"`{asserted_type}` and `{inferred_type}` are not equivalent types",
asserted_type = asserted_ty.display(db),
inferred_type = actual_ty.display(db),
));
None
}
KnownFunction::AssertNever => {
let [Some(actual_ty)] = parameter_types else {
return;
return None;
};
if actual_ty.is_equivalent_to(db, Type::Never) {
return;
return None;
}
if let Some(builder) = context.report_lint(&TYPE_ASSERTION_FAILURE, call_expression)
{
let mut diagnostic =
builder.into_diagnostic("Argument does not have asserted type `Never`");
diagnostic.annotate(
Annotation::secondary(context.span(&call_expression.arguments.args[0]))
.message(format_args!(
"Inferred type of argument is `{}`",
actual_ty.display(db)
)),
);
diagnostic.info(format_args!(
"`Never` and `{inferred_type}` are not equivalent types",
inferred_type = actual_ty.display(db),
));
}
}
let builder = context.report_lint(&TYPE_ASSERTION_FAILURE, call_expression)?;
let mut diagnostic =
builder.into_diagnostic("Argument does not have asserted type `Never`");
diagnostic.annotate(
Annotation::secondary(context.span(&call_expression.arguments.args[0]))
.message(format_args!(
"Inferred type of argument is `{}`",
actual_ty.display(db)
)),
);
diagnostic.info(format_args!(
"`Never` and `{inferred_type}` are not equivalent types",
inferred_type = actual_ty.display(db),
));
None
}
KnownFunction::StaticAssert => {
let [Some(parameter_ty), message] = parameter_types else {
return;
return None;
};
let truthiness = match parameter_ty.try_bool(db) {
Ok(truthiness) => truthiness,
@@ -1165,42 +1138,41 @@ impl KnownFunction {
err.report_diagnostic(context, condition);
return;
return None;
}
};
if let Some(builder) = context.report_lint(&STATIC_ASSERT_ERROR, call_expression) {
if truthiness.is_always_true() {
return;
}
if let Some(message) = message
.and_then(Type::into_string_literal)
.map(|s| s.value(db))
{
builder.into_diagnostic(format_args!("Static assertion error: {message}"));
} else if *parameter_ty == Type::BooleanLiteral(false) {
builder.into_diagnostic(
"Static assertion error: argument evaluates to `False`",
);
} else if truthiness.is_always_false() {
builder.into_diagnostic(format_args!(
"Static assertion error: argument of type `{parameter_ty}` \
is statically known to be falsy",
parameter_ty = parameter_ty.display(db)
));
} else {
builder.into_diagnostic(format_args!(
"Static assertion error: argument of type `{parameter_ty}` \
has an ambiguous static truthiness",
parameter_ty = parameter_ty.display(db)
));
}
let builder = context.report_lint(&STATIC_ASSERT_ERROR, call_expression)?;
if truthiness.is_always_true() {
return None;
}
if let Some(message) = message
.and_then(Type::into_string_literal)
.map(|s| s.value(db))
{
builder.into_diagnostic(format_args!("Static assertion error: {message}"));
} else if *parameter_ty == Type::BooleanLiteral(false) {
builder
.into_diagnostic("Static assertion error: argument evaluates to `False`");
} else if truthiness.is_always_false() {
builder.into_diagnostic(format_args!(
"Static assertion error: argument of type `{parameter_ty}` \
is statically known to be falsy",
parameter_ty = parameter_ty.display(db)
));
} else {
builder.into_diagnostic(format_args!(
"Static assertion error: argument of type `{parameter_ty}` \
has an ambiguous static truthiness",
parameter_ty = parameter_ty.display(db)
));
}
}
None
}
KnownFunction::Cast => {
let [Some(casted_type), Some(source_type)] = parameter_types else {
return;
return None;
};
let contains_unknown_or_todo =
|ty| matches!(ty, Type::Dynamic(dynamic) if dynamic != DynamicType::Any);
@@ -1208,34 +1180,31 @@ impl KnownFunction {
&& !any_over_type(db, *source_type, &contains_unknown_or_todo)
&& !any_over_type(db, *casted_type, &contains_unknown_or_todo)
{
if let Some(builder) = context.report_lint(&REDUNDANT_CAST, call_expression) {
builder.into_diagnostic(format_args!(
"Value is already of type `{}`",
casted_type.display(db),
));
}
let builder = context.report_lint(&REDUNDANT_CAST, call_expression)?;
builder.into_diagnostic(format_args!(
"Value is already of type `{}`",
casted_type.display(db),
));
}
None
}
KnownFunction::GetProtocolMembers => {
let [Some(Type::ClassLiteral(class))] = parameter_types else {
return;
return None;
};
if class.is_protocol(db) {
return;
return None;
}
report_bad_argument_to_get_protocol_members(context, call_expression, *class);
None
}
KnownFunction::IsInstance | KnownFunction::IsSubclass => {
let [_, Some(Type::ClassLiteral(class))] = parameter_types else {
return;
};
let Some(protocol_class) = class.into_protocol_class(db) else {
return;
return None;
};
let protocol_class = class.into_protocol_class(db)?;
if protocol_class.is_runtime_checkable(db) {
return;
return None;
}
report_runtime_check_against_non_runtime_checkable_protocol(
context,
@@ -1243,16 +1212,16 @@ impl KnownFunction {
protocol_class,
self,
);
None
}
known @ (KnownFunction::DunderImport | KnownFunction::ImportModule) => {
let [Some(Type::StringLiteral(full_module_name)), rest @ ..] = parameter_types
else {
return;
return None;
};
if rest.iter().any(Option::is_some) {
return;
return None;
}
let module_name = full_module_name.value(db);
@@ -1262,20 +1231,16 @@ impl KnownFunction {
// `importlib.import_module("collections.abc")` returns the `collections.abc` module.
// ty doesn't have a way to represent the return type of the former yet.
// https://github.com/astral-sh/ruff/pull/19008#discussion_r2173481311
return;
return None;
}
let Some(module_name) = ModuleName::new(module_name) else {
return;
};
let Some(module) = resolve_module(db, &module_name) else {
return;
};
let module_name = ModuleName::new(module_name)?;
let module = resolve_module(db, &module_name)?;
overload.set_return_type(Type::module_literal(db, file, &module));
Some(Type::module_literal(db, file, &module))
}
_ => {}
_ => None,
}
}
}

View File

@@ -13,16 +13,12 @@ use crate::types::call::CallArguments;
use crate::types::signatures::Signature;
use crate::types::{ClassBase, ClassLiteral, DynamicType, KnownClass, KnownInstanceType, Type};
use crate::{Db, HasType, NameKind, SemanticModel};
use ruff_db::files::{File, FileRange};
use ruff_db::parsed::parsed_module;
use ruff_db::files::File;
use ruff_python_ast as ast;
use ruff_python_ast::name::Name;
use ruff_text_size::{Ranged, TextRange};
use ruff_text_size::TextRange;
use rustc_hash::FxHashSet;
pub use resolve_definition::ResolvedDefinition;
use resolve_definition::{find_symbol_in_scope, resolve_definition};
pub(crate) fn all_declarations_and_bindings<'db>(
db: &'db dyn Db,
scope_id: ScopeId<'db>,
@@ -370,7 +366,7 @@ pub fn definition_kind_for_name<'db>(
let name_str = name.id.as_str();
// Get the scope for this name expression
let file_scope = index.expression_scope_id(&ast::ExprRef::from(name));
let file_scope = index.try_expression_scope_id(&ast::Expr::Name(name.clone()))?;
// Get the place table for this scope
let place_table = index.place_table(file_scope);
@@ -403,7 +399,9 @@ pub fn definitions_for_name<'db>(
let name_str = name.id.as_str();
// Get the scope for this name expression
let file_scope = index.expression_scope_id(&ast::ExprRef::from(name));
let Some(file_scope) = index.try_expression_scope_id(&ast::Expr::Name(name.clone())) else {
return Vec::new();
};
let mut all_definitions = Vec::new();
@@ -505,183 +503,6 @@ pub fn definitions_for_name<'db>(
}
}
/// Returns all resolved definitions for an attribute expression `x.y`.
/// This function duplicates much of the functionality in the semantic
/// analyzer, but it has somewhat different behavior so we've decided
/// to keep it separate for now. One key difference is that this function
/// doesn't model the descriptor protocol when accessing attributes.
/// For "go to definition", we want to get the type of the descriptor object
/// rather than "invoking" its `__get__` or `__set__` method.
/// If this becomes a maintenance burden in the future, it may be worth
/// changing the corresponding logic in the semantic analyzer to conditionally
/// handle this case through the use of mode flags.
pub fn definitions_for_attribute<'db>(
db: &'db dyn Db,
file: File,
attribute: &ast::ExprAttribute,
) -> Vec<ResolvedDefinition<'db>> {
let name_str = attribute.attr.as_str();
let model = SemanticModel::new(db, file);
let mut resolved = Vec::new();
// Determine the type of the LHS
let lhs_ty = attribute.value.inferred_type(&model);
let tys = match lhs_ty {
Type::Union(union) => union.elements(db).to_vec(),
_ => vec![lhs_ty],
};
// Expand intersections for each subtype into their components
let expanded_tys = tys
.into_iter()
.flat_map(|ty| match ty {
Type::Intersection(intersection) => intersection.positive(db).iter().copied().collect(),
_ => vec![ty],
})
.collect::<Vec<_>>();
for ty in expanded_tys {
// Handle modules
if let Type::ModuleLiteral(module_literal) = ty {
if let Some(module_file) = module_literal.module(db).file() {
let module_scope = global_scope(db, module_file);
for def in find_symbol_in_scope(db, module_scope, name_str) {
resolved.extend(resolve_definition(db, def, Some(name_str)));
}
}
continue;
}
// First, transform the type to its meta type, unless it's already a class-like type.
let meta_type = match ty {
Type::ClassLiteral(_) | Type::SubclassOf(_) | Type::GenericAlias(_) => ty,
_ => ty.to_meta_type(db),
};
let class_literal = match meta_type {
Type::ClassLiteral(class_literal) => class_literal,
Type::SubclassOf(subclass) => match subclass.subclass_of().into_class() {
Some(cls) => cls.class_literal(db).0,
None => continue,
},
_ => continue,
};
// Walk the MRO: include class and its ancestors, but stop when we find a match
'scopes: for ancestor in class_literal
.iter_mro(db, None)
.filter_map(ClassBase::into_class)
.map(|cls| cls.class_literal(db).0)
{
let class_scope = ancestor.body_scope(db);
let class_place_table = crate::semantic_index::place_table(db, class_scope);
// Look for class-level declarations and bindings
if let Some(place_id) = class_place_table.place_id_by_name(name_str) {
let use_def = use_def_map(db, class_scope);
// Check declarations first
for decl in use_def.all_reachable_declarations(place_id) {
if let Some(def) = decl.declaration.definition() {
resolved.extend(resolve_definition(db, def, Some(name_str)));
break 'scopes;
}
}
// If no declarations found, check bindings
for binding in use_def.all_reachable_bindings(place_id) {
if let Some(def) = binding.binding.definition() {
resolved.extend(resolve_definition(db, def, Some(name_str)));
break 'scopes;
}
}
}
// Look for instance attributes in method scopes (e.g., self.x = 1)
let file = class_scope.file(db);
let index = semantic_index(db, file);
for function_scope_id in attribute_scopes(db, class_scope) {
let place_table = index.place_table(function_scope_id);
if let Some(place_id) = place_table.place_id_by_instance_attribute_name(name_str) {
let use_def = index.use_def_map(function_scope_id);
// Check declarations first
for decl in use_def.all_reachable_declarations(place_id) {
if let Some(def) = decl.declaration.definition() {
resolved.extend(resolve_definition(db, def, Some(name_str)));
break 'scopes;
}
}
// If no declarations found, check bindings
for binding in use_def.all_reachable_bindings(place_id) {
if let Some(def) = binding.binding.definition() {
resolved.extend(resolve_definition(db, def, Some(name_str)));
break 'scopes;
}
}
}
}
// TODO: Add support for metaclass attribute lookups
}
}
resolved
}
/// Returns definitions for a keyword argument in a call expression.
/// This resolves the keyword argument to the corresponding parameter(s) in the callable's signature(s).
pub fn definitions_for_keyword_argument<'db>(
db: &'db dyn Db,
file: File,
keyword: &ast::Keyword,
call_expr: &ast::ExprCall,
) -> Vec<ResolvedDefinition<'db>> {
let model = SemanticModel::new(db, file);
let func_type = call_expr.func.inferred_type(&model);
let Some(keyword_name) = keyword.arg.as_ref() else {
return Vec::new();
};
let keyword_name_str = keyword_name.as_str();
let mut resolved_definitions = Vec::new();
if let Some(Type::Callable(callable_type)) = func_type.into_callable(db) {
let signatures = callable_type.signatures(db);
// For each signature, find the parameter with the matching name
for signature in signatures {
if let Some((_param_index, _param)) =
signature.parameters().keyword_by_name(keyword_name_str)
{
if let Some(function_definition) = signature.definition() {
let function_file = function_definition.file(db);
let module = parsed_module(db, function_file).load(db);
let def_kind = function_definition.kind(db);
if let DefinitionKind::Function(function_ast_ref) = def_kind {
let function_node = function_ast_ref.node(&module);
if let Some(parameter_range) =
find_parameter_range(&function_node.parameters, keyword_name_str)
{
resolved_definitions.push(ResolvedDefinition::FileWithRange(
FileRange::new(function_file, parameter_range),
));
}
}
}
}
}
}
resolved_definitions
}
/// Details about a callable signature for IDE support.
#[derive(Debug, Clone)]
pub struct CallSignatureDetails<'db> {
@@ -705,7 +526,7 @@ pub struct CallSignatureDetails<'db> {
pub definition: Option<Definition<'db>>,
/// Mapping from argument indices to parameter indices. This helps
/// identify which argument corresponds to which parameter.
/// determine which parameter corresponds to which argument position.
pub argument_to_parameter_mapping: Vec<Option<usize>>,
}
@@ -752,27 +573,14 @@ pub fn call_signature_details<'db>(
}
}
/// Find the text range of a specific parameter in function parameters by name.
/// Only searches for parameters that can be addressed by name in keyword arguments.
fn find_parameter_range(parameters: &ast::Parameters, parameter_name: &str) -> Option<TextRange> {
// Check regular positional and keyword-only parameters
parameters
.args
.iter()
.chain(&parameters.kwonlyargs)
.find(|param| param.parameter.name.as_str() == parameter_name)
.map(|param| param.parameter.name.range())
}
mod resolve_definition {
//! Resolves an Import, `ImportFrom` or `StarImport` definition to one or more
//! "resolved definitions". This is done recursively to find the original
//! definition targeted by the import.
use ruff_db::files::{File, FileRange};
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_python_ast as ast;
use ruff_text_size::TextRange;
use rustc_hash::FxHashSet;
use crate::semantic_index::definition::{Definition, DefinitionKind};
@@ -780,17 +588,16 @@ mod resolve_definition {
use crate::semantic_index::{global_scope, place_table, use_def_map};
use crate::{Db, ModuleName, resolve_module};
/// Represents the result of resolving an import to either a specific definition or
/// a specific range within a file.
/// Represents the result of resolving an import to either a specific definition or a module file.
/// This enum helps distinguish between cases where an import resolves to:
/// - A specific definition within a module (e.g., `from os import path` -> definition of `path`)
/// - A specific range within a file, sometimes an empty range at the top of the file
/// - An entire module file (e.g., `import os` -> the `os` module file itself)
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ResolvedDefinition<'db> {
/// The import resolved to a specific definition within a module
Definition(Definition<'db>),
/// The import resolved to a file with a specific range
FileWithRange(FileRange),
/// The import resolved to an entire module file
ModuleFile(File),
}
/// Resolve import definitions to their targets.
@@ -850,10 +657,7 @@ mod resolve_definition {
// For simple imports like "import os", we want to navigate to the module itself.
// Return the module file directly instead of trying to find definitions within it.
vec![ResolvedDefinition::FileWithRange(FileRange::new(
module_file,
TextRange::default(),
))]
vec![ResolvedDefinition::ModuleFile(module_file)]
}
DefinitionKind::ImportFrom(import_from_def) => {
@@ -963,3 +767,6 @@ mod resolve_definition {
definitions
}
}
pub use resolve_definition::ResolvedDefinition;
use resolve_definition::{find_symbol_in_scope, resolve_definition};

View File

@@ -424,7 +424,6 @@ pub(crate) struct TypeInference<'db> {
diagnostics: TypeCheckDiagnostics,
/// The scope this region is part of.
#[cfg(debug_assertions)]
scope: ScopeId<'db>,
/// The fallback type for missing expressions/bindings/declarations.
@@ -435,30 +434,24 @@ pub(crate) struct TypeInference<'db> {
impl<'db> TypeInference<'db> {
pub(crate) fn empty(scope: ScopeId<'db>) -> Self {
let _ = scope;
Self {
expressions: FxHashMap::default(),
bindings: FxHashMap::default(),
declarations: FxHashMap::default(),
deferred: FxHashSet::default(),
diagnostics: TypeCheckDiagnostics::default(),
#[cfg(debug_assertions)]
scope,
cycle_fallback_type: None,
}
}
fn cycle_fallback(scope: ScopeId<'db>, cycle_fallback_type: Type<'db>) -> Self {
let _ = scope;
Self {
expressions: FxHashMap::default(),
bindings: FxHashMap::default(),
declarations: FxHashMap::default(),
deferred: FxHashSet::default(),
diagnostics: TypeCheckDiagnostics::default(),
#[cfg(debug_assertions)]
scope,
cycle_fallback_type: Some(cycle_fallback_type),
}
@@ -591,9 +584,6 @@ pub(super) struct TypeInferenceBuilder<'db, 'ast> {
index: &'db SemanticIndex<'db>,
region: InferenceRegion<'db>,
/// The scope of the current region.
scope: ScopeId<'db>,
/// The type inference results
types: TypeInference<'db>,
@@ -655,7 +645,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
Self {
context: InferContext::new(db, scope, module),
scope,
index,
region,
return_types_and_ranges: vec![],
@@ -666,13 +655,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn extend(&mut self, inference: &TypeInference<'db>) {
#[cfg(debug_assertions)]
assert_eq!(self.scope, inference.scope);
debug_assert_eq!(self.types.scope, inference.scope);
self.extend_unchecked(inference);
}
fn extend_unchecked(&mut self, inference: &TypeInference<'db>) {
self.types.bindings.extend(inference.bindings.iter());
self.types
.declarations
@@ -699,7 +683,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn scope(&self) -> ScopeId<'db> {
self.scope
self.types.scope
}
/// Are we currently inferring types in file with deferred types?
@@ -2314,7 +2298,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let mut decorator_types_and_nodes = Vec::with_capacity(decorator_list.len());
let mut function_decorators = FunctionDecorators::empty();
let mut deprecated = None;
let mut dataclass_transformer_params = None;
for decorator in decorator_list {
@@ -2332,9 +2315,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
continue;
}
}
Type::KnownInstance(KnownInstanceType::Deprecated(deprecated_inst)) => {
deprecated = Some(deprecated_inst);
}
Type::DataclassTransformer(params) => {
dataclass_transformer_params = Some(params);
}
@@ -2382,7 +2362,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
known_function,
body_scope,
function_decorators,
deprecated,
dataclass_transformer_params,
);
@@ -2645,7 +2624,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
body: _,
} = class_node;
let mut deprecated = None;
let mut dataclass_params = None;
let mut dataclass_transformer_params = None;
for decorator in decorator_list {
@@ -2663,13 +2641,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
continue;
}
if let Type::KnownInstance(KnownInstanceType::Deprecated(deprecated_inst)) =
decorator_ty
{
deprecated = Some(deprecated_inst);
continue;
}
if let Type::FunctionLiteral(f) = decorator_ty {
// We do not yet detect or flag `@dataclass_transform` applied to more than one
// overload, or an overload and the implementation both. Nevertheless, this is not
@@ -2702,7 +2673,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
name.id.clone(),
body_scope,
maybe_known_class,
deprecated,
dataclass_params,
dataclass_transformer_params,
));
@@ -3476,6 +3446,20 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
| Type::AlwaysTruthy
| Type::AlwaysFalsy
| Type::TypeIs(_) => {
let is_read_only = || {
let dataclass_params = match object_ty {
Type::NominalInstance(instance) => match instance.class {
ClassType::NonGeneric(cls) => cls.dataclass_params(self.db()),
ClassType::Generic(cls) => {
cls.origin(self.db()).dataclass_params(self.db())
}
},
_ => None,
};
dataclass_params.is_some_and(|params| params.contains(DataclassParams::FROZEN))
};
// First, try to call the `__setattr__` dunder method. If this is present/defined, overrides
// assigning the attributed by the normal mechanism.
let setattr_dunder_call_result = object_ty.try_call_dunder_with_policy(
@@ -3492,41 +3476,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
if let Some(builder) =
self.context.report_lint(&INVALID_ASSIGNMENT, target)
{
let is_setattr_synthesized = match object_ty
.class_member_with_policy(
db,
"__setattr__".into(),
MemberLookupPolicy::MRO_NO_OBJECT_FALLBACK,
) {
PlaceAndQualifiers {
place: Place::Type(attr_ty, _),
qualifiers: _,
} => attr_ty.is_callable_type(),
_ => false,
};
let member_exists =
!object_ty.member(db, attribute).place.is_unbound();
let msg = if !member_exists {
format!(
"Can not assign to unresolved attribute `{attribute}` on type `{}`",
object_ty.display(db)
)
} else if is_setattr_synthesized {
format!(
"Property `{attribute}` defined in `{}` is read-only",
object_ty.display(db)
)
} else {
format!(
"Cannot assign to attribute `{attribute}` on type `{}` \
whose `__setattr__` method returns `Never`/`NoReturn`",
object_ty.display(db)
)
};
builder.into_diagnostic(msg);
builder.into_diagnostic(format_args!(
"Cannot assign to attribute `{attribute}` on type `{}` \
whose `__setattr__` method returns `Never`/`NoReturn`",
object_ty.display(db)
));
}
}
false
@@ -3576,71 +3530,85 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
place: Place::Type(meta_attr_ty, meta_attr_boundness),
qualifiers: _,
} => {
let assignable_to_meta_attr =
if let Place::Type(meta_dunder_set, _) =
meta_attr_ty.class_member(db, "__set__".into()).place
{
let successful_call = meta_dunder_set
.try_call(
db,
&CallArguments::positional([
meta_attr_ty,
object_ty,
value_ty,
]),
)
.is_ok();
if is_read_only() {
if emit_diagnostics {
if let Some(builder) =
self.context.report_lint(&INVALID_ASSIGNMENT, target)
{
builder.into_diagnostic(format_args!(
"Property `{attribute}` defined in `{ty}` is read-only",
ty = object_ty.display(self.db()),
));
}
}
false
} else {
let assignable_to_meta_attr =
if let Place::Type(meta_dunder_set, _) =
meta_attr_ty.class_member(db, "__set__".into()).place
{
let successful_call = meta_dunder_set
.try_call(
db,
&CallArguments::positional([
meta_attr_ty,
object_ty,
value_ty,
]),
)
.is_ok();
if !successful_call && emit_diagnostics {
if let Some(builder) = self
.context
.report_lint(&INVALID_ASSIGNMENT, target)
{
// TODO: Here, it would be nice to emit an additional diagnostic that explains why the call failed
builder.into_diagnostic(format_args!(
if !successful_call && emit_diagnostics {
if let Some(builder) = self
.context
.report_lint(&INVALID_ASSIGNMENT, target)
{
// TODO: Here, it would be nice to emit an additional diagnostic that explains why the call failed
builder.into_diagnostic(format_args!(
"Invalid assignment to data descriptor attribute \
`{attribute}` on type `{}` with custom `__set__` method",
object_ty.display(db)
));
}
}
}
successful_call
} else {
ensure_assignable_to(meta_attr_ty)
};
let assignable_to_instance_attribute =
if meta_attr_boundness == Boundness::PossiblyUnbound {
let (assignable, boundness) = if let Place::Type(
instance_attr_ty,
instance_attr_boundness,
) =
object_ty.instance_member(db, attribute).place
{
(
ensure_assignable_to(instance_attr_ty),
instance_attr_boundness,
)
successful_call
} else {
(true, Boundness::PossiblyUnbound)
ensure_assignable_to(meta_attr_ty)
};
if boundness == Boundness::PossiblyUnbound {
report_possibly_unbound_attribute(
&self.context,
target,
attribute,
object_ty,
);
}
let assignable_to_instance_attribute =
if meta_attr_boundness == Boundness::PossiblyUnbound {
let (assignable, boundness) = if let Place::Type(
instance_attr_ty,
instance_attr_boundness,
) =
object_ty.instance_member(db, attribute).place
{
(
ensure_assignable_to(instance_attr_ty),
instance_attr_boundness,
)
} else {
(true, Boundness::PossiblyUnbound)
};
assignable
} else {
true
};
if boundness == Boundness::PossiblyUnbound {
report_possibly_unbound_attribute(
&self.context,
target,
attribute,
object_ty,
);
}
assignable_to_meta_attr && assignable_to_instance_attribute
assignable
} else {
true
};
assignable_to_meta_attr && assignable_to_instance_attribute
}
}
PlaceAndQualifiers {
@@ -3659,7 +3627,22 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
);
}
ensure_assignable_to(instance_attr_ty)
if is_read_only() {
if emit_diagnostics {
if let Some(builder) = self
.context
.report_lint(&INVALID_ASSIGNMENT, target)
{
builder.into_diagnostic(format_args!(
"Property `{attribute}` defined in `{ty}` is read-only",
ty = object_ty.display(self.db()),
));
}
}
false
} else {
ensure_assignable_to(instance_attr_ty)
}
} else {
if emit_diagnostics {
if let Some(builder) =
@@ -3917,7 +3900,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) {
report_invalid_type_checking_constant(&self.context, target.into());
}
Type::BooleanLiteral(true)
value_ty
} else if self.in_stub() && value.is_ellipsis_literal_expr() {
Type::unknown()
} else {
@@ -4006,7 +3989,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// otherwise, assigning something other than `False` is an error
report_invalid_type_checking_constant(&self.context, target.into());
}
declared_ty.inner = Type::BooleanLiteral(true);
}
// Handle various singletons.
@@ -4031,12 +4013,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
if let Some(value) = value {
let inferred_ty = self.infer_expression(value);
let inferred_ty = if target
.as_name_expr()
.is_some_and(|name| &name.id == "TYPE_CHECKING")
{
Type::BooleanLiteral(true)
} else if self.in_stub() && value.is_ellipsis_literal_expr() {
let inferred_ty = if self.in_stub() && value.is_ellipsis_literal_expr() {
declared_ty.inner_type()
} else {
inferred_ty
@@ -4388,14 +4365,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
for alias in names {
for definition in self.index.definitions(alias) {
let inferred = infer_definition_types(self.db(), *definition);
// Check non-star imports for deprecations
if definition.kind(self.db()).as_star_import().is_none() {
for ty in inferred.declarations.values() {
self.check_deprecated(alias, ty.inner);
}
}
self.extend(inferred);
self.extend(infer_definition_types(self.db(), *definition));
}
}
}
@@ -5358,7 +5328,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
if comprehension.is_first() && target.is_name_expr() {
result.expression_type(iterable)
} else {
self.extend_unchecked(result);
let scope = self.types.scope;
self.types.scope = result.scope;
self.extend(result);
self.types.scope = scope;
result.expression_type(iterable)
}
};
@@ -5596,7 +5569,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
| KnownClass::NamedTuple
| KnownClass::TypeAliasType
| KnownClass::Tuple
| KnownClass::Deprecated
)
)
// temporary special-casing for all subclasses of `enum.Enum`
@@ -5637,24 +5609,30 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
match binding_type {
Type::FunctionLiteral(function_literal) => {
if let Some(known_function) = function_literal.known(self.db()) {
known_function.check_call(
if let Some(return_type) = known_function.check_call(
&self.context,
overload,
&call_arguments,
overload.parameter_types(),
call_expression,
self.file(),
);
) {
overload.set_return_type(return_type);
}
}
}
Type::ClassLiteral(class) => {
if let Some(known_class) = class.known(self.db()) {
known_class.check_call(
&self.context,
self.index,
overload,
&call_arguments,
call_expression,
);
let Some(known_class) = class.known(self.db()) else {
continue;
};
let overridden_return = known_class.check_call(
&self.context,
self.index,
overload,
&call_arguments,
call_expression,
);
if let Some(overridden_return) = overridden_return {
overload.set_return_type(overridden_return);
}
}
_ => {}
@@ -5834,62 +5812,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
ty
}
/// Check if the given ty is `@deprecated` or not
fn check_deprecated<T: Ranged>(&self, ranged: T, ty: Type) {
// First handle classes
if let Type::ClassLiteral(class_literal) = ty {
let Some(deprecated) = class_literal.deprecated(self.db()) else {
return;
};
let Some(builder) = self
.context
.report_lint(&crate::types::diagnostic::DEPRECATED, ranged)
else {
return;
};
let class_name = class_literal.name(self.db());
let mut diag =
builder.into_diagnostic(format_args!(r#"The class `{class_name}` is deprecated"#));
if let Some(message) = deprecated.message(self.db()) {
diag.set_primary_message(message.value(self.db()));
}
diag.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
return;
}
// Next handle functions
let function = match ty {
Type::FunctionLiteral(function) => function,
Type::BoundMethod(bound) => bound.function(self.db()),
_ => return,
};
// Currently we only check the final implementation for deprecation, as
// that check can be done on any reference to the function. Analysis of
// deprecated overloads needs to be done in places where we resolve the
// actual overloads being used.
let Some(deprecated) = function.implementation_deprecated(self.db()) else {
return;
};
let Some(builder) = self
.context
.report_lint(&crate::types::diagnostic::DEPRECATED, ranged)
else {
return;
};
let func_name = function.name(self.db());
let mut diag =
builder.into_diagnostic(format_args!(r#"The function `{func_name}` is deprecated"#));
if let Some(message) = deprecated.message(self.db()) {
diag.set_primary_message(message.value(self.db()));
}
diag.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
}
fn infer_name_load(&mut self, name_node: &ast::ExprName) -> Type<'db> {
let ast::ExprName {
range: _,
@@ -5902,7 +5824,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let (resolved, constraint_keys) =
self.infer_place_load(&expr, ast::ExprRef::Name(name_node));
resolved
// Not found in the module's explicitly declared global symbols?
// Check the "implicit globals" such as `__doc__`, `__file__`, `__name__`, etc.
@@ -5984,7 +5905,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let use_id = expr_ref.scoped_use_id(db, scope);
let place = place_from_bindings(db, use_def.bindings_at_use(use_id));
(place, Some(use_id))
}
}
@@ -6071,8 +5991,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// definition of this name visible to us (would be `LOAD_DEREF` at runtime.)
// Note that we skip the scope containing the use that we are resolving, since we
// already looked for the place there up above.
let mut nonlocal_union_builder = UnionBuilder::new(db);
let mut found_some_definition = false;
for (enclosing_scope_file_id, _) in self.index.ancestor_scopes(file_scope_id).skip(1) {
// Class scopes are not visible to nested scopes, and we need to handle global
// scope differently (because an unbound name there falls back to builtins), so
@@ -6158,25 +6076,21 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let Some(enclosing_place) = enclosing_place_table.place_by_expr(expr) else {
continue;
};
// Reads of "free" variables terminate at any enclosing scope that marks the
// variable `global`, whether or not that scope actually binds the variable. If we
// see a `global` declaration, stop walking scopes and proceed to the global
// handling below. (If we're walking from a prior/inner scope where this variable
// is `nonlocal`, then this is a semantic syntax error, but we don't enforce that
// here. See `infer_nonlocal_statement`.)
if enclosing_place.is_marked_global() {
// Reads of "free" variables can terminate at an enclosing scope that marks the
// variable `global` but doesn't actually bind it. In that case, stop walking
// scopes and proceed to the global handling below. (But note that it's a
// semantic syntax error for the `nonlocal` keyword to do this. See
// `infer_nonlocal_statement`.)
break;
}
// If the name is declared or bound in this scope, figure out its type. This might
// resolve the name and end the walk. But if the name is declared `nonlocal` in
// this scope, we'll keep walking enclosing scopes and union this type with the
// other types we find. (It's a semantic syntax error to declare a type for a
// `nonlocal` variable, but we don't enforce that here. See the
// `ast::Stmt::AnnAssign` handling in `SemanticIndexBuilder::visit_stmt`.)
if enclosing_place.is_bound() || enclosing_place.is_declared() {
let local_place_and_qualifiers = place(
// We can return early here, because the nearest function-like scope that
// defines a name must be the only source for the nonlocal reference (at
// runtime, it is the scope that creates the cell for our closure.) If the name
// isn't bound in that scope, we should get an unbound name, not continue
// falling back to other scopes / globals / builtins.
return place(
db,
enclosing_scope_id,
expr,
@@ -6185,25 +6099,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
.map_type(|ty| {
self.narrow_place_with_applicable_constraints(expr, ty, &constraint_keys)
});
// We could have Place::Unbound here, despite the checks above, for example if
// this scope contains a `del` statement but no binding or declaration.
if let Place::Type(type_, boundness) = local_place_and_qualifiers.place {
nonlocal_union_builder.add_in_place(type_);
// `ConsideredDefinitions::AllReachable` never returns PossiblyUnbound
debug_assert_eq!(boundness, Boundness::Bound);
found_some_definition = true;
}
if !enclosing_place.is_marked_nonlocal() {
// We've reached a function-like scope that marks this name bound or
// declared but doesn't mark it `nonlocal`. The name is therefore resolved,
// and we won't consider any scopes outside of this one.
return if found_some_definition {
Place::Type(nonlocal_union_builder.build(), Boundness::Bound).into()
} else {
Place::Unbound.into()
};
}
}
}
@@ -6258,10 +6153,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
})
});
if let Some(ty) = place.place.ignore_possibly_unbound() {
self.check_deprecated(expr_ref, ty);
}
(place, constraint_keys)
}
@@ -6465,9 +6356,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
})
.inner_type();
self.check_deprecated(attr, resolved_type);
// Even if we can obtain the attribute type based on the assignments, we still perform default type inference
// (to report errors).
assigned_type.unwrap_or(resolved_type)
@@ -9394,15 +9282,6 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
}
Type::unknown()
}
KnownInstanceType::Deprecated(_) => {
self.infer_type_expression(&subscript.slice);
if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) {
builder.into_diagnostic(format_args!(
"`warnings.deprecated` is not allowed in type expressions",
));
}
Type::unknown()
}
KnownInstanceType::TypeVar(_) => {
self.infer_type_expression(&subscript.slice);
todo_type!("TypeVar annotations")

View File

@@ -13,7 +13,7 @@
use std::{collections::HashMap, slice::Iter};
use itertools::EitherOrBoth;
use smallvec::{SmallVec, smallvec_inline};
use smallvec::{SmallVec, smallvec};
use super::{DynamicType, Type, TypeTransformer, TypeVarVariance, definition_expression_type};
use crate::semantic_index::definition::Definition;
@@ -34,7 +34,7 @@ pub struct CallableSignature<'db> {
impl<'db> CallableSignature<'db> {
pub(crate) fn single(signature: Signature<'db>) -> Self {
Self {
overloads: smallvec_inline![signature],
overloads: smallvec![signature],
}
}

View File

@@ -244,7 +244,7 @@ impl ruff_db::Db for CorpusDb {
#[salsa::db]
impl ty_python_semantic::Db for CorpusDb {
fn should_check_file(&self, file: File) -> bool {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}

View File

@@ -23,7 +23,6 @@ ty_python_semantic = { workspace = true }
ty_vendored = { workspace = true }
anyhow = { workspace = true }
bitflags = { workspace = true }
crossbeam = { workspace = true }
jod-thread = { workspace = true }
lsp-server = { workspace = true }

View File

@@ -98,7 +98,7 @@ impl<S> tracing_subscriber::layer::Filter<S> for LogLevelFilter {
meta: &tracing::Metadata<'_>,
_: &tracing_subscriber::layer::Context<'_, S>,
) -> bool {
let filter = if meta.target().starts_with("ty") || meta.target().starts_with("ruff") {
let filter = if meta.target().starts_with("ty") {
self.filter.trace_level()
} else {
tracing::Level::WARN

View File

@@ -21,8 +21,8 @@ mod schedule;
use crate::session::client::Client;
pub(crate) use api::Error;
pub(crate) use api::publish_settings_diagnostics;
pub(crate) use main_loop::{Action, ConnectionSender, Event, MainLoopReceiver, MainLoopSender};
pub(crate) type Result<T> = std::result::Result<T, api::Error>;
pub(crate) struct Server {

View File

@@ -17,7 +17,6 @@ mod traits;
use self::traits::{NotificationHandler, RequestHandler};
use super::{Result, schedule::BackgroundSchedule};
use crate::session::client::Client;
pub(crate) use diagnostics::publish_settings_diagnostics;
use ruff_db::panic::PanicError;
/// Processes a request from the client to the server.

View File

@@ -8,13 +8,11 @@ use rustc_hash::FxHashMap;
use ruff_db::diagnostic::{Annotation, Severity, SubDiagnostic};
use ruff_db::files::FileRange;
use ruff_db::source::{line_index, source_text};
use ruff_db::system::SystemPathBuf;
use ty_project::{Db, ProjectDatabase};
use crate::document::{DocumentKey, FileRangeExt, ToRangeExt};
use crate::session::DocumentSnapshot;
use crate::session::client::Client;
use crate::system::{AnySystemPath, file_to_url};
use crate::{PositionEncoding, Session};
/// Represents the diagnostics for a text document or a notebook document.
@@ -66,7 +64,7 @@ pub(super) fn clear_diagnostics(key: &DocumentKey, client: &Client) {
///
/// [publish diagnostics notification]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_publishDiagnostics
pub(super) fn publish_diagnostics(session: &Session, key: &DocumentKey, client: &Client) {
if session.client_capabilities().supports_pull_diagnostics() {
if session.client_capabilities().pull_diagnostics {
return;
}
@@ -111,82 +109,6 @@ pub(super) fn publish_diagnostics(session: &Session, key: &DocumentKey, client:
}
}
/// Publishes settings diagnostics for all the project at the given path
/// using the [publish diagnostics notification].
///
/// [publish diagnostics notification]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_publishDiagnostics
pub(crate) fn publish_settings_diagnostics(
session: &mut Session,
client: &Client,
path: SystemPathBuf,
) {
// Don't publish settings diagnostics for workspace that are already doing full diagnostics.
//
// Note we DO NOT respect the fact that clients support pulls because these are
// files they *specifically* won't pull diagnostics from us for, because we don't
// claim to be an LSP for them.
let has_workspace_diagnostics = session
.workspaces()
.for_path(&path)
.map(|workspace| workspace.settings().diagnostic_mode().is_workspace())
.unwrap_or(false);
if has_workspace_diagnostics {
return;
}
let session_encoding = session.position_encoding();
let state = session.project_state_mut(&AnySystemPath::System(path));
let db = &state.db;
let project = db.project();
let settings_diagnostics = project.check_settings(db);
// We need to send diagnostics if we have non-empty ones, or we have ones to clear.
// These will both almost always be empty so this function will almost always be a no-op.
if settings_diagnostics.is_empty() && state.untracked_files_with_pushed_diagnostics.is_empty() {
return;
}
// Group diagnostics by URL
let mut diagnostics_by_url: FxHashMap<Url, Vec<_>> = FxHashMap::default();
for diagnostic in settings_diagnostics {
if let Some(span) = diagnostic.primary_span() {
let file = span.expect_ty_file();
let Some(url) = file_to_url(db, file) else {
tracing::debug!("Failed to convert file to URL at {}", file.path(db));
continue;
};
diagnostics_by_url.entry(url).or_default().push(diagnostic);
}
}
// Record the URLs we're sending non-empty diagnostics for, so we know to clear them
// the next time we publish settings diagnostics!
let old_untracked = std::mem::replace(
&mut state.untracked_files_with_pushed_diagnostics,
diagnostics_by_url.keys().cloned().collect(),
);
// Add empty diagnostics for any files that had diagnostics before but don't now.
// This will clear them (either the file is no longer relevant to us or fixed!)
for url in old_untracked {
diagnostics_by_url.entry(url).or_default();
}
// Send the settings diagnostics!
for (url, file_diagnostics) in diagnostics_by_url {
// Convert diagnostics to LSP format
let lsp_diagnostics = file_diagnostics
.into_iter()
.map(|diagnostic| to_lsp_diagnostic(db, &diagnostic, session_encoding))
.collect::<Vec<_>>();
client.send_notification::<PublishDiagnostics>(PublishDiagnosticsParams {
uri: url,
diagnostics: lsp_diagnostics,
version: None,
});
}
}
pub(super) fn compute_diagnostics(
db: &ProjectDatabase,
snapshot: &DocumentSnapshot,

View File

@@ -1,5 +1,5 @@
use crate::server::Result;
use crate::server::api::diagnostics::{publish_diagnostics, publish_settings_diagnostics};
use crate::server::api::diagnostics::publish_diagnostics;
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
use crate::session::Session;
use crate::session::client::Client;
@@ -88,8 +88,7 @@ impl SyncNotificationHandler for DidChangeWatchedFiles {
for (root, changes) in events_by_db {
tracing::debug!("Applying changes to `{root}`");
let result = session.apply_changes(&AnySystemPath::System(root.clone()), changes);
publish_settings_diagnostics(session, client, root);
let result = session.apply_changes(&AnySystemPath::System(root), changes);
project_changed |= result.project_changed();
}
@@ -97,7 +96,7 @@ impl SyncNotificationHandler for DidChangeWatchedFiles {
let client_capabilities = session.client_capabilities();
if project_changed {
if client_capabilities.supports_workspace_diagnostic_refresh() {
if client_capabilities.diagnostics_refresh {
client.send_request::<types::request::WorkspaceDiagnosticRefresh>(
session,
(),
@@ -108,10 +107,11 @@ impl SyncNotificationHandler for DidChangeWatchedFiles {
publish_diagnostics(session, &key, client);
}
}
// TODO: always publish diagnostics for notebook files (since they don't use pull diagnostics)
}
if client_capabilities.supports_inlay_hint_refresh() {
if client_capabilities.inlay_refresh {
client.send_request::<types::request::InlayHintRefreshRequest>(session, (), |_, ()| {});
}

View File

@@ -8,8 +8,7 @@ use crate::system::AnySystemPath;
use lsp_server::ErrorCode;
use lsp_types::notification::DidCloseTextDocument;
use lsp_types::{DidCloseTextDocumentParams, TextDocumentIdentifier};
use ruff_db::Db as _;
use ty_project::Db as _;
use ty_project::watch::ChangeEvent;
pub(crate) struct DidCloseTextDocumentHandler;
@@ -39,29 +38,11 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler {
.close_document(&key)
.with_failure_code(ErrorCode::InternalError)?;
let path = key.path();
let db = session.project_db_mut(path);
match path {
AnySystemPath::System(system_path) => {
if let Some(file) = db.files().try_system(db, system_path) {
db.project().close_file(db, file);
} else {
// This can only fail when the path is a directory or it doesn't exists but the
// file should exists for this handler in this branch. This is because every
// close call is preceded by an open call, which ensures that the file is
// interned in the lookup table (`Files`).
tracing::warn!("Salsa file does not exists for {}", system_path);
}
}
AnySystemPath::SystemVirtual(virtual_path) => {
if let Some(virtual_file) = db.files().try_virtual_file(virtual_path) {
db.project().close_file(db, virtual_file.file());
virtual_file.close(db);
} else {
tracing::warn!("Salsa virtual file does not exists for {}", virtual_path);
}
}
if let AnySystemPath::SystemVirtual(virtual_path) = key.path() {
session.apply_changes(
key.path(),
vec![ChangeEvent::DeletedVirtual(virtual_path.clone())],
);
}
if !session.global_settings().diagnostic_mode().is_workspace() {

View File

@@ -1,9 +1,5 @@
use lsp_types::notification::DidOpenTextDocument;
use lsp_types::{DidOpenTextDocumentParams, TextDocumentItem};
use ruff_db::Db as _;
use ruff_db::files::system_path_to_file;
use ty_project::Db as _;
use ty_project::watch::{ChangeEvent, CreatedKind};
use crate::TextDocument;
use crate::server::Result;
@@ -12,6 +8,8 @@ use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
use crate::session::Session;
use crate::session::client::Client;
use crate::system::AnySystemPath;
use ruff_db::Db;
use ty_project::watch::ChangeEvent;
pub(crate) struct DidOpenTextDocumentHandler;
@@ -48,38 +46,13 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler {
let path = key.path();
// This is a "maybe" because the `File` might've not been interned yet i.e., the
// `try_system` call will return `None` which doesn't mean that the file is new, it's just
// that the server didn't need the file yet.
let is_maybe_new_system_file = path.as_system().is_some_and(|system_path| {
let db = session.project_db(path);
db.files()
.try_system(db, system_path)
.is_none_or(|file| !file.exists(db))
});
match path {
AnySystemPath::System(system_path) => {
let event = if is_maybe_new_system_file {
ChangeEvent::Created {
path: system_path.clone(),
kind: CreatedKind::File,
}
} else {
ChangeEvent::Opened(system_path.clone())
};
session.apply_changes(path, vec![event]);
let db = session.project_db_mut(path);
match system_path_to_file(db, system_path) {
Ok(file) => db.project().open_file(db, file),
Err(err) => tracing::warn!("Failed to open file {system_path}: {err}"),
}
session.apply_changes(path, vec![ChangeEvent::Opened(system_path.clone())]);
}
AnySystemPath::SystemVirtual(virtual_path) => {
let db = session.project_db_mut(path);
let virtual_file = db.files().virtual_file(db, virtual_path);
db.project().open_file(db, virtual_file.file());
db.files().virtual_file(db, virtual_path);
}
}

View File

@@ -52,7 +52,7 @@ impl BackgroundDocumentRequestHandler for GotoDeclarationRequestHandler {
if snapshot
.resolved_client_capabilities()
.supports_declaration_link()
.type_definition_link_support
{
let src = Some(ranged.range);
let links: Vec<_> = ranged

View File

@@ -52,7 +52,7 @@ impl BackgroundDocumentRequestHandler for GotoDefinitionRequestHandler {
if snapshot
.resolved_client_capabilities()
.supports_definition_link()
.type_definition_link_support
{
let src = Some(ranged.range);
let links: Vec<_> = ranged

View File

@@ -52,7 +52,7 @@ impl BackgroundDocumentRequestHandler for GotoTypeDefinitionRequestHandler {
if snapshot
.resolved_client_capabilities()
.supports_type_definition_link()
.type_definition_link_support
{
let src = Some(ranged.range);
let links: Vec<_> = ranged

View File

@@ -52,7 +52,7 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler {
let (markup_kind, lsp_markup_kind) = if snapshot
.resolved_client_capabilities()
.prefers_markdown_in_hover()
.hover_prefer_markdown
{
(MarkupKind::Markdown, lsp_types::MarkupKind::Markdown)
} else {

View File

@@ -41,7 +41,7 @@ impl BackgroundDocumentRequestHandler for SemanticTokensRequestHandler {
snapshot.encoding(),
snapshot
.resolved_client_capabilities()
.supports_multiline_semantic_tokens(),
.semantic_tokens_multiline_support,
);
Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {

Some files were not shown because too many files have changed in this diff Show More