Compare commits

..

1 Commits

154 changed files with 2216 additions and 7866 deletions

10
.github/CODEOWNERS vendored
View File

@@ -20,11 +20,9 @@
# ty
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/ruff_db/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
/crates/ty_ide/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager @Gankra
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
/crates/ty_project/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty_server/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty/ @carljm @MichaReiser @sharkdp @dcreager
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager @Gankra
/crates/ty_wasm/ @carljm @MichaReiser @sharkdp @dcreager
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
/crates/ty_python_semantic/ @carljm @AlexWaygood @sharkdp @dcreager
/crates/ty_module_resolver/ @carljm @MichaReiser @AlexWaygood @Gankra
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager

View File

@@ -10,12 +10,6 @@ Run all tests (using `nextest` for faster execution):
cargo nextest run
```
For faster test execution, use the `fast-test` profile which enables optimizations while retaining debug info:
```sh
cargo nextest run --cargo-profile fast-test
```
Run tests for a specific crate:
```sh

2
Cargo.lock generated
View File

@@ -4511,13 +4511,11 @@ dependencies = [
"regex-automata",
"ruff_cache",
"ruff_db",
"ruff_diagnostics",
"ruff_macros",
"ruff_memory_usage",
"ruff_options_metadata",
"ruff_python_ast",
"ruff_python_formatter",
"ruff_python_trivia",
"ruff_text_size",
"rustc-hash",
"salsa",

View File

@@ -335,11 +335,6 @@ strip = false
debug = "full"
lto = false
# Profile for faster iteration: applies minimal optimizations for faster tests.
[profile.fast-test]
inherits = "dev"
opt-level = 1
# The profile that 'cargo dist' will build with.
[profile.dist]
inherits = "release"

View File

@@ -221,7 +221,7 @@ fn setup_micro_case(code: &str) -> Case {
let file_path = "src/test.py";
fs.write_file_all(
SystemPathBuf::from(file_path),
&*ruff_python_trivia::textwrap::dedent(code),
ruff_python_trivia::textwrap::dedent(code),
)
.unwrap();

View File

@@ -1,4 +1,3 @@
use std::fmt::Formatter;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
@@ -50,15 +49,3 @@ impl CancellationToken {
self.cancelled.load(std::sync::atomic::Ordering::Relaxed)
}
}
/// The operation was canceled by the provided [`CancellationToken`].
#[derive(Debug)]
pub struct Canceled;
impl std::error::Error for Canceled {}
impl std::fmt::Display for Canceled {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str("operation was canceled")
}
}

View File

@@ -98,44 +98,6 @@ impl Diagnostic {
diag
}
/// Adds sub diagnostics that tell the user that this is a bug in ty
/// and asks them to open an issue on GitHub.
pub fn add_bug_sub_diagnostics(&mut self, url_encoded_title: &str) {
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
"This indicates a bug in ty.",
));
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format_args!(
"If you could open an issue at https://github.com/astral-sh/ty/issues/new?title={url_encoded_title}, we'd be very appreciative!"
),
));
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!(
"Platform: {os} {arch}",
os = std::env::consts::OS,
arch = std::env::consts::ARCH
),
));
if let Some(version) = crate::program_version() {
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!("Version: {version}"),
));
}
self.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!(
"Args: {args:?}",
args = std::env::args().collect::<Vec<_>>()
),
));
}
/// Add an annotation to this diagnostic.
///
/// Annotations for a diagnostic are optional, but if any are added,
@@ -1057,13 +1019,6 @@ impl DiagnosticId {
matches!(self, DiagnosticId::Lint(_))
}
pub const fn as_lint(&self) -> Option<LintName> {
match self {
DiagnosticId::Lint(name) => Some(*name),
_ => None,
}
}
/// Returns `true` if this `DiagnosticId` represents a lint with the given name.
pub fn is_lint_named(&self, name: &str) -> bool {
matches!(self, DiagnosticId::Lint(self_name) if self_name == name)

View File

@@ -14,7 +14,6 @@ use crate::diagnostic::{Span, UnifiedFile};
use crate::file_revision::FileRevision;
use crate::files::file_root::FileRoots;
use crate::files::private::FileStatus;
use crate::source::SourceText;
use crate::system::{SystemPath, SystemPathBuf, SystemVirtualPath, SystemVirtualPathBuf};
use crate::vendored::{VendoredPath, VendoredPathBuf};
use crate::{Db, FxDashMap, vendored};
@@ -324,17 +323,6 @@ pub struct File {
/// the file has been deleted is to change the status to `Deleted`.
#[default]
status: FileStatus,
/// Overrides the result of [`source_text`](crate::source::source_text).
///
/// This is useful when running queries after modifying a file's content but
/// before the content is written to disk. For example, to verify that the applied fixes
/// didn't introduce any new errors.
///
/// The override gets automatically removed the next time the file changes.
#[default]
#[returns(ref)]
pub source_text_override: Option<SourceText>,
}
// The Salsa heap is tracked separately.
@@ -456,28 +444,20 @@ impl File {
_ => (FileStatus::NotFound, FileRevision::zero(), None),
};
let mut clear_override = false;
if file.status(db) != status {
tracing::debug!("Updating the status of `{}`", file.path(db));
file.set_status(db).to(status);
clear_override = true;
}
if file.revision(db) != revision {
tracing::debug!("Updating the revision of `{}`", file.path(db));
file.set_revision(db).to(revision);
clear_override = true;
}
if file.permissions(db) != permission {
tracing::debug!("Updating the permissions of `{}`", file.path(db));
file.set_permissions(db).to(permission);
}
if clear_override && file.source_text_override(db).is_some() {
file.set_source_text_override(db).to(None);
}
}
/// Returns `true` if the file exists.
@@ -546,7 +526,7 @@ impl VirtualFile {
}
/// Increments the revision of the underlying [`File`].
pub fn sync(&self, db: &mut dyn Db) {
fn sync(&self, db: &mut dyn Db) {
let file = self.0;
tracing::debug!("Updating the revision of `{}`", file.path(db));
let current_revision = file.revision(db);

View File

@@ -85,13 +85,6 @@ pub fn max_parallelism() -> NonZeroUsize {
})
}
// Use a reasonably large stack size to avoid running into stack overflows too easily. The
// size was chosen in such a way as to still be able to handle large expressions involving
// binary operators (x + x + … + x) both during the AST walk in semantic index building as
// well as during type checking. Using this stack size, we can handle handle expressions
// that are several times larger than the corresponding limits in existing type checkers.
pub const STACK_SIZE: usize = 16 * 1024 * 1024;
/// Trait for types that can provide Rust documentation.
///
/// Use `derive(RustDoc)` to automatically implement this trait for types that have a static string documentation.

View File

@@ -1,8 +1,6 @@
use std::borrow::Cow;
use std::ops::Deref;
use std::sync::Arc;
use ruff_diagnostics::SourceMap;
use ruff_notebook::Notebook;
use ruff_python_ast::PySourceType;
use ruff_source_file::LineIndex;
@@ -18,10 +16,6 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
let _span = tracing::trace_span!("source_text", file = %path).entered();
let mut read_error = None;
if let Some(source) = file.source_text_override(db) {
return source.clone();
}
let kind = if is_notebook(db.system(), path) {
file.read_to_notebook(db)
.unwrap_or_else(|error| {
@@ -96,45 +90,6 @@ impl SourceText {
pub fn read_error(&self) -> Option<&SourceTextError> {
self.inner.read_error.as_ref()
}
/// Returns a new instance for this file with the updated source text (Python code).
///
/// Uses the `source_map` to preserve the cell-boundaries.
#[must_use]
pub fn with_text(&self, new_text: String, source_map: &SourceMap) -> Self {
let new_kind = match &self.inner.kind {
SourceTextKind::Text(_) => SourceTextKind::Text(new_text),
SourceTextKind::Notebook { notebook } => {
let mut new_notebook = notebook.as_ref().clone();
new_notebook.update(source_map, new_text);
SourceTextKind::Notebook {
notebook: new_notebook.into(),
}
}
};
Self {
inner: Arc::new(SourceTextInner {
kind: new_kind,
read_error: self.inner.read_error.clone(),
}),
}
}
pub fn to_bytes(&self) -> Cow<'_, [u8]> {
match &self.inner.kind {
SourceTextKind::Text(source) => Cow::Borrowed(source.as_bytes()),
SourceTextKind::Notebook { notebook } => {
let mut output: Vec<u8> = Vec::new();
notebook
.write(&mut output)
.expect("writing to a Vec should never fail");
Cow::Owned(output)
}
}
}
}
impl Deref for SourceText {
@@ -162,13 +117,13 @@ impl std::fmt::Debug for SourceText {
}
}
#[derive(Eq, PartialEq, get_size2::GetSize, Clone)]
#[derive(Eq, PartialEq, get_size2::GetSize)]
struct SourceTextInner {
kind: SourceTextKind,
read_error: Option<SourceTextError>,
}
#[derive(Eq, PartialEq, get_size2::GetSize, Clone)]
#[derive(Eq, PartialEq, get_size2::GetSize)]
enum SourceTextKind {
Text(String),
Notebook {

View File

@@ -271,12 +271,7 @@ pub trait WritableSystem: System {
fn create_new_file(&self, path: &SystemPath) -> Result<()>;
/// Writes the given content to the file at the given path.
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
self.write_file_bytes(path, content.as_bytes())
}
/// Writes the given content to the file at the given path.
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()>;
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()>;
/// Creates a directory at `path` as well as any intermediate directories.
fn create_directory_all(&self, path: &SystemPath) -> Result<()>;
@@ -316,8 +311,6 @@ pub trait WritableSystem: System {
Ok(Some(cache_path))
}
fn dyn_clone(&self) -> Box<dyn WritableSystem>;
}
#[derive(Clone, Debug, Eq, PartialEq)]

View File

@@ -122,9 +122,7 @@ impl MemoryFileSystem {
let entry = by_path.get(&normalized).ok_or_else(not_found)?;
match entry {
Entry::File(file) => {
String::from_utf8(file.content.to_vec()).map_err(|_| invalid_utf8())
}
Entry::File(file) => Ok(file.content.clone()),
Entry::Directory(_) => Err(is_a_directory()),
}
}
@@ -141,7 +139,7 @@ impl MemoryFileSystem {
.get(&path.as_ref().to_path_buf())
.ok_or_else(not_found)?;
String::from_utf8(file.content.to_vec()).map_err(|_| invalid_utf8())
Ok(file.content.clone())
}
pub fn exists(&self, path: &SystemPath) -> bool {
@@ -163,7 +161,7 @@ impl MemoryFileSystem {
match by_path.entry(normalized) {
btree_map::Entry::Vacant(entry) => {
entry.insert(Entry::File(File {
content: Box::default(),
content: String::new(),
last_modified: file_time_now(),
}));
@@ -179,17 +177,13 @@ impl MemoryFileSystem {
/// Stores a new file in the file system.
///
/// The operation overrides the content for an existing file with the same normalized `path`.
pub fn write_file(
&self,
path: impl AsRef<SystemPath>,
content: impl AsRef<[u8]>,
) -> Result<()> {
pub fn write_file(&self, path: impl AsRef<SystemPath>, content: impl ToString) -> Result<()> {
let mut by_path = self.inner.by_path.write().unwrap();
let normalized = self.normalize_path(path.as_ref());
let file = get_or_create_file(&mut by_path, &normalized)?;
file.content = content.as_ref().to_vec().into_boxed_slice();
file.content = content.to_string();
file.last_modified = file_time_now();
Ok(())
@@ -220,7 +214,7 @@ impl MemoryFileSystem {
pub fn write_file_all(
&self,
path: impl AsRef<SystemPath>,
content: impl AsRef<[u8]>,
content: impl ToString,
) -> Result<()> {
let path = path.as_ref();
@@ -234,24 +228,19 @@ impl MemoryFileSystem {
/// Stores a new virtual file in the file system.
///
/// The operation overrides the content for an existing virtual file with the same `path`.
pub fn write_virtual_file(
&self,
path: impl AsRef<SystemVirtualPath>,
content: impl AsRef<[u8]>,
) {
pub fn write_virtual_file(&self, path: impl AsRef<SystemVirtualPath>, content: impl ToString) {
let path = path.as_ref();
let mut virtual_files = self.inner.virtual_files.write().unwrap();
let content = content.as_ref().to_vec().into_boxed_slice();
match virtual_files.entry(path.to_path_buf()) {
std::collections::hash_map::Entry::Vacant(entry) => {
entry.insert(File {
content,
content: content.to_string(),
last_modified: file_time_now(),
});
}
std::collections::hash_map::Entry::Occupied(mut entry) => {
entry.get_mut().content = content;
entry.get_mut().content = content.to_string();
}
}
}
@@ -479,7 +468,7 @@ impl Entry {
#[derive(Debug)]
struct File {
content: Box<[u8]>,
content: String,
last_modified: FileTime,
}
@@ -508,13 +497,6 @@ fn directory_not_empty() -> std::io::Error {
std::io::Error::other("directory not empty")
}
fn invalid_utf8() -> std::io::Error {
std::io::Error::new(
std::io::ErrorKind::InvalidData,
"stream did not contain valid UTF-8",
)
}
fn create_dir_all(
paths: &mut RwLockWriteGuard<BTreeMap<Utf8PathBuf, Entry>>,
normalized: &Utf8Path,
@@ -551,7 +533,7 @@ fn get_or_create_file<'a>(
let entry = paths.entry(normalized.to_path_buf()).or_insert_with(|| {
Entry::File(File {
content: Box::default(),
content: String::new(),
last_modified: file_time_now(),
})
});
@@ -862,7 +844,7 @@ mod tests {
let fs = with_files(["c.py"]);
let error = fs
.write_file(SystemPath::new("a/b.py"), "content")
.write_file(SystemPath::new("a/b.py"), "content".to_string())
.unwrap_err();
assert_eq!(error.kind(), ErrorKind::NotFound);
@@ -873,7 +855,7 @@ mod tests {
let fs = with_files(["a/b.py"]);
let error = fs
.write_file_all(SystemPath::new("a/b.py/c"), "content")
.write_file_all(SystemPath::new("a/b.py/c"), "content".to_string())
.unwrap_err();
assert_eq!(error.kind(), ErrorKind::Other);
@@ -896,7 +878,7 @@ mod tests {
let fs = MemoryFileSystem::new();
let path = SystemPath::new("a.py");
fs.write_file_all(path, "Test content")?;
fs.write_file_all(path, "Test content".to_string())?;
assert_eq!(fs.read_to_string(path)?, "Test content");
@@ -933,7 +915,9 @@ mod tests {
fs.create_directory_all("a")?;
let error = fs.write_file(SystemPath::new("a"), "content").unwrap_err();
let error = fs
.write_file(SystemPath::new("a"), "content".to_string())
.unwrap_err();
assert_eq!(error.kind(), ErrorKind::Other);

View File

@@ -361,17 +361,13 @@ impl WritableSystem for OsSystem {
std::fs::File::create_new(path).map(drop)
}
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
std::fs::write(path.as_std_path(), content)
}
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
std::fs::create_dir_all(path.as_std_path())
}
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
Box::new(self.clone())
}
}
impl Default for OsSystem {

View File

@@ -205,17 +205,13 @@ impl WritableSystem for TestSystem {
self.system().create_new_file(path)
}
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
self.system().write_file_bytes(path, content)
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
self.system().write_file(path, content)
}
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
self.system().create_directory_all(path)
}
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
Box::new(self.clone())
}
}
/// Extension trait for databases that use a [`WritableSystem`].
@@ -287,11 +283,7 @@ pub trait DbWithTestSystem: Db + Sized {
///
/// ## Panics
/// If the db isn't using the [`InMemorySystem`].
fn write_virtual_file(
&mut self,
path: impl AsRef<SystemVirtualPath>,
content: impl AsRef<[u8]>,
) {
fn write_virtual_file(&mut self, path: impl AsRef<SystemVirtualPath>, content: impl ToString) {
let path = path.as_ref();
self.test_system()
.memory_file_system()
@@ -330,23 +322,23 @@ where
}
}
#[derive(Clone, Default, Debug)]
#[derive(Default, Debug)]
pub struct InMemorySystem {
user_config_directory: Arc<Mutex<Option<SystemPathBuf>>>,
user_config_directory: Mutex<Option<SystemPathBuf>>,
memory_fs: MemoryFileSystem,
}
impl InMemorySystem {
pub fn new(cwd: SystemPathBuf) -> Self {
Self {
user_config_directory: Mutex::new(None).into(),
user_config_directory: Mutex::new(None),
memory_fs: MemoryFileSystem::with_current_directory(cwd),
}
}
pub fn from_memory_fs(memory_fs: MemoryFileSystem) -> Self {
Self {
user_config_directory: Mutex::new(None).into(),
user_config_directory: Mutex::new(None),
memory_fs,
}
}
@@ -448,7 +440,10 @@ impl System for InMemorySystem {
}
fn dyn_clone(&self) -> Box<dyn System> {
Box::new(self.clone())
Box::new(Self {
user_config_directory: Mutex::new(self.user_config_directory.lock().unwrap().clone()),
memory_fs: self.memory_fs.clone(),
})
}
}
@@ -457,15 +452,11 @@ impl WritableSystem for InMemorySystem {
self.memory_fs.create_new_file(path)
}
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
self.memory_fs.write_file(path, content)
}
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
self.memory_fs.create_directory_all(path)
}
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
Box::new(self.clone())
}
}

View File

@@ -26,7 +26,6 @@ use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
use crate::fix::{FixResult, fix_file};
use crate::noqa::add_noqa;
use crate::package::PackageRoot;
use crate::preview::is_py315_support_enabled;
use crate::registry::Rule;
#[cfg(any(feature = "test-rules", test))]
use crate::rules::ruff::rules::test_rules::{self, TEST_RULES, TestRule};
@@ -34,7 +33,7 @@ use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, TargetVersion, flags};
use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::{Locator, directives, fs, warn_user_once};
use crate::{Locator, directives, fs};
pub(crate) mod float;
@@ -451,14 +450,6 @@ pub fn lint_only(
) -> LinterResult {
let target_version = settings.resolve_target_version(path);
if matches!(target_version.linter_version(), PythonVersion::PY315)
&& !is_py315_support_enabled(settings)
{
warn_user_once!(
"Support for Python 3.15 is under development and may be unstable. Enable `preview` to remove this warning."
);
}
let parsed = source.into_parsed(source_kind, source_type, target_version.parser_version());
// Map row and column locations to byte slices (lazily).
@@ -564,14 +555,6 @@ pub fn lint_fix<'a>(
let target_version = settings.resolve_target_version(path);
if matches!(target_version.linter_version(), PythonVersion::PY315)
&& !is_py315_support_enabled(settings)
{
warn_user_once!(
"Support for Python 3.15 is under development and may be unstable. Enable `preview` to remove this warning."
);
}
// Continuously fix until the source code stabilizes.
loop {
// Parse once.

View File

@@ -296,8 +296,3 @@ pub(crate) const fn is_s310_resolve_string_literal_bindings_enabled(
pub(crate) const fn is_range_suppressions_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/22419
pub(crate) const fn is_py315_support_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View File

@@ -36,16 +36,13 @@ use crate::{Fix, FixAvailability, Violation};
/// ```python
/// import logging
///
/// logging.basicConfig(level=logging.INFO)
/// logger = logging.getLogger(__name__)
///
///
/// def sum_less_than_four(a, b):
/// logger.debug("Calling sum_less_than_four")
/// return a + b < 4
///
///
/// if __name__ == "__main__":
/// logging.basicConfig(level=logging.INFO)
/// ```
///
/// ## Fix safety

View File

@@ -5,7 +5,7 @@ use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
use crate::fix::edits::add_argument;
use crate::{Fix, FixAvailability, Violation};
use crate::{AlwaysFixableViolation, Applicability, Fix};
/// ## What it does
/// Checks for uses of `subprocess.run` without an explicit `check` argument.
@@ -39,12 +39,9 @@ use crate::{Fix, FixAvailability, Violation};
/// ```
///
/// ## Fix safety
///
/// This rule's fix is marked as display-only because it's not clear whether the
/// potential exception was meant to be ignored by setting `check=False` or if
/// the author simply forgot to include `check=True`. The fix adds
/// `check=False`, making the existing behavior explicit but possibly masking
/// the original intention.
/// This rule's fix is marked as unsafe for function calls that contain
/// `**kwargs`, as adding a `check` keyword argument to such a call may lead
/// to a duplicate keyword argument error.
///
/// ## References
/// - [Python documentation: `subprocess.run`](https://docs.python.org/3/library/subprocess.html#subprocess.run)
@@ -52,18 +49,14 @@ use crate::{Fix, FixAvailability, Violation};
#[violation_metadata(stable_since = "v0.0.285")]
pub(crate) struct SubprocessRunWithoutCheck;
impl Violation for SubprocessRunWithoutCheck {
// The fix is always set on the diagnostic, but display-only fixes aren't
// considered "fixable" in the tests.
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
impl AlwaysFixableViolation for SubprocessRunWithoutCheck {
#[derive_message_formats]
fn message(&self) -> String {
"`subprocess.run` without explicit `check` argument".to_string()
}
fn fix_title(&self) -> Option<String> {
Some("Add explicit `check=False`".to_string())
fn fix_title(&self) -> String {
"Add explicit `check=False`".to_string()
}
}
@@ -81,11 +74,20 @@ pub(crate) fn subprocess_run_without_check(checker: &Checker, call: &ast::ExprCa
if call.arguments.find_keyword("check").is_none() {
let mut diagnostic =
checker.report_diagnostic(SubprocessRunWithoutCheck, call.func.range());
diagnostic.set_fix(Fix::display_only_edit(add_argument(
"check=False",
&call.arguments,
checker.tokens(),
)));
diagnostic.set_fix(Fix::applicable_edit(
add_argument("check=False", &call.arguments, checker.tokens()),
// If the function call contains `**kwargs`, mark the fix as unsafe.
if call
.arguments
.keywords
.iter()
.any(|keyword| keyword.arg.is_none())
{
Applicability::Unsafe
} else {
Applicability::Safe
},
));
}
}
}

View File

@@ -19,7 +19,6 @@ help: Add explicit `check=False`
5 | subprocess.run("ls", shell=True)
6 | subprocess.run(
7 | ["ls"],
note: This is a display-only fix and is likely to be incorrect
PLW1510 [*] `subprocess.run` without explicit `check` argument
--> subprocess_run_without_check.py:5:1
@@ -40,7 +39,6 @@ help: Add explicit `check=False`
6 | subprocess.run(
7 | ["ls"],
8 | shell=False,
note: This is a display-only fix and is likely to be incorrect
PLW1510 [*] `subprocess.run` without explicit `check` argument
--> subprocess_run_without_check.py:6:1
@@ -61,7 +59,6 @@ help: Add explicit `check=False`
9 | )
10 | subprocess.run(["ls"], **kwargs)
11 |
note: This is a display-only fix and is likely to be incorrect
PLW1510 [*] `subprocess.run` without explicit `check` argument
--> subprocess_run_without_check.py:10:1
@@ -82,4 +79,4 @@ help: Add explicit `check=False`
11 |
12 | # Non-errors.
13 | subprocess.run("ls", check=True)
note: This is a display-only fix and is likely to be incorrect
note: This is an unsafe fix and may change runtime behavior

View File

@@ -7,7 +7,7 @@ source: crates/ruff_linter/src/rules/ruff/mod.rs
--- Summary ---
Removed: 15
Added: 20
Added: 23
--- Removed ---
E741 Ambiguous variable name: `I`
@@ -301,7 +301,6 @@ RUF100 [*] Unused suppression (non-enabled: `E501`)
| ^^^^^^^^^^^^^^^^^^^^^
47 | I = 1
48 | # ruff: enable[E501]
| --------------------
|
help: Remove unused suppression
43 | def f():
@@ -309,10 +308,26 @@ help: Remove unused suppression
45 | # logged to user
- # ruff: disable[E501]
46 | I = 1
- # ruff: enable[E501]
47 |
47 | # ruff: enable[E501]
48 |
49 | def f():
RUF100 [*] Unused suppression (non-enabled: `E501`)
--> suppressions.py:48:5
|
46 | # ruff: disable[E501]
47 | I = 1
48 | # ruff: enable[E501]
| ^^^^^^^^^^^^^^^^^^^^
|
help: Remove unused suppression
45 | # logged to user
46 | # ruff: disable[E501]
47 | I = 1
- # ruff: enable[E501]
48 |
49 |
50 | def f():
RUF100 [*] Unused `noqa` directive (unused: `E741`, `F841`)
@@ -548,9 +563,6 @@ RUF102 [*] Invalid rule code in suppression: YF829
| ^^^^^
94 | # ruff: disable[F841, RQW320]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
| -----
|
help: Remove the rule code
90 |
@@ -560,10 +572,6 @@ help: Remove the rule code
93 | # ruff: disable[F841, RQW320]
94 | value = 0
95 | # ruff: enable[F841, RQW320]
- # ruff: enable[YF829]
96 |
97 |
98 | def f():
RUF102 [*] Invalid rule code in suppression: RQW320
@@ -575,8 +583,6 @@ RUF102 [*] Invalid rule code in suppression: RQW320
| ^^^^^^
95 | value = 0
96 | # ruff: enable[F841, RQW320]
| ------
97 | # ruff: enable[YF829]
|
help: Remove the rule code
91 | def f():
@@ -584,6 +590,23 @@ help: Remove the rule code
93 | # ruff: disable[YF829]
- # ruff: disable[F841, RQW320]
94 + # ruff: disable[F841]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
RUF102 [*] Invalid rule code in suppression: RQW320
--> suppressions.py:96:26
|
94 | # ruff: disable[F841, RQW320]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
| ^^^^^^
97 | # ruff: enable[YF829]
|
help: Remove the rule code
93 | # ruff: disable[YF829]
94 | # ruff: disable[F841, RQW320]
95 | value = 0
- # ruff: enable[F841, RQW320]
96 + # ruff: enable[F841]
@@ -592,6 +615,24 @@ help: Remove the rule code
99 |
RUF102 [*] Invalid rule code in suppression: YF829
--> suppressions.py:97:20
|
95 | value = 0
96 | # ruff: enable[F841, RQW320]
97 | # ruff: enable[YF829]
| ^^^^^
|
help: Remove the rule code
94 | # ruff: disable[F841, RQW320]
95 | value = 0
96 | # ruff: enable[F841, RQW320]
- # ruff: enable[YF829]
97 |
98 |
99 | def f():
RUF103 [*] Invalid suppression comment: missing suppression codes like `[E501, ...]`
--> suppressions.py:109:5
|

View File

@@ -36,7 +36,6 @@ pub enum PythonVersion {
Py312,
Py313,
Py314,
Py315,
}
impl Default for PythonVersion {
@@ -59,7 +58,6 @@ impl TryFrom<ast::PythonVersion> for PythonVersion {
ast::PythonVersion::PY312 => Ok(Self::Py312),
ast::PythonVersion::PY313 => Ok(Self::Py313),
ast::PythonVersion::PY314 => Ok(Self::Py314),
ast::PythonVersion::PY315 => Ok(Self::Py315),
_ => Err(format!("unrecognized python version {value}")),
}
}
@@ -90,7 +88,6 @@ impl PythonVersion {
Self::Py312 => (3, 12),
Self::Py313 => (3, 13),
Self::Py314 => (3, 14),
Self::Py315 => (3, 15),
}
}
}

View File

@@ -13,6 +13,7 @@ use ruff_python_trivia::Cursor;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize, TextSlice};
use smallvec::{SmallVec, smallvec};
use crate::Locator;
use crate::checkers::ast::LintContext;
use crate::codes::Rule;
use crate::fix::edits::delete_comment;
@@ -23,7 +24,6 @@ use crate::rules::ruff::rules::{
UnmatchedSuppressionComment, UnusedCodes, UnusedNOQA, UnusedNOQAKind, code_is_valid,
};
use crate::settings::LinterSettings;
use crate::{Locator, Violation};
#[derive(Clone, Debug, Eq, PartialEq)]
enum SuppressionAction {
@@ -85,33 +85,11 @@ pub(crate) struct Suppression {
/// Range for which the suppression applies
range: TextRange,
/// Any comments associated with the suppression
comments: SmallVec<[SuppressionComment; 2]>,
/// Whether this suppression actually suppressed a diagnostic
used: Cell<bool>,
comments: DisableEnableComments,
}
#[derive(Debug)]
pub(crate) enum DisableEnableComments {
/// An implicitly closed disable comment without a matching enable comment.
Disable(SuppressionComment),
/// A matching pair of disable and enable comments.
DisableEnable(SuppressionComment, SuppressionComment),
}
impl DisableEnableComments {
pub(crate) fn disable_comment(&self) -> &SuppressionComment {
match self {
DisableEnableComments::Disable(comment) => comment,
DisableEnableComments::DisableEnable(disable, _) => disable,
}
}
pub(crate) fn enable_comment(&self) -> Option<&SuppressionComment> {
match self {
DisableEnableComments::Disable(_) => None,
DisableEnableComments::DisableEnable(_, enable) => Some(enable),
}
}
}
#[derive(Copy, Clone, Debug)]
@@ -193,16 +171,23 @@ impl Suppressions {
if !code_is_valid(&suppression.code, &context.settings().external) {
// InvalidRuleCode
if context.is_rule_enabled(Rule::InvalidRuleCode) {
Suppressions::report_suppression(
context,
locator,
suppression,
true,
InvalidRuleCode {
rule_code: suppression.code.to_string(),
kind: InvalidRuleCodeKind::Suppression,
},
);
for comment in &suppression.comments {
let (range, edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
comment,
true,
);
context
.report_diagnostic(
InvalidRuleCode {
rule_code: suppression.code.to_string(),
kind: InvalidRuleCodeKind::Suppression,
},
range,
)
.set_fix(Fix::safe_edit(edit));
}
}
} else if !suppression.used.get() {
// UnusedNOQA
@@ -212,37 +197,42 @@ impl Suppressions {
) else {
continue; // "external" lint code, don't treat it as unused
};
for comment in &suppression.comments {
let (range, edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
comment,
false,
);
let codes = if context.is_rule_enabled(rule) {
UnusedCodes {
unmatched: vec![suppression.code.to_string()],
..Default::default()
}
} else {
UnusedCodes {
disabled: vec![suppression.code.to_string()],
..Default::default()
}
};
let codes = if context.is_rule_enabled(rule) {
UnusedCodes {
unmatched: vec![suppression.code.to_string()],
..Default::default()
}
} else {
UnusedCodes {
disabled: vec![suppression.code.to_string()],
..Default::default()
}
};
Suppressions::report_suppression(
context,
locator,
suppression,
false,
UnusedNOQA {
codes: Some(codes),
kind: UnusedNOQAKind::Suppression,
},
);
context
.report_diagnostic(
UnusedNOQA {
codes: Some(codes),
kind: UnusedNOQAKind::Suppression,
},
range,
)
.set_fix(Fix::safe_edit(edit));
}
}
} else if let DisableEnableComments::Disable(comment) = &suppression.comments {
} else if suppression.comments.len() == 1 {
// UnmatchedSuppressionComment
if unmatched_ranges.insert(comment.range) {
context.report_diagnostic_if_enabled(
UnmatchedSuppressionComment {},
comment.range,
);
let range = suppression.comments[0].range;
if unmatched_ranges.insert(range) {
context.report_diagnostic_if_enabled(UnmatchedSuppressionComment {}, range);
}
}
}
@@ -277,35 +267,6 @@ impl Suppressions {
}
}
fn report_suppression<T: Violation>(
context: &LintContext,
locator: &Locator,
suppression: &Suppression,
highlight_only_code: bool,
kind: T,
) {
let disable_comment = suppression.comments.disable_comment();
let (range, edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
disable_comment,
highlight_only_code,
);
let mut diagnostic = context.report_diagnostic(kind, range);
if let Some(enable_comment) = suppression.comments.enable_comment() {
let (enable_range, enable_range_edit) = Suppressions::delete_code_or_comment(
locator,
suppression,
enable_comment,
highlight_only_code,
);
diagnostic.secondary_annotation("", enable_range);
diagnostic.set_fix(Fix::safe_edits(edit, [enable_range_edit]));
} else {
diagnostic.set_fix(Fix::safe_edit(edit));
}
}
fn delete_code_or_comment(
locator: &Locator<'_>,
suppression: &Suppression,
@@ -463,10 +424,7 @@ impl<'a> SuppressionsBuilder<'a> {
self.valid.push(Suppression {
code: code.into(),
range: combined_range,
comments: DisableEnableComments::DisableEnable(
comment.comment.clone(),
other.comment.clone(),
),
comments: smallvec![comment.comment.clone(), other.comment.clone()],
used: false.into(),
});
}
@@ -483,7 +441,7 @@ impl<'a> SuppressionsBuilder<'a> {
self.valid.push(Suppression {
code: code.into(),
range: implicit_range,
comments: DisableEnableComments::Disable(comment.comment.clone()),
comments: smallvec![comment.comment.clone()],
used: false.into(),
});
}
@@ -685,7 +643,7 @@ mod tests {
use insta::assert_debug_snapshot;
use itertools::Itertools;
use ruff_python_parser::{Mode, ParseOptions, parse};
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_text_size::{TextRange, TextSize};
use similar::DiffableStr;
use crate::{
@@ -747,22 +705,24 @@ print('hello')
Suppression {
covered_source: "# ruff: disable[foo]\nprint('hello')\n# ruff: enable[foo]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
],
},
],
invalid: [],
@@ -791,28 +751,30 @@ def foo():
Suppression {
covered_source: "# ruff: disable[bar]\n print('hello')\n\n",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo]\nprint('hello')\n\ndef foo():\n # ruff: disable[bar]\n print('hello')\n\n",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
],
},
],
invalid: [],
@@ -841,42 +803,46 @@ class Foo:
Suppression {
covered_source: "# ruff: disable[bar]\n print('hello')\n # ruff: enable[bar]",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[bar]",
action: Enable,
codes: [
"bar",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[bar]",
action: Enable,
codes: [
"bar",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo]\n def bar(self):\n # ruff: disable[bar]\n print('hello')\n # ruff: enable[bar]\n # ruff: enable[foo]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
],
},
],
invalid: [],
@@ -906,42 +872,46 @@ def foo():
Suppression {
covered_source: "# ruff: disable[foo]\n print('hello')\n # ruff: disable[bar]\n print('hello')\n # ruff: enable[foo]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[bar]\n print('hello')\n # ruff: enable[foo]\n print('hello')\n # ruff: enable[bar]",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[bar]",
action: Enable,
codes: [
"bar",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[bar]",
action: Disable,
codes: [
"bar",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[bar]",
action: Enable,
codes: [
"bar",
],
reason: "",
},
],
},
],
invalid: [],
@@ -966,46 +936,50 @@ print('hello')
Suppression {
covered_source: "# ruff: disable[foo, bar]\nprint('hello')\n# ruff: enable[foo, bar]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo, bar]",
action: Enable,
codes: [
"foo",
"bar",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo, bar]",
action: Enable,
codes: [
"foo",
"bar",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo, bar]\nprint('hello')\n# ruff: enable[foo, bar]",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo, bar]",
action: Enable,
codes: [
"foo",
"bar",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[foo, bar]",
action: Enable,
codes: [
"foo",
"bar",
],
reason: "",
},
],
},
],
invalid: [],
@@ -1031,15 +1005,16 @@ print('world')
Suppression {
covered_source: "# ruff: disable[foo]\nprint('hello')\n# ruff: enable[bar]\nprint('world')\n",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo]",
action: Disable,
codes: [
"foo",
],
reason: "",
},
],
},
],
invalid: [
@@ -1076,30 +1051,32 @@ print('hello')
Suppression {
covered_source: "# ruff: disable[foo, bar]\nprint('hello')\n# ruff: enable[bar, foo]\n",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo, bar]\nprint('hello')\n# ruff: enable[bar, foo]\n",
code: "bar",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo, bar]",
action: Disable,
codes: [
"foo",
"bar",
],
reason: "",
},
],
},
],
invalid: [
@@ -1139,35 +1116,38 @@ print('hello')
Suppression {
covered_source: "# ruff: disable[foo] first\nprint('hello')\n# ruff: disable[foo] second\nprint('hello')\n# ruff: enable[foo]",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo] first",
action: Disable,
codes: [
"foo",
],
reason: "first",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[foo] first",
action: Disable,
codes: [
"foo",
],
reason: "first",
},
SuppressionComment {
text: "# ruff: enable[foo]",
action: Enable,
codes: [
"foo",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[foo] second\nprint('hello')\n# ruff: enable[foo]\n",
code: "foo",
disable_comment: SuppressionComment {
text: "# ruff: disable[foo] second",
action: Disable,
codes: [
"foo",
],
reason: "second",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[foo] second",
action: Disable,
codes: [
"foo",
],
reason: "second",
},
],
},
],
invalid: [],
@@ -1209,92 +1189,100 @@ def bar():
Suppression {
covered_source: "# ruff: disable[delta] unmatched\n pass\n # ruff: enable[beta,gamma]\n# ruff: enable[alpha]\n\n# ruff: disable # parse error!\n",
code: "delta",
disable_comment: SuppressionComment {
text: "# ruff: disable[delta] unmatched",
action: Disable,
codes: [
"delta",
],
reason: "unmatched",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[delta] unmatched",
action: Disable,
codes: [
"delta",
],
reason: "unmatched",
},
],
},
Suppression {
covered_source: "# ruff: disable[beta,gamma]\n if True:\n # ruff: disable[delta] unmatched\n pass\n # ruff: enable[beta,gamma]",
code: "beta",
disable_comment: SuppressionComment {
text: "# ruff: disable[beta,gamma]",
action: Disable,
codes: [
"beta",
"gamma",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[beta,gamma]",
action: Enable,
codes: [
"beta",
"gamma",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[beta,gamma]",
action: Disable,
codes: [
"beta",
"gamma",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[beta,gamma]",
action: Enable,
codes: [
"beta",
"gamma",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[beta,gamma]\n if True:\n # ruff: disable[delta] unmatched\n pass\n # ruff: enable[beta,gamma]",
code: "gamma",
disable_comment: SuppressionComment {
text: "# ruff: disable[beta,gamma]",
action: Disable,
codes: [
"beta",
"gamma",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[beta,gamma]",
action: Enable,
codes: [
"beta",
"gamma",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[beta,gamma]",
action: Disable,
codes: [
"beta",
"gamma",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[beta,gamma]",
action: Enable,
codes: [
"beta",
"gamma",
],
reason: "",
},
],
},
Suppression {
covered_source: "# ruff: disable[zeta] unmatched\n pass\n# ruff: enable[zeta] underindented\n pass\n",
code: "zeta",
disable_comment: SuppressionComment {
text: "# ruff: disable[zeta] unmatched",
action: Disable,
codes: [
"zeta",
],
reason: "unmatched",
},
enable_comment: None,
comments: [
SuppressionComment {
text: "# ruff: disable[zeta] unmatched",
action: Disable,
codes: [
"zeta",
],
reason: "unmatched",
},
],
},
Suppression {
covered_source: "# ruff: disable[alpha]\ndef foo():\n # ruff: disable[beta,gamma]\n if True:\n # ruff: disable[delta] unmatched\n pass\n # ruff: enable[beta,gamma]\n# ruff: enable[alpha]",
code: "alpha",
disable_comment: SuppressionComment {
text: "# ruff: disable[alpha]",
action: Disable,
codes: [
"alpha",
],
reason: "",
},
enable_comment: SuppressionComment {
text: "# ruff: enable[alpha]",
action: Enable,
codes: [
"alpha",
],
reason: "",
},
comments: [
SuppressionComment {
text: "# ruff: disable[alpha]",
action: Disable,
codes: [
"alpha",
],
reason: "",
},
SuppressionComment {
text: "# ruff: enable[alpha]",
action: Enable,
codes: [
"alpha",
],
reason: "",
},
],
},
],
invalid: [
@@ -1544,8 +1532,10 @@ def bar():
#[test]
fn comment_attributes() {
let source = "# ruff: disable[foo, bar] hello world";
let mut parser =
SuppressionParser::new(source, TextRange::new(0.into(), source.text_len()));
let mut parser = SuppressionParser::new(
source,
TextRange::new(0.into(), TextSize::try_from(source.len()).unwrap()),
);
let comment = parser.parse_comment().unwrap();
assert_eq!(comment.action, SuppressionAction::Disable);
assert_eq!(
@@ -1564,12 +1554,12 @@ def bar():
source: &'_ str,
) -> Result<DebugSuppressionComment<'_>, ParseError> {
let offset = TextSize::new(source.find('#').unwrap_or(0).try_into().unwrap());
let mut parser = SuppressionParser::new(source, TextRange::new(offset, source.text_len()));
let mut parser = SuppressionParser::new(
source,
TextRange::new(offset, TextSize::try_from(source.len()).unwrap()),
);
match parser.parse_comment() {
Ok(comment) => Ok(DebugSuppressionComment {
source,
comment: Some(comment),
}),
Ok(comment) => Ok(DebugSuppressionComment { source, comment }),
Err(error) => Err(error),
}
}
@@ -1649,18 +1639,16 @@ def bar():
.field("covered_source", &&self.source[self.suppression.range])
.field("code", &self.suppression.code)
.field(
"disable_comment",
&DebugSuppressionComment {
source: self.source,
comment: Some(self.suppression.comments.disable_comment().clone()),
},
)
.field(
"enable_comment",
&DebugSuppressionComment {
source: self.source,
comment: self.suppression.comments.enable_comment().cloned(),
},
"comments",
&self
.suppression
.comments
.iter()
.map(|comment| DebugSuppressionComment {
source: self.source,
comment: comment.clone(),
})
.collect_vec(),
)
.finish()
}
@@ -1679,7 +1667,7 @@ def bar():
"comment",
&DebugSuppressionComment {
source: self.source,
comment: Some(self.invalid.comment.clone()),
comment: self.invalid.comment.clone(),
},
)
.finish()
@@ -1702,27 +1690,23 @@ def bar():
struct DebugSuppressionComment<'a> {
source: &'a str,
comment: Option<SuppressionComment>,
comment: SuppressionComment,
}
impl fmt::Debug for DebugSuppressionComment<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match &self.comment {
Some(comment) => f
.debug_struct("SuppressionComment")
.field("text", &&self.source[comment.range])
.field("action", &comment.action)
.field(
"codes",
&DebugCodes {
source: self.source,
codes: &comment.codes,
},
)
.field("reason", &&self.source[comment.reason])
.finish(),
None => f.debug_tuple("None").finish(),
}
f.debug_struct("SuppressionComment")
.field("text", &&self.source[self.comment.range])
.field("action", &self.comment.action)
.field(
"codes",
&DebugCodes {
source: self.source,
codes: &self.comment.codes,
},
)
.field("reason", &&self.source[self.comment.reason])
.finish()
}
}

View File

@@ -35,10 +35,6 @@ impl PythonVersion {
major: 3,
minor: 14,
};
pub const PY315: PythonVersion = PythonVersion {
major: 3,
minor: 15,
};
pub fn iter() -> impl Iterator<Item = PythonVersion> {
[
@@ -50,7 +46,6 @@ impl PythonVersion {
PythonVersion::PY312,
PythonVersion::PY313,
PythonVersion::PY314,
PythonVersion::PY315,
]
.into_iter()
}
@@ -66,7 +61,7 @@ impl PythonVersion {
/// The latest Python version supported in preview
pub fn latest_preview() -> Self {
let latest_preview = Self::PY315;
let latest_preview = Self::PY314;
debug_assert!(latest_preview >= Self::latest());
latest_preview
}

View File

@@ -91,22 +91,20 @@ def example(session):
.all()
# fmt: on
def off_and_on_without_data():
"""Test that comment-only fmt:off/on blocks preserve formatting."""
# fmt: off
#should not be formatted
# fmt: on
"""All comments here are technically on the same prefix.
The comments between will be formatted. This is a known limitation.
"""
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
#hey, that won't work
# fmt: on
pass
def on_and_off_with_comment_only_blocks():
"""Test that fmt:off/on works with multiple directives and comment-only blocks."""
def on_and_off_broken():
"""Another known limitation."""
# fmt: on
# fmt: off
this=should.not_be.formatted()
@@ -115,16 +113,7 @@ def on_and_off_with_comment_only_blocks():
now . considers . multiple . fmt . directives . within . one . prefix
# fmt: on
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
# ...but comments still get reformatted even though they should not be
# fmt: on
def long_lines():
if True:
@@ -189,50 +178,6 @@ cfg.rule(
# fmt: on
xxxxxxxxxx_xxxxxxxxxxx_xxxxxxx_xxxxxxxxx=5
)
# Test comment-only blocks at file level with various spacing patterns
# fmt: off
#nospace
# twospaces
# fmt: on
# fmt: off
#nospaceatall
#extraspaces
#evenmorespaces
# fmt: on
# fmt: off
# fmt: on
# fmt: off
#SBATCH --job-name=test
#SBATCH --output=test.out
# fmt: on
# fmt: off
#first
#second
# fmt: on
# fmt: off
#!@#$%^&*()
#<=>+-*/
# fmt: on
# fmt: off
#x=1+2
#y = 3
#z = 4
# fmt: on
# fmt: off
yield 'hello'
# No formatting to the end of the file

View File

@@ -112,42 +112,29 @@ def example(session):
def off_and_on_without_data():
"""Test that comment-only fmt:off/on blocks preserve formatting."""
# fmt: off
#should not be formatted
# fmt: on
"""All comments here are technically on the same prefix.
The comments between will be formatted. This is a known limitation.
"""
# fmt: off
#should not be formatted
# hey, that won't work
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
# fmt: on
pass
def on_and_off_with_comment_only_blocks():
"""Test that fmt:off/on works with multiple directives and comment-only blocks."""
def on_and_off_broken():
"""Another known limitation."""
# fmt: on
# fmt: off
this=should.not_be.formatted()
and_=indeed . it is not formatted
because . the . handling . inside . generate_ignored_nodes()
now . considers . multiple . fmt . directives . within . one . prefix
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
# ...but comments still get reformatted even though they should not be
# fmt: on
@@ -224,50 +211,6 @@ cfg.rule(
# fmt: on
xxxxxxxxxx_xxxxxxxxxxx_xxxxxxx_xxxxxxxxx=5,
)
# Test comment-only blocks at file level with various spacing patterns
# fmt: off
#nospace
# twospaces
# fmt: on
# fmt: off
#nospaceatall
#extraspaces
#evenmorespaces
# fmt: on
# fmt: off
# fmt: on
# fmt: off
#SBATCH --job-name=test
#SBATCH --output=test.out
# fmt: on
# fmt: off
#first
#second
# fmt: on
# fmt: off
#!@#$%^&*()
#<=>+-*/
# fmt: on
# fmt: off
#x=1+2
#y = 3
#z = 4
# fmt: on
# fmt: off
yield 'hello'
# No formatting to the end of the file

View File

@@ -1,21 +1,8 @@
def foo(): return "mock" # fmt: skip
if True: print("yay") # fmt: skip
for i in range(10): print(i) # fmt: skip
if True: print("this"); print("that") # fmt: skip
while True: print("loop"); break # fmt: skip
for x in [1, 2]: print(x); print("done") # fmt: skip
def f(x: int): return x # fmt: skip
j = 1 # fmt: skip
while j < 10: j += 1 # fmt: skip
b = [c for c in "A very long string that would normally generate some kind of collapse, since it is this long"] # fmt: skip
v = (
foo_dict # fmt: skip
.setdefault("a", {})
.setdefault("b", {})
.setdefault("c", {})
.setdefault("d", {})
.setdefault("e", {})
)

View File

@@ -1,21 +1,8 @@
def foo(): return "mock" # fmt: skip
if True: print("yay") # fmt: skip
for i in range(10): print(i) # fmt: skip
if True: print("this"); print("that") # fmt: skip
while True: print("loop"); break # fmt: skip
for x in [1, 2]: print(x); print("done") # fmt: skip
def f(x: int): return x # fmt: skip
j = 1 # fmt: skip
while j < 10: j += 1 # fmt: skip
b = [c for c in "A very long string that would normally generate some kind of collapse, since it is this long"] # fmt: skip
v = (
foo_dict # fmt: skip
.setdefault("a", {})
.setdefault("b", {})
.setdefault("c", {})
.setdefault("d", {})
.setdefault("e", {})
)

View File

@@ -4,84 +4,3 @@ def foo():
# comment 1 # fmt: skip
# comment 2
[
(1, 2),
# # fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3,
4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3,
4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3,
4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3,
4),
# fmt: on
(5, 6),
]
if False:
# fmt: off # some other comment
pass

View File

@@ -4,84 +4,3 @@ def foo():
# comment 1 # fmt: skip
# comment 2
[
(1, 2),
# # fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3,
4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3,
4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3,
4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3,
4),
# fmt: on
(5, 6),
]
if False:
# fmt: off # some other comment
pass

View File

@@ -1 +0,0 @@
{"preview": "enabled"}

View File

@@ -1,8 +0,0 @@
with open("file.txt") as f: content = f.read() # fmt: skip
# Ideally, only the last line would be ignored
# But ignoring only part of the asexpr_test causes a parse error
# Same with ignoring the asexpr_test without also ignoring the entire with_stmt
with open (
"file.txt" ,
) as f: content = f.read() # fmt: skip

View File

@@ -1,8 +0,0 @@
with open("file.txt") as f: content = f.read() # fmt: skip
# Ideally, only the last line would be ignored
# But ignoring only part of the asexpr_test causes a parse error
# Same with ignoring the asexpr_test without also ignoring the entire with_stmt
with open (
"file.txt" ,
) as f: content = f.read() # fmt: skip

View File

@@ -1 +0,0 @@
{"preview": "enabled"}

View File

@@ -1,28 +0,0 @@
t = (
{"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{"foo": "bar"},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{"foo": "bar"},
)
t = (
{"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{"foo": "bar",},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{"foo": "bar",},
)

View File

@@ -1,32 +0,0 @@
t = (
{"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{"foo": "bar"},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{"foo": "bar"},
)
t = (
{"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{
"foo": "bar",
},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{
"foo": "bar",
},
)

View File

@@ -1,4 +1,4 @@
a = "this is some code"
b = 5 # fmt:skip
b = 5 #fmt:skip
c = 9 #fmt: skip
d = "thisisasuperlongstringthisisasuperlongstringthisisasuperlongstringthisisasuperlongstring" # fmt:skip
d = "thisisasuperlongstringthisisasuperlongstringthisisasuperlongstringthisisasuperlongstring" #fmt:skip

View File

@@ -1,4 +1,4 @@
a = "this is some code"
b = 5 # fmt:skip
c = 9 #fmt: skip
d = "thisisasuperlongstringthisisasuperlongstringthisisasuperlongstringthisisasuperlongstring" # fmt:skip
b = 5 # fmt:skip
c = 9 # fmt: skip
d = "thisisasuperlongstringthisisasuperlongstringthisisasuperlongstringthisisasuperlongstring" # fmt:skip

View File

@@ -1,19 +0,0 @@
# Multiple fmt: skip in multi-part if-clause
class ClassWithALongName:
Constant1 = 1
Constant2 = 2
Constant3 = 3
def test():
if (
"cond1" == "cond1"
and "cond2" == "cond2"
and 1 in (
ClassWithALongName.Constant1,
ClassWithALongName.Constant2,
ClassWithALongName.Constant3, # fmt: skip
) # fmt: skip
):
return True
return False

View File

@@ -1,19 +0,0 @@
# Multiple fmt: skip in multi-part if-clause
class ClassWithALongName:
Constant1 = 1
Constant2 = 2
Constant3 = 3
def test():
if (
"cond1" == "cond1"
and "cond2" == "cond2"
and 1 in (
ClassWithALongName.Constant1,
ClassWithALongName.Constant2,
ClassWithALongName.Constant3, # fmt: skip
) # fmt: skip
):
return True
return False

View File

@@ -1,35 +0,0 @@
# Multiple fmt: skip on string literals
a = (
"this should " # fmt: skip
"be fine"
)
b = (
"this is " # fmt: skip
"not working" # fmt: skip
)
c = (
"and neither " # fmt: skip
"is this " # fmt: skip
"working"
)
d = (
"nor "
"is this " # fmt: skip
"working" # fmt: skip
)
e = (
"and this " # fmt: skip
"is definitely "
"not working" # fmt: skip
)
# Dictionary entries with fmt: skip (covers issue with long lines)
hotkeys = {
"editor:swap-line-down": [{"key": "ArrowDown", "modifiers": ["Alt", "Mod"]}], # fmt: skip
"editor:swap-line-up": [{"key": "ArrowUp", "modifiers": ["Alt", "Mod"]}], # fmt: skip
"editor:toggle-source": [{"key": "S", "modifiers": ["Alt", "Mod"]}], # fmt: skip
}

View File

@@ -1,35 +0,0 @@
# Multiple fmt: skip on string literals
a = (
"this should " # fmt: skip
"be fine"
)
b = (
"this is " # fmt: skip
"not working" # fmt: skip
)
c = (
"and neither " # fmt: skip
"is this " # fmt: skip
"working"
)
d = (
"nor "
"is this " # fmt: skip
"working" # fmt: skip
)
e = (
"and this " # fmt: skip
"is definitely "
"not working" # fmt: skip
)
# Dictionary entries with fmt: skip (covers issue with long lines)
hotkeys = {
"editor:swap-line-down": [{"key": "ArrowDown", "modifiers": ["Alt", "Mod"]}], # fmt: skip
"editor:swap-line-up": [{"key": "ArrowUp", "modifiers": ["Alt", "Mod"]}], # fmt: skip
"editor:toggle-source": [{"key": "S", "modifiers": ["Alt", "Mod"]}], # fmt: skip
}

View File

@@ -1,24 +0,0 @@
# Test that Jupytext markdown comments are preserved before fmt:off/on blocks
# %% [markdown]
# fmt: off
# fmt: on
# Also test with other comments
# Some comment
# %% [markdown]
# Another comment
# fmt: off
x = 1
# fmt: on
# Test multiple markdown comments
# %% [markdown]
# First markdown
# %% [code]
# Code cell
# fmt: off
y = 2
# fmt: on

View File

@@ -1,24 +0,0 @@
# Test that Jupytext markdown comments are preserved before fmt:off/on blocks
# %% [markdown]
# fmt: off
# fmt: on
# Also test with other comments
# Some comment
# %% [markdown]
# Another comment
# fmt: off
x = 1
# fmt: on
# Test multiple markdown comments
# %% [markdown]
# First markdown
# %% [code]
# Code cell
# fmt: off
y = 2
# fmt: on

View File

@@ -1 +0,0 @@
{"target_version": "3.14"}

View File

@@ -1,40 +0,0 @@
x = t"foo"
x = t'foo {{ {2 + 2}bar {{ baz'
x = t"foo {f'abc'} bar"
x = t"""foo {{ a
foo {2 + 2}bar {{ baz
x = f"foo {{ {
2 + 2 # comment
}bar"
{{ baz
}} buzz
{print("abc" + "def"
)}
abc"""
t'{(abc:=10)}'
t'''This is a really long string, but just make sure that you reflow tstrings {
2+2:d
}'''
t'This is a really long string, but just make sure that you reflow tstrings correctly {2+2:d}'
t"{ 2 + 2 = }"
t'{
X
!r
}'
tr'\{{\}}'
t'''
WITH {f'''
{1}_cte AS ()'''}
'''

View File

@@ -1,40 +0,0 @@
x = t"foo"
x = t"foo {{ {2 + 2}bar {{ baz"
x = t"foo {f'abc'} bar"
x = t"""foo {{ a
foo {2 + 2}bar {{ baz
x = f"foo {{ {
2 + 2 # comment
}bar"
{{ baz
}} buzz
{print("abc" + "def"
)}
abc"""
t"{(abc:=10)}"
t"""This is a really long string, but just make sure that you reflow tstrings {
2+2:d
}"""
t"This is a really long string, but just make sure that you reflow tstrings correctly {2+2:d}"
t"{ 2 + 2 = }"
t"{
X
!r
}"
rt"\{{\}}"
t"""
WITH {f'''
{1}_cte AS ()'''}
"""

View File

@@ -1,19 +0,0 @@
# Regression test for https://github.com/psf/black/issues/3438
import ast
import collections # fmt: skip
import dataclasses
# fmt: off
import os
# fmt: on
import pathlib
import re # fmt: skip
import secrets
# fmt: off
import sys
# fmt: on
import tempfile
import zoneinfo

View File

@@ -1,19 +0,0 @@
# Regression test for https://github.com/psf/black/issues/3438
import ast
import collections # fmt: skip
import dataclasses
# fmt: off
import os
# fmt: on
import pathlib
import re # fmt: skip
import secrets
# fmt: off
import sys
# fmt: on
import tempfile
import zoneinfo

View File

@@ -156,6 +156,24 @@ Please use `--build-option` instead,
`--global-option` is reserved to flags like `--verbose` or `--quiet`.
"""
this_will_become_one_line = (
"a"
"b"
"c"
)
this_will_stay_on_three_lines = (
"a" # comment
"b"
"c"
)
this_will_also_become_one_line = ( # comment
"a"
"b"
"c"
)
assert some_var == expected_result, """
test
"""

View File

@@ -198,6 +198,16 @@ Please use `--build-option` instead,
`--global-option` is reserved to flags like `--verbose` or `--quiet`.
"""
this_will_become_one_line = "abc"
this_will_stay_on_three_lines = (
"a" # comment
"b"
"c"
)
this_will_also_become_one_line = "abc" # comment
assert some_var == expected_result, """
test
"""

View File

@@ -1,10 +0,0 @@
def foo(
a, #type:int
b, #type: str
c, # type: List[int]
d, # type: Dict[int, str]
e, # type: ignore
f, # type : ignore
g, # type : ignore
):
pass

View File

@@ -1,10 +0,0 @@
def foo(
a, # type: int
b, # type: str
c, # type: List[int]
d, # type: Dict[int, str]
e, # type: ignore
f, # type : ignore
g, # type : ignore
):
pass

View File

@@ -1,16 +0,0 @@
# Remove unnecessary parentheses from LHS of assignments
def a():
return [1, 2, 3]
# Single variable with unnecessary parentheses
(b) = a()[0]
# Tuple unpacking with unnecessary parentheses
(c, *_) = a()
# These should not be changed - parentheses are necessary
(d,) = a() # single-element tuple
e = (1 + 2) * 3 # RHS has precedence needs

View File

@@ -1,16 +0,0 @@
# Remove unnecessary parentheses from LHS of assignments
def a():
return [1, 2, 3]
# Single variable with unnecessary parentheses
b = a()[0]
# Tuple unpacking with unnecessary parentheses
c, *_ = a()
# These should not be changed - parentheses are necessary
(d,) = a() # single-element tuple
e = (1 + 2) * 3 # RHS has precedence needs

View File

@@ -1,5 +1,6 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/black/cases/fmtonoff.py
---
## Input
@@ -97,22 +98,20 @@ def example(session):
.all()
# fmt: on
def off_and_on_without_data():
"""Test that comment-only fmt:off/on blocks preserve formatting."""
# fmt: off
#should not be formatted
# fmt: on
"""All comments here are technically on the same prefix.
The comments between will be formatted. This is a known limitation.
"""
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
#hey, that won't work
# fmt: on
pass
def on_and_off_with_comment_only_blocks():
"""Test that fmt:off/on works with multiple directives and comment-only blocks."""
def on_and_off_broken():
"""Another known limitation."""
# fmt: on
# fmt: off
this=should.not_be.formatted()
@@ -121,16 +120,7 @@ def on_and_off_with_comment_only_blocks():
now . considers . multiple . fmt . directives . within . one . prefix
# fmt: on
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
# ...but comments still get reformatted even though they should not be
# fmt: on
def long_lines():
if True:
@@ -195,50 +185,6 @@ cfg.rule(
# fmt: on
xxxxxxxxxx_xxxxxxxxxxx_xxxxxxx_xxxxxxxxx=5
)
# Test comment-only blocks at file level with various spacing patterns
# fmt: off
#nospace
# twospaces
# fmt: on
# fmt: off
#nospaceatall
#extraspaces
#evenmorespaces
# fmt: on
# fmt: off
# fmt: on
# fmt: off
#SBATCH --job-name=test
#SBATCH --output=test.out
# fmt: on
# fmt: off
#first
#second
# fmt: on
# fmt: off
#!@#$%^&*()
#<=>+-*/
# fmt: on
# fmt: off
#x=1+2
#y = 3
#z = 4
# fmt: on
# fmt: off
yield 'hello'
# No formatting to the end of the file
@@ -279,16 +225,28 @@ d={'a':1,
# fmt: on
goes + here,
andhere,
@@ -136,7 +137,7 @@
and_=indeed . it is not formatted
because . the . handling . inside . generate_ignored_nodes()
now . considers . multiple . fmt . directives . within . one . prefix
-
+ # fmt: on
@@ -118,8 +119,10 @@
"""
# fmt: off
#should not be formatted
- # hey, that won't work
+ #hey, that won't work
+
+
# fmt: on
@@ -187,14 +188,18 @@
pass
@@ -134,7 +137,7 @@
now . considers . multiple . fmt . directives . within . one . prefix
# fmt: on
# fmt: off
- # ...but comments still get reformatted even though they should not be
+ # ...but comments still get reformatted even though they should not be
# fmt: on
@@ -174,14 +177,18 @@
$
""",
# fmt: off
@@ -429,24 +387,22 @@ def example(session):
def off_and_on_without_data():
"""Test that comment-only fmt:off/on blocks preserve formatting."""
# fmt: off
#should not be formatted
# fmt: on
"""All comments here are technically on the same prefix.
The comments between will be formatted. This is a known limitation.
"""
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
#hey, that won't work
# fmt: on
pass
def on_and_off_with_comment_only_blocks():
"""Test that fmt:off/on works with multiple directives and comment-only blocks."""
def on_and_off_broken():
"""Another known limitation."""
# fmt: on
# fmt: off
this=should.not_be.formatted()
@@ -455,16 +411,7 @@ def on_and_off_with_comment_only_blocks():
now . considers . multiple . fmt . directives . within . one . prefix
# fmt: on
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
# ...but comments still get reformatted even though they should not be
# fmt: on
@@ -545,50 +492,6 @@ cfg.rule(
# fmt: on
xxxxxxxxxx_xxxxxxxxxxx_xxxxxxx_xxxxxxxxx=5,
)
# Test comment-only blocks at file level with various spacing patterns
# fmt: off
#nospace
# twospaces
# fmt: on
# fmt: off
#nospaceatall
#extraspaces
#evenmorespaces
# fmt: on
# fmt: off
# fmt: on
# fmt: off
#SBATCH --job-name=test
#SBATCH --output=test.out
# fmt: on
# fmt: off
#first
#second
# fmt: on
# fmt: off
#!@#$%^&*()
#<=>+-*/
# fmt: on
# fmt: off
#x=1+2
#y = 3
#z = 4
# fmt: on
# fmt: off
yield 'hello'
# No formatting to the end of the file
@@ -714,42 +617,29 @@ def example(session):
def off_and_on_without_data():
"""Test that comment-only fmt:off/on blocks preserve formatting."""
# fmt: off
#should not be formatted
# fmt: on
"""All comments here are technically on the same prefix.
The comments between will be formatted. This is a known limitation.
"""
# fmt: off
#should not be formatted
# hey, that won't work
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
# fmt: on
pass
def on_and_off_with_comment_only_blocks():
"""Test that fmt:off/on works with multiple directives and comment-only blocks."""
def on_and_off_broken():
"""Another known limitation."""
# fmt: on
# fmt: off
this=should.not_be.formatted()
and_=indeed . it is not formatted
because . the . handling . inside . generate_ignored_nodes()
now . considers . multiple . fmt . directives . within . one . prefix
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
# fmt: on
# fmt: off
#should not be formatted
#should not be formatted #also should not be formatted
# ...but comments still get reformatted even though they should not be
# fmt: on
@@ -826,50 +716,6 @@ cfg.rule(
# fmt: on
xxxxxxxxxx_xxxxxxxxxxx_xxxxxxx_xxxxxxxxx=5,
)
# Test comment-only blocks at file level with various spacing patterns
# fmt: off
#nospace
# twospaces
# fmt: on
# fmt: off
#nospaceatall
#extraspaces
#evenmorespaces
# fmt: on
# fmt: off
# fmt: on
# fmt: off
#SBATCH --job-name=test
#SBATCH --output=test.out
# fmt: on
# fmt: off
#first
#second
# fmt: on
# fmt: off
#!@#$%^&*()
#<=>+-*/
# fmt: on
# fmt: off
#x=1+2
#y = 3
#z = 4
# fmt: on
# fmt: off
yield 'hello'
# No formatting to the end of the file

View File

@@ -1,5 +1,6 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/black/cases/fmtskip10.py
---
## Input
@@ -7,24 +8,11 @@ source: crates/ruff_python_formatter/tests/fixtures.rs
def foo(): return "mock" # fmt: skip
if True: print("yay") # fmt: skip
for i in range(10): print(i) # fmt: skip
if True: print("this"); print("that") # fmt: skip
while True: print("loop"); break # fmt: skip
for x in [1, 2]: print(x); print("done") # fmt: skip
def f(x: int): return x # fmt: skip
j = 1 # fmt: skip
while j < 10: j += 1 # fmt: skip
b = [c for c in "A very long string that would normally generate some kind of collapse, since it is this long"] # fmt: skip
v = (
foo_dict # fmt: skip
.setdefault("a", {})
.setdefault("b", {})
.setdefault("c", {})
.setdefault("d", {})
.setdefault("e", {})
)
```
## Black Differences
@@ -32,30 +20,19 @@ v = (
```diff
--- Black
+++ Ruff
@@ -1,15 +1,20 @@
@@ -1,8 +1,10 @@
def foo(): return "mock" # fmt: skip
+
+
if True: print("yay") # fmt: skip
for i in range(10): print(i) # fmt: skip
if True: print("this"); print("that") # fmt: skip
while True: print("loop"); break # fmt: skip
for x in [1, 2]: print(x); print("done") # fmt: skip
-def f(x: int): return x # fmt: skip
-j = 1 # fmt: skip
+
+def f(x: int): return x # fmt: skip
+
+
+j = 1 # fmt: skip
while j < 10: j += 1 # fmt: skip
-b = [c for c in "A very long string that would normally generate some kind of collapse, since it is this long"] # fmt: skip
+b = [c for c in "A very long string that would normally generate some kind of collapse, since it is this long"] # fmt: skip
v = (
foo_dict # fmt: skip
```
## Ruff Output
@@ -66,27 +43,11 @@ def foo(): return "mock" # fmt: skip
if True: print("yay") # fmt: skip
for i in range(10): print(i) # fmt: skip
if True: print("this"); print("that") # fmt: skip
while True: print("loop"); break # fmt: skip
for x in [1, 2]: print(x); print("done") # fmt: skip
def f(x: int): return x # fmt: skip
j = 1 # fmt: skip
while j < 10: j += 1 # fmt: skip
b = [c for c in "A very long string that would normally generate some kind of collapse, since it is this long"] # fmt: skip
v = (
foo_dict # fmt: skip
.setdefault("a", {})
.setdefault("b", {})
.setdefault("c", {})
.setdefault("d", {})
.setdefault("e", {})
)
```
## Black Output
@@ -95,22 +56,9 @@ v = (
def foo(): return "mock" # fmt: skip
if True: print("yay") # fmt: skip
for i in range(10): print(i) # fmt: skip
if True: print("this"); print("that") # fmt: skip
while True: print("loop"); break # fmt: skip
for x in [1, 2]: print(x); print("done") # fmt: skip
def f(x: int): return x # fmt: skip
j = 1 # fmt: skip
while j < 10: j += 1 # fmt: skip
b = [c for c in "A very long string that would normally generate some kind of collapse, since it is this long"] # fmt: skip
v = (
foo_dict # fmt: skip
.setdefault("a", {})
.setdefault("b", {})
.setdefault("c", {})
.setdefault("d", {})
.setdefault("e", {})
)
```

View File

@@ -1,321 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
def foo():
pass
# comment 1 # fmt: skip
# comment 2
[
(1, 2),
# # fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3,
4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3,
4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3,
4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3,
4),
# fmt: on
(5, 6),
]
if False:
# fmt: off # some other comment
pass
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -46,8 +46,7 @@
[
(1, 2),
# # fmt: off
- (3,
- 4),
+ (3, 4),
# # fmt: on
(5, 6),
]
@@ -55,8 +54,7 @@
[
(1, 2),
# # fmt: off
- (3,
- 4),
+ (3, 4),
# fmt: on
(5, 6),
]
@@ -65,8 +63,7 @@
[
(1, 2),
# fmt: off
- (3,
- 4),
+ (3, 4),
# # fmt: on
(5, 6),
]
@@ -75,8 +72,7 @@
[
(1, 2),
# fmt: off
- (3,
- 4),
+ (3, 4),
# fmt: on
(5, 6),
]
```
## Ruff Output
```python
def foo():
pass
# comment 1 # fmt: skip
# comment 2
[
(1, 2),
# # fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3, 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3, 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3, 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3, 4),
# fmt: on
(5, 6),
]
if False:
# fmt: off # some other comment
pass
```
## Black Output
```python
def foo():
pass
# comment 1 # fmt: skip
# comment 2
[
(1, 2),
# # fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
# (3,
# 4),
# fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3,
4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# # fmt: off
(3,
4),
# fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3,
4),
# # fmt: on
(5, 6),
]
[
(1, 2),
# fmt: off
(3,
4),
# fmt: on
(5, 6),
]
if False:
# fmt: off # some other comment
pass
```

View File

@@ -1,59 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
with open("file.txt") as f: content = f.read() # fmt: skip
# Ideally, only the last line would be ignored
# But ignoring only part of the asexpr_test causes a parse error
# Same with ignoring the asexpr_test without also ignoring the entire with_stmt
with open (
"file.txt" ,
) as f: content = f.read() # fmt: skip
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -1,8 +1,8 @@
-with open("file.txt") as f: content = f.read() # fmt: skip
+with open("file.txt") as f: content = f.read() # fmt: skip
# Ideally, only the last line would be ignored
# But ignoring only part of the asexpr_test causes a parse error
# Same with ignoring the asexpr_test without also ignoring the entire with_stmt
with open (
"file.txt" ,
-) as f: content = f.read() # fmt: skip
+) as f: content = f.read() # fmt: skip
```
## Ruff Output
```python
with open("file.txt") as f: content = f.read() # fmt: skip
# Ideally, only the last line would be ignored
# But ignoring only part of the asexpr_test causes a parse error
# Same with ignoring the asexpr_test without also ignoring the entire with_stmt
with open (
"file.txt" ,
) as f: content = f.read() # fmt: skip
```
## Black Output
```python
with open("file.txt") as f: content = f.read() # fmt: skip
# Ideally, only the last line would be ignored
# But ignoring only part of the asexpr_test causes a parse error
# Same with ignoring the asexpr_test without also ignoring the entire with_stmt
with open (
"file.txt" ,
) as f: content = f.read() # fmt: skip
```

View File

@@ -1,149 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
t = (
{"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{"foo": "bar"},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{"foo": "bar"},
)
t = (
{"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{"foo": "bar",},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{"foo": "bar",},
)
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -1,5 +1,9 @@
t = (
- {"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
+ {
+ "foo": "very long string",
+ "bar": "another very long string",
+ "baz": "we should run out of space by now",
+ }, # fmt: skip
{"foo": "bar"},
)
@@ -14,8 +18,12 @@
t = (
- {"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{
+ "foo": "very long string",
+ "bar": "another very long string",
+ "baz": "we should run out of space by now",
+ }, # fmt: skip
+ {
"foo": "bar",
},
)
```
## Ruff Output
```python
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{"foo": "bar"},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{"foo": "bar"},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{
"foo": "bar",
},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{
"foo": "bar",
},
)
```
## Black Output
```python
t = (
{"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{"foo": "bar"},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{"foo": "bar"},
)
t = (
{"foo": "very long string", "bar": "another very long string", "baz": "we should run out of space by now"}, # fmt: skip
{
"foo": "bar",
},
)
t = (
{
"foo": "very long string",
"bar": "another very long string",
"baz": "we should run out of space by now",
}, # fmt: skip
{
"foo": "bar",
},
)
```

View File

@@ -1,43 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
a = "this is some code"
b = 5 # fmt:skip
c = 9 #fmt: skip
d = "thisisasuperlongstringthisisasuperlongstringthisisasuperlongstringthisisasuperlongstring" # fmt:skip
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -1,4 +1,4 @@
a = "this is some code"
-b = 5 # fmt:skip
-c = 9 #fmt: skip
+b = 5 # fmt:skip
+c = 9 # fmt: skip
d = "thisisasuperlongstringthisisasuperlongstringthisisasuperlongstringthisisasuperlongstring" # fmt:skip
```
## Ruff Output
```python
a = "this is some code"
b = 5 # fmt:skip
c = 9 # fmt: skip
d = "thisisasuperlongstringthisisasuperlongstringthisisasuperlongstringthisisasuperlongstring" # fmt:skip
```
## Black Output
```python
a = "this is some code"
b = 5 # fmt:skip
c = 9 #fmt: skip
d = "thisisasuperlongstringthisisasuperlongstringthisisasuperlongstringthisisasuperlongstring" # fmt:skip
```

View File

@@ -1,98 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
# Multiple fmt: skip in multi-part if-clause
class ClassWithALongName:
Constant1 = 1
Constant2 = 2
Constant3 = 3
def test():
if (
"cond1" == "cond1"
and "cond2" == "cond2"
and 1 in (
ClassWithALongName.Constant1,
ClassWithALongName.Constant2,
ClassWithALongName.Constant3, # fmt: skip
) # fmt: skip
):
return True
return False
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -9,11 +9,12 @@
if (
"cond1" == "cond1"
and "cond2" == "cond2"
- and 1 in (
+ and 1
+ in (
ClassWithALongName.Constant1,
ClassWithALongName.Constant2,
- ClassWithALongName.Constant3, # fmt: skip
- ) # fmt: skip
+ ClassWithALongName.Constant3, # fmt: skip
+ ) # fmt: skip
):
return True
return False
```
## Ruff Output
```python
# Multiple fmt: skip in multi-part if-clause
class ClassWithALongName:
Constant1 = 1
Constant2 = 2
Constant3 = 3
def test():
if (
"cond1" == "cond1"
and "cond2" == "cond2"
and 1
in (
ClassWithALongName.Constant1,
ClassWithALongName.Constant2,
ClassWithALongName.Constant3, # fmt: skip
) # fmt: skip
):
return True
return False
```
## Black Output
```python
# Multiple fmt: skip in multi-part if-clause
class ClassWithALongName:
Constant1 = 1
Constant2 = 2
Constant3 = 3
def test():
if (
"cond1" == "cond1"
and "cond2" == "cond2"
and 1 in (
ClassWithALongName.Constant1,
ClassWithALongName.Constant2,
ClassWithALongName.Constant3, # fmt: skip
) # fmt: skip
):
return True
return False
```

View File

@@ -1,148 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
# Multiple fmt: skip on string literals
a = (
"this should " # fmt: skip
"be fine"
)
b = (
"this is " # fmt: skip
"not working" # fmt: skip
)
c = (
"and neither " # fmt: skip
"is this " # fmt: skip
"working"
)
d = (
"nor "
"is this " # fmt: skip
"working" # fmt: skip
)
e = (
"and this " # fmt: skip
"is definitely "
"not working" # fmt: skip
)
# Dictionary entries with fmt: skip (covers issue with long lines)
hotkeys = {
"editor:swap-line-down": [{"key": "ArrowDown", "modifiers": ["Alt", "Mod"]}], # fmt: skip
"editor:swap-line-up": [{"key": "ArrowUp", "modifiers": ["Alt", "Mod"]}], # fmt: skip
"editor:toggle-source": [{"key": "S", "modifiers": ["Alt", "Mod"]}], # fmt: skip
}
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -29,7 +29,11 @@
# Dictionary entries with fmt: skip (covers issue with long lines)
hotkeys = {
- "editor:swap-line-down": [{"key": "ArrowDown", "modifiers": ["Alt", "Mod"]}], # fmt: skip
- "editor:swap-line-up": [{"key": "ArrowUp", "modifiers": ["Alt", "Mod"]}], # fmt: skip
- "editor:toggle-source": [{"key": "S", "modifiers": ["Alt", "Mod"]}], # fmt: skip
+ "editor:swap-line-down": [
+ {"key": "ArrowDown", "modifiers": ["Alt", "Mod"]}
+ ], # fmt: skip
+ "editor:swap-line-up": [
+ {"key": "ArrowUp", "modifiers": ["Alt", "Mod"]}
+ ], # fmt: skip
+ "editor:toggle-source": [{"key": "S", "modifiers": ["Alt", "Mod"]}], # fmt: skip
}
```
## Ruff Output
```python
# Multiple fmt: skip on string literals
a = (
"this should " # fmt: skip
"be fine"
)
b = (
"this is " # fmt: skip
"not working" # fmt: skip
)
c = (
"and neither " # fmt: skip
"is this " # fmt: skip
"working"
)
d = (
"nor "
"is this " # fmt: skip
"working" # fmt: skip
)
e = (
"and this " # fmt: skip
"is definitely "
"not working" # fmt: skip
)
# Dictionary entries with fmt: skip (covers issue with long lines)
hotkeys = {
"editor:swap-line-down": [
{"key": "ArrowDown", "modifiers": ["Alt", "Mod"]}
], # fmt: skip
"editor:swap-line-up": [
{"key": "ArrowUp", "modifiers": ["Alt", "Mod"]}
], # fmt: skip
"editor:toggle-source": [{"key": "S", "modifiers": ["Alt", "Mod"]}], # fmt: skip
}
```
## Black Output
```python
# Multiple fmt: skip on string literals
a = (
"this should " # fmt: skip
"be fine"
)
b = (
"this is " # fmt: skip
"not working" # fmt: skip
)
c = (
"and neither " # fmt: skip
"is this " # fmt: skip
"working"
)
d = (
"nor "
"is this " # fmt: skip
"working" # fmt: skip
)
e = (
"and this " # fmt: skip
"is definitely "
"not working" # fmt: skip
)
# Dictionary entries with fmt: skip (covers issue with long lines)
hotkeys = {
"editor:swap-line-down": [{"key": "ArrowDown", "modifiers": ["Alt", "Mod"]}], # fmt: skip
"editor:swap-line-up": [{"key": "ArrowUp", "modifiers": ["Alt", "Mod"]}], # fmt: skip
"editor:toggle-source": [{"key": "S", "modifiers": ["Alt", "Mod"]}], # fmt: skip
}
```

View File

@@ -1,188 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
x = t"foo"
x = t'foo {{ {2 + 2}bar {{ baz'
x = t"foo {f'abc'} bar"
x = t"""foo {{ a
foo {2 + 2}bar {{ baz
x = f"foo {{ {
2 + 2 # comment
}bar"
{{ baz
}} buzz
{print("abc" + "def"
)}
abc"""
t'{(abc:=10)}'
t'''This is a really long string, but just make sure that you reflow tstrings {
2+2:d
}'''
t'This is a really long string, but just make sure that you reflow tstrings correctly {2+2:d}'
t"{ 2 + 2 = }"
t'{
X
!r
}'
tr'\{{\}}'
t'''
WITH {f'''
{1}_cte AS ()'''}
'''
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -7,34 +7,32 @@
foo {2 + 2}bar {{ baz
x = f"foo {{ {
- 2 + 2 # comment
- }bar"
+ 2 + 2 # comment
+}bar"
{{ baz
}} buzz
- {print("abc" + "def"
-)}
+ {print("abc" + "def")}
abc"""
-t"{(abc:=10)}"
+t"{(abc := 10)}"
t"""This is a really long string, but just make sure that you reflow tstrings {
- 2+2:d
+ 2 + 2:d
}"""
-t"This is a really long string, but just make sure that you reflow tstrings correctly {2+2:d}"
+t"This is a really long string, but just make sure that you reflow tstrings correctly {2 + 2:d}"
t"{ 2 + 2 = }"
-t"{
-X
-!r
-}"
+t"{X!r}"
rt"\{{\}}"
t"""
- WITH {f'''
- {1}_cte AS ()'''}
+ WITH {
+ f'''
+ {1}_cte AS ()'''
+}
"""
```
## Ruff Output
```python
x = t"foo"
x = t"foo {{ {2 + 2}bar {{ baz"
x = t"foo {f'abc'} bar"
x = t"""foo {{ a
foo {2 + 2}bar {{ baz
x = f"foo {{ {
2 + 2 # comment
}bar"
{{ baz
}} buzz
{print("abc" + "def")}
abc"""
t"{(abc := 10)}"
t"""This is a really long string, but just make sure that you reflow tstrings {
2 + 2:d
}"""
t"This is a really long string, but just make sure that you reflow tstrings correctly {2 + 2:d}"
t"{ 2 + 2 = }"
t"{X!r}"
rt"\{{\}}"
t"""
WITH {
f'''
{1}_cte AS ()'''
}
"""
```
## Black Output
```python
x = t"foo"
x = t"foo {{ {2 + 2}bar {{ baz"
x = t"foo {f'abc'} bar"
x = t"""foo {{ a
foo {2 + 2}bar {{ baz
x = f"foo {{ {
2 + 2 # comment
}bar"
{{ baz
}} buzz
{print("abc" + "def"
)}
abc"""
t"{(abc:=10)}"
t"""This is a really long string, but just make sure that you reflow tstrings {
2+2:d
}"""
t"This is a really long string, but just make sure that you reflow tstrings correctly {2+2:d}"
t"{ 2 + 2 = }"
t"{
X
!r
}"
rt"\{{\}}"
t"""
WITH {f'''
{1}_cte AS ()'''}
"""
```

View File

@@ -1,90 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
# Regression test for https://github.com/psf/black/issues/3438
import ast
import collections # fmt: skip
import dataclasses
# fmt: off
import os
# fmt: on
import pathlib
import re # fmt: skip
import secrets
# fmt: off
import sys
# fmt: on
import tempfile
import zoneinfo
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -3,6 +3,7 @@
import ast
import collections # fmt: skip
import dataclasses
+
# fmt: off
import os
# fmt: on
```
## Ruff Output
```python
# Regression test for https://github.com/psf/black/issues/3438
import ast
import collections # fmt: skip
import dataclasses
# fmt: off
import os
# fmt: on
import pathlib
import re # fmt: skip
import secrets
# fmt: off
import sys
# fmt: on
import tempfile
import zoneinfo
```
## Black Output
```python
# Regression test for https://github.com/psf/black/issues/3438
import ast
import collections # fmt: skip
import dataclasses
# fmt: off
import os
# fmt: on
import pathlib
import re # fmt: skip
import secrets
# fmt: off
import sys
# fmt: on
import tempfile
import zoneinfo
```

View File

@@ -1,5 +1,6 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/black/cases/preview_multiline_strings.py
---
## Input
@@ -162,6 +163,24 @@ Please use `--build-option` instead,
`--global-option` is reserved to flags like `--verbose` or `--quiet`.
"""
this_will_become_one_line = (
"a"
"b"
"c"
)
this_will_stay_on_three_lines = (
"a" # comment
"b"
"c"
)
this_will_also_become_one_line = ( # comment
"a"
"b"
"c"
)
assert some_var == expected_result, """
test
"""
@@ -402,7 +421,18 @@ a = b if """
[
"""cow
moos""",
@@ -214,10 +253,8 @@
@@ -206,7 +245,9 @@
"c"
)
-this_will_also_become_one_line = "abc" # comment
+this_will_also_become_one_line = ( # comment
+ "abc"
+)
assert some_var == expected_result, """
test
@@ -224,10 +265,8 @@
"""Sxxxxxxx xxxxxxxx, xxxxxxx xx xxxxxxxxx
xxxxxxxxxxxxx xxxxxxx xxxxxxxxx xxx-xxxxxxxxxx xxxxxx xx xxx-xxxxxx"""
),
@@ -415,7 +445,7 @@ a = b if """
},
}
@@ -236,14 +273,12 @@
@@ -246,14 +285,12 @@
a
a"""
),
@@ -676,6 +706,18 @@ Please use `--build-option` instead,
`--global-option` is reserved to flags like `--verbose` or `--quiet`.
"""
this_will_become_one_line = "abc"
this_will_stay_on_three_lines = (
"a" # comment
"b"
"c"
)
this_will_also_become_one_line = ( # comment
"abc"
)
assert some_var == expected_result, """
test
"""
@@ -986,6 +1028,16 @@ Please use `--build-option` instead,
`--global-option` is reserved to flags like `--verbose` or `--quiet`.
"""
this_will_become_one_line = "abc"
this_will_stay_on_three_lines = (
"a" # comment
"b"
"c"
)
this_will_also_become_one_line = "abc" # comment
assert some_var == expected_result, """
test
"""

View File

@@ -1,67 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
def foo(
a, #type:int
b, #type: str
c, # type: List[int]
d, # type: Dict[int, str]
e, # type: ignore
f, # type : ignore
g, # type : ignore
):
pass
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -1,9 +1,9 @@
def foo(
- a, # type: int
+ a, # type:int
b, # type: str
c, # type: List[int]
- d, # type: Dict[int, str]
- e, # type: ignore
+ d, # type: Dict[int, str]
+ e, # type: ignore
f, # type : ignore
g, # type : ignore
):
```
## Ruff Output
```python
def foo(
a, # type:int
b, # type: str
c, # type: List[int]
d, # type: Dict[int, str]
e, # type: ignore
f, # type : ignore
g, # type : ignore
):
pass
```
## Black Output
```python
def foo(
a, # type: int
b, # type: str
c, # type: List[int]
d, # type: Dict[int, str]
e, # type: ignore
f, # type : ignore
g, # type : ignore
):
pass
```

View File

@@ -1,85 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
---
## Input
```python
# Remove unnecessary parentheses from LHS of assignments
def a():
return [1, 2, 3]
# Single variable with unnecessary parentheses
(b) = a()[0]
# Tuple unpacking with unnecessary parentheses
(c, *_) = a()
# These should not be changed - parentheses are necessary
(d,) = a() # single-element tuple
e = (1 + 2) * 3 # RHS has precedence needs
```
## Black Differences
```diff
--- Black
+++ Ruff
@@ -6,10 +6,10 @@
# Single variable with unnecessary parentheses
-b = a()[0]
+(b) = a()[0]
# Tuple unpacking with unnecessary parentheses
-c, *_ = a()
+(c, *_) = a()
# These should not be changed - parentheses are necessary
(d,) = a() # single-element tuple
```
## Ruff Output
```python
# Remove unnecessary parentheses from LHS of assignments
def a():
return [1, 2, 3]
# Single variable with unnecessary parentheses
(b) = a()[0]
# Tuple unpacking with unnecessary parentheses
(c, *_) = a()
# These should not be changed - parentheses are necessary
(d,) = a() # single-element tuple
e = (1 + 2) * 3 # RHS has precedence needs
```
## Black Output
```python
# Remove unnecessary parentheses from LHS of assignments
def a():
return [1, 2, 3]
# Single variable with unnecessary parentheses
b = a()[0]
# Tuple unpacking with unnecessary parentheses
c, *_ = a()
# These should not be changed - parentheses are necessary
(d,) = a() # single-element tuple
e = (1 + 2) * 3 # RHS has precedence needs
```

3
crates/ty/docs/cli.md generated
View File

@@ -37,8 +37,7 @@ ty check [OPTIONS] [PATH]...
<h3 class="cli-reference">Options</h3>
<dl class="cli-reference"><dt id="ty-check--add-ignore"><a href="#ty-check--add-ignore"><code>--add-ignore</code></a></dt><dd><p>Adds <code>ty: ignore</code> comments to suppress all rule diagnostics</p>
</dd><dt id="ty-check--color"><a href="#ty-check--color"><code>--color</code></a> <i>when</i></dt><dd><p>Control when colored output is used</p>
<dl class="cli-reference"><dt id="ty-check--color"><a href="#ty-check--color"><code>--color</code></a> <i>when</i></dt><dd><p>Control when colored output is used</p>
<p>Possible values:</p>
<ul>
<li><code>auto</code>: Display colors if the output goes to an interactive terminal</li>

229
crates/ty/docs/rules.md generated
View File

@@ -8,7 +8,7 @@
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20ambiguous-protocol-member" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L540" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L538" target="_blank">View source</a>
</small>
@@ -80,7 +80,7 @@ def test(): -> "int":
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20call-non-callable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L139" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L137" target="_blank">View source</a>
</small>
@@ -104,7 +104,7 @@ Calling a non-callable object will raise a `TypeError` at runtime.
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.7">0.0.7</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20call-top-callable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L157" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L155" target="_blank">View source</a>
</small>
@@ -135,7 +135,7 @@ def f(x: object):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-argument-forms" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L208" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L206" target="_blank">View source</a>
</small>
@@ -167,7 +167,7 @@ f(int) # error
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-declarations" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L234" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L232" target="_blank">View source</a>
</small>
@@ -198,7 +198,7 @@ a = 1
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-metaclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L259" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L257" target="_blank">View source</a>
</small>
@@ -230,7 +230,7 @@ class C(A, B): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-class-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L285" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L283" target="_blank">View source</a>
</small>
@@ -262,7 +262,7 @@ class B(A): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-type-alias-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L311" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L309" target="_blank">View source</a>
</small>
@@ -290,7 +290,7 @@ type B = A
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.16">0.0.1-alpha.16</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20deprecated" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L355" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L353" target="_blank">View source</a>
</small>
@@ -317,7 +317,7 @@ old_func() # emits [deprecated] diagnostic
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20division-by-zero" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L333" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L331" target="_blank">View source</a>
</small>
@@ -346,7 +346,7 @@ false positives it can produce.
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L376" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L374" target="_blank">View source</a>
</small>
@@ -373,7 +373,7 @@ class B(A, A): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-kw-only" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L397" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L395" target="_blank">View source</a>
</small>
@@ -529,7 +529,7 @@ def test(): -> "Literal[5]":
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20inconsistent-mro" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L623" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L621" target="_blank">View source</a>
</small>
@@ -559,7 +559,7 @@ class C(A, B): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20index-out-of-bounds" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L647" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L645" target="_blank">View source</a>
</small>
@@ -585,7 +585,7 @@ t[3] # IndexError: tuple index out of range
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20instance-layout-conflict" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L429" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L427" target="_blank">View source</a>
</small>
@@ -674,7 +674,7 @@ an atypical memory layout.
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-argument-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L701" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L699" target="_blank">View source</a>
</small>
@@ -701,7 +701,7 @@ func("foo") # error: [invalid-argument-type]
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-assignment" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L741" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L739" target="_blank">View source</a>
</small>
@@ -729,7 +729,7 @@ a: int = ''
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-attribute-access" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2044" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2042" target="_blank">View source</a>
</small>
@@ -763,7 +763,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-await" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L763" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L761" target="_blank">View source</a>
</small>
@@ -799,7 +799,7 @@ asyncio.run(main())
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L793" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L791" target="_blank">View source</a>
</small>
@@ -823,7 +823,7 @@ class A(42): ... # error: [invalid-base]
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-context-manager" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L844" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L842" target="_blank">View source</a>
</small>
@@ -850,7 +850,7 @@ with 1:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-declaration" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L865" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L863" target="_blank">View source</a>
</small>
@@ -879,7 +879,7 @@ a: str
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-exception-caught" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L888" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L886" target="_blank">View source</a>
</small>
@@ -923,7 +923,7 @@ except ZeroDivisionError:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.28">0.0.1-alpha.28</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-explicit-override" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1714" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1712" target="_blank">View source</a>
</small>
@@ -965,7 +965,7 @@ class D(A):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.35">0.0.1-alpha.35</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-frozen-dataclass-subclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2295" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2268" target="_blank">View source</a>
</small>
@@ -1009,7 +1009,7 @@ class NonFrozenChild(FrozenBase): # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-generic-class" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L924" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L922" target="_blank">View source</a>
</small>
@@ -1077,7 +1077,7 @@ a = 20 / 0 # type: ignore
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.17">0.0.1-alpha.17</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-key" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L668" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L666" target="_blank">View source</a>
</small>
@@ -1116,7 +1116,7 @@ carol = Person(name="Carol", age=25) # typo!
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-legacy-type-variable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L955" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L953" target="_blank">View source</a>
</small>
@@ -1151,7 +1151,7 @@ def f(t: TypeVar("U")): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-metaclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1052" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1050" target="_blank">View source</a>
</small>
@@ -1185,7 +1185,7 @@ class B(metaclass=f): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-method-override" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2197" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2170" target="_blank">View source</a>
</small>
@@ -1292,7 +1292,7 @@ Correct use of `@override` is enforced by ty's `invalid-explicit-override` rule.
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-named-tuple" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L575" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L573" target="_blank">View source</a>
</small>
@@ -1346,7 +1346,7 @@ AttributeError: Cannot overwrite NamedTuple attribute _asdict
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-newtype" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1028" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1026" target="_blank">View source</a>
</small>
@@ -1376,7 +1376,7 @@ Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType`
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-overload" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1079" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1077" target="_blank">View source</a>
</small>
@@ -1426,7 +1426,7 @@ def foo(x: int) -> int: ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-parameter-default" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1178" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1176" target="_blank">View source</a>
</small>
@@ -1452,7 +1452,7 @@ def f(a: int = ''): ...
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-paramspec" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L983" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L981" target="_blank">View source</a>
</small>
@@ -1483,7 +1483,7 @@ P2 = ParamSpec("S2") # error: ParamSpec name must match the variable it's assig
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-protocol" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L511" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L509" target="_blank">View source</a>
</small>
@@ -1517,7 +1517,7 @@ TypeError: Protocols can only inherit from other protocols, got <class 'int'>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-raise" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1198" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1196" target="_blank">View source</a>
</small>
@@ -1566,7 +1566,7 @@ def g():
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-return-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L722" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L720" target="_blank">View source</a>
</small>
@@ -1591,7 +1591,7 @@ def func() -> int:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-super-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1241" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1239" target="_blank">View source</a>
</small>
@@ -1681,59 +1681,13 @@ class C: ...
- [Typing spec: The meaning of annotations](https://typing.python.org/en/latest/spec/annotations.html#the-meaning-of-annotations)
- [Typing spec: String annotations](https://typing.python.org/en/latest/spec/annotations.html#string-annotations)
## `invalid-total-ordering`
<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.10">0.0.10</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-total-ordering" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2333" target="_blank">View source</a>
</small>
**What it does**
Checks for classes decorated with `@functools.total_ordering` that don't
define any ordering method (`__lt__`, `__le__`, `__gt__`, or `__ge__`).
**Why is this bad?**
The `@total_ordering` decorator requires the class to define at least one
ordering method. If none is defined, Python raises a `ValueError` at runtime.
**Example**
```python
from functools import total_ordering
@total_ordering
class MyClass: # Error: no ordering method defined
def __eq__(self, other: object) -> bool:
return True
```
Use instead:
```python
from functools import total_ordering
@total_ordering
class MyClass:
def __eq__(self, other: object) -> bool:
return True
def __lt__(self, other: "MyClass") -> bool:
return True
```
## `invalid-type-alias-type`
<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.6">0.0.1-alpha.6</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-alias-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1007" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1005" target="_blank">View source</a>
</small>
@@ -1760,7 +1714,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1473" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1471" target="_blank">View source</a>
</small>
@@ -1807,7 +1761,7 @@ Bar[int] # error: too few arguments
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-checking-constant" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1280" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1278" target="_blank">View source</a>
</small>
@@ -1837,7 +1791,7 @@ TYPE_CHECKING = ''
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-form" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1304" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1302" target="_blank">View source</a>
</small>
@@ -1867,7 +1821,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-call" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1356" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1354" target="_blank">View source</a>
</small>
@@ -1901,7 +1855,7 @@ f(10) # Error
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1328" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1326" target="_blank">View source</a>
</small>
@@ -1935,7 +1889,7 @@ class C:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-variable-constraints" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1384" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1382" target="_blank">View source</a>
</small>
@@ -1964,44 +1918,13 @@ T = TypeVar('T', bound=str) # valid bound TypeVar
[type variables]: https://docs.python.org/3/library/typing.html#typing.TypeVar
## `invalid-typed-dict-statement`
<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.9">0.0.9</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-typed-dict-statement" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2172" target="_blank">View source</a>
</small>
**What it does**
Detects statements other than annotated declarations in `TypedDict` class bodies.
**Why is this bad?**
`TypedDict` class bodies aren't allowed to contain any other types of statements. For
example, method definitions and field values aren't allowed. None of these will be
available on "instances of the `TypedDict`" at runtime (as `dict` is the runtime class of
all "`TypedDict` instances").
**Example**
```python
from typing import TypedDict
class Foo(TypedDict):
def bar(self): # error: [invalid-typed-dict-statement]
pass
```
## `missing-argument`
<small>
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1413" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1411" target="_blank">View source</a>
</small>
@@ -2026,7 +1949,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-typed-dict-key" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2145" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2143" target="_blank">View source</a>
</small>
@@ -2059,7 +1982,7 @@ alice["age"] # KeyError
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20no-matching-overload" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1432" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1430" target="_blank">View source</a>
</small>
@@ -2088,7 +2011,7 @@ func("string") # error: [no-matching-overload]
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20not-iterable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1514" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1512" target="_blank">View source</a>
</small>
@@ -2114,7 +2037,7 @@ for i in 34: # TypeError: 'int' object is not iterable
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20not-subscriptable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1455" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1453" target="_blank">View source</a>
</small>
@@ -2138,7 +2061,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20override-of-final-method" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1687" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1685" target="_blank">View source</a>
</small>
@@ -2171,7 +2094,7 @@ class B(A):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20parameter-already-assigned" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1565" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1563" target="_blank">View source</a>
</small>
@@ -2198,7 +2121,7 @@ f(1, x=2) # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20positional-only-parameter-as-kwarg" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1898" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1896" target="_blank">View source</a>
</small>
@@ -2225,7 +2148,7 @@ f(x=1) # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-attribute" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1586" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1584" target="_blank">View source</a>
</small>
@@ -2253,7 +2176,7 @@ A.c # AttributeError: type object 'A' has no attribute 'c'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-implicit-call" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L182" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L180" target="_blank">View source</a>
</small>
@@ -2285,7 +2208,7 @@ A()[0] # TypeError: 'A' object is not subscriptable
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-import" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1608" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1606" target="_blank">View source</a>
</small>
@@ -2322,7 +2245,7 @@ from module import a # ImportError: cannot import name 'a' from 'module'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unresolved-reference" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1638" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1636" target="_blank">View source</a>
</small>
@@ -2386,7 +2309,7 @@ def test(): -> "int":
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20redundant-cast" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2072" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2070" target="_blank">View source</a>
</small>
@@ -2413,7 +2336,7 @@ cast(int, f()) # Redundant
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20static-assert-error" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2020" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2018" target="_blank">View source</a>
</small>
@@ -2443,7 +2366,7 @@ static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known tr
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20subclass-of-final-class" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1664" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1662" target="_blank">View source</a>
</small>
@@ -2472,7 +2395,7 @@ class B(A): ... # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.30">0.0.1-alpha.30</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20super-call-in-named-tuple-method" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1832" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1830" target="_blank">View source</a>
</small>
@@ -2506,7 +2429,7 @@ class F(NamedTuple):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20too-many-positional-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1772" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1770" target="_blank">View source</a>
</small>
@@ -2533,7 +2456,7 @@ f("foo") # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20type-assertion-failure" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1750" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1748" target="_blank">View source</a>
</small>
@@ -2561,7 +2484,7 @@ def _(x: int):
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unavailable-implicit-super-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1793" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1791" target="_blank">View source</a>
</small>
@@ -2607,7 +2530,7 @@ class A:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20undefined-reveal" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1859" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1857" target="_blank">View source</a>
</small>
@@ -2631,7 +2554,7 @@ reveal_type(1) # NameError: name 'reveal_type' is not defined
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unknown-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1877" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1875" target="_blank">View source</a>
</small>
@@ -2658,7 +2581,7 @@ f(x=1, y=2) # Error raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-attribute" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1919" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1917" target="_blank">View source</a>
</small>
@@ -2686,7 +2609,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.15">0.0.1-alpha.15</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-global" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2093" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2091" target="_blank">View source</a>
</small>
@@ -2744,7 +2667,7 @@ def g():
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-import" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1941" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1939" target="_blank">View source</a>
</small>
@@ -2769,7 +2692,7 @@ import foo # ModuleNotFoundError: No module named 'foo'
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-reference" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1960" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1958" target="_blank">View source</a>
</small>
@@ -2794,7 +2717,7 @@ print(x) # NameError: name 'x' is not defined
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.7">0.0.1-alpha.7</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L811" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L809" target="_blank">View source</a>
</small>
@@ -2833,7 +2756,7 @@ class D(C): ... # error: [unsupported-base]
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-bool-conversion" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1534" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1532" target="_blank">View source</a>
</small>
@@ -2870,7 +2793,7 @@ b1 < b2 < b1 # exception raised here
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-operator" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1979" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1977" target="_blank">View source</a>
</small>
@@ -2929,7 +2852,7 @@ a = 20 / 2
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20useless-overload-body" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1122" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1120" target="_blank">View source</a>
</small>
@@ -2992,7 +2915,7 @@ def foo(x: int | str) -> int | str:
Default level: <a href="../../rules#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20zero-stepsize-in-slice" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2001" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1999" target="_blank">View source</a>
</small>

View File

@@ -54,10 +54,6 @@ pub(crate) struct CheckCommand {
)]
pub paths: Vec<SystemPathBuf>,
/// Adds `ty: ignore` comments to suppress all rule diagnostics.
#[arg(long)]
pub(crate) add_ignore: bool,
/// Run the command within the given project directory.
///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,

View File

@@ -4,36 +4,37 @@ mod printer;
mod python_version;
mod version;
pub use args::Cli;
use ty_project::metadata::settings::TerminalSettings;
use ty_static::EnvVars;
use std::fmt::Write;
use std::process::{ExitCode, Termination};
use std::sync::Mutex;
use anyhow::Result;
use crate::args::{CheckCommand, Command, TerminalColor};
use crate::logging::{VerbosityLevel, setup_tracing};
use crate::printer::Printer;
use anyhow::{Context, anyhow};
use clap::{CommandFactory, Parser};
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use rayon::ThreadPoolBuilder;
use ruff_db::cancellation::{Canceled, CancellationToken, CancellationTokenSource};
use ruff_db::cancellation::{CancellationToken, CancellationTokenSource};
use ruff_db::diagnostic::{
Diagnostic, DiagnosticId, DisplayDiagnosticConfig, DisplayDiagnostics, Severity,
};
use ruff_db::files::File;
use ruff_db::max_parallelism;
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::{STACK_SIZE, max_parallelism};
use salsa::Database;
use ty_project::metadata::options::ProjectOptionsOverrides;
use ty_project::metadata::settings::TerminalSettings;
use ty_project::watch::ProjectWatcher;
use ty_project::{CollectReporter, Db, suppress_all_diagnostics, watch};
use ty_project::{CollectReporter, Db, watch};
use ty_project::{ProjectDatabase, ProjectMetadata};
use ty_server::run_server;
use ty_static::EnvVars;
use crate::args::{CheckCommand, Command, TerminalColor};
use crate::logging::{VerbosityLevel, setup_tracing};
use crate::printer::Printer;
pub use args::Cli;
pub fn run() -> anyhow::Result<ExitStatus> {
setup_rayon();
@@ -111,12 +112,6 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
.map(|path| SystemPath::absolute(path, &cwd))
.collect();
let mode = if args.add_ignore {
MainLoopMode::AddIgnore
} else {
MainLoopMode::Check
};
let system = OsSystem::new(&cwd);
let watch = args.watch;
let exit_zero = args.exit_zero;
@@ -149,7 +144,7 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
}
let (main_loop, main_loop_cancellation_token) =
MainLoop::new(mode, project_options_overrides, printer);
MainLoop::new(project_options_overrides, printer);
// Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
@@ -220,8 +215,6 @@ impl Termination for ExitStatus {
}
struct MainLoop {
mode: MainLoopMode,
/// Sender that can be used to send messages to the main loop.
sender: crossbeam_channel::Sender<MainLoopMessage>,
@@ -244,7 +237,6 @@ struct MainLoop {
impl MainLoop {
fn new(
mode: MainLoopMode,
project_options_overrides: ProjectOptionsOverrides,
printer: Printer,
) -> (Self, MainLoopCancellationToken) {
@@ -255,7 +247,6 @@ impl MainLoop {
(
Self {
mode,
sender: sender.clone(),
receiver,
watcher: None,
@@ -334,78 +325,80 @@ impl MainLoop {
result,
revision: check_revision,
} => {
if check_revision != revision {
tracing::debug!(
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
);
continue;
}
let terminal_settings = db.project().settings(db).terminal();
let display_config = DisplayDiagnosticConfig::default()
.format(terminal_settings.output_format.into())
.color(colored::control::SHOULD_COLORIZE.should_colorize())
.with_cancellation_token(Some(self.cancellation_token.clone()))
.show_fix_diff(true);
if db.project().files(db).is_empty() {
tracing::warn!("No python files found under the given path(s)");
}
if check_revision == revision {
if db.project().files(db).is_empty() {
tracing::warn!("No python files found under the given path(s)");
}
let result = match self.mode {
MainLoopMode::Check => {
// TODO: We should have an official flag to silence workspace diagnostics.
if std::env::var("TY_MEMORY_REPORT").as_deref() == Ok("mypy_primer") {
// TODO: We should have an official flag to silence workspace diagnostics.
if std::env::var("TY_MEMORY_REPORT").as_deref() == Ok("mypy_primer") {
return Ok(ExitStatus::Success);
}
let is_human_readable = terminal_settings.output_format.is_human_readable();
if result.is_empty() {
if is_human_readable {
writeln!(
self.printer.stream_for_success_summary(),
"{}",
"All checks passed!".green().bold()
)?;
}
if self.watcher.is_none() {
return Ok(ExitStatus::Success);
}
} else {
let diagnostics_count = result.len();
self.write_diagnostics(db, &result)?;
let mut stdout = self.printer.stream_for_details().lock();
let exit_status =
exit_status_from_diagnostics(&result, terminal_settings);
if self.cancellation_token.is_cancelled() {
Err(Canceled)
} else {
Ok(result)
// Only render diagnostics if they're going to be displayed, since doing
// so is expensive.
if stdout.is_enabled() {
write!(
stdout,
"{}",
DisplayDiagnostics::new(db, &display_config, &result)
)?;
}
}
MainLoopMode::AddIgnore => {
if let Ok(result) =
suppress_all_diagnostics(db, result, &self.cancellation_token)
{
self.write_diagnostics(db, &result.diagnostics)?;
let terminal_settings = db.project().settings(db).terminal();
let is_human_readable =
terminal_settings.output_format.is_human_readable();
if !self.cancellation_token.is_cancelled() {
if is_human_readable {
writeln!(
self.printer.stream_for_failure_summary(),
"Added {} ignore comment{}",
result.count,
if result.count > 1 { "s" } else { "" }
"Found {} diagnostic{}",
diagnostics_count,
if diagnostics_count > 1 { "s" } else { "" }
)?;
}
Ok(result.diagnostics)
} else {
Err(Canceled)
if exit_status.is_internal_error() {
tracing::warn!(
"A fatal error occurred while checking some files. Not all project files were analyzed. See the diagnostics list above for details."
);
}
}
if self.watcher.is_none() {
return Ok(exit_status);
}
}
};
let exit_status = match result.as_deref() {
Ok([]) => ExitStatus::Success,
Ok(diagnostics) => {
let terminal_settings = db.project().settings(db).terminal();
exit_status_from_diagnostics(diagnostics, terminal_settings)
}
Err(Canceled) => ExitStatus::Success,
};
if exit_status.is_internal_error() {
tracing::warn!(
"A fatal error occurred while checking some files. Not all project files were analyzed. See the diagnostics list above for details."
} else {
tracing::debug!(
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
);
}
if self.watcher.is_some() {
continue;
}
return Ok(exit_status);
}
MainLoopMessage::ApplyChanges(changes) => {
@@ -432,65 +425,6 @@ impl MainLoop {
Ok(ExitStatus::Success)
}
fn write_diagnostics(
&self,
db: &ProjectDatabase,
diagnostics: &[Diagnostic],
) -> anyhow::Result<()> {
let terminal_settings = db.project().settings(db).terminal();
let is_human_readable = terminal_settings.output_format.is_human_readable();
match diagnostics {
[] => {
if is_human_readable {
writeln!(
self.printer.stream_for_success_summary(),
"{}",
"All checks passed!".green().bold()
)?;
}
}
diagnostics => {
let diagnostics_count = diagnostics.len();
let mut stdout = self.printer.stream_for_details().lock();
// Only render diagnostics if they're going to be displayed, since doing
// so is expensive.
if stdout.is_enabled() {
let display_config = DisplayDiagnosticConfig::default()
.format(terminal_settings.output_format.into())
.color(colored::control::SHOULD_COLORIZE.should_colorize())
.with_cancellation_token(Some(self.cancellation_token.clone()))
.show_fix_diff(true);
write!(
stdout,
"{}",
DisplayDiagnostics::new(db, &display_config, diagnostics)
)?;
}
if !self.cancellation_token.is_cancelled() && is_human_readable {
writeln!(
self.printer.stream_for_failure_summary(),
"Found {} diagnostic{}",
diagnostics_count,
if diagnostics_count > 1 { "s" } else { "" }
)?;
}
}
}
Ok(())
}
}
#[derive(Copy, Clone, Debug)]
enum MainLoopMode {
Check,
AddIgnore,
}
fn exit_status_from_diagnostics(
@@ -625,7 +559,12 @@ fn set_colored_override(color: Option<TerminalColor>) {
fn setup_rayon() {
ThreadPoolBuilder::default()
.num_threads(max_parallelism().get())
.stack_size(STACK_SIZE)
// Use a reasonably large stack size to avoid running into stack overflows too easily. The
// size was chosen in such a way as to still be able to handle large expressions involving
// binary operators (x + x + … + x) both during the AST walk in semantic index building as
// well as during type checking. Using this stack size, we can handle handle expressions
// that are several times larger than the corresponding limits in existing type checkers.
.stack_size(16 * 1024 * 1024)
.build_global()
.unwrap();
}

View File

@@ -160,65 +160,6 @@ fn configuration_include() -> anyhow::Result<()> {
Ok(())
}
/// Files without extensions can be included by adding a literal glob to `include` that matches
/// the path exactly. A literal glob is a glob without any meta characters.
#[test]
fn configuration_include_no_extension() -> anyhow::Result<()> {
let case = CliTest::with_files([(
"src/main",
r#"
print(undefined_var) # error: unresolved-reference
"#,
)])?;
// By default, `src/main` is excluded because the file has no supported extension.
case.write_file(
"ty.toml",
r#"
[src]
include = ["src"]
"#,
)?;
assert_cmd_snapshot!(case.command(), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
WARN No python files found under the given path(s)
");
// The file can be included by adding an exactly matching pattern
case.write_file(
"ty.toml",
r#"
[src]
include = ["src", "src/main"]
"#,
)?;
assert_cmd_snapshot!(case.command(), @r"
success: false
exit_code: 1
----- stdout -----
error[unresolved-reference]: Name `undefined_var` used when not defined
--> src/main:2:7
|
2 | print(undefined_var) # error: unresolved-reference
| ^^^^^^^^^^^^^
|
info: rule `unresolved-reference` is enabled by default
Found 1 diagnostic
----- stderr -----
");
Ok(())
}
/// Test configuration file exclude functionality
#[test]
fn configuration_exclude() -> anyhow::Result<()> {

View File

@@ -1,114 +0,0 @@
use insta_cmd::assert_cmd_snapshot;
use crate::CliTest;
#[test]
fn add_ignore() -> anyhow::Result<()> {
let case = CliTest::with_file(
"different_violations.py",
r#"
import sys
x = 1 + a
if sys.does_not_exist:
...
def test(a, b): ...
test(x = 10, b = 12)
"#,
)?;
assert_cmd_snapshot!(case.command().arg("--add-ignore"), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
Added 4 ignore comments
----- stderr -----
");
// There should be no diagnostics when running ty again
assert_cmd_snapshot!(case.command(), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
");
Ok(())
}
#[test]
fn add_ignore_unfixable() -> anyhow::Result<()> {
let case = CliTest::with_files([
("has_syntax_error.py", r"print(x # [unresolved-reference]"),
(
"different_violations.py",
r#"
import sys
x = 1 + a
reveal_type(x)
if sys.does_not_exist:
...
"#,
),
(
"repeated_violations.py",
r#"
x = (
1 +
a * b
)
y = y # ty: ignore[unresolved-reference]
"#,
),
])?;
assert_cmd_snapshot!(case.command().arg("--add-ignore").env("RUST_BACKTRACE", "1"), @r"
success: false
exit_code: 1
----- stdout -----
info[revealed-type]: Revealed type
--> different_violations.py:6:13
|
4 | x = 1 + a # ty:ignore[unresolved-reference]
5 |
6 | reveal_type(x) # ty:ignore[undefined-reveal]
| ^ `Unknown`
7 |
8 | if sys.does_not_exist: # ty:ignore[unresolved-attribute]
|
error[unresolved-reference]: Name `x` used when not defined
--> has_syntax_error.py:1:7
|
1 | print(x # [unresolved-reference]
| ^
|
info: rule `unresolved-reference` is enabled by default
error[invalid-syntax]: unexpected EOF while parsing
--> has_syntax_error.py:1:34
|
1 | print(x # [unresolved-reference]
| ^
|
Found 3 diagnostics
Added 5 ignore comments
----- stderr -----
WARN Skipping file `<temp_dir>/has_syntax_error.py` with syntax errors
");
Ok(())
}

View File

@@ -2,7 +2,6 @@ mod analysis_options;
mod config_option;
mod exit_code;
mod file_selection;
mod fixes;
mod python_environment;
mod rule_selection;

View File

@@ -3,7 +3,6 @@ auto-import-includes-modules,main.py,0,1
auto-import-includes-modules,main.py,1,7
auto-import-includes-modules,main.py,2,1
auto-import-skips-current-module,main.py,0,1
class-arg-completion,main.py,0,1
fstring-completions,main.py,0,1
higher-level-symbols-preferred,main.py,0,
higher-level-symbols-preferred,main.py,1,1
1 name file index rank
3 auto-import-includes-modules main.py 1 7
4 auto-import-includes-modules main.py 2 1
5 auto-import-skips-current-module main.py 0 1
class-arg-completion main.py 0 1
6 fstring-completions main.py 0 1
7 higher-level-symbols-preferred main.py 0
8 higher-level-symbols-preferred main.py 1 1

View File

@@ -1,2 +0,0 @@
[settings]
auto-import = false

View File

@@ -1 +0,0 @@
class Foo(m<CURSOR: metaclass>)

View File

@@ -1,5 +0,0 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []

View File

@@ -1,8 +0,0 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }

View File

@@ -5,8 +5,8 @@ use ruff_diagnostics::Edit;
use ruff_python_ast::find_node::covering_node;
use ruff_text_size::TextRange;
use ty_project::Db;
use ty_python_semantic::create_suppression_fix;
use ty_python_semantic::lint::LintId;
use ty_python_semantic::suppress_single;
use ty_python_semantic::types::{UNDEFINED_REVEAL, UNRESOLVED_REFERENCE};
/// A `QuickFix` Code Action
@@ -42,7 +42,7 @@ pub fn code_actions(
// Suggest just suppressing the lint (always a valid option, but never ideal)
actions.push(QuickFix {
title: format!("Ignore '{}' for this line", lint_id.name()),
edits: suppress_single(db, file, lint_id, diagnostic_range).into_edits(),
edits: create_suppression_fix(db, file, lint_id, diagnostic_range).into_edits(),
preferred: false,
});
@@ -437,38 +437,6 @@ mod tests {
"#);
}
#[test]
fn add_ignore_line_continuation_empty_lines() {
let test = CodeActionTest::with_source(
r#"b = bbbbb \
[ ccc # test
+ <START>ddd<END> \
] # test
"#,
);
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:4:11
|
2 | [ ccc # test
3 |
4 | + ddd \
| ^^^
5 |
6 | ] # test
|
2 | [ ccc # test
3 |
4 | + ddd \
-
5 + # ty:ignore[unresolved-reference]
6 | ] # test
");
}
#[test]
fn undefined_reveal_type() {
let test = CodeActionTest::with_source(

View File

@@ -193,16 +193,15 @@ impl<'db> Completions<'db> {
/// when the completion context determines that the given suggestion
/// is never valid.
fn add_skip_query(&mut self, mut completion: Completion<'db>) -> bool {
// Tags completions with context-specific if they are
// known to be usable in a `raise` context and we have
// determined a raisable type `raisable_ty`.
// Tags completions with whether they are known to be usable in
// a `raise` context.
//
// It's possible that some completions are usable in a `raise`
// but aren't marked here. That is, false negatives are
// possible but false positives are not.
if let Some(raisable_ty) = self.context.raisable_ty {
if let Some(ty) = completion.ty {
completion.is_context_specific |= ty.is_assignable_to(self.db, raisable_ty);
completion.is_definitively_raisable = ty.is_assignable_to(self.db, raisable_ty);
}
}
if self.context.exclude(self.db, &completion) {
@@ -286,13 +285,13 @@ pub struct Completion<'db> {
/// Whether this item only exists for type checking purposes and
/// will be missing at runtime
pub is_type_check_only: bool,
/// Whether this item can definitively be used in the current context.
/// Whether this item can definitively be used in a `raise` context.
///
/// Some completions are computed based on contextual information.
/// If that's the case, we know this is a very precise completion
/// that should always be valid and can be preferred when
/// ordering completions.
pub is_context_specific: bool,
/// Note that this may not always be computed. (i.e., Only computed
/// when we are in a `raise` context.) And also note that if this
/// is `true`, then it's definitively usable in `raise`, but if
/// it's `false`, it _may_ still be usable in `raise`.
pub is_definitively_raisable: bool,
/// The documentation associated with this item, if
/// available.
pub documentation: Option<Docstring>,
@@ -316,7 +315,7 @@ impl<'db> Completion<'db> {
import: None,
builtin: semantic.builtin,
is_type_check_only,
is_context_specific: false,
is_definitively_raisable: false,
documentation,
}
}
@@ -399,7 +398,7 @@ impl<'db> Completion<'db> {
import: None,
builtin: false,
is_type_check_only: false,
is_context_specific: false,
is_definitively_raisable: false,
documentation: None,
}
}
@@ -415,7 +414,7 @@ impl<'db> Completion<'db> {
import: None,
builtin: true,
is_type_check_only: false,
is_context_specific: false,
is_definitively_raisable: false,
documentation: None,
}
}
@@ -434,7 +433,7 @@ impl<'db> Completion<'db> {
import: None,
builtin: false,
is_type_check_only: false,
is_context_specific: true,
is_definitively_raisable: false,
documentation,
}
}
@@ -995,7 +994,7 @@ impl<'db> CollectionContext<'db> {
#[allow(clippy::unused_self)]
fn rank<'c>(&self, c: &'c Completion<'_>) -> Rank<'c> {
Rank {
definitively_usable: if c.is_context_specific {
definitively_usable: if c.is_definitively_raisable {
Sort::Higher
} else {
Sort::Even
@@ -1184,6 +1183,7 @@ fn add_function_arg_completions<'db>(
if p.is_positional_only || set_function_args.contains(&p.name.as_str()) {
continue;
}
completions.add(Completion::argument(
&p.name,
p.ty,
@@ -1374,7 +1374,7 @@ fn add_unimported_completions<'db>(
builtin: false,
// TODO: `is_type_check_only` requires inferring the type of the symbol
is_type_check_only: false,
is_context_specific: false,
is_definitively_raisable: false,
documentation: None,
});
}
@@ -3088,9 +3088,9 @@ class Foo(<CURSOR>):
);
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"
metaclass=
Bar
Foo
metaclass=
");
}
@@ -3106,9 +3106,9 @@ class Bar: ...
);
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"
metaclass=
Bar
Foo
metaclass=
");
}
@@ -3124,9 +3124,9 @@ class Bar: ...
);
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"
metaclass=
Bar
Foo
metaclass=
");
}
@@ -3140,9 +3140,9 @@ class Foo(<CURSOR>",
);
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"
metaclass=
Bar
Foo
metaclass=
");
}
@@ -3804,8 +3804,8 @@ bar(o<CURSOR>
assert_snapshot!(
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@"
okay=
foo
okay=
"
);
}
@@ -3825,8 +3825,8 @@ bar(o<CURSOR>
assert_snapshot!(
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@"
okay=
foo
okay=
"
);
}
@@ -3940,10 +3940,10 @@ bar(o<CURSOR>
assert_snapshot!(
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@"
foo
okay=
okay_abc=
okay_okay=
foo
"
);
}
@@ -3961,9 +3961,9 @@ bar(<CURSOR>
);
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"
okay=
bar
foo
okay=
");
}

View File

@@ -218,7 +218,6 @@ fn render_markdown(docstring: &str) -> String {
output.push('\n');
}
}
first_line = false;
// If we're in a literal block and we find a non-empty dedented line, end the block
// TODO: we should remove all the trailing blank lines
@@ -274,22 +273,6 @@ fn render_markdown(docstring: &str) -> String {
block_indent = line_indent;
in_any_code = true;
in_markdown_with_fence = Some(fence.to_owned());
// Render the line verbatim without its indent and move on.
//
// If there's any indent this is really just Bad Syntax but it "makes sense"
// to someone writing docs like this:
//
// Returns:
// Some details...
// ```
// some_example()
// ```
// etc etc...
//
// We "make this work" by stripping the indent on the fences but preserving the
// full indent of the lines between the fences
output.push_str(line);
continue;
}
// If we're in a markdown code fence and this line seems to terminate it, end the block
} else if let Some(fence) = &in_markdown_with_fence
@@ -298,9 +281,6 @@ fn render_markdown(docstring: &str) -> String {
in_any_code = false;
block_indent = 0;
in_markdown_with_fence = None;
// Render the line without its indent and move on.
output.push_str(line);
continue;
}
// If we're not in a codeblock and we see something that signals a literal block, start one
@@ -466,6 +446,8 @@ fn render_markdown(docstring: &str) -> String {
// Print the line verbatim, it's in code
output.push_str(line);
}
first_line = false;
}
// Flush codeblock
if in_any_code {
@@ -1226,74 +1208,6 @@ mod tests {
");
}
// If an explicit markdown codefence is indented, eat the indent so it renders
// "the way the user expects" (as written this is basically invalid markdown,
// but it's nice if we handle it anyway because it makes visual sense).
#[test]
fn explicit_markdown_block_with_indent_tick() {
let docstring = r#"
My cool func...
Returns:
Some details
`````python
x_y = thing_do();
``` # this should't close the fence!
a_b = other_thing();
`````
And so on.
"#;
let docstring = Docstring::new(docstring.to_owned());
assert_snapshot!(docstring.render_markdown(), @r"
My cool func...
Returns:
&nbsp;&nbsp;&nbsp;&nbsp;Some details
`````python
x_y = thing_do();
``` # this should't close the fence!
a_b = other_thing();
`````
&nbsp;&nbsp;&nbsp;&nbsp;And so on.
");
}
// If an explicit markdown codefence is indented, eat the indent so it renders
// "the way the user expects" (as written this is basically invalid markdown,
// but it's nice if we handle it anyway because it makes visual sense).
#[test]
fn explicit_markdown_block_with_indent_tilde() {
let docstring = r#"
My cool func...
Returns:
Some details
~~~~~~python
x_y = thing_do();
~~~ # this should't close the fence!
a_b = other_thing();
~~~~~~
And so on.
"#;
let docstring = Docstring::new(docstring.to_owned());
assert_snapshot!(docstring.render_markdown(), @r"
My cool func...
Returns:
&nbsp;&nbsp;&nbsp;&nbsp;Some details
~~~~~~python
x_y = thing_do();
~~~ # this should't close the fence!
a_b = other_thing();
~~~~~~
&nbsp;&nbsp;&nbsp;&nbsp;And so on.
");
}
// What do we do when we hit the end of the docstring with an unclosed markdown block?
#[test]
fn explicit_markdown_block_with_unclosed_fence_tick() {
@@ -1353,7 +1267,7 @@ mod tests {
assert_snapshot!(docstring.render_markdown(), @r"
My cool func:
``````we still think this is a codefence```
``````we still think this is a codefence```
x_y = thing_do();
```````````` and are sloppy as heck with indentation and closing shrugggg
");
@@ -1376,7 +1290,7 @@ mod tests {
assert_snapshot!(docstring.render_markdown(), @r"
My cool func:
~~~~~~we still think this is a codefence~~~
~~~~~~we still think this is a codefence~~~
x_y = thing_do();
~~~~~~~~~~~~~ and are sloppy as heck with indentation and closing shrugggg
");

View File

@@ -619,7 +619,7 @@ mod tests {
list_snapshot(&db),
@r#"
[
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, Some(Functools)),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, None),
]
"#,
);
@@ -662,7 +662,7 @@ mod tests {
@r#"
[
Module::File("asyncio", "std-custom", "/typeshed/stdlib/asyncio/__init__.pyi", Package, None),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, Some(Functools)),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, None),
Module::File("random", "std-custom", "/typeshed/stdlib/random.pyi", Module, None),
]
"#,
@@ -755,7 +755,7 @@ mod tests {
[
Module::File("asyncio", "std-custom", "/typeshed/stdlib/asyncio/__init__.pyi", Package, None),
Module::File("collections", "std-custom", "/typeshed/stdlib/collections/__init__.pyi", Package, Some(Collections)),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, Some(Functools)),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, None),
]
"#,
);
@@ -1091,7 +1091,7 @@ mod tests {
list_snapshot(&db),
@r#"
[
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, Some(Functools)),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, None),
]
"#,
);
@@ -1107,7 +1107,7 @@ mod tests {
list_snapshot(&db),
@r#"
[
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, Some(Functools)),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, None),
]
"#,
);
@@ -1129,7 +1129,7 @@ mod tests {
list_snapshot(&db),
@r#"
[
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, Some(Functools)),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, None),
]
"#,
);
@@ -1191,7 +1191,7 @@ mod tests {
list_snapshot(&db),
@r#"
[
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, Some(Functools)),
Module::File("functools", "std-custom", "/typeshed/stdlib/functools.pyi", Module, None),
]
"#,
);

View File

@@ -320,7 +320,6 @@ pub enum KnownModule {
Abc,
Contextlib,
Dataclasses,
Functools,
Collections,
Inspect,
#[strum(serialize = "string.templatelib")]
@@ -352,7 +351,6 @@ impl KnownModule {
Self::Abc => "abc",
Self::Contextlib => "contextlib",
Self::Dataclasses => "dataclasses",
Self::Functools => "functools",
Self::Collections => "collections",
Self::Inspect => "inspect",
Self::TypeCheckerInternals => "_typeshed._type_checker_internals",
@@ -397,10 +395,6 @@ impl KnownModule {
pub const fn is_importlib(self) -> bool {
matches!(self, Self::ImportLib)
}
pub const fn is_functools(self) -> bool {
matches!(self, Self::Functools)
}
}
impl std::fmt::Display for KnownModule {

View File

@@ -325,10 +325,6 @@ impl ModulePath {
relative_path: relative_path.with_extension("py"),
})
}
pub(crate) fn into_search_path(self) -> SearchPath {
self.search_path
}
}
impl PartialEq<SystemPathBuf> for ModulePath {

View File

@@ -32,8 +32,11 @@ specifies ty's implementation of Python's import resolution algorithm.
*/
use std::borrow::Cow;
use std::fmt;
use std::iter::FusedIterator;
use std::str::Split;
use compact_str::format_compact;
use rustc_hash::{FxBuildHasher, FxHashSet};
use ruff_db::files::{File, FilePath, FileRootKind};
@@ -1099,79 +1102,6 @@ fn desperately_resolve_name(
resolve_name_impl(db, name, mode, search_paths.iter().flatten())
}
#[derive(Debug, Clone, Copy)]
enum ResolvedModule {
NamespacePackage,
LegacyNamespacePackage(File),
RegularPackage(File),
Module(File),
}
#[derive(Debug, Clone)]
struct ModuleResolutionCandidate {
path: ModulePath,
module: ResolvedModule,
py_typed: PyTyped,
}
impl ModuleResolutionCandidate {
// Is this some kind of namespace package?
fn is_any_namespace_package(&self) -> bool {
match self.module {
ResolvedModule::NamespacePackage => true,
ResolvedModule::LegacyNamespacePackage(_) => true,
ResolvedModule::RegularPackage(_) => false,
ResolvedModule::Module(_) => false,
}
}
// This is the module we were actually interested in resolving, complete the resolution
fn into_resolved_name(self) -> ResolvedName {
match self.module {
ResolvedModule::NamespacePackage => ResolvedName::NamespacePackage,
// legacy namespace packages behave like regular packages when they're the target of the resolution
ResolvedModule::LegacyNamespacePackage(file) => {
ResolvedName::FileModule(ResolvedFileModule {
kind: ModuleKind::Package,
search_path: self.path.into_search_path(),
file,
})
}
ResolvedModule::RegularPackage(file) => ResolvedName::FileModule(ResolvedFileModule {
kind: ModuleKind::Package,
search_path: self.path.into_search_path(),
file,
}),
ResolvedModule::Module(file) => ResolvedName::FileModule(ResolvedFileModule {
kind: ModuleKind::Module,
search_path: self.path.into_search_path(),
file,
}),
}
}
fn missing_submodule_is_terminal(&self) -> bool {
if matches!(self.py_typed, PyTyped::Partial) {
return false;
}
// Only regular packages are truly terminal, as a later `foo/__init__.py`
// can shadow `foo.py`. Both shadow namespace packages.
matches!(self.module, ResolvedModule::RegularPackage(_))
}
fn to_str<'a>(&self, db: &'a dyn Db) -> Cow<'a, str> {
match self.module {
ResolvedModule::NamespacePackage => {
Cow::Owned(self.path.to_system_path().unwrap_or_default().to_string())
}
ResolvedModule::LegacyNamespacePackage(file) => Cow::Borrowed(file.path(db).as_str()),
ResolvedModule::RegularPackage(file) => Cow::Borrowed(file.path(db).as_str()),
ResolvedModule::Module(file) => Cow::Borrowed(file.path(db).as_str()),
}
}
}
fn resolve_name_impl<'a>(
db: &dyn Db,
name: &ModuleName,
@@ -1179,250 +1109,109 @@ fn resolve_name_impl<'a>(
search_paths: impl Iterator<Item = &'a SearchPath>,
) -> Option<ResolvedName> {
let python_version = db.python_version();
let context = ResolverContext::new(db, python_version, mode);
let resolver_state = ResolverContext::new(db, python_version, mode);
let is_non_shadowable = mode.is_non_shadowable(python_version.minor, name.as_str());
let mut stub_name = None;
let mut cur_candidates = search_paths
.filter_map(|search_path| {
// When a builtin module is imported, standard module resolution is bypassed:
// the module name always resolves to the stdlib module,
// even if there's a module of the same name in the first-party root
// (which would normally result in the stdlib module being overridden).
// TODO: offer a diagnostic if there is a first-party module of the same name
if is_non_shadowable && !search_path.is_standard_library() {
return None;
}
let name = RelaxedModuleName::new(name);
let stub_name = name.to_stub_package();
let mut is_namespace_package = false;
Some(ModuleResolutionCandidate {
path: search_path.to_module_path(),
module: ResolvedModule::NamespacePackage,
py_typed: PyTyped::Untyped,
})
})
.collect::<Vec<_>>();
let mut next_candidates = vec![];
for search_path in search_paths {
// When a builtin module is imported, standard module resolution is bypassed:
// the module name always resolves to the stdlib module,
// even if there's a module of the same name in the first-party root
// (which would normally result in the stdlib module being overridden).
// TODO: offer a diagnostic if there is a first-party module of the same name
if is_non_shadowable && !search_path.is_standard_library() {
continue;
}
// FIXME?: because we have to search every candidate on each step of this loop,
// in theory we can search them all in parallel. However we need to join the parallelism
// at the end of each iteration, and after the first iteration in 99% of cases we will have
// reduced down to a single candidate, so maybe meh?
let mut is_root = true;
for component in name.components() {
// Search for the next component in every search-path
for mut candidate in cur_candidates.drain(..) {
// On the first iteration, look for `mypackage-stubs` as well
// Optimization: stdlib never has these `-stubs`
if is_root
&& context.mode.stubs_allowed()
&& !candidate.path.search_path().is_standard_library()
{
let stub_name = stub_name.get_or_insert_with(|| format!("{component}-stubs"));
let mut stub_candidate = candidate.clone();
if resolve_name_in_search_path(&context, &mut stub_candidate, stub_name).is_ok() {
// `mypackage-stubs.py(i)` is not a valid result
if matches!(stub_candidate.module, ResolvedModule::Module(_)) {
if !search_path.is_standard_library() && resolver_state.mode.stubs_allowed() {
match resolve_name_in_search_path(&resolver_state, &stub_name, search_path) {
Ok((package_kind, _, ResolvedName::FileModule(module))) => {
if package_kind.is_root() && module.kind.is_module() {
tracing::trace!(
"Search path `{}` contains a module \
named `{stub_name}` but a standalone module isn't a valid stub.",
candidate.path.search_path()
"Search path `{search_path}` contains a module \
named `{stub_name}` but a standalone module isn't a valid stub."
);
} else {
let shadows_all = stub_candidate.missing_submodule_is_terminal();
next_candidates.push(stub_candidate);
if shadows_all {
break;
}
return Some(ResolvedName::FileModule(module));
}
}
}
if resolve_name_in_search_path(&context, &mut candidate, component).is_err() {
if candidate.missing_submodule_is_terminal() {
// Everything after this package should be shadowed out by this failure
// But the previous results are still in play because they would have
// shadowed this one out anyway.
break;
Ok((_, _, ResolvedName::NamespacePackage)) => {
is_namespace_package = true;
}
continue;
}
let shadows_all = candidate.missing_submodule_is_terminal();
next_candidates.push(candidate);
if shadows_all {
break;
}
}
// Now that we have several candidates, we need to reject candidates that are shadowed.
// There are only two valid situations where we should proceed into the next iteration
// with multiple candidates:
//
// * All the candidates are namespace packages
// * `mypackage-stubs` is a candidate with `PyTyped::Partial`
//
// The existence of a single non-namespace package will shadow
// all namespace packages *regardless of search-path order*.
//
// Similarly, the existence of a single regular package will shadow
// all modules (mymod.py) *regardless of search-path order*.
//
// This is implemented with the `retain` that follows.
//
// We can't do this "delete all namespace packages" eagerly because we want a
// `PyTyped::Partial` regular package to shadow namespace packages after it.
// (FIXME: I guess we could just set a flag not to add them...)
// First record what kinds of things we found
let mut found_regular_package = None;
let mut found_module = None;
let mut found_legacy_namespace_package = None;
for candidate in &next_candidates {
match (candidate.module, candidate.py_typed) {
(ResolvedModule::LegacyNamespacePackage(file), _) => {
found_legacy_namespace_package = Some(file);
Err((PackageKind::Root, _)) => {
tracing::trace!(
"Search path `{search_path}` contains no stub package named `{stub_name}`."
);
}
(ResolvedModule::RegularPackage(file), PyTyped::Untyped | PyTyped::Full) => {
found_regular_package = Some(file);
Err((PackageKind::Regular, PyTyped::Partial)) => {
tracing::trace!(
"Stub-package in `{search_path}` doesn't contain module: \
`{name}` but it is a partial package, keep going."
);
// stub exists, but the module doesn't. But this is a partial package,
// fall through to looking for a non-stub package
}
(ResolvedModule::Module(file), PyTyped::Untyped | PyTyped::Full) => {
found_module = Some(file);
Err((PackageKind::Regular, _)) => {
tracing::trace!(
"Stub-package in `{search_path}` doesn't contain module: `{name}`"
);
// stub exists, but the module doesn't.
return None;
}
Err((PackageKind::Namespace, _)) => {
tracing::trace!(
"Stub-package in `{search_path}` doesn't contain module: \
`{name}` but it is a namespace package, keep going."
);
// stub exists, but the module doesn't. But this is a namespace package,
// fall through to looking for a non-stub package
}
_ => {}
}
}
next_candidates.retain(|candidate| {
if let Some(_legacy) = found_legacy_namespace_package && !matches!(candidate.module, ResolvedModule::LegacyNamespacePackage(_)) {
// TODO: it would be nice to emit a warning about this but we just assume it's fine
match resolve_name_in_search_path(&resolver_state, &name, search_path) {
Ok((_, _, ResolvedName::FileModule(module))) => {
return Some(ResolvedName::FileModule(module));
}
// Regular packages shadow anything that isn't a regular package independent of order
if let Some(package) = found_regular_package && !matches!(candidate.module, ResolvedModule::RegularPackage(_)) {
tracing::trace!("Discarding namespace package `{}` because a regular package of the same name was found: {}",
candidate.to_str(db),
package.path(db).as_str(),
);
return false;
}
// Modules shadow namespace packages independent of order
if let Some(module) = found_module && candidate.is_any_namespace_package() {
tracing::trace!("Discarding namespace package `{}` because a module of the same name was found: {}",
candidate.to_str(db),
module.path(db).as_str(),
);
return false;
}
true
});
if next_candidates.is_empty() {
return None;
}
// Advance to the next level of candidates while reusing allocations
// (we used `drain` so cur_candidates is empty)
std::mem::swap(&mut cur_candidates, &mut next_candidates);
is_root = false;
}
// We now have a list of candidates that are all correct answers, and we just need to take the
// Best one. Because of the filtering we've done in the loop, and sorting stub-packages to come
// first, this is in fact just "the first one".
cur_candidates
.into_iter()
.next()
.map(ModuleResolutionCandidate::into_resolved_name)
}
/// Attempts to resolve a module name in a particular search path.
///
/// `search_path` should be the directory to start looking for the module.
///
/// `name` should be a complete non-empty module name, e.g, `foo` or
/// `foo.bar.baz`.
///
/// Upon success, this returns the kind of the parent package (root, regular
/// package or namespace package) along with the resolved details of the
/// module: its kind (single-file module or package), the search path in
/// which it was found (guaranteed to be equal to the one given) and the
/// corresponding `File`.
///
/// Upon error, the kind of the parent package is returned.
fn resolve_name_in_search_path(
context: &ResolverContext,
candidate: &mut ModuleResolutionCandidate,
module_name: &str,
) -> Result<(), ()> {
if matches!(candidate.module, ResolvedModule::Module(_)) {
tracing::trace!(
"The non-package {} cannot have child",
candidate.to_str(context.db)
);
return Err(());
}
let package_path = &mut candidate.path;
package_path.push(module_name);
// Check for a regular package first (highest priority)
package_path.push("__init__");
if let Some(init) = resolve_file_module(package_path, context) {
// Remove the `__init__` component for any potential next step
package_path.pop();
candidate.py_typed = package_path
.py_typed(context)
.inherit_parent(candidate.py_typed);
if is_legacy_namespace_package(package_path, context, init) {
candidate.module = ResolvedModule::LegacyNamespacePackage(init);
} else {
candidate.module = ResolvedModule::RegularPackage(init);
}
return Ok(());
}
// Check for a file module next
package_path.pop();
if let Some(file_module) = resolve_file_module(package_path, context) {
candidate.module = ResolvedModule::Module(file_module);
return Ok(());
}
// Last resort, check if a folder with the given name exists. If so,
// then this is a namespace package. We need to skip this check for
// typeshed because the `resolve_file_module` can also return `None` if the
// `__init__.py` exists but isn't available for the current Python version.
// Let's assume that the `xml` module is only available on Python 3.11+ and
// we're resolving for Python 3.10:
//
// * `resolve_file_module("xml/__init__.pyi")` returns `None` even though
// the file exists but the module isn't available for the current Python
// version.
// * The check here would now return `true` because the `xml` directory
// exists, resulting in a false positive for a namespace package.
//
// Since typeshed doesn't use any namespace packages today (May 2025),
// simply skip this check which also helps performance. If typeshed
// ever uses namespace packages, ensure that this check also takes the
// `VERSIONS` file into consideration.
if !package_path.search_path().is_standard_library() && package_path.is_directory(context) {
if let Some(path) = package_path.to_system_path() {
let system = context.db.system();
if system.case_sensitivity().is_case_sensitive()
|| system.path_exists_case_sensitive(
&path,
package_path.search_path().as_system_path().unwrap(),
)
{
candidate.py_typed = package_path
.py_typed(context)
.inherit_parent(candidate.py_typed);
candidate.module = ResolvedModule::NamespacePackage;
return Ok(());
Ok((_, _, ResolvedName::NamespacePackage)) => {
is_namespace_package = true;
}
Err(kind) => match kind {
(PackageKind::Root, _) => {
tracing::trace!(
"Search path `{search_path}` contains no package named `{name}`."
);
}
(PackageKind::Regular, PyTyped::Partial) => {
tracing::trace!(
"Package in `{search_path}` doesn't contain module: \
`{name}` but it is a partial package, keep going."
);
}
(PackageKind::Regular, _) => {
// For regular packages, don't search the next search path. All files of that
// package must be in the same location
tracing::trace!("Package in `{search_path}` doesn't contain module: `{name}`");
return None;
}
(PackageKind::Namespace, _) => {
tracing::trace!(
"Package in `{search_path}` doesn't contain module: \
`{name}` but it is a namespace package, keep going."
);
}
},
}
}
Err(())
if is_namespace_package {
return Some(ResolvedName::NamespacePackage);
}
None
}
#[derive(Debug)]
@@ -1445,6 +1234,101 @@ struct ResolvedFileModule {
file: File,
}
/// Attempts to resolve a module name in a particular search path.
///
/// `search_path` should be the directory to start looking for the module.
///
/// `name` should be a complete non-empty module name, e.g, `foo` or
/// `foo.bar.baz`.
///
/// Upon success, this returns the kind of the parent package (root, regular
/// package or namespace package) along with the resolved details of the
/// module: its kind (single-file module or package), the search path in
/// which it was found (guaranteed to be equal to the one given) and the
/// corresponding `File`.
///
/// Upon error, the kind of the parent package is returned.
fn resolve_name_in_search_path(
context: &ResolverContext,
name: &RelaxedModuleName,
search_path: &SearchPath,
) -> Result<(PackageKind, PyTyped, ResolvedName), (PackageKind, PyTyped)> {
let mut components = name.components();
let module_name = components.next_back().unwrap();
let resolved_package = resolve_package(search_path, components, context)?;
let mut package_path = resolved_package.path;
package_path.push(module_name);
// Check for a regular package first (highest priority)
package_path.push("__init__");
if let Some(regular_package) = resolve_file_module(&package_path, context) {
return Ok((
resolved_package.kind,
resolved_package.typed,
ResolvedName::FileModule(ResolvedFileModule {
search_path: search_path.clone(),
kind: ModuleKind::Package,
file: regular_package,
}),
));
}
// Check for a file module next
package_path.pop();
if let Some(file_module) = resolve_file_module(&package_path, context) {
return Ok((
resolved_package.kind,
resolved_package.typed,
ResolvedName::FileModule(ResolvedFileModule {
file: file_module,
kind: ModuleKind::Module,
search_path: search_path.clone(),
}),
));
}
// Last resort, check if a folder with the given name exists. If so,
// then this is a namespace package. We need to skip this check for
// typeshed because the `resolve_file_module` can also return `None` if the
// `__init__.py` exists but isn't available for the current Python version.
// Let's assume that the `xml` module is only available on Python 3.11+ and
// we're resolving for Python 3.10:
//
// * `resolve_file_module("xml/__init__.pyi")` returns `None` even though
// the file exists but the module isn't available for the current Python
// version.
// * The check here would now return `true` because the `xml` directory
// exists, resulting in a false positive for a namespace package.
//
// Since typeshed doesn't use any namespace packages today (May 2025),
// simply skip this check which also helps performance. If typeshed
// ever uses namespace packages, ensure that this check also takes the
// `VERSIONS` file into consideration.
if !search_path.is_standard_library() && package_path.is_directory(context) {
if let Some(path) = package_path.to_system_path() {
let system = context.db.system();
if system.case_sensitivity().is_case_sensitive()
|| system.path_exists_case_sensitive(
&path,
package_path.search_path().as_system_path().unwrap(),
)
{
return Ok((
resolved_package.kind,
resolved_package.typed,
ResolvedName::NamespacePackage,
));
}
}
}
Err((resolved_package.kind, resolved_package.typed))
}
/// If `module` exists on disk with either a `.pyi` or `.py` extension,
/// return the [`File`] corresponding to that path.
///
@@ -1482,6 +1366,90 @@ pub(super) fn resolve_file_module(
Some(file)
}
/// Attempt to resolve the parent package of a module.
///
/// `module_search_path` should be the directory to start looking for the
/// parent package.
///
/// `components` should be the full module name of the parent package. This
/// specifically should not include the basename of the module. So e.g.,
/// for `foo.bar.baz`, `components` should be `[foo, bar]`. It follows that
/// `components` may be empty (in which case, the parent package is the root).
///
/// Upon success, the path to the package and its "kind" (root, regular or
/// namespace) is returned. Upon error, the kind of the package is still
/// returned based on how many components were found and whether `__init__.py`
/// is present.
fn resolve_package<'a, 'db, I>(
module_search_path: &SearchPath,
components: I,
resolver_state: &ResolverContext<'db>,
) -> Result<ResolvedPackage, (PackageKind, PyTyped)>
where
I: Iterator<Item = &'a str>,
{
let mut package_path = module_search_path.to_module_path();
// `true` if inside a folder that is a namespace package (has no `__init__.py`).
// Namespace packages are special because they can be spread across multiple search paths.
// https://peps.python.org/pep-0420/
let mut in_namespace_package = false;
// `true` if resolving a sub-package. For example, `true` when resolving `bar` of `foo.bar`.
let mut in_sub_package = false;
let mut typed = package_path.py_typed(resolver_state);
// For `foo.bar.baz`, test that `foo` and `bar` both contain a `__init__.py`.
for folder in components {
package_path.push(folder);
typed = package_path.py_typed(resolver_state).inherit_parent(typed);
let is_regular_package = package_path.is_regular_package(resolver_state);
if is_regular_package {
// This is the only place where we need to consider the existence of legacy namespace
// packages, as we are explicitly searching for the *parent* package of the module
// we actually want. Here, such a package should be treated as a PEP-420 ("modern")
// namespace package. In all other contexts it acts like a normal package and needs
// no special handling.
in_namespace_package = is_legacy_namespace_package(&package_path, resolver_state);
} else if package_path.is_directory(resolver_state)
// Pure modules hide namespace packages with the same name
&& resolve_file_module(&package_path, resolver_state).is_none()
{
// A directory without an `__init__.py(i)` is a namespace package,
// continue with the next folder.
in_namespace_package = true;
} else if in_namespace_package {
// Package not found but it is part of a namespace package.
return Err((PackageKind::Namespace, typed));
} else if in_sub_package {
// A regular sub package wasn't found.
return Err((PackageKind::Regular, typed));
} else {
// We couldn't find `foo` for `foo.bar.baz`, search the next search path.
return Err((PackageKind::Root, typed));
}
in_sub_package = true;
}
let kind = if in_namespace_package {
PackageKind::Namespace
} else if in_sub_package {
PackageKind::Regular
} else {
PackageKind::Root
};
Ok(ResolvedPackage {
kind,
path: package_path,
typed,
})
}
/// Determines whether a package is a legacy namespace package.
///
/// Before PEP 420 introduced implicit namespace packages, the ecosystem developed
@@ -1511,14 +1479,19 @@ pub(super) fn resolve_file_module(
/// we will just get confused if you mess it up).
fn is_legacy_namespace_package(
package_path: &ModulePath,
context: &ResolverContext,
init: File,
resolver_state: &ResolverContext,
) -> bool {
// Just an optimization, the stdlib and typeshed are never legacy namespace packages
if package_path.search_path().is_standard_library() {
return false;
}
let mut package_path = package_path.clone();
package_path.push("__init__");
let Some(init) = resolve_file_module(&package_path, resolver_state) else {
return false;
};
// This is all syntax-only analysis so it *could* be fooled but it's really unlikely.
//
// The benefit of being syntax-only is speed and avoiding circular dependencies
@@ -1526,13 +1499,44 @@ fn is_legacy_namespace_package(
//
// The downside is if you write slightly different syntax we will fail to detect the idiom,
// but hey, this is better than nothing!
let parsed = ruff_db::parsed::parsed_module(context.db, init);
let parsed = ruff_db::parsed::parsed_module(resolver_state.db, init);
let mut visitor = LegacyNamespacePackageVisitor::default();
visitor.visit_body(parsed.load(context.db).suite());
visitor.visit_body(parsed.load(resolver_state.db).suite());
visitor.is_legacy_namespace_package
}
#[derive(Debug)]
struct ResolvedPackage {
path: ModulePath,
kind: PackageKind,
typed: PyTyped,
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum PackageKind {
/// A root package or module. E.g. `foo` in `foo.bar.baz` or just `foo`.
Root,
/// A regular sub-package where the parent contains an `__init__.py`.
///
/// For example, `bar` in `foo.bar` when the `foo` directory contains an `__init__.py`.
Regular,
/// A sub-package in a namespace package. A namespace package is a package
/// without an `__init__.py`.
///
/// For example, `bar` in `foo.bar` if the `foo` directory contains no
/// `__init__.py`.
Namespace,
}
impl PackageKind {
pub(crate) const fn is_root(self) -> bool {
matches!(self, PackageKind::Root)
}
}
/// Info about the `py.typed` file for this package
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub(crate) enum PyTyped {
@@ -1583,6 +1587,34 @@ impl<'db> ResolverContext<'db> {
}
}
/// A [`ModuleName`] but with relaxed semantics to allow `<package>-stubs.path`
#[derive(Debug)]
struct RelaxedModuleName(compact_str::CompactString);
impl RelaxedModuleName {
fn new(name: &ModuleName) -> Self {
Self(name.as_str().into())
}
fn components(&self) -> Split<'_, char> {
self.0.split('.')
}
fn to_stub_package(&self) -> Self {
if let Some((package, rest)) = self.0.split_once('.') {
Self(format_compact!("{package}-stubs.{rest}"))
} else {
Self(format_compact!("{package}-stubs", package = self.0))
}
}
}
impl fmt::Display for RelaxedModuleName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
/// Detects if a module contains a statement of the form:
/// ```python
/// __path__ = pkgutil.extend_path(__path__, __name__)
@@ -1894,12 +1926,14 @@ mod tests {
asyncio: 3.8- # 'Regular' package on py38+
asyncio.tasks: 3.9-3.11 # Submodule on py39+ only
functools: 3.8- # Top-level single-file module
xml: 3.8-3.8 # Namespace package on py38 only
";
const STDLIB: &[FileSpec] = &[
("asyncio/__init__.pyi", ""),
("asyncio/tasks.pyi", ""),
("functools.pyi", ""),
("xml/etree.pyi", ""),
];
const TYPESHED: MockedTypeshed = MockedTypeshed {
@@ -1912,7 +1946,7 @@ mod tests {
.with_python_version(PythonVersion::PY38)
.build();
let existing_modules = create_module_names(&["asyncio", "functools"]);
let existing_modules = create_module_names(&["asyncio", "functools", "xml.etree"]);
for module_name in existing_modules {
let resolved_module =
resolve_module_confident(&db, &module_name).unwrap_or_else(|| {
@@ -1936,12 +1970,16 @@ mod tests {
asyncio: 3.8- # 'Regular' package on py38+
asyncio.tasks: 3.9-3.11 # Submodule on py39+ only
collections: 3.9- # 'Regular' package on py39+
importlib: 3.9- # Namespace package on py39+
xml: 3.8-3.8 # Namespace package on 3.8 only
";
const STDLIB: &[FileSpec] = &[
("collections/__init__.pyi", ""),
("asyncio/__init__.pyi", ""),
("asyncio/tasks.pyi", ""),
("importlib/abc.pyi", ""),
("xml/etree.pyi", ""),
];
const TYPESHED: MockedTypeshed = MockedTypeshed {
@@ -1954,7 +1992,13 @@ mod tests {
.with_python_version(PythonVersion::PY38)
.build();
let nonexisting_modules = create_module_names(&["collections", "asyncio.tasks"]);
let nonexisting_modules = create_module_names(&[
"collections",
"importlib",
"importlib.abc",
"xml",
"asyncio.tasks",
]);
for module_name in nonexisting_modules {
assert!(
@@ -1971,6 +2015,7 @@ mod tests {
asyncio.tasks: 3.9-3.11 # Submodule on py39+ only
collections: 3.9- # 'Regular' package on py39+
functools: 3.8- # Top-level single-file module
importlib: 3.9- # Namespace package on py39+
";
const STDLIB: &[FileSpec] = &[
@@ -1978,6 +2023,7 @@ mod tests {
("asyncio/tasks.pyi", ""),
("collections/__init__.pyi", ""),
("functools.pyi", ""),
("importlib/abc.pyi", ""),
];
const TYPESHED: MockedTypeshed = MockedTypeshed {
@@ -1990,8 +2036,13 @@ mod tests {
.with_python_version(PythonVersion::PY39)
.build();
let existing_modules =
create_module_names(&["asyncio", "functools", "collections", "asyncio.tasks"]);
let existing_modules = create_module_names(&[
"asyncio",
"functools",
"importlib.abc",
"collections",
"asyncio.tasks",
]);
for module_name in existing_modules {
let resolved_module =
@@ -2393,7 +2444,7 @@ mod tests {
fn adding_file_to_search_path_with_lower_priority_does_not_invalidate_query() {
const TYPESHED: MockedTypeshed = MockedTypeshed {
versions: "functools: 3.8-",
stdlib_files: &[("functools/__init__.pyi", "def update_wrapper(): ...")],
stdlib_files: &[("functools.pyi", "def update_wrapper(): ...")],
};
let TestCase {
@@ -2407,7 +2458,7 @@ mod tests {
.build();
let functools_module_name = ModuleName::new_static("functools").unwrap();
let stdlib_functools_path = stdlib.join("functools/__init__.pyi");
let stdlib_functools_path = stdlib.join("functools.pyi");
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
assert_eq!(functools_module.search_path(&db).unwrap(), &stdlib);
@@ -2419,7 +2470,7 @@ mod tests {
// Adding a file to site-packages does not invalidate the query,
// since site-packages takes lower priority in the module resolution
db.clear_salsa_events();
let site_packages_functools_path = site_packages.join("functools/__init__.py");
let site_packages_functools_path = site_packages.join("functools.py");
db.write_file(&site_packages_functools_path, "f: int")
.unwrap();
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();

View File

@@ -14,7 +14,6 @@ license.workspace = true
[dependencies]
ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["cache", "serde"] }
ruff_diagnostics = { workspace = true }
ruff_macros = { workspace = true }
ruff_memory_usage = { workspace = true }
ruff_options_metadata = { workspace = true }
@@ -31,7 +30,7 @@ anyhow = { workspace = true }
camino = { workspace = true }
colored = { workspace = true }
crossbeam = { workspace = true }
get-size2 = { workspace = true, features = ["ordermap"] }
get-size2 = { workspace = true }
globset = { workspace = true }
notify = { workspace = true }
ordermap = { workspace = true, features = ["serde"] }
@@ -49,10 +48,8 @@ toml = { workspace = true }
tracing = { workspace = true }
[dev-dependencies]
ruff_db = { workspace = true, features = ["testing"] }
ruff_python_trivia = { workspace = true }
insta = { workspace = true, features = ["redactions", "ron"] }
ruff_db = { workspace = true, features = ["testing"] }
[features]
default = ["zstd"]

View File

@@ -1,794 +0,0 @@
use ruff_db::cancellation::{Canceled, CancellationToken};
use ruff_db::diagnostic::{DisplayDiagnosticConfig, DisplayDiagnostics};
use ruff_db::parsed::parsed_module;
use ruff_db::source::SourceText;
use ruff_db::system::{SystemPath, WritableSystem};
use ruff_db::{
diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span},
files::File,
source::source_text,
};
use ruff_diagnostics::{Fix, IsolationLevel, SourceMap};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use rustc_hash::FxHashSet;
use salsa::Setter as _;
use std::collections::BTreeMap;
use thiserror::Error;
use ty_python_semantic::{UNUSED_IGNORE_COMMENT, suppress_all};
use crate::Db;
pub struct SuppressAllResult {
/// The non-lint diagnostics that can't be suppressed or the diagnostics of files
/// that couldn't be suppressed (because ty failed to write the result back to disk,
/// or the file contains syntax errors).
pub diagnostics: Vec<Diagnostic>,
/// The number of diagnostics that were suppressed.
pub count: usize,
}
/// Adds suppressions to all lint diagnostics and writes the changed files back to disk.
///
/// Returns how many diagnostics were suppressed along the remaining, non-suppressed diagnostics.
///
/// ## Panics
/// If the `db`'s system isn't [writable](WritableSystem).
pub fn suppress_all_diagnostics(
db: &mut dyn Db,
mut diagnostics: Vec<Diagnostic>,
cancellation_token: &CancellationToken,
) -> Result<SuppressAllResult, Canceled> {
let system = WritableSystem::dyn_clone(
db.system()
.as_writable()
.expect("System should be writable"),
);
let has_fixable = diagnostics.iter().any(|diagnostic| {
diagnostic
.primary_span()
.and_then(|span| span.range())
.is_some()
&& diagnostic.id().is_lint()
&& diagnostic.id() != DiagnosticId::Lint(UNUSED_IGNORE_COMMENT.name())
});
// Early return if there are no diagnostics that can be suppressed to avoid all the heavy work below.
if !has_fixable {
return Ok(SuppressAllResult {
diagnostics,
count: 0,
});
}
let mut by_file: BTreeMap<File, Vec<_>> = BTreeMap::new();
// Group the diagnostics by file, leave the file-agnostic diagnostics in `diagnostics`.
for diagnostic in diagnostics.extract_if(.., |diagnostic| diagnostic.primary_span().is_some()) {
let span = diagnostic
.primary_span()
.expect("should be set because `extract_if` only yields elements with a primary_span");
by_file
.entry(span.expect_ty_file())
.or_default()
.push(diagnostic);
}
let mut fixed_count = 0usize;
let project = db.project();
// Try to suppress all lint-diagnostics in the given file.
for (&file, file_diagnostics) in &mut by_file {
if cancellation_token.is_cancelled() {
return Err(Canceled);
}
let Some(path) = file.path(db).as_system_path() else {
tracing::debug!(
"Skipping file `{}` with non-system path because vendored and system virtual file paths are read-only",
file.path(db)
);
continue;
};
let parsed = parsed_module(db, file);
if parsed.load(db).has_syntax_errors() {
tracing::warn!("Skipping file `{path}` with syntax errors",);
continue;
}
let fixable_diagnostics: Vec<_> = file_diagnostics
.iter()
.filter_map(|diagnostic| {
let lint_id = diagnostic.id().as_lint()?;
// Don't suppress unused ignore comments.
if lint_id == UNUSED_IGNORE_COMMENT.name() {
return None;
}
// We can't suppress diagnostics without a corresponding file or range.
let span = diagnostic.primary_span()?;
let range = span.range()?;
Some((lint_id, range))
})
.collect();
if fixable_diagnostics.is_empty() {
tracing::debug!(
"Skipping file `{path}` because it contains no suppressable diagnostics"
);
continue;
}
tracing::debug!(
"Suppressing {} diagnostics in `{path}`.",
fixable_diagnostics.len()
);
// Required to work around borrow checker issues.
let path = path.to_path_buf();
let fixes = suppress_all(db, file, &fixable_diagnostics);
let source = source_text(db, file);
// TODO: Handle overlapping fixes when adding support for `--fix` by iterating until all fixes
// were successfully applied. We don't need to do that for suppressions because suppression fixes
// should never overlap (and, if they were, the worst outcome is that some suppressions are missing).
let FixedCode {
source: new_source,
source_map,
} = apply_fixes(&source, fixes).unwrap_or_else(|fixed| fixed);
let new_source = source.with_text(new_source, &source_map);
// Verify that the fix didn't introduce any syntax errors by overriding
// the source text for `file`.
let mut source_guard = WithUpdatedSourceGuard::new(db, file, &source, new_source.clone());
let db = source_guard.db();
let new_parsed = parsed_module(db, file);
let new_parsed = new_parsed.load(db);
if new_parsed.has_syntax_errors() {
let mut diag = Diagnostic::new(
DiagnosticId::InternalError,
Severity::Fatal,
format_args!(
"Adding suppressions introduced a syntax error. Reverting all changes."
),
);
let mut file_annotation = Annotation::primary(Span::from(file));
file_annotation.hide_snippet(true);
diag.annotate(file_annotation);
let parse_diagnostics: Vec<_> = new_parsed
.errors()
.iter()
.map(|error| {
Diagnostic::invalid_syntax(Span::from(file), &error.error, error.location)
})
.collect();
diag.add_bug_sub_diagnostics("%5BFix%20error%5D");
let file_db: &dyn ruff_db::Db = db;
diag.info(format_args!(
"Introduced syntax errors:\n\n{}",
DisplayDiagnostics::new(
&file_db,
&DisplayDiagnosticConfig::default(),
&parse_diagnostics
)
));
file_diagnostics.push(diag);
continue;
}
// Write the changes back to disk.
if let Err(err) = write_changes(db, &*system, file, &path, &new_source) {
let mut diag = Diagnostic::new(
DiagnosticId::Io,
Severity::Error,
format_args!("Failed to write fixes to file: {err}"),
);
diag.annotate(Annotation::primary(Span::from(file)));
diagnostics.push(diag);
continue;
}
// If we got here then we've been successful. Re-check to get the diagnostics with the
// update source, update the fix count.
if fixable_diagnostics.len() == file_diagnostics.len() {
file_diagnostics.clear();
} else {
// If there are any other file level diagnostics, call `check_file` to re-compute them
// with updated ranges.
let diagnostics = project.check_file(db, file);
*file_diagnostics = diagnostics;
}
fixed_count += fixable_diagnostics.len();
// Don't restore the source text or we risk a panic when rendering the diagnostics
// if reading any of the fixed files fails (for whatever reason).
// The override will get removed on the next `File::sync_path` call.
source_guard.defuse();
}
// Stitch the remaining diagnostics back together.
diagnostics.extend(by_file.into_values().flatten());
diagnostics.sort_by(|left, right| {
left.rendering_sort_key(db)
.cmp(&right.rendering_sort_key(db))
});
Ok(SuppressAllResult {
diagnostics,
count: fixed_count,
})
}
fn write_changes(
db: &dyn Db,
system: &dyn WritableSystem,
file: File,
path: &SystemPath,
new_source: &SourceText,
) -> Result<(), WriteChangesError> {
let metadata = system.path_metadata(path)?;
if metadata.revision() != file.revision(db) {
return Err(WriteChangesError::FileWasModified);
}
system.write_file_bytes(path, &new_source.to_bytes())?;
Ok(())
}
#[derive(Debug, Error)]
enum WriteChangesError {
#[error("failed to write changes to disk: {0}")]
Io(#[from] std::io::Error),
#[error("the file has been modified")]
FileWasModified,
}
/// Apply a series of fixes to `File` and returns the updated source code along with the source map.
///
/// Returns an error if not all fixes were applied because some fixes are overlapping.
fn apply_fixes(source: &str, mut fixes: Vec<Fix>) -> Result<FixedCode, FixedCode> {
let mut output = String::with_capacity(source.len());
let mut last_pos: Option<TextSize> = None;
let mut has_overlapping_fixes = false;
let mut isolated: FxHashSet<u32> = FxHashSet::default();
let mut source_map = SourceMap::default();
fixes.sort_unstable_by_key(Fix::min_start);
for fix in fixes {
let mut edits = fix.edits().iter().peekable();
// If the fix contains at least one new edit, enforce isolation and positional requirements.
if let Some(first) = edits.peek() {
// If this fix requires isolation, and we've already applied another fix in the
// same isolation group, skip it.
if let IsolationLevel::Group(id) = fix.isolation() {
if !isolated.insert(id) {
has_overlapping_fixes = true;
continue;
}
}
// If this fix overlaps with a fix we've already applied, skip it.
if last_pos.is_some_and(|last_pos| last_pos >= first.start()) {
has_overlapping_fixes = true;
continue;
}
}
let mut applied_edits = Vec::with_capacity(fix.edits().len());
for edit in edits {
// Add all contents from `last_pos` to `fix.location`.
let slice = &source[TextRange::new(last_pos.unwrap_or_default(), edit.start())];
output.push_str(slice);
// Add the start source marker for the patch.
source_map.push_start_marker(edit, output.text_len());
// Add the patch itself.
output.push_str(edit.content().unwrap_or_default());
// Add the end source marker for the added patch.
source_map.push_end_marker(edit, output.text_len());
// Track that the edit was applied.
last_pos = Some(edit.end());
applied_edits.push(edit);
}
}
// Add the remaining content.
let slice = &source[last_pos.unwrap_or_default().to_usize()..];
output.push_str(slice);
let fixed = FixedCode {
source: output,
source_map,
};
if has_overlapping_fixes {
Err(fixed)
} else {
Ok(fixed)
}
}
struct FixedCode {
/// Source map that allows mapping positions in the fixed code back to positions in the original
/// source code (useful for mapping fixed lines back to their original notebook cells).
source_map: SourceMap,
/// The fixed source code
source: String,
}
/// Guard that sets [`File::set_source_text_override`] and guarantees to restore the original source
/// text unless the guard is explicitly defused.
struct WithUpdatedSourceGuard<'db> {
db: &'db mut dyn Db,
file: File,
old_source: Option<SourceText>,
}
impl<'db> WithUpdatedSourceGuard<'db> {
fn new(
db: &'db mut dyn Db,
file: File,
old_source: &SourceText,
new_source: SourceText,
) -> Self {
file.set_source_text_override(db).to(Some(new_source));
Self {
db,
file,
old_source: Some(old_source.clone()),
}
}
fn defuse(&mut self) {
self.old_source = None;
}
fn db(&mut self) -> &mut dyn Db {
self.db
}
}
impl Drop for WithUpdatedSourceGuard<'_> {
fn drop(&mut self) {
if let Some(old_source) = self.old_source.take() {
// We don't set `source_text_override` to `None` here because setting the value
// invalidates the `source_text` query and there's the chance that reading the file's content
// will fail this time (e.g. because the file was deleted), resulting in ty panicking
// when trying to render any diagnostic for that file (because all offsets now point nowhere).
// The override will be cleared by `File::sync_path`, the next time the revision changes.
self.file
.set_source_text_override(self.db)
.to(Some(old_source));
}
}
}
#[cfg(test)]
mod tests {
use std::collections::hash_map::Entry;
use std::hash::{DefaultHasher, Hash, Hasher};
use insta::assert_snapshot;
use ruff_db::cancellation::CancellationTokenSource;
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, DisplayDiagnostics};
use ruff_db::files::{File, system_path_to_file};
use ruff_db::parsed::parsed_module;
use ruff_db::source::source_text;
use ruff_db::system::{DbWithWritableSystem, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
use rustc_hash::FxHashMap;
use ty_python_semantic::UNUSED_IGNORE_COMMENT;
use ty_python_semantic::lint::Level;
use crate::db::tests::TestDb;
use crate::metadata::options::Rules;
use crate::metadata::value::RangedValue;
use crate::{Db, ProjectMetadata, suppress_all_diagnostics};
#[test]
fn simple_suppression() {
assert_snapshot!(
suppress_all_in(r#"
a = b + 10"#
),
@r"
Added 1 suppressions
## Fixed source
```py
a = b + 10 # ty:ignore[unresolved-reference]
```
");
}
#[test]
fn multiple_suppressions_same_code() {
assert_snapshot!(
suppress_all_in(r#"
a = b + 10 + c"#
),
@r"
Added 2 suppressions
## Fixed source
```py
a = b + 10 + c # ty:ignore[unresolved-reference]
```
");
}
#[test]
fn multiple_suppressions_different_codes() {
assert_snapshot!(
suppress_all_in(r#"
import sys
a = b + 10 + sys.veeersion"#
),
@r"
Added 2 suppressions
## Fixed source
```py
import sys
a = b + 10 + sys.veeersion # ty:ignore[unresolved-attribute, unresolved-reference]
```
");
}
#[test]
fn dont_fix_unused_ignore() {
assert_snapshot!(
suppress_all_in(r#"
import sys
a = 5 + 10 # ty: ignore[unresolved-reference]"#
),
@r"
Added 0 suppressions
## Fixed source
```py
import sys
a = 5 + 10 # ty: ignore[unresolved-reference]
```
## Diagnostics after applying fixes
warning[unused-ignore-comment]: Unused `ty: ignore` directive
--> test.py:2:13
|
1 | import sys
2 | a = 5 + 10 # ty: ignore[unresolved-reference]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Remove the unused suppression comment
");
}
#[test]
fn dont_fix_files_containing_syntax_errors() {
assert_snapshot!(
suppress_all_in(r#"
import sys
a = x +
"#
),
@r"
Added 0 suppressions
## Fixed source
```py
import sys
a = x +
```
## Diagnostics after applying fixes
error[unresolved-reference]: Name `x` used when not defined
--> test.py:2:5
|
1 | import sys
2 | a = x +
| ^
|
info: rule `unresolved-reference` is enabled by default
error[invalid-syntax]: Expected an expression
--> test.py:2:8
|
1 | import sys
2 | a = x +
| ^
|
");
}
#[test]
fn arguments() {
assert_snapshot!(
suppress_all_in(r#"
def test(a, b):
pass
test(
a = 10,
c = "unknown"
)
"#
),
@r#"
Added 2 suppressions
## Fixed source
```py
def test(a, b):
pass
test(
a = 10,
c = "unknown" # ty:ignore[unknown-argument]
) # ty:ignore[missing-argument]
```
"#);
}
#[test]
fn return_type() {
assert_snapshot!(
suppress_all_in(r#"class A:
def test(self, b: int) -> str:
return "test"
class B(A):
def test(
self,
b: str
) -> A.b:
pass"#
),
@r#"
Added 2 suppressions
## Fixed source
```py
class A:
def test(self, b: int) -> str:
return "test"
class B(A):
def test(
self,
b: str
) -> A.b: # ty:ignore[invalid-method-override, unresolved-attribute]
pass
```
"#);
}
#[test]
fn existing_ty_ignore() {
assert_snapshot!(
suppress_all_in(r#"class A:
def test(self, b: int) -> str:
return "test"
class B(A):
def test( # ty:ignore[unresolved-reference]
self,
b: str
) -> A.b:
pass"#
),
@r#"
Added 2 suppressions
## Fixed source
```py
class A:
def test(self, b: int) -> str:
return "test"
class B(A):
def test( # ty:ignore[unresolved-reference, invalid-method-override]
self,
b: str
) -> A.b: # ty:ignore[unresolved-attribute]
pass
```
## Diagnostics after applying fixes
warning[unused-ignore-comment]: Unused `ty: ignore` directive: 'unresolved-reference'
--> test.py:7:28
|
6 | class B(A):
7 | def test( # ty:ignore[unresolved-reference, invalid-method-override]
| ^^^^^^^^^^^^^^^^^^^^
8 | self,
9 | b: str
|
help: Remove the unused suppression code
"#);
}
#[track_caller]
fn suppress_all_in(source: &str) -> String {
use std::fmt::Write as _;
let mut metadata = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("."));
metadata.options.rules = Some(Rules::from_iter([(
RangedValue::cli(UNUSED_IGNORE_COMMENT.name.to_string()),
RangedValue::cli(Level::Warn),
)]));
let mut db = TestDb::new(metadata);
db.init_program().unwrap();
db.write_file(
"test.py",
ruff_python_trivia::textwrap::dedent(source).trim(),
)
.unwrap();
let file = system_path_to_file(&db, "test.py").unwrap();
let parsed_before = parsed_module(&db, file);
let had_syntax_errors = parsed_before.load(&db).has_syntax_errors();
let diagnostics = db.project().check_file(&db, file);
let total_diagnostics = diagnostics.len();
let cancellation_token_source = CancellationTokenSource::new();
let fixes =
suppress_all_diagnostics(&mut db, diagnostics, &cancellation_token_source.token())
.expect("operation never gets cancelled");
assert_eq!(fixes.count, total_diagnostics - fixes.diagnostics.len());
File::sync_path(&mut db, SystemPath::new("test.py"));
let fixed = source_text(&db, file);
let parsed = parsed_module(&db, file);
let parsed = parsed.load(&db);
let diagnostics_after_applying_fixes = db.project().check_file(&db, file);
let mut output = String::new();
writeln!(
output,
"Added {} suppressions\n\n## Fixed source\n\n```py\n{}\n```\n",
fixes.count,
fixed.as_str()
)
.unwrap();
if !fixes.diagnostics.is_empty() {
writeln!(
output,
"## Diagnostics after applying fixes\n\n{diagnostics}\n",
diagnostics = DisplayDiagnostics::new(
&db,
&DisplayDiagnosticConfig::default(),
&fixes.diagnostics
)
)
.unwrap();
}
assert!(
!parsed.has_syntax_errors() || had_syntax_errors,
"Fixed introduced syntax errors\n\n{output}"
);
let new_diagnostics =
diff_diagnostics(&fixes.diagnostics, &diagnostics_after_applying_fixes);
if !new_diagnostics.is_empty() {
writeln!(
&mut output,
"## New diagnostics after re-checking file\n\n{diagnostics}\n",
diagnostics = DisplayDiagnostics::new(
&db,
&DisplayDiagnosticConfig::default(),
&new_diagnostics
)
)
.unwrap();
}
output
}
fn diff_diagnostics<'a>(before: &'a [Diagnostic], after: &'a [Diagnostic]) -> Vec<Diagnostic> {
let before = DiagnosticFingerprint::group_diagnostics(before);
let after = DiagnosticFingerprint::group_diagnostics(after);
after
.into_iter()
.filter(|(key, _)| !before.contains_key(key))
.map(|(_, diagnostic)| diagnostic.clone())
.collect()
}
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
struct DiagnosticFingerprint(u64);
impl DiagnosticFingerprint {
fn group_diagnostics(diagnostics: &[Diagnostic]) -> FxHashMap<Self, &Diagnostic> {
let mut result = FxHashMap::default();
for diagnostic in diagnostics {
Self::from_diagnostic(diagnostic, &mut result);
}
result
}
fn from_diagnostic<'a>(
diagnostic: &'a Diagnostic,
seen: &mut FxHashMap<DiagnosticFingerprint, &'a Diagnostic>,
) -> DiagnosticFingerprint {
let mut disambiguator = 0u64;
loop {
let mut h = DefaultHasher::default();
disambiguator.hash(&mut h);
diagnostic.id().hash(&mut h);
let key = DiagnosticFingerprint(h.finish());
match seen.entry(key) {
Entry::Occupied(_) => {
disambiguator += 1;
}
Entry::Vacant(entry) => {
entry.insert(diagnostic);
return key;
}
}
}
}
}
}

View File

@@ -1,6 +1,5 @@
use ruff_db::system::SystemPath;
use crate::glob::include::MatchFile;
pub(crate) use exclude::{ExcludeFilter, ExcludeFilterBuilder};
pub(crate) use include::{IncludeFilter, IncludeFilterBuilder};
pub(crate) use portable::{
@@ -40,9 +39,7 @@ impl IncludeExcludeFilter {
if self.exclude.match_directory(path, mode) {
IncludeResult::Excluded
} else if self.include.match_directory(path) {
IncludeResult::Included {
literal_match: None,
}
IncludeResult::Included
} else {
IncludeResult::NotIncluded
}
@@ -55,16 +52,10 @@ impl IncludeExcludeFilter {
) -> IncludeResult {
if self.exclude.match_file(path, mode) {
IncludeResult::Excluded
} else if self.include.match_file(path) {
IncludeResult::Included
} else {
match self.include.match_file(path) {
MatchFile::Literal => IncludeResult::Included {
literal_match: Some(true),
},
MatchFile::Pattern => IncludeResult::Included {
literal_match: Some(false),
},
MatchFile::No => IncludeResult::NotIncluded,
}
IncludeResult::NotIncluded
}
}
}
@@ -95,7 +86,7 @@ pub(crate) enum IncludeResult {
///
/// For directories: This isn't a guarantee that any file in this directory gets included
/// but we need to traverse it to make this decision.
Included { literal_match: Option<bool> },
Included,
/// The path matches an exclude pattern.
Excluded,

View File

@@ -33,8 +33,7 @@ const DFA_SIZE_LIMIT: usize = 1_000_000;
pub(crate) struct IncludeFilter {
#[get_size(ignore)]
glob_set: GlobSet,
original_patterns: Box<[Box<str>]>,
literal_pattern_indices: Box<[usize]>,
original_patterns: Box<[String]>,
#[get_size(size_fn = dfa_memory_usage)]
dfa: Option<dfa::dense::DFA<Vec<u32>>>,
}
@@ -46,29 +45,10 @@ fn dfa_memory_usage(dfa: &Option<dfa::dense::DFA<Vec<u32>>>) -> usize {
impl IncludeFilter {
/// Whether the file matches any of the globs.
pub(crate) fn match_file(&self, path: impl AsRef<SystemPath>) -> MatchFile {
pub(crate) fn match_file(&self, path: impl AsRef<SystemPath>) -> bool {
let path = path.as_ref();
if self.literal_pattern_indices.is_empty() {
return if self.glob_set.is_match(path) {
MatchFile::Pattern
} else {
MatchFile::No
};
}
let matches = self.glob_set.matches(path);
if matches.is_empty() {
MatchFile::No
} else {
for match_index in matches {
if self.literal_pattern_indices.contains(&match_index) {
return MatchFile::Literal;
}
}
MatchFile::Pattern
}
self.glob_set.is_match(path)
}
/// Check whether a directory or any of its children can be matched by any of the globs.
@@ -140,36 +120,18 @@ impl PartialEq for IncludeFilter {
impl Eq for IncludeFilter {}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum MatchFile {
No,
/// The file path matches the glob literally exactly. This is only the case for globs
/// that don't use any wildcards.
Literal,
/// The file path matches the glob pattern.
Pattern,
}
impl MatchFile {}
#[derive(Debug)]
pub(crate) struct IncludeFilterBuilder {
set: GlobSetBuilder,
set_len: usize,
original_patterns: Vec<Box<str>>,
original_pattern: Vec<String>,
regexes: Vec<String>,
/// Indices of literal patterns (contain no meta characters).
literal_pattern_indices: Vec<usize>,
}
impl IncludeFilterBuilder {
pub(crate) fn new() -> Self {
Self {
literal_pattern_indices: Vec::new(),
set: GlobSetBuilder::new(),
set_len: 0,
original_patterns: Vec::new(),
original_pattern: Vec::new(),
regexes: Vec::new(),
}
}
@@ -199,16 +161,13 @@ impl IncludeFilterBuilder {
// No need to support Windows-style paths, so the backslash can be used a escape.
.backslash_escape(true)
.build()?;
let is_literal_pattern = globset::escape(glob_pattern) == glob_pattern;
self.original_patterns.push(input.relative().into());
self.original_pattern.push(input.relative().to_string());
// `lib` is the same as `lib/**`
// Add a glob that matches `lib` exactly, change the glob to `lib/**`.
if glob_pattern.ends_with("**") {
self.push_prefix_regex(&glob);
self.add_glob(glob);
self.set.add(glob);
} else {
let prefix_glob = GlobBuilder::new(&format!("{glob_pattern}/**"))
.literal_separator(true)
@@ -217,28 +176,19 @@ impl IncludeFilterBuilder {
.build()?;
self.push_prefix_regex(&prefix_glob);
self.add_glob(prefix_glob);
self.set.add(prefix_glob);
// The reason we add the exact glob, e.g. `src` when the original pattern was `src/` is
// so that `match_file` returns true when matching against a file. However, we don't
// need to do this if this is a pattern that should only match a directory (specifically, its contents).
if !only_directory {
if is_literal_pattern {
self.literal_pattern_indices.push(self.set_len);
}
self.add_glob(glob);
self.set.add(glob);
}
}
Ok(self)
}
fn add_glob(&mut self, glob: Glob) {
self.set.add(glob);
self.set_len += 1;
}
fn push_prefix_regex(&mut self, glob: &Glob) {
let main_separator = regex::escape(MAIN_SEPARATOR_STR);
@@ -289,8 +239,7 @@ impl IncludeFilterBuilder {
Ok(IncludeFilter {
glob_set,
dfa,
literal_pattern_indices: self.literal_pattern_indices.into(),
original_patterns: self.original_patterns.into(),
original_patterns: self.original_pattern.into(),
})
}
}
@@ -299,7 +248,7 @@ impl IncludeFilterBuilder {
mod tests {
use std::path::{MAIN_SEPARATOR, MAIN_SEPARATOR_STR};
use crate::glob::include::{IncludeFilter, IncludeFilterBuilder, MatchFile};
use crate::glob::include::{IncludeFilter, IncludeFilterBuilder};
use crate::glob::{PortableGlobKind, PortableGlobPattern};
use ruff_db::system::{MemoryFileSystem, walk_directory::WalkState};
@@ -377,33 +326,33 @@ mod tests {
"files/*.py",
]);
assert_eq!(filter.match_file("lib"), MatchFile::Literal);
assert_eq!(filter.match_file("lib/more/test"), MatchFile::Pattern);
assert!(filter.match_file("lib"));
assert!(filter.match_file("lib/more/test"));
// Unlike `directory`, `directory/` only includes a directory with the given name and its contents
assert_eq!(filter.match_file("directory"), MatchFile::No);
assert_eq!(filter.match_file("directory/more/test"), MatchFile::Pattern);
assert!(!filter.match_file("directory"));
assert!(filter.match_file("directory/more/test"));
// Unlike `src`, `src/*` only includes a directory with the given name.
assert_eq!(filter.match_file("src"), MatchFile::No);
assert_eq!(filter.match_file("src/more/test"), MatchFile::Pattern);
assert!(!filter.match_file("src"));
assert!(filter.match_file("src/more/test"));
// Unlike `tests`, `tests/**` only includes files under `tests`, but not a file named tests
assert_eq!(filter.match_file("tests"), MatchFile::No);
assert_eq!(filter.match_file("tests/more/test"), MatchFile::Pattern);
assert!(!filter.match_file("tests"));
assert!(filter.match_file("tests/more/test"));
// Unlike `match_directory`, prefixes should not be included.
assert_eq!(filter.match_file("a"), MatchFile::No);
assert_eq!(filter.match_file("a/test-b"), MatchFile::No);
assert!(!filter.match_file("a"));
assert!(!filter.match_file("a/test-b"));
assert_eq!(filter.match_file("a/test-b/x"), MatchFile::No);
assert_eq!(filter.match_file("a/test"), MatchFile::No);
assert!(!filter.match_file("a/test-b/x"));
assert!(!filter.match_file("a/test"));
assert_eq!(filter.match_file("files/a.py"), MatchFile::Pattern);
assert_eq!(filter.match_file("files/a.py/bcd"), MatchFile::Pattern);
assert!(filter.match_file("files/a.py"));
assert!(filter.match_file("files/a.py/bcd"));
assert_eq!(filter.match_file("not_included"), MatchFile::No);
assert_eq!(filter.match_file("files/a.pi"), MatchFile::No);
assert!(!filter.match_file("not_included"));
assert!(!filter.match_file("files/a.pi"));
}
/// Check that we skip directories that can never match.

View File

@@ -9,7 +9,6 @@ use crate::walk::{ProjectFilesFilter, ProjectFilesWalker};
pub use db::tests::TestDb;
pub use db::{ChangeResult, CheckMode, Db, ProjectDatabase, SalsaMemoryDump};
use files::{Index, Indexed, IndexedFiles};
pub use fixes::suppress_all_diagnostics;
use metadata::settings::Settings;
pub use metadata::{ProjectMetadata, ProjectMetadataError};
use ruff_db::diagnostic::{
@@ -34,7 +33,6 @@ use ty_python_semantic::types::check_types;
mod db;
mod files;
mod fixes;
mod glob;
pub mod metadata;
mod walk;
@@ -216,19 +214,15 @@ impl Project {
/// This means, that this method is an over-approximation of `Self::files` and may return `true` for paths
/// that won't be included when checking the project because they're ignored in a `.gitignore` file.
pub fn is_file_included(self, db: &dyn Db, path: &SystemPath) -> bool {
matches!(
ProjectFilesFilter::from_project(db, self)
.is_file_included(path, GlobFilterCheckMode::Adhoc),
IncludeResult::Included { .. }
)
ProjectFilesFilter::from_project(db, self)
.is_file_included(path, GlobFilterCheckMode::Adhoc)
== IncludeResult::Included
}
pub fn is_directory_included(self, db: &dyn Db, path: &SystemPath) -> bool {
matches!(
ProjectFilesFilter::from_project(db, self)
.is_directory_included(path, GlobFilterCheckMode::Adhoc),
IncludeResult::Included { .. }
)
ProjectFilesFilter::from_project(db, self)
.is_directory_included(path, GlobFilterCheckMode::Adhoc)
== IncludeResult::Included
}
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
@@ -700,7 +694,38 @@ where
Err(error) => {
let message = error.to_diagnostic_message(Some(file.path(db)));
let mut diagnostic = Diagnostic::new(DiagnosticId::Panic, Severity::Fatal, message);
diagnostic.add_bug_sub_diagnostics("%5Bpanic%5D");
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
"This indicates a bug in ty.",
));
let report_message = "If you could open an issue at https://github.com/astral-sh/ty/issues/new?title=%5Bpanic%5D, we'd be very appreciative!";
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
report_message,
));
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!(
"Platform: {os} {arch}",
os = std::env::consts::OS,
arch = std::env::consts::ARCH
),
));
if let Some(version) = ruff_db::program_version() {
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!("Version: {version}"),
));
}
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!(
"Args: {args:?}",
args = std::env::args().collect::<Vec<_>>()
),
));
if let Some(backtrace) = error.backtrace {
match backtrace.status() {

View File

@@ -850,6 +850,7 @@ impl SrcOptions {
)]
#[serde(rename_all = "kebab-case", transparent)]
pub struct Rules {
#[get_size(ignore)] // TODO: Add `GetSize` support for `OrderMap`.
inner: OrderMap<RangedValue<String>, RangedValue<Level>, BuildHasherDefault<FxHasher>>,
}

View File

@@ -97,7 +97,7 @@ impl Override {
matches!(
self.files
.is_file_included(path, GlobFilterCheckMode::Adhoc),
IncludeResult::Included { .. }
IncludeResult::Included
)
}
}

View File

@@ -79,9 +79,7 @@ impl<'a> ProjectFilesFilter<'a> {
match self.match_included_paths(path, mode) {
None => IncludeResult::NotIncluded,
Some(CheckPathMatch::Partial) => self.src_filter.is_file_included(path, mode),
Some(CheckPathMatch::Full) => IncludeResult::Included {
literal_match: Some(true),
},
Some(CheckPathMatch::Full) => IncludeResult::Included,
}
}
@@ -95,9 +93,7 @@ impl<'a> ProjectFilesFilter<'a> {
Some(CheckPathMatch::Partial) => {
self.src_filter.is_directory_maybe_included(path, mode)
}
Some(CheckPathMatch::Full) => IncludeResult::Included {
literal_match: Some(true),
},
Some(CheckPathMatch::Full) => IncludeResult::Included,
}
}
}
@@ -193,59 +189,60 @@ impl<'a> ProjectFilesWalker<'a> {
let directory_included = filter
.is_directory_included(entry.path(), GlobFilterCheckMode::TopDown);
return match directory_included {
IncludeResult::Included { .. } => WalkState::Continue,
IncludeResult::Included => WalkState::Continue,
IncludeResult::Excluded => {
tracing::debug!(
"Skipping directory '{path}' because it is excluded by a default or `src.exclude` pattern",
path=entry.path()
);
WalkState::Skip
}
},
IncludeResult::NotIncluded => {
tracing::debug!(
"Skipping directory `{path}` because it doesn't match any `src.include` pattern or path specified on the CLI",
path=entry.path()
);
WalkState::Skip
}
},
};
}
} else {
// Ignore any non python files to avoid creating too many entries in `Files`.
// Unless the file is explicitly passed, we then always assume it's a python file.
let source_type = entry.path().extension().and_then(PySourceType::try_from_extension).or_else(|| {
if entry.depth() == 0 {
Some(PySourceType::Python)
} else {
db.system().source_type(entry.path())
}
});
if source_type.is_none()
{
return WalkState::Continue;
}
// For all files, except the ones that were explicitly passed to the walker (CLI),
// check if they're included in the project.
if entry.depth() > 0 || self.force_exclude {
match filter
.is_file_included(entry.path(), GlobFilterCheckMode::TopDown)
{
IncludeResult::Included { literal_match } => {
// Ignore any non python files to avoid creating too many entries in `Files`.
// Unless the file is explicitly passed on the CLI or a literal match in the `include`, we then always assume it's a file ty can analyze
let source_type = if literal_match == Some(true) || entry.depth() == 0 {
Some(PySourceType::Python)
} else {
entry.path().extension().and_then(PySourceType::try_from_extension).or_else(|| db.system().source_type(entry.path()))
};
if source_type.is_none()
{
return WalkState::Continue;
}
}
IncludeResult::Included => {},
IncludeResult::Excluded => {
tracing::debug!(
"Ignoring file `{path}` because it is excluded by a default or `src.exclude` pattern.",
path=entry.path()
);
return WalkState::Continue;
}
},
IncludeResult::NotIncluded => {
tracing::debug!(
"Ignoring file `{path}` because it doesn't match any `src.include` pattern or path specified on the CLI.",
path=entry.path()
);
return WalkState::Continue;
}
},
}
}

View File

@@ -168,56 +168,6 @@ on top of that:
Foo = NewType("Foo", 42)
```
## `NewType`s in arithmetic and comparison expressions might or might not act as their base
These expressions are valid because `Foo` acts as its base type, `int`:
```py
from typing import NewType
Foo = NewType("Foo", int)
reveal_type(Foo(42) + 1) # revealed: int
reveal_type(1 + Foo(42)) # revealed: int
reveal_type(Foo(42) + Foo(42)) # revealed: int
reveal_type(Foo(42) == 42) # revealed: bool
reveal_type(42 == Foo(42)) # revealed: bool
reveal_type(Foo(42) == Foo(42)) # revealed: bool
```
However, we can't always substitute `int` for `Foo` to evaluate expressions like these. In the
following cases, only `Foo` itself is valid:
```py
class Bar:
def __add__(self, other: Foo) -> Foo:
return other
def __radd__(self, other: Foo) -> Foo:
return other
def __lt__(self, other: Foo) -> bool:
return True
def __gt__(self, other: Foo) -> bool:
return True
def __contains__(self, key: Foo) -> bool:
return True
reveal_type(Foo(42) + Bar()) # revealed: Foo
reveal_type(Bar() + Foo(42)) # revealed: Foo
reveal_type(Foo(42) < Bar()) # revealed: bool
reveal_type(Bar() < Foo(42)) # revealed: bool
reveal_type(Foo(42) in Bar()) # revealed: bool
42 + Bar() # error: [unsupported-operator]
Bar() + 42 # error: [unsupported-operator]
42 < Bar() # error: [unsupported-operator]
Bar() < 42 # error: [unsupported-operator]
42 in Bar() # error: [unsupported-operator]
```
## `float` and `complex` special cases
`float` and `complex` are subject to a special case in the typing spec, which we currently interpret
@@ -228,7 +178,6 @@ and we accept the unions they expand into.
```py
from typing import NewType
from ty_extensions import static_assert, is_assignable_to
Foo = NewType("Foo", float)
Foo(3.14)
@@ -237,15 +186,6 @@ Foo("hello") # error: [invalid-argument-type] "Argument is incorrect: Expected
reveal_type(Foo(3.14).__class__) # revealed: type[int] | type[float]
reveal_type(Foo(42).__class__) # revealed: type[int] | type[float]
static_assert(is_assignable_to(Foo, float))
static_assert(is_assignable_to(Foo, int | float))
static_assert(is_assignable_to(Foo, int | float | None))
# The assignments above require treating `Foo` as its underlying union type. Each of its members is
# assignable to the union on the right, so `Foo` is assignable to the union, even though `Foo` as a
# whole isn't assignable to any one member. However, as in the previous section, we need to be sure
# that this treatment doesn't break cases like the assignment below, where `Foo` *is* assignable to
# the union on the right, even though its members *aren't*.
static_assert(is_assignable_to(Foo, Foo | None))
Bar = NewType("Bar", complex)
Bar(1 + 2j)
@@ -256,11 +196,6 @@ Bar("goodbye") # error: [invalid-argument-type]
reveal_type(Bar(1 + 2j).__class__) # revealed: type[int] | type[float] | type[complex]
reveal_type(Bar(3.14).__class__) # revealed: type[int] | type[float] | type[complex]
reveal_type(Bar(42).__class__) # revealed: type[int] | type[float] | type[complex]
static_assert(is_assignable_to(Bar, complex))
static_assert(is_assignable_to(Bar, int | float | complex))
static_assert(is_assignable_to(Bar, int | float | complex | None))
# See the `Foo | None` case above.
static_assert(is_assignable_to(Bar, Bar | None))
```
We don't currently try to distinguish between an implicit union (e.g. `float`) and the equivalent
@@ -288,52 +223,6 @@ def g(_: Callable[[int | float | complex], Bar]): ...
g(Bar)
```
The arithmetic and comparison test cases in the previous section used a `NewType` of `int`, but
`NewType`s of `float` and `complex` are more complicated, because their base type is a union, and
that union needs special handling in binary expressions. In these examples, we we need to lower
`Foo` to `int | float` and then check each member of that union _individually_, as we would with an
explicit `Union` on the left side:
```py
reveal_type(Foo(3.14) < Foo(42)) # revealed: bool
reveal_type(Foo(3.14) == Foo(42)) # revealed: bool
reveal_type(Foo(3.14) + Foo(42)) # revealed: int | float
reveal_type(Foo(3.14) / Foo(42)) # revealed: int | float
```
But again as above, we can't _always_ lower `Foo` to `int | float`, because there are also binary
expressions where only `Foo` itself is valid:
```py
class Bing:
def __add__(self, other: Foo) -> Foo:
return other
def __radd__(self, other: Foo) -> Foo:
return other
def __lt__(self, other: Foo) -> bool:
return True
def __gt__(self, other: Foo) -> bool:
return True
def __contains__(self, key: Foo) -> bool:
return True
reveal_type(Foo(3.14) + Bing()) # revealed: Foo
reveal_type(Bing() + Foo(42)) # revealed: Foo
reveal_type(Foo(3.14) < Bing()) # revealed: bool
reveal_type(Bing() < Foo(42)) # revealed: bool
reveal_type(Foo(3.14) in Bing()) # revealed: bool
3.14 + Bing() # error: [unsupported-operator]
Bing() + 3.14 # error: [unsupported-operator]
3.14 < Bing() # error: [unsupported-operator]
Bing() < 3.14 # error: [unsupported-operator]
3.14 in Bing() # error: [unsupported-operator]
```
## A `NewType` definition must be a simple variable assignment
```py

View File

@@ -338,111 +338,7 @@ reveal_type(a is not c) # revealed: Literal[True]
For tuples like `tuple[int, ...]`, `tuple[Any, ...]`
### Unsupported Comparisons
<!-- snapshot-diagnostics -->
Comparisons between homogeneous tuples with incompatible element types should emit diagnostics for
ordering operators (`<`, `<=`, `>`, `>=`), but not for equality operators (`==`, `!=`).
```py
def f(
a: tuple[int, ...],
b: tuple[str, ...],
c: tuple[str],
):
# Equality comparisons are always valid
reveal_type(a == b) # revealed: bool
reveal_type(a != b) # revealed: bool
# Ordering comparisons between incompatible types should emit errors
# error: [unsupported-operator] "Operator `<` is not supported between objects of type `tuple[int, ...]` and `tuple[str, ...]`"
a < b
# error: [unsupported-operator] "Operator `<` is not supported between objects of type `tuple[str, ...]` and `tuple[int, ...]`"
b < a
# error: [unsupported-operator] "Operator `<` is not supported between objects of type `tuple[int, ...]` and `tuple[str]`"
a < c
# error: [unsupported-operator] "Operator `<` is not supported between objects of type `tuple[str]` and `tuple[int, ...]`"
c < a
```
When comparing fixed-length tuples with variable-length tuples, all element types that could
potentially be compared must be compatible.
```py
def _(
var_int: tuple[int, ...],
var_str: tuple[str, ...],
fixed_int_str: tuple[int, str],
):
# Fixed `tuple[int, str]` vs. variable `tuple[int, ...]`:
# Position 0: `int` vs. `int` are comparable.
# Position 1 (if `var_int` has 2+ elements): `str` vs. `int` are not comparable.
# error: [unsupported-operator]
fixed_int_str < var_int
# Variable `tuple[int, ...]` vs. fixed `tuple[int, str]`:
# Position 0: `int` vs. `int` are comparable.
# Position 1 (if `var_int` has 2+ elements): `int` vs. `str` are not comparable.
# error: [unsupported-operator]
var_int < fixed_int_str
# Variable `tuple[str, ...]` vs. fixed `tuple[int, str]`:
# Position 0: `str` vs. `int` are not comparable.
# error: [unsupported-operator]
var_str < fixed_int_str
```
### Supported Comparisons
Comparisons between homogeneous tuples with compatible element types should work.
```py
def _(a: tuple[int, ...], b: tuple[int, ...], c: tuple[bool, ...]):
# Same element types - always valid
reveal_type(a == b) # revealed: bool
reveal_type(a != b) # revealed: bool
reveal_type(a < b) # revealed: bool
reveal_type(a <= b) # revealed: bool
reveal_type(a > b) # revealed: bool
reveal_type(a >= b) # revealed: bool
# int and bool are compatible for comparison
reveal_type(a < c) # revealed: bool
reveal_type(c < a) # revealed: bool
```
### Tuples with Prefixes and Suffixes
<!-- snapshot-diagnostics -->
Variable-length tuples with prefixes and suffixes are also checked.
```toml
[environment]
python-version = "3.11"
```
```py
def _(
prefix_int_var_str: tuple[int, *tuple[str, ...]],
prefix_str_var_int: tuple[str, *tuple[int, ...]],
):
# Prefix `int` vs. prefix `str` are not comparable.
# error: [unsupported-operator]
prefix_int_var_str < prefix_str_var_int
```
Tuples with compatible prefixes/suffixes are allowed.
```py
def _(
prefix_int_var_int: tuple[int, *tuple[int, ...]],
prefix_int_var_bool: tuple[int, *tuple[bool, ...]],
):
# Prefix `int` vs. prefix `int`, variable `int` vs. variable `bool` are all comparable.
reveal_type(prefix_int_var_int < prefix_int_var_bool) # revealed: bool
```
// TODO
## Chained comparisons with elements that incorrectly implement `__bool__`

View File

@@ -583,7 +583,7 @@ from module import NotFrozenBase
@final
@dataclass(frozen=True)
@total_ordering # error: [invalid-total-ordering]
@total_ordering
class FrozenChild(NotFrozenBase): # error: [invalid-frozen-dataclass-subclass]
y: str
```

Some files were not shown because too many files have changed in this diff Show More