Compare commits

..

4 Commits

Author SHA1 Message Date
Aria Desires
f47b9f22f5 Consider from thispackage import y to re-export y in __init__.pyi
Fixes https://github.com/astral-sh/ty/issues/1487
2025-11-11 12:46:31 -05:00
Aria Desires
018febf444 cleanup implementation 2025-11-11 11:23:35 -05:00
Aria Desires
b159d43670 fixup 2025-11-10 20:13:19 -05:00
Aria Desires
260edcff57 support absolute imports 2025-11-10 20:06:13 -05:00
23 changed files with 119 additions and 1126 deletions

View File

@@ -415,13 +415,8 @@ pub struct CheckCommand {
)]
pub statistics: bool,
/// Enable automatic additions of `noqa` directives to failing lines.
/// Optionally provide a reason to append after the codes.
#[arg(
long,
value_name = "REASON",
default_missing_value = "",
num_args = 0..=1,
require_equals = true,
// conflicts_with = "add_noqa",
conflicts_with = "show_files",
conflicts_with = "show_settings",
@@ -433,7 +428,7 @@ pub struct CheckCommand {
conflicts_with = "fix",
conflicts_with = "diff",
)]
pub add_noqa: Option<String>,
pub add_noqa: bool,
/// See the files Ruff will be run against with the current settings.
#[arg(
long,
@@ -1062,7 +1057,7 @@ Possible choices:
/// etc.).
#[expect(clippy::struct_excessive_bools)]
pub struct CheckArguments {
pub add_noqa: Option<String>,
pub add_noqa: bool,
pub diff: bool,
pub exit_non_zero_on_fix: bool,
pub exit_zero: bool,

View File

@@ -21,7 +21,6 @@ pub(crate) fn add_noqa(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
config_arguments: &ConfigArguments,
reason: Option<&str>,
) -> Result<usize> {
// Collect all the files to check.
let start = Instant::now();
@@ -77,14 +76,7 @@ pub(crate) fn add_noqa(
return None;
}
};
match add_noqa_to_path(
path,
package,
&source_kind,
source_type,
&settings.linter,
reason,
) {
match add_noqa_to_path(path, package, &source_kind, source_type, &settings.linter) {
Ok(count) => Some(count),
Err(e) => {
error!("Failed to add noqa to {}: {e}", path.display());

View File

@@ -319,20 +319,12 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
warn_user!("Detected debug build without --no-cache.");
}
if let Some(reason) = &cli.add_noqa {
if cli.add_noqa {
if !fix_mode.is_generate() {
warn_user!("--fix is incompatible with --add-noqa.");
}
if reason.contains(['\n', '\r']) {
return Err(anyhow::anyhow!(
"--add-noqa <reason> cannot contain newline characters"
));
}
let reason_opt = (!reason.is_empty()).then_some(reason.as_str());
let modifications =
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments, reason_opt)?;
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
let s = if modifications == 1 { "" } else { "s" };
#[expect(clippy::print_stderr)]

View File

@@ -1760,64 +1760,6 @@ from foo import ( # noqa: F401
Ok(())
}
#[test]
fn add_noqa_with_reason() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"test.py",
r#"import os
def foo():
x = 1
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command()
.arg("--add-noqa=TODO: fix")
.arg("--select=F401,F841")
.arg("test.py"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Added 2 noqa directives.
");
let content = fs::read_to_string(fixture.root().join("test.py"))?;
insta::assert_snapshot!(content, @r"
import os # noqa: F401 TODO: fix
def foo():
x = 1 # noqa: F841 TODO: fix
");
Ok(())
}
#[test]
fn add_noqa_with_newline_in_reason() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file("test.py", "import os\n")?;
assert_cmd_snapshot!(fixture
.check_command()
.arg("--add-noqa=line1\nline2")
.arg("--select=F401")
.arg("test.py"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: --add-noqa <reason> cannot contain newline characters
"###);
Ok(())
}
/// Infer `3.11` from `requires-python` in `pyproject.toml`.
#[test]
fn requires_python() -> Result<()> {

View File

@@ -112,16 +112,16 @@ impl std::fmt::Display for Diff<'_> {
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
// `None` key.
let cells = if let Some(notebook_index) = &self.notebook_index {
let mut last_cell_index = OneIndexed::MIN;
let mut last_cell = OneIndexed::MIN;
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
for cell in notebook_index.iter() {
if cell.cell_index() != last_cell_index {
let offset = source_code.line_start(cell.start_row());
cells.push((Some(last_cell_index), offset));
last_cell_index = cell.cell_index();
for (row, cell) in notebook_index.iter() {
if cell != last_cell {
let offset = source_code.line_start(row);
cells.push((Some(last_cell), offset));
last_cell = cell;
}
}
cells.push((Some(last_cell_index), source_text.text_len()));
cells.push((Some(last_cell), source_text.text_len()));
cells
} else {
vec![(None, source_text.text_len())]

View File

@@ -377,7 +377,6 @@ pub fn add_noqa_to_path(
source_kind: &SourceKind,
source_type: PySourceType,
settings: &LinterSettings,
reason: Option<&str>,
) -> Result<usize> {
// Parse once.
let target_version = settings.resolve_target_version(path);
@@ -426,7 +425,6 @@ pub fn add_noqa_to_path(
&settings.external,
&directives.noqa_line_for,
stylist.line_ending(),
reason,
)
}

View File

@@ -39,7 +39,7 @@ pub fn generate_noqa_edits(
let exemption = FileExemption::from(&file_directives);
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
build_noqa_edits_by_diagnostic(comments, locator, line_ending, None)
build_noqa_edits_by_diagnostic(comments, locator, line_ending)
}
/// A directive to ignore a set of rules either for a given line of Python source code or an entire file (e.g.,
@@ -715,7 +715,6 @@ impl Display for LexicalError {
impl Error for LexicalError {}
/// Adds noqa comments to suppress all messages of a file.
#[expect(clippy::too_many_arguments)]
pub(crate) fn add_noqa(
path: &Path,
diagnostics: &[Diagnostic],
@@ -724,7 +723,6 @@ pub(crate) fn add_noqa(
external: &[String],
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
reason: Option<&str>,
) -> Result<usize> {
let (count, output) = add_noqa_inner(
path,
@@ -734,14 +732,12 @@ pub(crate) fn add_noqa(
external,
noqa_line_for,
line_ending,
reason,
);
fs::write(path, output)?;
Ok(count)
}
#[expect(clippy::too_many_arguments)]
fn add_noqa_inner(
path: &Path,
diagnostics: &[Diagnostic],
@@ -750,7 +746,6 @@ fn add_noqa_inner(
external: &[String],
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
reason: Option<&str>,
) -> (usize, String) {
let mut count = 0;
@@ -762,7 +757,7 @@ fn add_noqa_inner(
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason);
let edits = build_noqa_edits_by_line(comments, locator, line_ending);
let contents = locator.contents();
@@ -788,7 +783,6 @@ fn build_noqa_edits_by_diagnostic(
comments: Vec<Option<NoqaComment>>,
locator: &Locator,
line_ending: LineEnding,
reason: Option<&str>,
) -> Vec<Option<Edit>> {
let mut edits = Vec::default();
for comment in comments {
@@ -800,7 +794,6 @@ fn build_noqa_edits_by_diagnostic(
FxHashSet::from_iter([comment.code]),
locator,
line_ending,
reason,
) {
edits.push(Some(noqa_edit.into_edit()));
}
@@ -815,7 +808,6 @@ fn build_noqa_edits_by_line<'a>(
comments: Vec<Option<NoqaComment<'a>>>,
locator: &Locator,
line_ending: LineEnding,
reason: Option<&'a str>,
) -> BTreeMap<TextSize, NoqaEdit<'a>> {
let mut comments_by_line = BTreeMap::default();
for comment in comments.into_iter().flatten() {
@@ -839,7 +831,6 @@ fn build_noqa_edits_by_line<'a>(
.collect(),
locator,
line_ending,
reason,
) {
edits.insert(offset, edit);
}
@@ -936,7 +927,6 @@ struct NoqaEdit<'a> {
noqa_codes: FxHashSet<&'a SecondaryCode>,
codes: Option<&'a Codes<'a>>,
line_ending: LineEnding,
reason: Option<&'a str>,
}
impl NoqaEdit<'_> {
@@ -964,9 +954,6 @@ impl NoqaEdit<'_> {
push_codes(writer, self.noqa_codes.iter().sorted_unstable());
}
}
if let Some(reason) = self.reason {
write!(writer, " {reason}").unwrap();
}
write!(writer, "{}", self.line_ending.as_str()).unwrap();
}
}
@@ -983,7 +970,6 @@ fn generate_noqa_edit<'a>(
noqa_codes: FxHashSet<&'a SecondaryCode>,
locator: &Locator,
line_ending: LineEnding,
reason: Option<&'a str>,
) -> Option<NoqaEdit<'a>> {
let line_range = locator.full_line_range(offset);
@@ -1013,7 +999,6 @@ fn generate_noqa_edit<'a>(
noqa_codes,
codes,
line_ending,
reason,
})
}
@@ -2847,7 +2832,6 @@ mod tests {
&[],
&noqa_line_for,
LineEnding::Lf,
None,
);
assert_eq!(count, 0);
assert_eq!(output, format!("{contents}"));
@@ -2871,7 +2855,6 @@ mod tests {
&[],
&noqa_line_for,
LineEnding::Lf,
None,
);
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: F841\n");
@@ -2902,7 +2885,6 @@ mod tests {
&[],
&noqa_line_for,
LineEnding::Lf,
None,
);
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
@@ -2933,7 +2915,6 @@ mod tests {
&[],
&noqa_line_for,
LineEnding::Lf,
None,
);
assert_eq!(count, 0);
assert_eq!(output, "x = 1 # noqa");

View File

@@ -8,40 +8,37 @@ use ruff_source_file::{LineColumn, OneIndexed, SourceLocation};
/// [`ruff_text_size::TextSize`] to jupyter notebook cell/row/column.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct NotebookIndex {
/// Stores the starting row and the absolute cell index for every Python (valid) cell.
///
/// The index in this vector corresponds to the Python cell index (valid cell index).
pub(super) cell_starts: Vec<CellStart>,
/// Enter a row (1-based), get back the cell (1-based)
pub(super) row_to_cell: Vec<OneIndexed>,
/// Enter a row (1-based), get back the row in cell (1-based)
pub(super) row_to_row_in_cell: Vec<OneIndexed>,
}
impl NotebookIndex {
fn find_cell(&self, row: OneIndexed) -> Option<CellStart> {
match self
.cell_starts
.binary_search_by_key(&row, |start| start.start_row)
{
Ok(cell_index) => Some(self.cell_starts[cell_index]),
Err(insertion_point) => Some(self.cell_starts[insertion_point.checked_sub(1)?]),
pub fn new(row_to_cell: Vec<OneIndexed>, row_to_row_in_cell: Vec<OneIndexed>) -> Self {
Self {
row_to_cell,
row_to_row_in_cell,
}
}
/// Returns the (raw) cell number (1-based) for the given row (1-based).
/// Returns the cell number (1-based) for the given row (1-based).
pub fn cell(&self, row: OneIndexed) -> Option<OneIndexed> {
self.find_cell(row).map(|start| start.raw_cell_index)
self.row_to_cell.get(row.to_zero_indexed()).copied()
}
/// Returns the row number (1-based) in the cell (1-based) for the
/// given row (1-based).
pub fn cell_row(&self, row: OneIndexed) -> Option<OneIndexed> {
self.find_cell(row)
.map(|start| OneIndexed::from_zero_indexed(row.get() - start.start_row.get()))
self.row_to_row_in_cell.get(row.to_zero_indexed()).copied()
}
/// Returns an iterator over the starting rows of each cell (1-based).
///
/// This yields one entry per Python cell (skipping over Makrdown cell).
pub fn iter(&self) -> impl Iterator<Item = CellStart> + '_ {
self.cell_starts.iter().copied()
/// Returns an iterator over the row:cell-number pairs (both 1-based).
pub fn iter(&self) -> impl Iterator<Item = (OneIndexed, OneIndexed)> {
self.row_to_cell
.iter()
.enumerate()
.map(|(row, cell)| (OneIndexed::from_zero_indexed(row), *cell))
}
/// Translates the given [`LineColumn`] based on the indexing table.
@@ -70,23 +67,3 @@ impl NotebookIndex {
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct CellStart {
/// The row in the concatenated notebook source code at which
/// this cell starts.
pub(super) start_row: OneIndexed,
/// The absolute index of this cell in the notebook.
pub(super) raw_cell_index: OneIndexed,
}
impl CellStart {
pub fn start_row(&self) -> OneIndexed {
self.start_row
}
pub fn cell_index(&self) -> OneIndexed {
self.raw_cell_index
}
}

View File

@@ -18,7 +18,7 @@ use ruff_text_size::TextSize;
use crate::cell::CellOffsets;
use crate::index::NotebookIndex;
use crate::schema::{Cell, RawNotebook, SortAlphabetically, SourceValue};
use crate::{CellMetadata, CellStart, RawNotebookMetadata, schema};
use crate::{CellMetadata, RawNotebookMetadata, schema};
/// Run round-trip source code generation on a given Jupyter notebook file path.
pub fn round_trip(path: &Path) -> anyhow::Result<String> {
@@ -320,19 +320,11 @@ impl Notebook {
/// The index building is expensive as it needs to go through the content of
/// every valid code cell.
fn build_index(&self) -> NotebookIndex {
let mut cell_starts = Vec::with_capacity(self.valid_code_cells.len());
let mut current_row = OneIndexed::MIN;
let mut row_to_cell = Vec::new();
let mut row_to_row_in_cell = Vec::new();
for &cell_index in &self.valid_code_cells {
let raw_cell_index = cell_index as usize;
// Record the starting row of this cell
cell_starts.push(CellStart {
start_row: current_row,
raw_cell_index: OneIndexed::from_zero_indexed(raw_cell_index),
});
let line_count = match &self.raw.cells[raw_cell_index].source() {
let line_count = match &self.raw.cells[cell_index as usize].source() {
SourceValue::String(string) => {
if string.is_empty() {
1
@@ -350,11 +342,17 @@ impl Notebook {
}
}
};
current_row = current_row.saturating_add(line_count);
row_to_cell.extend(std::iter::repeat_n(
OneIndexed::from_zero_indexed(cell_index as usize),
line_count,
));
row_to_row_in_cell.extend((0..line_count).map(OneIndexed::from_zero_indexed));
}
NotebookIndex { cell_starts }
NotebookIndex {
row_to_cell,
row_to_row_in_cell,
}
}
/// Return the notebook content.
@@ -458,7 +456,7 @@ mod tests {
use ruff_source_file::OneIndexed;
use crate::{Cell, CellStart, Notebook, NotebookError, NotebookIndex};
use crate::{Cell, Notebook, NotebookError, NotebookIndex};
/// Construct a path to a Jupyter notebook in the `resources/test/fixtures/jupyter` directory.
fn notebook_path(path: impl AsRef<Path>) -> std::path::PathBuf {
@@ -550,27 +548,39 @@ print("after empty cells")
assert_eq!(
notebook.index(),
&NotebookIndex {
cell_starts: vec![
CellStart {
start_row: OneIndexed::MIN,
raw_cell_index: OneIndexed::MIN
},
CellStart {
start_row: OneIndexed::from_zero_indexed(6),
raw_cell_index: OneIndexed::from_zero_indexed(2)
},
CellStart {
start_row: OneIndexed::from_zero_indexed(11),
raw_cell_index: OneIndexed::from_zero_indexed(4)
},
CellStart {
start_row: OneIndexed::from_zero_indexed(12),
raw_cell_index: OneIndexed::from_zero_indexed(6)
},
CellStart {
start_row: OneIndexed::from_zero_indexed(14),
raw_cell_index: OneIndexed::from_zero_indexed(7)
}
row_to_cell: vec![
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(4),
OneIndexed::from_zero_indexed(6),
OneIndexed::from_zero_indexed(6),
OneIndexed::from_zero_indexed(7)
],
row_to_row_in_cell: vec![
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(3),
OneIndexed::from_zero_indexed(4),
OneIndexed::from_zero_indexed(5),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(3),
OneIndexed::from_zero_indexed(4),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(0)
],
}
);

View File

@@ -1,8 +0,0 @@
[
{
"preview": "disabled"
},
{
"preview": "enabled"
}
]

View File

@@ -125,13 +125,6 @@ lambda a, /, c: a
*x: x
)
(
lambda
# comment
*x,
**y: x
)
(
lambda
# comment 1
@@ -142,17 +135,6 @@ lambda a, /, c: a
x
)
(
lambda
# comment 1
*
# comment 2
x,
**y:
# comment 3
x
)
(
lambda # comment 1
* # comment 2
@@ -160,14 +142,6 @@ lambda a, /, c: a
x
)
(
lambda # comment 1
* # comment 2
x,
y: # comment 3
x
)
lambda *x\
:x
@@ -222,17 +196,6 @@ lambda: ( # comment
x
)
(
lambda # 1
# 2
x, # 3
# 4
y
: # 5
# 6
x
)
(
lambda
x,
@@ -241,71 +204,6 @@ lambda: ( # comment
z
)
# Leading
lambda x: (
lambda y: lambda z: x
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ z # Trailing
) # Trailing
# Leading
lambda x: lambda y: lambda z: [
x,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
z
] # Trailing
# Trailing
lambda self, araa, kkkwargs=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs), e=1, f=2, g=2: d
# Regression tests for https://github.com/astral-sh/ruff/issues/8179

View File

@@ -4,7 +4,6 @@ use ruff_python_ast::ExprLambda;
use ruff_text_size::Ranged;
use crate::comments::dangling_comments;
use crate::comments::leading_comments;
use crate::expression::parentheses::{NeedsParentheses, OptionalParentheses};
use crate::other::parameters::ParametersParentheses;
use crate::prelude::*;
@@ -34,45 +33,24 @@ impl FormatNodeRule<ExprLambda> for FormatExprLambda {
if dangling_before_parameters.is_empty() {
write!(f, [space()])?;
} else {
write!(f, [dangling_comments(dangling_before_parameters)])?;
}
group(&format_with(|f: &mut PyFormatter| {
if f.context().node_level().is_parenthesized()
&& (parameters.len() > 1 || !dangling_before_parameters.is_empty())
{
let end_of_line_start = dangling_before_parameters
.partition_point(|comment| comment.line_position().is_end_of_line());
let (same_line_comments, own_line_comments) =
dangling_before_parameters.split_at(end_of_line_start);
write!(
f,
[parameters
.format()
.with_options(ParametersParentheses::Never)]
)?;
dangling_comments(same_line_comments).fmt(f)?;
write!(f, [token(":")])?;
write![
f,
[
soft_line_break(),
leading_comments(own_line_comments),
parameters
.format()
.with_options(ParametersParentheses::Never),
]
]
} else {
parameters
.format()
.with_options(ParametersParentheses::Never)
.fmt(f)
}?;
write!(f, [token(":")])?;
if dangling_after_parameters.is_empty() {
write!(f, [space()])
} else {
write!(f, [dangling_comments(dangling_after_parameters)])
}
}))
.fmt(f)?;
if dangling_after_parameters.is_empty() {
write!(f, [space()])?;
} else {
write!(f, [dangling_comments(dangling_after_parameters)])?;
}
} else {
write!(f, [token(":")])?;

View File

@@ -241,7 +241,7 @@ impl FormatNodeRule<Parameters> for FormatParameters {
let num_parameters = item.len();
if self.parentheses == ParametersParentheses::Never {
write!(f, [format_inner, dangling_comments(dangling)])
write!(f, [group(&format_inner), dangling_comments(dangling)])
} else if num_parameters == 0 {
let mut f = WithNodeLevel::new(NodeLevel::ParenthesizedExpression, f);
// No parameters, format any dangling comments between `()`

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/lambda.py
snapshot_kind: text
---
## Input
```python
@@ -131,13 +132,6 @@ lambda a, /, c: a
*x: x
)
(
lambda
# comment
*x,
**y: x
)
(
lambda
# comment 1
@@ -148,17 +142,6 @@ lambda a, /, c: a
x
)
(
lambda
# comment 1
*
# comment 2
x,
**y:
# comment 3
x
)
(
lambda # comment 1
* # comment 2
@@ -166,14 +149,6 @@ lambda a, /, c: a
x
)
(
lambda # comment 1
* # comment 2
x,
y: # comment 3
x
)
lambda *x\
:x
@@ -228,17 +203,6 @@ lambda: ( # comment
x
)
(
lambda # 1
# 2
x, # 3
# 4
y
: # 5
# 6
x
)
(
lambda
x,
@@ -247,71 +211,6 @@ lambda: ( # comment
z
)
# Leading
lambda x: (
lambda y: lambda z: x
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ z # Trailing
) # Trailing
# Leading
lambda x: lambda y: lambda z: [
x,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
z
] # Trailing
# Trailing
lambda self, araa, kkkwargs=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs), e=1, f=2, g=2: d
# Regression tests for https://github.com/astral-sh/ruff/issues/8179
@@ -338,22 +237,7 @@ def a():
```
## Outputs
### Output 1
```
indent-style = space
line-width = 88
indent-width = 4
quote-style = Double
line-ending = LineFeed
magic-trailing-comma = Respect
docstring-code = Disabled
docstring-code-line-width = "dynamic"
preview = Disabled
target_version = 3.10
source_type = Python
```
## Output
```python
# Leading
lambda x: x # Trailing
@@ -417,8 +301,7 @@ a = (
)
a = (
lambda
x, # Dangling
lambda x, # Dangling
y: 1
)
@@ -484,13 +367,6 @@ lambda a, /, c: a
*x: x
)
(
lambda
# comment
*x,
**y: x
)
(
lambda
# comment 1
@@ -500,16 +376,6 @@ lambda a, /, c: a
x
)
(
lambda
# comment 1
# comment 2
*x,
**y:
# comment 3
x
)
(
lambda # comment 1
# comment 2
@@ -517,14 +383,6 @@ lambda a, /, c: a
x
)
(
lambda # comment 1
# comment 2
*x,
y: # comment 3
x
)
lambda *x: x
(
@@ -577,87 +435,11 @@ lambda: ( # comment
)
(
lambda # 1
# 2
x, # 3
# 4
y: # 5
# 6
x
)
(
lambda
x,
lambda x,
# comment
y: z
)
# Leading
lambda x: (
lambda y: lambda z: x
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ z # Trailing
) # Trailing
# Leading
lambda x: lambda y: lambda z: [
x,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
z,
] # Trailing
# Trailing
lambda self, araa, kkkwargs=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(
*args, **kwargs
), e=1, f=2, g=2: d
@@ -669,8 +451,7 @@ def a():
c,
d,
e,
f=lambda
self,
f=lambda self,
*args,
**kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs),
)
@@ -681,365 +462,7 @@ def a():
c,
d,
e,
f=lambda
self,
araa,
kkkwargs,
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
args,
kwargs,
e=1,
f=2,
g=2: d,
g=10,
)
```
### Output 2
```
indent-style = space
line-width = 88
indent-width = 4
quote-style = Double
line-ending = LineFeed
magic-trailing-comma = Respect
docstring-code = Disabled
docstring-code-line-width = "dynamic"
preview = Enabled
target_version = 3.10
source_type = Python
```
```python
# Leading
lambda x: x # Trailing
# Trailing
# Leading
lambda x, y: x # Trailing
# Trailing
# Leading
lambda x, y: x, y # Trailing
# Trailing
# Leading
lambda x, /, y: x # Trailing
# Trailing
# Leading
lambda x: lambda y: lambda z: x # Trailing
# Trailing
# Leading
lambda x: lambda y: lambda z: (x, y, z) # Trailing
# Trailing
# Leading
lambda x: lambda y: lambda z: (x, y, z) # Trailing
# Trailing
# Leading
lambda x: lambda y: lambda z: (
x,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
z,
) # Trailing
# Trailing
a = (
lambda: # Dangling
1
)
a = (
lambda
x, # Dangling
y: 1
)
# Regression test: lambda empty arguments ranges were too long, leading to unstable
# formatting
(
lambda: ( #
),
)
# lambda arguments don't have parentheses, so we never add a magic trailing comma ...
def f(
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb = lambda x: y,
):
pass
# ...but we do preserve a trailing comma after the arguments
a = lambda b,: 0
lambda a,: 0
lambda *args,: 0
lambda **kwds,: 0
lambda a, *args,: 0
lambda a, **kwds,: 0
lambda *args, b,: 0
lambda *, b,: 0
lambda *args, **kwds,: 0
lambda a, *args, b,: 0
lambda a, *, b,: 0
lambda a, *args, **kwds,: 0
lambda *args, b, **kwds,: 0
lambda *, b, **kwds,: 0
lambda a, *args, b, **kwds,: 0
lambda a, *, b, **kwds,: 0
lambda a, /: a
lambda a, /, c: a
# Dangling comments without parameters.
(
lambda: # 3
None
)
(
lambda:
# 3
None
)
(
lambda: # 1
# 2
# 3
# 4
None # 5
)
(
lambda
# comment
*x: x
)
(
lambda
# comment
*x,
**y: x
)
(
lambda
# comment 1
# comment 2
*x:
# comment 3
x
)
(
lambda
# comment 1
# comment 2
*x,
**y:
# comment 3
x
)
(
lambda # comment 1
# comment 2
*x: # comment 3
x
)
(
lambda # comment 1
# comment 2
*x,
y: # comment 3
x
)
lambda *x: x
(
lambda
# comment
*x: x
)
lambda: ( # comment
x
)
(
lambda: # comment
x
)
(
lambda:
# comment
x
)
(
lambda: # comment
x
)
(
lambda:
# comment
x
)
(
lambda: # comment
( # comment
x
)
)
(
lambda # 1
# 2
x: # 3
# 4
# 5
# 6
x
)
(
lambda # 1
# 2
x, # 3
# 4
y: # 5
# 6
x
)
(
lambda
x,
# comment
y: z
)
# Leading
lambda x: (
lambda y: lambda z: x
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ z # Trailing
) # Trailing
# Leading
lambda x: lambda y: lambda z: [
x,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
z,
] # Trailing
# Trailing
lambda self, araa, kkkwargs=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(
*args, **kwargs
), e=1, f=2, g=2: d
# Regression tests for https://github.com/astral-sh/ruff/issues/8179
def a():
return b(
c,
d,
e,
f=lambda
self,
*args,
**kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs),
)
def a():
return b(
c,
d,
e,
f=lambda
self,
f=lambda self,
araa,
kkkwargs,
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,

View File

@@ -162,38 +162,3 @@ def _(x: A | B, y: list[int]):
reveal_type(x) # revealed: B & ~A
reveal_type(isinstance(x, B)) # revealed: Literal[True]
```
Certain special forms in the typing module are not instances of `type`, so are strictly-speaking
disallowed as the second argument to `isinstance()` according to typeshed's annotations. However, at
runtime they work fine as the second argument, and we implement that special case in ty:
```py
import typing as t
# no errors emitted for any of these:
isinstance("", t.Dict)
isinstance("", t.List)
isinstance("", t.Set)
isinstance("", t.FrozenSet)
isinstance("", t.Tuple)
isinstance("", t.ChainMap)
isinstance("", t.Counter)
isinstance("", t.Deque)
isinstance("", t.OrderedDict)
isinstance("", t.Callable)
isinstance("", t.Type)
isinstance("", t.Callable | t.Deque)
# `Any` is valid in `issubclass()` calls but not `isinstance()` calls
issubclass(list, t.Any)
issubclass(list, t.Any | t.Dict)
```
But for other special forms that are not permitted as the second argument, we still emit an error:
```py
isinstance("", t.TypeGuard) # error: [invalid-argument-type]
isinstance("", t.ClassVar) # error: [invalid-argument-type]
isinstance("", t.Final) # error: [invalid-argument-type]
isinstance("", t.Any) # error: [invalid-argument-type]
```

View File

@@ -272,54 +272,6 @@ def g(
): ...
```
## `|` unions in stubs and `TYPE_CHECKING` blocks
In runtime contexts, `|` unions are only permitted on Python 3.10+. But in suites of code that are
never executed at runtime (stub files, `if TYPE_CHECKING` blocks, and stringified annotations), they
are permitted even if the target version is set to Python 3.9 or earlier.
```toml
[environment]
python-version = "3.9"
```
`bar.pyi`:
```pyi
Z = int | str
GLOBAL_CONSTANT: Z
```
`foo.py`:
```py
from typing import TYPE_CHECKING
from bar import GLOBAL_CONSTANT
reveal_type(GLOBAL_CONSTANT) # revealed: int | str
if TYPE_CHECKING:
class ItsQuiteCloudyInManchester:
X = int | str
def f(obj: X):
reveal_type(obj) # revealed: int | str
# TODO: we currently only understand code as being inside a `TYPE_CHECKING` block
# if a whole *scope* is inside the `if TYPE_CHECKING` block
# (like the `ItsQuiteCloudyInManchester` class above); this is a false-positive
Y = int | str # error: [unsupported-operator]
def g(obj: Y):
# TODO: should be `int | str`
reveal_type(obj) # revealed: Unknown
Y = list["int | str"]
def g(obj: Y):
reveal_type(obj) # revealed: list[int | str]
```
## Generic types
Implicit type aliases can also refer to generic types:

View File

@@ -22,17 +22,15 @@ This file currently covers the following details:
- **dot re-exports**: `from . import a` in an `__init__.pyi` is considered a re-export of `a`
(equivalent to `from . import a as a`). This is required to properly handle many stubs in the
wild. Equivalent imports like `from whatever.thispackage import a` also introduce a re-export
(this has essentially zero ecosystem impact, we just felt it was more consistent). The only way
to opt out of this is to rename the import to something else (`from . import a as b`).
`from .a import b` and equivalent does *not* introduce a re-export.
wild. Currently it must be *exactly* `from . import ...`.
Note: almost all tests in here have a stub and non-stub version, because we're interested in both
defining symbols *at all* and re-exporting them.
## Relative `from` Import of Direct Submodule in `__init__`
We consider the `from . import submodule` idiom in an `__init__.pyi` an explicit re-export.
We consider the `from . import submodule` idiom in an `__init__.pyi` an explicit re-export. This
pattern is observed in the wild with various stub packages.
### In Stub

View File

@@ -1028,13 +1028,6 @@ impl<'db> Type<'db> {
any_over_type(db, self, &|ty| matches!(ty, Type::TypeVar(_)), false)
}
pub(crate) const fn as_special_form(self) -> Option<SpecialFormType> {
match self {
Type::SpecialForm(special_form) => Some(special_form),
_ => None,
}
}
pub(crate) const fn as_class_literal(self) -> Option<ClassLiteral<'db>> {
match self {
Type::ClassLiteral(class_type) => Some(class_type),

View File

@@ -3647,31 +3647,6 @@ impl<'db> BindingError<'db> {
expected_ty,
provided_ty,
} => {
// Certain special forms in the typing module are aliases for classes
// elsewhere in the standard library. These special forms are not instances of `type`,
// and you cannot use them in place of their aliased classes in *all* situations:
// for example, `dict()` succeeds at runtime, but `typing.Dict()` fails. However,
// they *can* all be used as the second argument to `isinstance` and `issubclass`.
// We model that specific aspect of their behaviour here.
//
// This is implemented as a special case in call-binding machinery because overriding
// typeshed's signatures for `isinstance()` and `issubclass()` would be complex and
// error-prone, due to the fact that they are annotated with recursive type aliases.
if parameter.index == 1
&& *argument_index == Some(1)
&& matches!(
callable_ty
.as_function_literal()
.and_then(|function| function.known(context.db())),
Some(KnownFunction::IsInstance | KnownFunction::IsSubclass)
)
&& provided_ty
.as_special_form()
.is_some_and(SpecialFormType::is_valid_isinstance_target)
{
return;
}
// TODO: Ideally we would not emit diagnostics for `TypedDict` literal arguments
// here (see `diagnostic::is_invalid_typed_dict_literal`). However, we may have
// silenced diagnostics during overload evaluation, and rely on the assignability

View File

@@ -1764,7 +1764,6 @@ impl KnownFunction {
Type::KnownInstance(KnownInstanceType::UnionType(_)) => {
fn find_invalid_elements<'db>(
db: &'db dyn Db,
function: KnownFunction,
ty: Type<'db>,
invalid_elements: &mut Vec<Type<'db>>,
) {
@@ -1772,19 +1771,9 @@ impl KnownFunction {
Type::ClassLiteral(_) => {}
Type::NominalInstance(instance)
if instance.has_known_class(db, KnownClass::NoneType) => {}
Type::SpecialForm(special_form)
if special_form.is_valid_isinstance_target() => {}
// `Any` can be used in `issubclass()` calls but not `isinstance()` calls
Type::SpecialForm(SpecialFormType::Any)
if function == KnownFunction::IsSubclass => {}
Type::KnownInstance(KnownInstanceType::UnionType(union)) => {
for element in union.elements(db) {
find_invalid_elements(
db,
function,
*element,
invalid_elements,
);
find_invalid_elements(db, *element, invalid_elements);
}
}
_ => invalid_elements.push(ty),
@@ -1792,7 +1781,7 @@ impl KnownFunction {
}
let mut invalid_elements = vec![];
find_invalid_elements(db, self, *second_argument, &mut invalid_elements);
find_invalid_elements(db, *second_argument, &mut invalid_elements);
let Some((first_invalid_element, other_invalid_elements)) =
invalid_elements.split_first()

View File

@@ -8908,12 +8908,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
emitted_division_by_zero_diagnostic = self.check_division_by_zero(node, op, left_ty);
}
let pep_604_unions_allowed = || {
Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310
|| self.file().is_stub(self.db())
|| self.scope().scope(self.db()).in_type_checking_block()
};
match (left_ty, right_ty, op) {
(Type::Union(lhs_union), rhs, _) => lhs_union.try_map(self.db(), |lhs_element| {
self.infer_binary_expression_type(
@@ -9175,7 +9169,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
| KnownInstanceType::Annotated(_),
),
ast::Operator::BitOr,
) if pep_604_unions_allowed() => {
) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => {
if left_ty.is_equivalent_to(self.db(), right_ty) {
Some(left_ty)
} else {
@@ -9201,7 +9195,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
| Type::KnownInstance(..)
| Type::SpecialForm(..),
ast::Operator::BitOr,
) if pep_604_unions_allowed()
) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310
&& instance.has_known_class(self.db(), KnownClass::NoneType) =>
{
Some(Type::KnownInstance(KnownInstanceType::UnionType(
@@ -9225,15 +9219,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
_,
Type::ClassLiteral(..) | Type::GenericAlias(..) | Type::SubclassOf(..),
ast::Operator::BitOr,
) if pep_604_unions_allowed() => Type::try_call_bin_op_with_policy(
self.db(),
left_ty,
ast::Operator::BitOr,
right_ty,
MemberLookupPolicy::META_CLASS_NO_TYPE_FALLBACK,
)
.ok()
.map(|binding| binding.return_type(self.db())),
) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => {
Type::try_call_bin_op_with_policy(
self.db(),
left_ty,
ast::Operator::BitOr,
right_ty,
MemberLookupPolicy::META_CLASS_NO_TYPE_FALLBACK,
)
.ok()
.map(|binding| binding.return_type(self.db()))
}
// We've handled all of the special cases that we support for literals, so we need to
// fall back on looking for dunder methods on one of the operand types.

View File

@@ -328,58 +328,6 @@ impl SpecialFormType {
}
}
/// Return `true` if this special form is valid as the second argument
/// to `issubclass()` and `isinstance()` calls.
pub(super) const fn is_valid_isinstance_target(self) -> bool {
match self {
Self::Callable
| Self::ChainMap
| Self::Counter
| Self::DefaultDict
| Self::Deque
| Self::FrozenSet
| Self::Dict
| Self::List
| Self::OrderedDict
| Self::Set
| Self::Tuple
| Self::Type
| Self::Protocol
| Self::Generic => true,
Self::AlwaysFalsy
| Self::AlwaysTruthy
| Self::Annotated
| Self::Bottom
| Self::CallableTypeOf
| Self::ClassVar
| Self::Concatenate
| Self::Final
| Self::Intersection
| Self::Literal
| Self::LiteralString
| Self::Never
| Self::NoReturn
| Self::Not
| Self::ReadOnly
| Self::Required
| Self::TypeAlias
| Self::TypeGuard
| Self::NamedTuple
| Self::NotRequired
| Self::Optional
| Self::Top
| Self::TypeIs
| Self::TypedDict
| Self::TypingSelf
| Self::Union
| Self::Unknown
| Self::TypeOf
| Self::Any // can be used in `issubclass()` but not `isinstance()`.
| Self::Unpack => false,
}
}
/// Return the repr of the symbol at runtime
pub(super) const fn repr(self) -> &'static str {
match self {

View File

@@ -618,9 +618,8 @@ Options:
notebooks, use `--extension ipy:ipynb`
--statistics
Show counts for every rule with at least one violation
--add-noqa[=<REASON>]
Enable automatic additions of `noqa` directives to failing lines.
Optionally provide a reason to append after the codes
--add-noqa
Enable automatic additions of `noqa` directives to failing lines
--show-files
See the files Ruff will be run against with the current settings
--show-settings