Compare commits
7 Commits
gankra/scr
...
david/sqla
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
352628e986 | ||
|
|
4e67a219bb | ||
|
|
8ea18966cf | ||
|
|
e548ce1ca9 | ||
|
|
eac8a90cc4 | ||
|
|
2d3466eccf | ||
|
|
45fb3732a4 |
@@ -1440,6 +1440,78 @@ def function():
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_noqa() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
fixture.write_file(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[lint]
|
||||
select = ["F401"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
fixture.write_file(
|
||||
"noqa.py",
|
||||
r#"
|
||||
import os # noqa: F401
|
||||
|
||||
# ruff: disable[F401]
|
||||
import sys
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// without --ignore-noqa
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.args(["--config", "ruff.toml"])
|
||||
.arg("noqa.py"),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
noqa.py:5:8: F401 [*] `sys` imported but unused
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.args(["--config", "ruff.toml"])
|
||||
.arg("noqa.py")
|
||||
.args(["--preview"]),
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
// with --ignore-noqa --preview
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.args(["--config", "ruff.toml"])
|
||||
.arg("noqa.py")
|
||||
.args(["--ignore-noqa", "--preview"]),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
noqa.py:2:8: F401 [*] `os` imported but unused
|
||||
noqa.py:5:8: F401 [*] `sys` imported but unused
|
||||
Found 2 errors.
|
||||
[*] 2 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_noqa() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
@@ -1632,6 +1704,100 @@ def unused(x): # noqa: ANN001, ARG001, D103
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_noqa_existing_file_level_noqa() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
fixture.write_file(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[lint]
|
||||
select = ["F401"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
fixture.write_file(
|
||||
"noqa.py",
|
||||
r#"
|
||||
# ruff: noqa F401
|
||||
import os
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.args(["--config", "ruff.toml"])
|
||||
.arg("noqa.py")
|
||||
.arg("--preview")
|
||||
.args(["--add-noqa"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
|
||||
"#), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
let test_code =
|
||||
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
|
||||
|
||||
insta::assert_snapshot!(test_code, @r"
|
||||
# ruff: noqa F401
|
||||
import os
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_noqa_existing_range_suppression() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
fixture.write_file(
|
||||
"ruff.toml",
|
||||
r#"
|
||||
[lint]
|
||||
select = ["F401"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
fixture.write_file(
|
||||
"noqa.py",
|
||||
r#"
|
||||
# ruff: disable[F401]
|
||||
import os
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(fixture
|
||||
.check_command()
|
||||
.args(["--config", "ruff.toml"])
|
||||
.arg("noqa.py")
|
||||
.arg("--preview")
|
||||
.args(["--add-noqa"])
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
|
||||
"#), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
let test_code =
|
||||
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
|
||||
|
||||
insta::assert_snapshot!(test_code, @r"
|
||||
# ruff: disable[F401]
|
||||
import os
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_noqa_multiline_comment() -> Result<()> {
|
||||
let fixture = CliTest::new()?;
|
||||
|
||||
@@ -199,6 +199,9 @@ def bytes_okay(value=bytes(1)):
|
||||
def int_okay(value=int("12")):
|
||||
pass
|
||||
|
||||
# Allow immutable slice()
|
||||
def slice_okay(value=slice(1,2)):
|
||||
pass
|
||||
|
||||
# Allow immutable complex() value
|
||||
def complex_okay(value=complex(1,2)):
|
||||
|
||||
@@ -218,3 +218,26 @@ def should_not_fail(payload, Args):
|
||||
Args:
|
||||
The other arguments.
|
||||
"""
|
||||
|
||||
|
||||
# Test cases for Unpack[TypedDict] kwargs
|
||||
from typing import TypedDict
|
||||
from typing_extensions import Unpack
|
||||
|
||||
class User(TypedDict):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
def function_with_unpack_args_should_not_fail(query: str, **kwargs: Unpack[User]):
|
||||
"""Function with Unpack kwargs.
|
||||
|
||||
Args:
|
||||
query: some arg
|
||||
"""
|
||||
|
||||
def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
|
||||
"""Function with Unpack kwargs but missing query arg documentation.
|
||||
|
||||
Args:
|
||||
**kwargs: keyword arguments
|
||||
"""
|
||||
|
||||
56
crates/ruff_linter/resources/test/fixtures/ruff/suppressions.py
vendored
Normal file
56
crates/ruff_linter/resources/test/fixtures/ruff/suppressions.py
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
def f():
|
||||
# These should both be ignored by the range suppression.
|
||||
# ruff: disable[E741, F841]
|
||||
I = 1
|
||||
# ruff: enable[E741, F841]
|
||||
|
||||
|
||||
def f():
|
||||
# These should both be ignored by the implicit range suppression.
|
||||
# Should also generate an "unmatched suppression" warning.
|
||||
# ruff:disable[E741,F841]
|
||||
I = 1
|
||||
|
||||
|
||||
def f():
|
||||
# Neither warning is ignored, and an "unmatched suppression"
|
||||
# should be generated.
|
||||
I = 1
|
||||
# ruff: enable[E741, F841]
|
||||
|
||||
|
||||
def f():
|
||||
# One should be ignored by the range suppression, and
|
||||
# the other logged to the user.
|
||||
# ruff: disable[E741]
|
||||
I = 1
|
||||
# ruff: enable[E741]
|
||||
|
||||
|
||||
def f():
|
||||
# Test interleaved range suppressions. The first and last
|
||||
# lines should each log a different warning, while the
|
||||
# middle line should be completely silenced.
|
||||
# ruff: disable[E741]
|
||||
l = 0
|
||||
# ruff: disable[F841]
|
||||
O = 1
|
||||
# ruff: enable[E741]
|
||||
I = 2
|
||||
# ruff: enable[F841]
|
||||
|
||||
|
||||
def f():
|
||||
# Neither of these are ignored and warnings are
|
||||
# logged to user
|
||||
# ruff: disable[E501]
|
||||
I = 1
|
||||
# ruff: enable[E501]
|
||||
|
||||
|
||||
def f():
|
||||
# These should both be ignored by the range suppression,
|
||||
# and an unusued noqa diagnostic should be logged.
|
||||
# ruff:disable[E741,F841]
|
||||
I = 1 # noqa: E741,F841
|
||||
# ruff:enable[E741,F841]
|
||||
@@ -12,17 +12,20 @@ use crate::fix::edits::delete_comment;
|
||||
use crate::noqa::{
|
||||
Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping,
|
||||
};
|
||||
use crate::preview::is_range_suppressions_enabled;
|
||||
use crate::registry::Rule;
|
||||
use crate::rule_redirects::get_redirect_target;
|
||||
use crate::rules::pygrep_hooks;
|
||||
use crate::rules::ruff;
|
||||
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::suppression::Suppressions;
|
||||
use crate::{Edit, Fix, Locator};
|
||||
|
||||
use super::ast::LintContext;
|
||||
|
||||
/// RUF100
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub(crate) fn check_noqa(
|
||||
context: &mut LintContext,
|
||||
path: &Path,
|
||||
@@ -31,6 +34,7 @@ pub(crate) fn check_noqa(
|
||||
noqa_line_for: &NoqaMapping,
|
||||
analyze_directives: bool,
|
||||
settings: &LinterSettings,
|
||||
suppressions: &Suppressions,
|
||||
) -> Vec<usize> {
|
||||
// Identify any codes that are globally exempted (within the current file).
|
||||
let file_noqa_directives =
|
||||
@@ -40,7 +44,7 @@ pub(crate) fn check_noqa(
|
||||
let mut noqa_directives =
|
||||
NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator);
|
||||
|
||||
if file_noqa_directives.is_empty() && noqa_directives.is_empty() {
|
||||
if file_noqa_directives.is_empty() && noqa_directives.is_empty() && suppressions.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
@@ -60,11 +64,19 @@ pub(crate) fn check_noqa(
|
||||
continue;
|
||||
}
|
||||
|
||||
// Apply file-level suppressions first
|
||||
if exemption.contains_secondary_code(code) {
|
||||
ignored_diagnostics.push(index);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Apply ranged suppressions next
|
||||
if is_range_suppressions_enabled(settings) && suppressions.check_diagnostic(diagnostic) {
|
||||
ignored_diagnostics.push(index);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Apply end-of-line noqa suppressions last
|
||||
let noqa_offsets = diagnostic
|
||||
.parent()
|
||||
.into_iter()
|
||||
|
||||
@@ -32,6 +32,7 @@ use crate::rules::ruff::rules::test_rules::{self, TEST_RULES, TestRule};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
use crate::settings::{LinterSettings, TargetVersion, flags};
|
||||
use crate::source_kind::SourceKind;
|
||||
use crate::suppression::Suppressions;
|
||||
use crate::{Locator, directives, fs};
|
||||
|
||||
pub(crate) mod float;
|
||||
@@ -128,6 +129,7 @@ pub fn check_path(
|
||||
source_type: PySourceType,
|
||||
parsed: &Parsed<ModModule>,
|
||||
target_version: TargetVersion,
|
||||
suppressions: &Suppressions,
|
||||
) -> Vec<Diagnostic> {
|
||||
// Aggregate all diagnostics.
|
||||
let mut context = LintContext::new(path, locator.contents(), settings);
|
||||
@@ -339,6 +341,7 @@ pub fn check_path(
|
||||
&directives.noqa_line_for,
|
||||
parsed.has_valid_syntax(),
|
||||
settings,
|
||||
suppressions,
|
||||
);
|
||||
if noqa.is_enabled() {
|
||||
for index in ignored.iter().rev() {
|
||||
@@ -400,6 +403,9 @@ pub fn add_noqa_to_path(
|
||||
&indexer,
|
||||
);
|
||||
|
||||
// Parse range suppression comments
|
||||
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
|
||||
|
||||
// Generate diagnostics, ignoring any existing `noqa` directives.
|
||||
let diagnostics = check_path(
|
||||
path,
|
||||
@@ -414,6 +420,7 @@ pub fn add_noqa_to_path(
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
|
||||
// Add any missing `# noqa` pragmas.
|
||||
@@ -427,6 +434,7 @@ pub fn add_noqa_to_path(
|
||||
&directives.noqa_line_for,
|
||||
stylist.line_ending(),
|
||||
reason,
|
||||
&suppressions,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -461,6 +469,9 @@ pub fn lint_only(
|
||||
&indexer,
|
||||
);
|
||||
|
||||
// Parse range suppression comments
|
||||
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
|
||||
|
||||
// Generate diagnostics.
|
||||
let diagnostics = check_path(
|
||||
path,
|
||||
@@ -475,6 +486,7 @@ pub fn lint_only(
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
|
||||
LinterResult {
|
||||
@@ -566,6 +578,9 @@ pub fn lint_fix<'a>(
|
||||
&indexer,
|
||||
);
|
||||
|
||||
// Parse range suppression comments
|
||||
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
|
||||
|
||||
// Generate diagnostics.
|
||||
let diagnostics = check_path(
|
||||
path,
|
||||
@@ -580,6 +595,7 @@ pub fn lint_fix<'a>(
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
|
||||
if iterations == 0 {
|
||||
@@ -769,6 +785,7 @@ mod tests {
|
||||
use crate::registry::Rule;
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::source_kind::SourceKind;
|
||||
use crate::suppression::Suppressions;
|
||||
use crate::test::{TestedNotebook, assert_notebook_path, test_contents, test_snippet};
|
||||
use crate::{Locator, assert_diagnostics, directives, settings};
|
||||
|
||||
@@ -944,6 +961,7 @@ mod tests {
|
||||
&locator,
|
||||
&indexer,
|
||||
);
|
||||
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
|
||||
let mut diagnostics = check_path(
|
||||
path,
|
||||
None,
|
||||
@@ -957,6 +975,7 @@ mod tests {
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
diagnostics.sort_by(Diagnostic::ruff_start_ordering);
|
||||
diagnostics
|
||||
|
||||
@@ -20,12 +20,14 @@ use crate::Locator;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::registry::Rule;
|
||||
use crate::rule_redirects::get_redirect_target;
|
||||
use crate::suppression::Suppressions;
|
||||
|
||||
/// Generates an array of edits that matches the length of `messages`.
|
||||
/// Each potential edit in the array is paired, in order, with the associated diagnostic.
|
||||
/// Each edit will add a `noqa` comment to the appropriate line in the source to hide
|
||||
/// the diagnostic. These edits may conflict with each other and should not be applied
|
||||
/// simultaneously.
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub fn generate_noqa_edits(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
@@ -34,11 +36,19 @@ pub fn generate_noqa_edits(
|
||||
external: &[String],
|
||||
noqa_line_for: &NoqaMapping,
|
||||
line_ending: LineEnding,
|
||||
suppressions: &Suppressions,
|
||||
) -> Vec<Option<Edit>> {
|
||||
let file_directives = FileNoqaDirectives::extract(locator, comment_ranges, external, path);
|
||||
let exemption = FileExemption::from(&file_directives);
|
||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
let comments = find_noqa_comments(
|
||||
diagnostics,
|
||||
locator,
|
||||
&exemption,
|
||||
&directives,
|
||||
noqa_line_for,
|
||||
suppressions,
|
||||
);
|
||||
build_noqa_edits_by_diagnostic(comments, locator, line_ending, None)
|
||||
}
|
||||
|
||||
@@ -725,6 +735,7 @@ pub(crate) fn add_noqa(
|
||||
noqa_line_for: &NoqaMapping,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&str>,
|
||||
suppressions: &Suppressions,
|
||||
) -> Result<usize> {
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
@@ -735,6 +746,7 @@ pub(crate) fn add_noqa(
|
||||
noqa_line_for,
|
||||
line_ending,
|
||||
reason,
|
||||
suppressions,
|
||||
);
|
||||
|
||||
fs::write(path, output)?;
|
||||
@@ -751,6 +763,7 @@ fn add_noqa_inner(
|
||||
noqa_line_for: &NoqaMapping,
|
||||
line_ending: LineEnding,
|
||||
reason: Option<&str>,
|
||||
suppressions: &Suppressions,
|
||||
) -> (usize, String) {
|
||||
let mut count = 0;
|
||||
|
||||
@@ -760,7 +773,14 @@ fn add_noqa_inner(
|
||||
|
||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
|
||||
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
let comments = find_noqa_comments(
|
||||
diagnostics,
|
||||
locator,
|
||||
&exemption,
|
||||
&directives,
|
||||
noqa_line_for,
|
||||
suppressions,
|
||||
);
|
||||
|
||||
let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason);
|
||||
|
||||
@@ -859,6 +879,7 @@ fn find_noqa_comments<'a>(
|
||||
exemption: &'a FileExemption,
|
||||
directives: &'a NoqaDirectives,
|
||||
noqa_line_for: &NoqaMapping,
|
||||
suppressions: &Suppressions,
|
||||
) -> Vec<Option<NoqaComment<'a>>> {
|
||||
// List of noqa comments, ordered to match up with `messages`
|
||||
let mut comments_by_line: Vec<Option<NoqaComment<'a>>> = vec![];
|
||||
@@ -875,6 +896,12 @@ fn find_noqa_comments<'a>(
|
||||
continue;
|
||||
}
|
||||
|
||||
// Apply ranged suppressions next
|
||||
if suppressions.check_diagnostic(message) {
|
||||
comments_by_line.push(None);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Is the violation ignored by a `noqa` directive on the parent line?
|
||||
if let Some(parent) = message.parent() {
|
||||
if let Some(directive_line) =
|
||||
@@ -1253,6 +1280,7 @@ mod tests {
|
||||
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
|
||||
use crate::rules::pyflakes::rules::UnusedVariable;
|
||||
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
|
||||
use crate::suppression::Suppressions;
|
||||
use crate::{Edit, Violation};
|
||||
use crate::{Locator, generate_noqa_edits};
|
||||
|
||||
@@ -2848,6 +2876,7 @@ mod tests {
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
&Suppressions::default(),
|
||||
);
|
||||
assert_eq!(count, 0);
|
||||
assert_eq!(output, format!("{contents}"));
|
||||
@@ -2872,6 +2901,7 @@ mod tests {
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
&Suppressions::default(),
|
||||
);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: F841\n");
|
||||
@@ -2903,6 +2933,7 @@ mod tests {
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
&Suppressions::default(),
|
||||
);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
|
||||
@@ -2934,6 +2965,7 @@ mod tests {
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
None,
|
||||
&Suppressions::default(),
|
||||
);
|
||||
assert_eq!(count, 0);
|
||||
assert_eq!(output, "x = 1 # noqa");
|
||||
@@ -2956,6 +2988,7 @@ print(
|
||||
let messages = [PrintfStringFormatting
|
||||
.into_diagnostic(TextRange::new(12.into(), 79.into()), &source_file)];
|
||||
let comment_ranges = CommentRanges::default();
|
||||
let suppressions = Suppressions::default();
|
||||
let edits = generate_noqa_edits(
|
||||
path,
|
||||
&messages,
|
||||
@@ -2964,6 +2997,7 @@ print(
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
&suppressions,
|
||||
);
|
||||
assert_eq!(
|
||||
edits,
|
||||
@@ -2987,6 +3021,7 @@ bar =
|
||||
[UselessSemicolon.into_diagnostic(TextRange::new(4.into(), 5.into()), &source_file)];
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges = CommentRanges::default();
|
||||
let suppressions = Suppressions::default();
|
||||
let edits = generate_noqa_edits(
|
||||
path,
|
||||
&messages,
|
||||
@@ -2995,6 +3030,7 @@ bar =
|
||||
&[],
|
||||
&noqa_line_for,
|
||||
LineEnding::Lf,
|
||||
&suppressions,
|
||||
);
|
||||
assert_eq!(
|
||||
edits,
|
||||
|
||||
@@ -286,3 +286,8 @@ pub(crate) const fn is_s310_resolve_string_literal_bindings_enabled(
|
||||
) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/21623
|
||||
pub(crate) const fn is_range_suppressions_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
@@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:239:20
|
||||
--> B006_B008.py:242:20
|
||||
|
|
||||
237 | # B006 and B008
|
||||
238 | # We should handle arbitrary nesting of these B008.
|
||||
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
240 | # B006 and B008
|
||||
241 | # We should handle arbitrary nesting of these B008.
|
||||
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
240 | pass
|
||||
243 | pass
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
236 |
|
||||
237 | # B006 and B008
|
||||
238 | # We should handle arbitrary nesting of these B008.
|
||||
239 |
|
||||
240 | # B006 and B008
|
||||
241 | # We should handle arbitrary nesting of these B008.
|
||||
- def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
239 + def nested_combo(a=None):
|
||||
240 | pass
|
||||
241 |
|
||||
242 |
|
||||
242 + def nested_combo(a=None):
|
||||
243 | pass
|
||||
244 |
|
||||
245 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:276:27
|
||||
--> B006_B008.py:279:27
|
||||
|
|
||||
275 | def mutable_annotations(
|
||||
276 | a: list[int] | None = [],
|
||||
278 | def mutable_annotations(
|
||||
279 | a: list[int] | None = [],
|
||||
| ^^
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
273 |
|
||||
274 |
|
||||
275 | def mutable_annotations(
|
||||
276 |
|
||||
277 |
|
||||
278 | def mutable_annotations(
|
||||
- a: list[int] | None = [],
|
||||
276 + a: list[int] | None = None,
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 + a: list[int] | None = None,
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:277:35
|
||||
--> B006_B008.py:280:35
|
||||
|
|
||||
275 | def mutable_annotations(
|
||||
276 | a: list[int] | None = [],
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | def mutable_annotations(
|
||||
279 | a: list[int] | None = [],
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
| ^^
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
274 |
|
||||
275 | def mutable_annotations(
|
||||
276 | a: list[int] | None = [],
|
||||
277 |
|
||||
278 | def mutable_annotations(
|
||||
279 | a: list[int] | None = [],
|
||||
- b: Optional[Dict[int, int]] = {},
|
||||
277 + b: Optional[Dict[int, int]] = None,
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | ):
|
||||
280 + b: Optional[Dict[int, int]] = None,
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
283 | ):
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:278:62
|
||||
--> B006_B008.py:281:62
|
||||
|
|
||||
276 | a: list[int] | None = [],
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | a: list[int] | None = [],
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
| ^^^^^
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | ):
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
283 | ):
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
275 | def mutable_annotations(
|
||||
276 | a: list[int] | None = [],
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | def mutable_annotations(
|
||||
279 | a: list[int] | None = [],
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
- c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | ):
|
||||
281 | pass
|
||||
281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
283 | ):
|
||||
284 | pass
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:279:80
|
||||
--> B006_B008.py:282:80
|
||||
|
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
| ^^^^^
|
||||
280 | ):
|
||||
281 | pass
|
||||
283 | ):
|
||||
284 | pass
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
276 | a: list[int] | None = [],
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | a: list[int] | None = [],
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
- d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
|
||||
280 | ):
|
||||
281 | pass
|
||||
282 |
|
||||
282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
|
||||
283 | ):
|
||||
284 | pass
|
||||
285 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:284:52
|
||||
--> B006_B008.py:287:52
|
||||
|
|
||||
284 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
287 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
285 | """Docstring"""
|
||||
288 | """Docstring"""
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
281 | pass
|
||||
282 |
|
||||
283 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
284 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
285 | """Docstring"""
|
||||
284 | pass
|
||||
285 |
|
||||
286 |
|
||||
287 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
287 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
288 | """Docstring"""
|
||||
289 |
|
||||
290 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:288:52
|
||||
--> B006_B008.py:291:52
|
||||
|
|
||||
288 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
291 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
289 | """Docstring"""
|
||||
290 | ...
|
||||
292 | """Docstring"""
|
||||
293 | ...
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
285 | """Docstring"""
|
||||
286 |
|
||||
287 |
|
||||
288 | """Docstring"""
|
||||
289 |
|
||||
290 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
288 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
289 | """Docstring"""
|
||||
290 | ...
|
||||
291 |
|
||||
291 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
292 | """Docstring"""
|
||||
293 | ...
|
||||
294 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:293:52
|
||||
--> B006_B008.py:296:52
|
||||
|
|
||||
293 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
296 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
294 | """Docstring"""; ...
|
||||
297 | """Docstring"""; ...
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
290 | ...
|
||||
291 |
|
||||
292 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
293 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
294 | """Docstring"""; ...
|
||||
293 | ...
|
||||
294 |
|
||||
295 |
|
||||
296 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
296 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
297 | """Docstring"""; ...
|
||||
298 |
|
||||
299 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:297:52
|
||||
--> B006_B008.py:300:52
|
||||
|
|
||||
297 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
300 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
298 | """Docstring"""; \
|
||||
299 | ...
|
||||
301 | """Docstring"""; \
|
||||
302 | ...
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
294 | """Docstring"""; ...
|
||||
295 |
|
||||
296 |
|
||||
297 | """Docstring"""; ...
|
||||
298 |
|
||||
299 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
297 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
298 | """Docstring"""; \
|
||||
299 | ...
|
||||
300 |
|
||||
300 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
301 | """Docstring"""; \
|
||||
302 | ...
|
||||
303 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:302:52
|
||||
--> B006_B008.py:305:52
|
||||
|
|
||||
302 | def single_line_func_wrong(value: dict[str, str] = {
|
||||
305 | def single_line_func_wrong(value: dict[str, str] = {
|
||||
| ____________________________________________________^
|
||||
303 | | # This is a comment
|
||||
304 | | }):
|
||||
306 | | # This is a comment
|
||||
307 | | }):
|
||||
| |_^
|
||||
305 | """Docstring"""
|
||||
308 | """Docstring"""
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
299 | ...
|
||||
300 |
|
||||
301 |
|
||||
302 | ...
|
||||
303 |
|
||||
304 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {
|
||||
- # This is a comment
|
||||
- }):
|
||||
302 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
303 | """Docstring"""
|
||||
304 |
|
||||
305 |
|
||||
305 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
306 | """Docstring"""
|
||||
307 |
|
||||
308 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:308:52
|
||||
--> B006_B008.py:311:52
|
||||
|
|
||||
308 | def single_line_func_wrong(value: dict[str, str] = {}) \
|
||||
311 | def single_line_func_wrong(value: dict[str, str] = {}) \
|
||||
| ^^
|
||||
309 | : \
|
||||
310 | """Docstring"""
|
||||
312 | : \
|
||||
313 | """Docstring"""
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:313:52
|
||||
--> B006_B008.py:316:52
|
||||
|
|
||||
313 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
316 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
314 | """Docstring without newline"""
|
||||
317 | """Docstring without newline"""
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
310 | """Docstring"""
|
||||
311 |
|
||||
312 |
|
||||
313 | """Docstring"""
|
||||
314 |
|
||||
315 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
313 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
314 | """Docstring without newline"""
|
||||
316 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
317 | """Docstring without newline"""
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -53,39 +53,39 @@ B008 Do not perform function call in argument defaults; instead, perform the cal
|
||||
|
|
||||
|
||||
B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
|
||||
--> B006_B008.py:239:31
|
||||
--> B006_B008.py:242:31
|
||||
|
|
||||
237 | # B006 and B008
|
||||
238 | # We should handle arbitrary nesting of these B008.
|
||||
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
240 | # B006 and B008
|
||||
241 | # We should handle arbitrary nesting of these B008.
|
||||
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
240 | pass
|
||||
243 | pass
|
||||
|
|
||||
|
||||
B008 Do not perform function call `map` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
|
||||
--> B006_B008.py:245:22
|
||||
--> B006_B008.py:248:22
|
||||
|
|
||||
243 | # Don't flag nested B006 since we can't guarantee that
|
||||
244 | # it isn't made mutable by the outer operation.
|
||||
245 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
|
||||
246 | # Don't flag nested B006 since we can't guarantee that
|
||||
247 | # it isn't made mutable by the outer operation.
|
||||
248 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
246 | pass
|
||||
249 | pass
|
||||
|
|
||||
|
||||
B008 Do not perform function call `random.randint` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
|
||||
--> B006_B008.py:250:19
|
||||
--> B006_B008.py:253:19
|
||||
|
|
||||
249 | # B008-ception.
|
||||
250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
|
||||
252 | # B008-ception.
|
||||
253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
251 | pass
|
||||
254 | pass
|
||||
|
|
||||
|
||||
B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
|
||||
--> B006_B008.py:250:37
|
||||
--> B006_B008.py:253:37
|
||||
|
|
||||
249 | # B008-ception.
|
||||
250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
|
||||
252 | # B008-ception.
|
||||
253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
251 | pass
|
||||
254 | pass
|
||||
|
|
||||
|
||||
@@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:239:20
|
||||
--> B006_B008.py:242:20
|
||||
|
|
||||
237 | # B006 and B008
|
||||
238 | # We should handle arbitrary nesting of these B008.
|
||||
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
240 | # B006 and B008
|
||||
241 | # We should handle arbitrary nesting of these B008.
|
||||
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
240 | pass
|
||||
243 | pass
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
236 |
|
||||
237 | # B006 and B008
|
||||
238 | # We should handle arbitrary nesting of these B008.
|
||||
239 |
|
||||
240 | # B006 and B008
|
||||
241 | # We should handle arbitrary nesting of these B008.
|
||||
- def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
239 + def nested_combo(a=None):
|
||||
240 | pass
|
||||
241 |
|
||||
242 |
|
||||
242 + def nested_combo(a=None):
|
||||
243 | pass
|
||||
244 |
|
||||
245 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:276:27
|
||||
--> B006_B008.py:279:27
|
||||
|
|
||||
275 | def mutable_annotations(
|
||||
276 | a: list[int] | None = [],
|
||||
278 | def mutable_annotations(
|
||||
279 | a: list[int] | None = [],
|
||||
| ^^
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
273 |
|
||||
274 |
|
||||
275 | def mutable_annotations(
|
||||
276 |
|
||||
277 |
|
||||
278 | def mutable_annotations(
|
||||
- a: list[int] | None = [],
|
||||
276 + a: list[int] | None = None,
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 + a: list[int] | None = None,
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:277:35
|
||||
--> B006_B008.py:280:35
|
||||
|
|
||||
275 | def mutable_annotations(
|
||||
276 | a: list[int] | None = [],
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | def mutable_annotations(
|
||||
279 | a: list[int] | None = [],
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
| ^^
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
274 |
|
||||
275 | def mutable_annotations(
|
||||
276 | a: list[int] | None = [],
|
||||
277 |
|
||||
278 | def mutable_annotations(
|
||||
279 | a: list[int] | None = [],
|
||||
- b: Optional[Dict[int, int]] = {},
|
||||
277 + b: Optional[Dict[int, int]] = None,
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | ):
|
||||
280 + b: Optional[Dict[int, int]] = None,
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
283 | ):
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:278:62
|
||||
--> B006_B008.py:281:62
|
||||
|
|
||||
276 | a: list[int] | None = [],
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | a: list[int] | None = [],
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
| ^^^^^
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | ):
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
283 | ):
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
275 | def mutable_annotations(
|
||||
276 | a: list[int] | None = [],
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | def mutable_annotations(
|
||||
279 | a: list[int] | None = [],
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
- c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | ):
|
||||
281 | pass
|
||||
281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
283 | ):
|
||||
284 | pass
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:279:80
|
||||
--> B006_B008.py:282:80
|
||||
|
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
| ^^^^^
|
||||
280 | ):
|
||||
281 | pass
|
||||
283 | ):
|
||||
284 | pass
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
276 | a: list[int] | None = [],
|
||||
277 | b: Optional[Dict[int, int]] = {},
|
||||
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 | a: list[int] | None = [],
|
||||
280 | b: Optional[Dict[int, int]] = {},
|
||||
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
- d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
||||
279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
|
||||
280 | ):
|
||||
281 | pass
|
||||
282 |
|
||||
282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
|
||||
283 | ):
|
||||
284 | pass
|
||||
285 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:284:52
|
||||
--> B006_B008.py:287:52
|
||||
|
|
||||
284 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
287 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
285 | """Docstring"""
|
||||
288 | """Docstring"""
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
281 | pass
|
||||
282 |
|
||||
283 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
284 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
285 | """Docstring"""
|
||||
284 | pass
|
||||
285 |
|
||||
286 |
|
||||
287 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
287 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
288 | """Docstring"""
|
||||
289 |
|
||||
290 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:288:52
|
||||
--> B006_B008.py:291:52
|
||||
|
|
||||
288 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
291 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
289 | """Docstring"""
|
||||
290 | ...
|
||||
292 | """Docstring"""
|
||||
293 | ...
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
285 | """Docstring"""
|
||||
286 |
|
||||
287 |
|
||||
288 | """Docstring"""
|
||||
289 |
|
||||
290 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
288 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
289 | """Docstring"""
|
||||
290 | ...
|
||||
291 |
|
||||
291 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
292 | """Docstring"""
|
||||
293 | ...
|
||||
294 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:293:52
|
||||
--> B006_B008.py:296:52
|
||||
|
|
||||
293 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
296 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
294 | """Docstring"""; ...
|
||||
297 | """Docstring"""; ...
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
290 | ...
|
||||
291 |
|
||||
292 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
293 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
294 | """Docstring"""; ...
|
||||
293 | ...
|
||||
294 |
|
||||
295 |
|
||||
296 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
296 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
297 | """Docstring"""; ...
|
||||
298 |
|
||||
299 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:297:52
|
||||
--> B006_B008.py:300:52
|
||||
|
|
||||
297 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
300 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
298 | """Docstring"""; \
|
||||
299 | ...
|
||||
301 | """Docstring"""; \
|
||||
302 | ...
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
294 | """Docstring"""; ...
|
||||
295 |
|
||||
296 |
|
||||
297 | """Docstring"""; ...
|
||||
298 |
|
||||
299 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
297 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
298 | """Docstring"""; \
|
||||
299 | ...
|
||||
300 |
|
||||
300 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
301 | """Docstring"""; \
|
||||
302 | ...
|
||||
303 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:302:52
|
||||
--> B006_B008.py:305:52
|
||||
|
|
||||
302 | def single_line_func_wrong(value: dict[str, str] = {
|
||||
305 | def single_line_func_wrong(value: dict[str, str] = {
|
||||
| ____________________________________________________^
|
||||
303 | | # This is a comment
|
||||
304 | | }):
|
||||
306 | | # This is a comment
|
||||
307 | | }):
|
||||
| |_^
|
||||
305 | """Docstring"""
|
||||
308 | """Docstring"""
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
299 | ...
|
||||
300 |
|
||||
301 |
|
||||
302 | ...
|
||||
303 |
|
||||
304 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {
|
||||
- # This is a comment
|
||||
- }):
|
||||
302 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
303 | """Docstring"""
|
||||
304 |
|
||||
305 |
|
||||
305 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
306 | """Docstring"""
|
||||
307 |
|
||||
308 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
B006 Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:308:52
|
||||
--> B006_B008.py:311:52
|
||||
|
|
||||
308 | def single_line_func_wrong(value: dict[str, str] = {}) \
|
||||
311 | def single_line_func_wrong(value: dict[str, str] = {}) \
|
||||
| ^^
|
||||
309 | : \
|
||||
310 | """Docstring"""
|
||||
312 | : \
|
||||
313 | """Docstring"""
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
|
||||
B006 [*] Do not use mutable data structures for argument defaults
|
||||
--> B006_B008.py:313:52
|
||||
--> B006_B008.py:316:52
|
||||
|
|
||||
313 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
316 | def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
| ^^
|
||||
314 | """Docstring without newline"""
|
||||
317 | """Docstring without newline"""
|
||||
|
|
||||
help: Replace with `None`; initialize within function
|
||||
310 | """Docstring"""
|
||||
311 |
|
||||
312 |
|
||||
313 | """Docstring"""
|
||||
314 |
|
||||
315 |
|
||||
- def single_line_func_wrong(value: dict[str, str] = {}):
|
||||
313 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
314 | """Docstring without newline"""
|
||||
316 + def single_line_func_wrong(value: dict[str, str] = None):
|
||||
317 | """Docstring without newline"""
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -4,7 +4,9 @@ use rustc_hash::FxHashSet;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::Parameter;
|
||||
use ruff_python_ast::docstrings::{clean_space, leading_space};
|
||||
use ruff_python_ast::helpers::map_subscript;
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_semantic::analyze::visibility::is_staticmethod;
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
@@ -1184,6 +1186,9 @@ impl AlwaysFixableViolation for MissingSectionNameColon {
|
||||
/// This rule is enabled when using the `google` convention, and disabled when
|
||||
/// using the `pep257` and `numpy` conventions.
|
||||
///
|
||||
/// Parameters annotated with `typing.Unpack` are exempt from this rule.
|
||||
/// This follows the Python typing specification for unpacking keyword arguments.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// def calculate_speed(distance: float, time: float) -> float:
|
||||
@@ -1233,6 +1238,7 @@ impl AlwaysFixableViolation for MissingSectionNameColon {
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
/// - [Google Python Style Guide - Docstrings](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings)
|
||||
/// - [Python - Unpack for keyword arguments](https://typing.python.org/en/latest/spec/callables.html#unpack-kwargs)
|
||||
#[derive(ViolationMetadata)]
|
||||
#[violation_metadata(stable_since = "v0.0.73")]
|
||||
pub(crate) struct UndocumentedParam {
|
||||
@@ -1808,7 +1814,9 @@ fn missing_args(checker: &Checker, docstring: &Docstring, docstrings_args: &FxHa
|
||||
missing_arg_names.insert(starred_arg_name);
|
||||
}
|
||||
}
|
||||
if let Some(arg) = function.parameters.kwarg.as_ref() {
|
||||
if let Some(arg) = function.parameters.kwarg.as_ref()
|
||||
&& !has_unpack_annotation(checker, arg)
|
||||
{
|
||||
let arg_name = arg.name.as_str();
|
||||
let starred_arg_name = format!("**{arg_name}");
|
||||
if !arg_name.starts_with('_')
|
||||
@@ -1834,6 +1842,15 @@ fn missing_args(checker: &Checker, docstring: &Docstring, docstrings_args: &FxHa
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the parameter is annotated with `typing.Unpack`
|
||||
fn has_unpack_annotation(checker: &Checker, parameter: &Parameter) -> bool {
|
||||
parameter.annotation.as_ref().is_some_and(|annotation| {
|
||||
checker
|
||||
.semantic()
|
||||
.match_typing_expr(map_subscript(annotation), "Unpack")
|
||||
})
|
||||
}
|
||||
|
||||
// See: `GOOGLE_ARGS_REGEX` in `pydocstyle/checker.py`.
|
||||
static GOOGLE_ARGS_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap());
|
||||
|
||||
@@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
|
||||
200 | """
|
||||
201 | Send a message.
|
||||
|
|
||||
|
||||
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
|
||||
--> D417.py:238:5
|
||||
|
|
||||
236 | """
|
||||
237 |
|
||||
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
239 | """Function with Unpack kwargs but missing query arg documentation.
|
||||
|
|
||||
|
||||
@@ -83,3 +83,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
|
||||
200 | """
|
||||
201 | Send a message.
|
||||
|
|
||||
|
||||
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
|
||||
--> D417.py:238:5
|
||||
|
|
||||
236 | """
|
||||
237 |
|
||||
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
239 | """Function with Unpack kwargs but missing query arg documentation.
|
||||
|
|
||||
|
||||
@@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
|
||||
200 | """
|
||||
201 | Send a message.
|
||||
|
|
||||
|
||||
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
|
||||
--> D417.py:238:5
|
||||
|
|
||||
236 | """
|
||||
237 |
|
||||
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
239 | """Function with Unpack kwargs but missing query arg documentation.
|
||||
|
|
||||
|
||||
@@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
|
||||
200 | """
|
||||
201 | Send a message.
|
||||
|
|
||||
|
||||
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
|
||||
--> D417.py:238:5
|
||||
|
|
||||
236 | """
|
||||
237 |
|
||||
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
239 | """Function with Unpack kwargs but missing query arg documentation.
|
||||
|
|
||||
|
||||
@@ -28,6 +28,7 @@ mod tests {
|
||||
use crate::settings::types::PreviewMode;
|
||||
use crate::settings::{LinterSettings, flags};
|
||||
use crate::source_kind::SourceKind;
|
||||
use crate::suppression::Suppressions;
|
||||
use crate::test::{test_contents, test_path, test_snippet};
|
||||
use crate::{Locator, assert_diagnostics, assert_diagnostics_diff, directives};
|
||||
|
||||
@@ -955,6 +956,8 @@ mod tests {
|
||||
&locator,
|
||||
&indexer,
|
||||
);
|
||||
let suppressions =
|
||||
Suppressions::from_tokens(&settings, locator.contents(), parsed.tokens());
|
||||
let mut messages = check_path(
|
||||
Path::new("<filename>"),
|
||||
None,
|
||||
@@ -968,6 +971,7 @@ mod tests {
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
messages.sort_by(Diagnostic::ruff_start_ordering);
|
||||
let actual = messages
|
||||
|
||||
@@ -305,6 +305,25 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range_suppressions() -> Result<()> {
|
||||
assert_diagnostics_diff!(
|
||||
Path::new("ruff/suppressions.py"),
|
||||
&settings::LinterSettings::for_rules(vec![
|
||||
Rule::UnusedVariable,
|
||||
Rule::AmbiguousVariableName,
|
||||
Rule::UnusedNOQA,
|
||||
]),
|
||||
&settings::LinterSettings::for_rules(vec![
|
||||
Rule::UnusedVariable,
|
||||
Rule::AmbiguousVariableName,
|
||||
Rule::UnusedNOQA,
|
||||
])
|
||||
.with_preview_mode(),
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ruf100_0() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -0,0 +1,168 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 9
|
||||
Added: 1
|
||||
|
||||
--- Removed ---
|
||||
E741 Ambiguous variable name: `I`
|
||||
--> suppressions.py:4:5
|
||||
|
|
||||
2 | # These should both be ignored by the range suppression.
|
||||
3 | # ruff: disable[E741, F841]
|
||||
4 | I = 1
|
||||
| ^
|
||||
5 | # ruff: enable[E741, F841]
|
||||
|
|
||||
|
||||
|
||||
F841 [*] Local variable `I` is assigned to but never used
|
||||
--> suppressions.py:4:5
|
||||
|
|
||||
2 | # These should both be ignored by the range suppression.
|
||||
3 | # ruff: disable[E741, F841]
|
||||
4 | I = 1
|
||||
| ^
|
||||
5 | # ruff: enable[E741, F841]
|
||||
|
|
||||
help: Remove assignment to unused variable `I`
|
||||
1 | def f():
|
||||
2 | # These should both be ignored by the range suppression.
|
||||
3 | # ruff: disable[E741, F841]
|
||||
- I = 1
|
||||
4 + pass
|
||||
5 | # ruff: enable[E741, F841]
|
||||
6 |
|
||||
7 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
|
||||
E741 Ambiguous variable name: `I`
|
||||
--> suppressions.py:12:5
|
||||
|
|
||||
10 | # Should also generate an "unmatched suppression" warning.
|
||||
11 | # ruff:disable[E741,F841]
|
||||
12 | I = 1
|
||||
| ^
|
||||
|
|
||||
|
||||
|
||||
F841 [*] Local variable `I` is assigned to but never used
|
||||
--> suppressions.py:12:5
|
||||
|
|
||||
10 | # Should also generate an "unmatched suppression" warning.
|
||||
11 | # ruff:disable[E741,F841]
|
||||
12 | I = 1
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `I`
|
||||
9 | # These should both be ignored by the implicit range suppression.
|
||||
10 | # Should also generate an "unmatched suppression" warning.
|
||||
11 | # ruff:disable[E741,F841]
|
||||
- I = 1
|
||||
12 + pass
|
||||
13 |
|
||||
14 |
|
||||
15 | def f():
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
|
||||
E741 Ambiguous variable name: `I`
|
||||
--> suppressions.py:26:5
|
||||
|
|
||||
24 | # the other logged to the user.
|
||||
25 | # ruff: disable[E741]
|
||||
26 | I = 1
|
||||
| ^
|
||||
27 | # ruff: enable[E741]
|
||||
|
|
||||
|
||||
|
||||
E741 Ambiguous variable name: `l`
|
||||
--> suppressions.py:35:5
|
||||
|
|
||||
33 | # middle line should be completely silenced.
|
||||
34 | # ruff: disable[E741]
|
||||
35 | l = 0
|
||||
| ^
|
||||
36 | # ruff: disable[F841]
|
||||
37 | O = 1
|
||||
|
|
||||
|
||||
|
||||
E741 Ambiguous variable name: `O`
|
||||
--> suppressions.py:37:5
|
||||
|
|
||||
35 | l = 0
|
||||
36 | # ruff: disable[F841]
|
||||
37 | O = 1
|
||||
| ^
|
||||
38 | # ruff: enable[E741]
|
||||
39 | I = 2
|
||||
|
|
||||
|
||||
|
||||
F841 [*] Local variable `O` is assigned to but never used
|
||||
--> suppressions.py:37:5
|
||||
|
|
||||
35 | l = 0
|
||||
36 | # ruff: disable[F841]
|
||||
37 | O = 1
|
||||
| ^
|
||||
38 | # ruff: enable[E741]
|
||||
39 | I = 2
|
||||
|
|
||||
help: Remove assignment to unused variable `O`
|
||||
34 | # ruff: disable[E741]
|
||||
35 | l = 0
|
||||
36 | # ruff: disable[F841]
|
||||
- O = 1
|
||||
37 | # ruff: enable[E741]
|
||||
38 | I = 2
|
||||
39 | # ruff: enable[F841]
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
|
||||
F841 [*] Local variable `I` is assigned to but never used
|
||||
--> suppressions.py:39:5
|
||||
|
|
||||
37 | O = 1
|
||||
38 | # ruff: enable[E741]
|
||||
39 | I = 2
|
||||
| ^
|
||||
40 | # ruff: enable[F841]
|
||||
|
|
||||
help: Remove assignment to unused variable `I`
|
||||
36 | # ruff: disable[F841]
|
||||
37 | O = 1
|
||||
38 | # ruff: enable[E741]
|
||||
- I = 2
|
||||
39 | # ruff: enable[F841]
|
||||
40 |
|
||||
41 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
|
||||
|
||||
--- Added ---
|
||||
RUF100 [*] Unused `noqa` directive (unused: `E741`, `F841`)
|
||||
--> suppressions.py:55:12
|
||||
|
|
||||
53 | # and an unusued noqa diagnostic should be logged.
|
||||
54 | # ruff:disable[E741,F841]
|
||||
55 | I = 1 # noqa: E741,F841
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
56 | # ruff:enable[E741,F841]
|
||||
|
|
||||
help: Remove unused `noqa` directive
|
||||
52 | # These should both be ignored by the range suppression,
|
||||
53 | # and an unusued noqa diagnostic should be logged.
|
||||
54 | # ruff:disable[E741,F841]
|
||||
- I = 1 # noqa: E741,F841
|
||||
55 + I = 1
|
||||
56 | # ruff:enable[E741,F841]
|
||||
@@ -465,6 +465,12 @@ impl LinterSettings {
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_preview_mode(mut self) -> Self {
|
||||
self.preview = PreviewMode::Enabled;
|
||||
self
|
||||
}
|
||||
|
||||
/// Resolve the [`TargetVersion`] to use for linting.
|
||||
///
|
||||
/// This method respects the per-file version overrides in
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use compact_str::CompactString;
|
||||
use core::fmt;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_python_ast::token::{TokenKind, Tokens};
|
||||
use ruff_python_ast::whitespace::indentation;
|
||||
use std::{error::Error, fmt::Formatter};
|
||||
@@ -9,6 +10,9 @@ use ruff_python_trivia::Cursor;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize, TextSlice};
|
||||
use smallvec::{SmallVec, smallvec};
|
||||
|
||||
use crate::preview::is_range_suppressions_enabled;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
enum SuppressionAction {
|
||||
@@ -98,8 +102,8 @@ pub(crate) struct InvalidSuppression {
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Suppressions {
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Suppressions {
|
||||
/// Valid suppression ranges with associated comments
|
||||
valid: Vec<Suppression>,
|
||||
|
||||
@@ -112,9 +116,41 @@ pub(crate) struct Suppressions {
|
||||
|
||||
#[allow(unused)]
|
||||
impl Suppressions {
|
||||
pub(crate) fn from_tokens(source: &str, tokens: &Tokens) -> Suppressions {
|
||||
let builder = SuppressionsBuilder::new(source);
|
||||
builder.load_from_tokens(tokens)
|
||||
pub fn from_tokens(settings: &LinterSettings, source: &str, tokens: &Tokens) -> Suppressions {
|
||||
if is_range_suppressions_enabled(settings) {
|
||||
let builder = SuppressionsBuilder::new(source);
|
||||
builder.load_from_tokens(tokens)
|
||||
} else {
|
||||
Suppressions::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.valid.is_empty()
|
||||
}
|
||||
|
||||
/// Check if a diagnostic is suppressed by any known range suppressions
|
||||
pub(crate) fn check_diagnostic(&self, diagnostic: &Diagnostic) -> bool {
|
||||
if self.valid.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let Some(code) = diagnostic.secondary_code() else {
|
||||
return false;
|
||||
};
|
||||
let Some(span) = diagnostic.primary_span() else {
|
||||
return false;
|
||||
};
|
||||
let Some(range) = span.range() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
for suppression in &self.valid {
|
||||
if *code == suppression.code.as_str() && suppression.range.contains_range(range) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -457,9 +493,12 @@ mod tests {
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use similar::DiffableStr;
|
||||
|
||||
use crate::suppression::{
|
||||
InvalidSuppression, ParseError, Suppression, SuppressionAction, SuppressionComment,
|
||||
SuppressionParser, Suppressions,
|
||||
use crate::{
|
||||
settings::LinterSettings,
|
||||
suppression::{
|
||||
InvalidSuppression, ParseError, Suppression, SuppressionAction, SuppressionComment,
|
||||
SuppressionParser, Suppressions,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
@@ -1376,7 +1415,11 @@ def bar():
|
||||
/// Parse all suppressions and errors in a module for testing
|
||||
fn debug(source: &'_ str) -> DebugSuppressions<'_> {
|
||||
let parsed = parse(source, ParseOptions::from(Mode::Module)).unwrap();
|
||||
let suppressions = Suppressions::from_tokens(source, parsed.tokens());
|
||||
let suppressions = Suppressions::from_tokens(
|
||||
&LinterSettings::default().with_preview_mode(),
|
||||
source,
|
||||
parsed.tokens(),
|
||||
);
|
||||
DebugSuppressions {
|
||||
source,
|
||||
suppressions,
|
||||
|
||||
@@ -32,6 +32,7 @@ use crate::packaging::detect_package_root;
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
use crate::settings::{LinterSettings, flags};
|
||||
use crate::source_kind::SourceKind;
|
||||
use crate::suppression::Suppressions;
|
||||
use crate::{Applicability, FixAvailability};
|
||||
use crate::{Locator, directives};
|
||||
|
||||
@@ -234,6 +235,7 @@ pub(crate) fn test_contents<'a>(
|
||||
&locator,
|
||||
&indexer,
|
||||
);
|
||||
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
|
||||
let messages = check_path(
|
||||
path,
|
||||
path.parent()
|
||||
@@ -249,6 +251,7 @@ pub(crate) fn test_contents<'a>(
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
|
||||
let source_has_errors = parsed.has_invalid_syntax();
|
||||
@@ -299,6 +302,8 @@ pub(crate) fn test_contents<'a>(
|
||||
&indexer,
|
||||
);
|
||||
|
||||
let suppressions =
|
||||
Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
|
||||
let fixed_messages = check_path(
|
||||
path,
|
||||
None,
|
||||
@@ -312,6 +317,7 @@ pub(crate) fn test_contents<'a>(
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
|
||||
if parsed.has_invalid_syntax() && !source_has_errors {
|
||||
|
||||
@@ -326,7 +326,15 @@ pub fn is_immutable_return_type(qualified_name: &[&str]) -> bool {
|
||||
| ["re", "compile"]
|
||||
| [
|
||||
"",
|
||||
"bool" | "bytes" | "complex" | "float" | "frozenset" | "int" | "str" | "tuple"
|
||||
"bool"
|
||||
| "bytes"
|
||||
| "complex"
|
||||
| "float"
|
||||
| "frozenset"
|
||||
| "int"
|
||||
| "str"
|
||||
| "tuple"
|
||||
| "slice"
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ use ruff_linter::{
|
||||
packaging::detect_package_root,
|
||||
settings::flags,
|
||||
source_kind::SourceKind,
|
||||
suppression::Suppressions,
|
||||
};
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_codegen::Stylist;
|
||||
@@ -118,6 +119,10 @@ pub(crate) fn check(
|
||||
// Extract the `# noqa` and `# isort: skip` directives from the source.
|
||||
let directives = extract_directives(parsed.tokens(), Flags::all(), &locator, &indexer);
|
||||
|
||||
// Parse range suppression comments
|
||||
let suppressions =
|
||||
Suppressions::from_tokens(&settings.linter, locator.contents(), parsed.tokens());
|
||||
|
||||
// Generate checks.
|
||||
let diagnostics = check_path(
|
||||
&document_path,
|
||||
@@ -132,6 +137,7 @@ pub(crate) fn check(
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
|
||||
let noqa_edits = generate_noqa_edits(
|
||||
@@ -142,6 +148,7 @@ pub(crate) fn check(
|
||||
&settings.linter.external,
|
||||
&directives.noqa_line_for,
|
||||
stylist.line_ending(),
|
||||
&suppressions,
|
||||
);
|
||||
|
||||
let mut diagnostics_map = DiagnosticsMap::default();
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::path::Path;
|
||||
|
||||
use js_sys::Error;
|
||||
use ruff_linter::settings::types::PythonVersion;
|
||||
use ruff_linter::suppression::Suppressions;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
@@ -212,6 +213,9 @@ impl Workspace {
|
||||
&indexer,
|
||||
);
|
||||
|
||||
let suppressions =
|
||||
Suppressions::from_tokens(&self.settings.linter, locator.contents(), parsed.tokens());
|
||||
|
||||
// Generate checks.
|
||||
let diagnostics = check_path(
|
||||
Path::new("<filename>"),
|
||||
@@ -226,6 +230,7 @@ impl Workspace {
|
||||
source_type,
|
||||
&parsed,
|
||||
target_version,
|
||||
&suppressions,
|
||||
);
|
||||
|
||||
let source_code = locator.to_source_code();
|
||||
|
||||
@@ -5,7 +5,8 @@ use ruff_diagnostics::Edit;
|
||||
use ruff_text_size::TextRange;
|
||||
use ty_project::Db;
|
||||
use ty_python_semantic::create_suppression_fix;
|
||||
use ty_python_semantic::types::UNRESOLVED_REFERENCE;
|
||||
use ty_python_semantic::lint::LintId;
|
||||
use ty_python_semantic::types::{UNDEFINED_REVEAL, UNRESOLVED_REFERENCE};
|
||||
|
||||
/// A `QuickFix` Code Action
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -28,12 +29,17 @@ pub fn code_actions(
|
||||
|
||||
let mut actions = Vec::new();
|
||||
|
||||
if lint_id.name() == UNRESOLVED_REFERENCE.name()
|
||||
// Suggest imports for unresolved references (often ideal)
|
||||
// TODO: suggest qualifying with an already imported symbol
|
||||
let is_unresolved_reference =
|
||||
lint_id == LintId::of(&UNRESOLVED_REFERENCE) || lint_id == LintId::of(&UNDEFINED_REVEAL);
|
||||
if is_unresolved_reference
|
||||
&& let Some(import_quick_fix) = create_import_symbol_quick_fix(db, file, diagnostic_range)
|
||||
{
|
||||
actions.extend(import_quick_fix);
|
||||
}
|
||||
|
||||
// Suggest just suppressing the lint (always a valid option, but never ideal)
|
||||
actions.push(QuickFix {
|
||||
title: format!("Ignore '{}' for this line", lint_id.name()),
|
||||
edits: create_suppression_fix(db, file, lint_id, diagnostic_range).into_edits(),
|
||||
|
||||
@@ -9,6 +9,7 @@ use ruff_python_ast::token::{Token, TokenAt, TokenKind, Tokens};
|
||||
use ruff_python_ast::{self as ast, AnyNodeRef};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use rustc_hash::FxHashSet;
|
||||
use ty_python_semantic::types::UnionType;
|
||||
use ty_python_semantic::{
|
||||
Completion as SemanticCompletion, KnownModule, ModuleName, NameKind, SemanticModel,
|
||||
@@ -20,7 +21,7 @@ use crate::find_node::covering_node;
|
||||
use crate::goto::Definitions;
|
||||
use crate::importer::{ImportRequest, Importer};
|
||||
use crate::symbols::QueryPattern;
|
||||
use crate::{Db, all_symbols};
|
||||
use crate::{Db, all_symbols, signature_help};
|
||||
|
||||
/// A collection of completions built up from various sources.
|
||||
#[derive(Clone)]
|
||||
@@ -436,6 +437,10 @@ pub fn completion<'db>(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(arg_completions) = detect_function_arg_completions(db, file, &parsed, offset) {
|
||||
completions.extend(arg_completions);
|
||||
}
|
||||
}
|
||||
|
||||
if is_raising_exception(tokens) {
|
||||
@@ -451,10 +456,89 @@ pub fn completion<'db>(
|
||||
!ty.is_notimplemented(db)
|
||||
});
|
||||
}
|
||||
|
||||
completions.into_completions()
|
||||
}
|
||||
|
||||
/// Detect and construct completions for unset function arguments.
|
||||
///
|
||||
/// Suggestions are only provided if the cursor is currently inside a
|
||||
/// function call and the function arguments have not 1) already been
|
||||
/// set and 2) been defined as positional-only.
|
||||
fn detect_function_arg_completions<'db>(
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
parsed: &ParsedModuleRef,
|
||||
offset: TextSize,
|
||||
) -> Option<Vec<Completion<'db>>> {
|
||||
let sig_help = signature_help(db, file, offset)?;
|
||||
let set_function_args = detect_set_function_args(parsed, offset);
|
||||
|
||||
let completions = sig_help
|
||||
.signatures
|
||||
.iter()
|
||||
.flat_map(|sig| &sig.parameters)
|
||||
.filter(|p| !p.is_positional_only && !set_function_args.contains(&p.name.as_str()))
|
||||
.map(|p| {
|
||||
let name = Name::new(&p.name);
|
||||
let documentation = p
|
||||
.documentation
|
||||
.as_ref()
|
||||
.map(|d| Docstring::new(d.to_owned()));
|
||||
let insert = Some(format!("{name}=").into_boxed_str());
|
||||
Completion {
|
||||
name,
|
||||
qualified: None,
|
||||
insert,
|
||||
ty: p.ty,
|
||||
kind: Some(CompletionKind::Variable),
|
||||
module_name: None,
|
||||
import: None,
|
||||
builtin: false,
|
||||
is_type_check_only: false,
|
||||
is_definitively_raisable: false,
|
||||
documentation,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Some(completions)
|
||||
}
|
||||
|
||||
/// Returns function arguments that have already been set.
|
||||
///
|
||||
/// If `offset` is inside an arguments node, this returns
|
||||
/// the list of argument names that are already set.
|
||||
///
|
||||
/// For example, given:
|
||||
///
|
||||
/// ```python
|
||||
/// def abc(foo, bar, baz): ...
|
||||
/// abc(foo=1, bar=2, b<CURSOR>)
|
||||
/// ```
|
||||
///
|
||||
/// the resulting value is `["foo", "bar"]`
|
||||
///
|
||||
/// This is useful to be able to exclude autocomplete suggestions
|
||||
/// for arguments that have already been set to some value.
|
||||
///
|
||||
/// If the parent node is not an arguments node, the return value
|
||||
/// is an empty Vec.
|
||||
fn detect_set_function_args(parsed: &ParsedModuleRef, offset: TextSize) -> FxHashSet<&str> {
|
||||
let range = TextRange::empty(offset);
|
||||
covering_node(parsed.syntax().into(), range)
|
||||
.parent()
|
||||
.and_then(|node| match node {
|
||||
ast::AnyNodeRef::Arguments(args) => Some(args),
|
||||
_ => None,
|
||||
})
|
||||
.map(|args| {
|
||||
args.keywords
|
||||
.iter()
|
||||
.filter_map(|kw| kw.arg.as_ref().map(|ident| ident.id.as_str()))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub(crate) struct ImportEdit {
|
||||
pub label: String,
|
||||
pub edit: Edit,
|
||||
@@ -2386,10 +2470,11 @@ def frob(): ...
|
||||
",
|
||||
);
|
||||
|
||||
// FIXME: Should include `foo`.
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@"<No completions found after filtering out completions>",
|
||||
@r"
|
||||
foo
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2401,10 +2486,11 @@ def frob(): ...
|
||||
",
|
||||
);
|
||||
|
||||
// FIXME: Should include `foo`.
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@"<No completions found after filtering out completions>",
|
||||
@r"
|
||||
foo
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3039,7 +3125,6 @@ quux.<CURSOR>
|
||||
");
|
||||
}
|
||||
|
||||
// We don't yet take function parameters into account.
|
||||
#[test]
|
||||
fn call_prefix1() {
|
||||
let builder = completion_test_builder(
|
||||
@@ -3052,7 +3137,157 @@ bar(o<CURSOR>
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo");
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@r"
|
||||
foo
|
||||
okay
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn call_keyword_only_argument() {
|
||||
let builder = completion_test_builder(
|
||||
"\
|
||||
def bar(*, okay): ...
|
||||
|
||||
foo = 1
|
||||
|
||||
bar(o<CURSOR>
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@r"
|
||||
foo
|
||||
okay
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn call_multiple_keyword_arguments() {
|
||||
let builder = completion_test_builder(
|
||||
"\
|
||||
def foo(bar, baz, barbaz): ...
|
||||
|
||||
foo(b<CURSOR>
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@r"
|
||||
bar
|
||||
barbaz
|
||||
baz
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn call_multiple_keyword_arguments_some_set() {
|
||||
let builder = completion_test_builder(
|
||||
"\
|
||||
def foo(bar, baz): ...
|
||||
|
||||
foo(bar=1, b<CURSOR>
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@r"
|
||||
baz
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn call_arguments_multi_def() {
|
||||
let builder = completion_test_builder(
|
||||
"\
|
||||
def abc(okay, x): ...
|
||||
def bar(not_okay, y): ...
|
||||
def baz(foobarbaz, z): ...
|
||||
|
||||
abc(o<CURSOR>
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@r"
|
||||
okay
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn call_arguments_cursor_middle() {
|
||||
let builder = completion_test_builder(
|
||||
"\
|
||||
def abc(okay, foo, bar, baz): ...
|
||||
|
||||
abc(okay=1, ba<CURSOR> baz=5
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@r"
|
||||
bar
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn call_positional_only_argument() {
|
||||
// If the parameter is positional only we don't
|
||||
// want to suggest it as specifying by name
|
||||
// is not valid.
|
||||
let builder = completion_test_builder(
|
||||
"\
|
||||
def bar(okay, /): ...
|
||||
|
||||
foo = 1
|
||||
|
||||
bar(o<CURSOR>
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@"foo"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn call_positional_only_keyword_only_argument_mix() {
|
||||
// If the parameter is positional only we don't
|
||||
// want to suggest it as specifying by name
|
||||
// is not valid.
|
||||
let builder = completion_test_builder(
|
||||
"\
|
||||
def bar(not_okay, no, /, okay, *, okay_abc, okay_okay): ...
|
||||
|
||||
foo = 1
|
||||
|
||||
bar(o<CURSOR>
|
||||
",
|
||||
);
|
||||
|
||||
assert_snapshot!(
|
||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||
@r"
|
||||
foo
|
||||
okay
|
||||
okay_abc
|
||||
okay_okay
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -3070,6 +3305,7 @@ bar(<CURSOR>
|
||||
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r"
|
||||
bar
|
||||
foo
|
||||
okay
|
||||
");
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ use ty_python_semantic::semantic_index::definition::Definition;
|
||||
use ty_python_semantic::types::ide_support::{
|
||||
CallSignatureDetails, call_signature_details, find_active_signature_from_details,
|
||||
};
|
||||
use ty_python_semantic::types::{ParameterKind, Type};
|
||||
|
||||
// TODO: We may want to add special-case handling for calls to constructors
|
||||
// so the class docstring is used in place of (or inaddition to) any docstring
|
||||
@@ -27,25 +28,29 @@ use ty_python_semantic::types::ide_support::{
|
||||
|
||||
/// Information about a function parameter
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ParameterDetails {
|
||||
pub struct ParameterDetails<'db> {
|
||||
/// The parameter name (e.g., "param1")
|
||||
pub name: String,
|
||||
/// The parameter label in the signature (e.g., "param1: str")
|
||||
pub label: String,
|
||||
/// The annotated type of the parameter, if any
|
||||
pub ty: Option<Type<'db>>,
|
||||
/// Documentation specific to the parameter, typically extracted from the
|
||||
/// function's docstring
|
||||
pub documentation: Option<String>,
|
||||
/// True if the parameter is positional-only.
|
||||
pub is_positional_only: bool,
|
||||
}
|
||||
|
||||
/// Information about a function signature
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SignatureDetails {
|
||||
pub struct SignatureDetails<'db> {
|
||||
/// Text representation of the full signature (including input parameters and return type).
|
||||
pub label: String,
|
||||
/// Documentation for the signature, typically from the function's docstring.
|
||||
pub documentation: Option<Docstring>,
|
||||
/// Information about each of the parameters in left-to-right order.
|
||||
pub parameters: Vec<ParameterDetails>,
|
||||
pub parameters: Vec<ParameterDetails<'db>>,
|
||||
/// Index of the parameter that corresponds to the argument where the
|
||||
/// user's cursor is currently positioned.
|
||||
pub active_parameter: Option<usize>,
|
||||
@@ -53,18 +58,18 @@ pub struct SignatureDetails {
|
||||
|
||||
/// Signature help information for function calls
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SignatureHelpInfo {
|
||||
pub struct SignatureHelpInfo<'db> {
|
||||
/// Information about each of the signatures for the function call. We
|
||||
/// need to handle multiple because of unions, overloads, and composite
|
||||
/// calls like constructors (which invoke both __new__ and __init__).
|
||||
pub signatures: Vec<SignatureDetails>,
|
||||
pub signatures: Vec<SignatureDetails<'db>>,
|
||||
/// Index of the "active signature" which is the first signature where
|
||||
/// all arguments that are currently present in the code map to parameters.
|
||||
pub active_signature: Option<usize>,
|
||||
}
|
||||
|
||||
/// Signature help information for function calls at the given position
|
||||
pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option<SignatureHelpInfo> {
|
||||
pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option<SignatureHelpInfo<'_>> {
|
||||
let parsed = parsed_module(db, file).load(db);
|
||||
|
||||
// Get the call expression at the given position.
|
||||
@@ -166,11 +171,11 @@ fn get_argument_index(call_expr: &ast::ExprCall, offset: TextSize) -> usize {
|
||||
}
|
||||
|
||||
/// Create signature details from `CallSignatureDetails`.
|
||||
fn create_signature_details_from_call_signature_details(
|
||||
fn create_signature_details_from_call_signature_details<'db>(
|
||||
db: &dyn crate::Db,
|
||||
details: &CallSignatureDetails,
|
||||
details: &CallSignatureDetails<'db>,
|
||||
current_arg_index: usize,
|
||||
) -> SignatureDetails {
|
||||
) -> SignatureDetails<'db> {
|
||||
let signature_label = details.label.clone();
|
||||
|
||||
let documentation = get_callable_documentation(db, details.definition);
|
||||
@@ -200,6 +205,8 @@ fn create_signature_details_from_call_signature_details(
|
||||
&signature_label,
|
||||
documentation.as_ref(),
|
||||
&details.parameter_names,
|
||||
&details.parameter_kinds,
|
||||
&details.parameter_types,
|
||||
);
|
||||
SignatureDetails {
|
||||
label: signature_label,
|
||||
@@ -218,12 +225,14 @@ fn get_callable_documentation(
|
||||
}
|
||||
|
||||
/// Create `ParameterDetails` objects from parameter label offsets.
|
||||
fn create_parameters_from_offsets(
|
||||
fn create_parameters_from_offsets<'db>(
|
||||
parameter_offsets: &[TextRange],
|
||||
signature_label: &str,
|
||||
docstring: Option<&Docstring>,
|
||||
parameter_names: &[String],
|
||||
) -> Vec<ParameterDetails> {
|
||||
parameter_kinds: &[ParameterKind],
|
||||
parameter_types: &[Option<Type<'db>>],
|
||||
) -> Vec<ParameterDetails<'db>> {
|
||||
// Extract parameter documentation from the function's docstring if available.
|
||||
let param_docs = if let Some(docstring) = docstring {
|
||||
docstring.parameter_documentation()
|
||||
@@ -245,11 +254,18 @@ fn create_parameters_from_offsets(
|
||||
|
||||
// Get the parameter name for documentation lookup.
|
||||
let param_name = parameter_names.get(i).map(String::as_str).unwrap_or("");
|
||||
let is_positional_only = matches!(
|
||||
parameter_kinds.get(i),
|
||||
Some(ParameterKind::PositionalOnly { .. })
|
||||
);
|
||||
let ty = parameter_types.get(i).copied().flatten();
|
||||
|
||||
ParameterDetails {
|
||||
name: param_name.to_string(),
|
||||
label,
|
||||
ty,
|
||||
documentation: param_docs.get(param_name).cloned(),
|
||||
is_positional_only,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
@@ -1173,7 +1189,7 @@ def ab(a: int, *, c: int):
|
||||
}
|
||||
|
||||
impl CursorTest {
|
||||
fn signature_help(&self) -> Option<SignatureHelpInfo> {
|
||||
fn signature_help(&self) -> Option<SignatureHelpInfo<'_>> {
|
||||
crate::signature_help::signature_help(&self.db, self.cursor.file, self.cursor.offset)
|
||||
}
|
||||
|
||||
|
||||
@@ -106,45 +106,36 @@ reveal_type(admin_users) # revealed: Sequence[User]
|
||||
We can also specify particular columns to select:
|
||||
|
||||
```py
|
||||
reveal_type(User.id) # revealed: InstrumentedAttribute[int]
|
||||
stmt = select(User.id, User.name)
|
||||
# TODO: should be `Select[tuple[int, str]]`
|
||||
reveal_type(stmt) # revealed: Select[tuple[Unknown, Unknown]]
|
||||
reveal_type(stmt) # revealed: Select[tuple[int, str]]
|
||||
|
||||
ids_and_names = session.execute(stmt).all()
|
||||
# TODO: should be `Sequence[Row[tuple[int, str]]]`
|
||||
reveal_type(ids_and_names) # revealed: Sequence[Row[tuple[Unknown, Unknown]]]
|
||||
reveal_type(ids_and_names) # revealed: Sequence[Row[tuple[int, str]]]
|
||||
|
||||
for row in session.execute(stmt):
|
||||
# TODO: should be `Row[tuple[int, str]]`
|
||||
reveal_type(row) # revealed: Row[tuple[Unknown, Unknown]]
|
||||
reveal_type(row) # revealed: Row[tuple[int, str]]
|
||||
|
||||
for user_id, name in session.execute(stmt).tuples():
|
||||
# TODO: should be `int`
|
||||
reveal_type(user_id) # revealed: Unknown
|
||||
# TODO: should be `str`
|
||||
reveal_type(name) # revealed: Unknown
|
||||
reveal_type(user_id) # revealed: int
|
||||
reveal_type(name) # revealed: str
|
||||
|
||||
result = session.execute(stmt)
|
||||
row = result.one_or_none()
|
||||
assert row is not None
|
||||
(user_id, name) = row._tuple()
|
||||
# TODO: should be `int`
|
||||
reveal_type(user_id) # revealed: Unknown
|
||||
# TODO: should be `str`
|
||||
reveal_type(name) # revealed: Unknown
|
||||
reveal_type(user_id) # revealed: int
|
||||
reveal_type(name) # revealed: str
|
||||
|
||||
stmt = select(User.id).where(User.name == "Alice")
|
||||
|
||||
# TODO: should be `Select[tuple[int]]`
|
||||
reveal_type(stmt) # revealed: Select[tuple[Unknown]]
|
||||
reveal_type(stmt) # revealed: Select[tuple[int]]
|
||||
|
||||
alice_id = session.scalars(stmt).first()
|
||||
# TODO: should be `int | None`
|
||||
reveal_type(alice_id) # revealed: Unknown | None
|
||||
reveal_type(alice_id) # revealed: int | None
|
||||
|
||||
alice_id = session.scalar(stmt)
|
||||
# TODO: should be `int | None`
|
||||
reveal_type(alice_id) # revealed: Unknown | None
|
||||
reveal_type(alice_id) # revealed: int | None
|
||||
```
|
||||
|
||||
Using the legacy `query` API also works:
|
||||
@@ -203,8 +194,6 @@ async def test_async(session: AsyncSession):
|
||||
stmt = select(User.id, User.name)
|
||||
result = await session.execute(stmt)
|
||||
for user_id, name in result.tuples():
|
||||
# TODO: should be `int`
|
||||
reveal_type(user_id) # revealed: Unknown
|
||||
# TODO: should be `str`
|
||||
reveal_type(name) # revealed: Unknown
|
||||
reveal_type(user_id) # revealed: int
|
||||
reveal_type(name) # revealed: str
|
||||
```
|
||||
|
||||
@@ -335,6 +335,12 @@ pub enum KnownModule {
|
||||
#[cfg(test)]
|
||||
Uuid,
|
||||
Warnings,
|
||||
#[strum(serialize = "sqlalchemy.sql.selectable")]
|
||||
SqlalchemySqlSelectable,
|
||||
#[strum(serialize = "sqlalchemy.sql._selectable_constructors")]
|
||||
SqlalchemySqlSelectableConstructors,
|
||||
#[strum(serialize = "sqlalchemy.orm.attributes")]
|
||||
SqlalchemyOrmAttributes,
|
||||
}
|
||||
|
||||
impl KnownModule {
|
||||
@@ -363,6 +369,9 @@ impl KnownModule {
|
||||
#[cfg(test)]
|
||||
Self::Uuid => "uuid",
|
||||
Self::Templatelib => "string.templatelib",
|
||||
Self::SqlalchemySqlSelectable => "sqlalchemy.sql.selectable",
|
||||
Self::SqlalchemySqlSelectableConstructors => "sqlalchemy.sql._selectable_constructors",
|
||||
Self::SqlalchemyOrmAttributes => "sqlalchemy.orm.attributes",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -378,7 +387,20 @@ impl KnownModule {
|
||||
if search_path.is_standard_library() {
|
||||
Self::from_str(name.as_str()).ok()
|
||||
} else {
|
||||
None
|
||||
// For non-stdlib search paths, check for known third-party modules
|
||||
Self::try_from_third_party_name(name)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a known module for third-party packages, if applicable.
|
||||
fn try_from_third_party_name(name: &ModuleName) -> Option<Self> {
|
||||
match name.as_str() {
|
||||
"sqlalchemy.sql.selectable" => Some(Self::SqlalchemySqlSelectable),
|
||||
"sqlalchemy.sql._selectable_constructors" => {
|
||||
Some(Self::SqlalchemySqlSelectableConstructors)
|
||||
}
|
||||
"sqlalchemy.orm.attributes" => Some(Self::SqlalchemyOrmAttributes),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -419,6 +441,11 @@ mod tests {
|
||||
let stdlib_search_path = SearchPath::vendored_stdlib();
|
||||
|
||||
for module in KnownModule::iter() {
|
||||
// Third-party modules aren't available in the vendored stdlib
|
||||
if module.is_third_party() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let module_name = module.name();
|
||||
|
||||
assert_eq!(
|
||||
|
||||
@@ -30,6 +30,7 @@ pub(crate) use self::infer::{
|
||||
TypeContext, infer_deferred_types, infer_definition_types, infer_expression_type,
|
||||
infer_expression_types, infer_scope_types, static_expression_truthiness,
|
||||
};
|
||||
pub use self::signatures::ParameterKind;
|
||||
pub(crate) use self::signatures::{CallableSignature, Signature};
|
||||
pub(crate) use self::subclass_of::{SubclassOfInner, SubclassOfType};
|
||||
pub use crate::diagnostic::add_inferred_python_version_hint_to_diagnostic;
|
||||
|
||||
@@ -4207,6 +4207,9 @@ pub enum KnownClass {
|
||||
ConstraintSet,
|
||||
GenericContext,
|
||||
Specialization,
|
||||
// sqlalchemy
|
||||
SqlalchemySelect,
|
||||
SqlalchemyInstrumentedAttribute,
|
||||
}
|
||||
|
||||
impl KnownClass {
|
||||
@@ -4315,7 +4318,9 @@ impl KnownClass {
|
||||
| Self::GenericContext
|
||||
| Self::Specialization
|
||||
| Self::ProtocolMeta
|
||||
| Self::TypedDictFallback => Some(Truthiness::Ambiguous),
|
||||
| Self::TypedDictFallback
|
||||
| Self::SqlalchemySelect
|
||||
| Self::SqlalchemyInstrumentedAttribute => Some(Truthiness::Ambiguous),
|
||||
|
||||
Self::Tuple => None,
|
||||
}
|
||||
@@ -4405,7 +4410,9 @@ impl KnownClass {
|
||||
| KnownClass::BuiltinFunctionType
|
||||
| KnownClass::ProtocolMeta
|
||||
| KnownClass::Template
|
||||
| KnownClass::Path => false,
|
||||
| KnownClass::Path
|
||||
| KnownClass::SqlalchemySelect
|
||||
| KnownClass::SqlalchemyInstrumentedAttribute => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4492,7 +4499,9 @@ impl KnownClass {
|
||||
| KnownClass::BuiltinFunctionType
|
||||
| KnownClass::ProtocolMeta
|
||||
| KnownClass::Template
|
||||
| KnownClass::Path => false,
|
||||
| KnownClass::Path
|
||||
| KnownClass::SqlalchemySelect
|
||||
| KnownClass::SqlalchemyInstrumentedAttribute => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4578,7 +4587,9 @@ impl KnownClass {
|
||||
| KnownClass::BuiltinFunctionType
|
||||
| KnownClass::ProtocolMeta
|
||||
| KnownClass::Template
|
||||
| KnownClass::Path => false,
|
||||
| KnownClass::Path
|
||||
| KnownClass::SqlalchemySelect
|
||||
| KnownClass::SqlalchemyInstrumentedAttribute => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4677,7 +4688,9 @@ impl KnownClass {
|
||||
| Self::ProtocolMeta
|
||||
| Self::Template
|
||||
| Self::Path
|
||||
| Self::Mapping => false,
|
||||
| Self::Mapping
|
||||
| Self::SqlalchemySelect
|
||||
| Self::SqlalchemyInstrumentedAttribute => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4766,7 +4779,9 @@ impl KnownClass {
|
||||
| KnownClass::ConstraintSet
|
||||
| KnownClass::GenericContext
|
||||
| KnownClass::Specialization
|
||||
| KnownClass::InitVar => false,
|
||||
| KnownClass::InitVar
|
||||
| KnownClass::SqlalchemySelect
|
||||
| KnownClass::SqlalchemyInstrumentedAttribute => false,
|
||||
KnownClass::NamedTupleFallback | KnownClass::TypedDictFallback => true,
|
||||
}
|
||||
}
|
||||
@@ -4882,6 +4897,8 @@ impl KnownClass {
|
||||
Self::Template => "Template",
|
||||
Self::Path => "Path",
|
||||
Self::ProtocolMeta => "_ProtocolMeta",
|
||||
Self::SqlalchemySelect => "Select",
|
||||
Self::SqlalchemyInstrumentedAttribute => "InstrumentedAttribute",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5203,6 +5220,8 @@ impl KnownClass {
|
||||
| Self::Specialization => KnownModule::TyExtensions,
|
||||
Self::Template => KnownModule::Templatelib,
|
||||
Self::Path => KnownModule::Pathlib,
|
||||
Self::SqlalchemySelect => KnownModule::SqlalchemySqlSelectable,
|
||||
Self::SqlalchemyInstrumentedAttribute => KnownModule::SqlalchemyOrmAttributes,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5291,7 +5310,9 @@ impl KnownClass {
|
||||
| Self::BuiltinFunctionType
|
||||
| Self::ProtocolMeta
|
||||
| Self::Template
|
||||
| Self::Path => Some(false),
|
||||
| Self::Path
|
||||
| Self::SqlalchemySelect
|
||||
| Self::SqlalchemyInstrumentedAttribute => Some(false),
|
||||
|
||||
Self::Tuple => None,
|
||||
}
|
||||
@@ -5383,7 +5404,9 @@ impl KnownClass {
|
||||
| Self::BuiltinFunctionType
|
||||
| Self::ProtocolMeta
|
||||
| Self::Template
|
||||
| Self::Path => false,
|
||||
| Self::Path
|
||||
| Self::SqlalchemySelect
|
||||
| Self::SqlalchemyInstrumentedAttribute => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5489,6 +5512,8 @@ impl KnownClass {
|
||||
"Template" => &[Self::Template],
|
||||
"Path" => &[Self::Path],
|
||||
"_ProtocolMeta" => &[Self::ProtocolMeta],
|
||||
"Select" => &[Self::SqlalchemySelect],
|
||||
"InstrumentedAttribute" => &[Self::SqlalchemyInstrumentedAttribute],
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
@@ -5569,7 +5594,9 @@ impl KnownClass {
|
||||
| Self::Awaitable
|
||||
| Self::Generator
|
||||
| Self::Template
|
||||
| Self::Path => module == self.canonical_module(db),
|
||||
| Self::Path
|
||||
| Self::SqlalchemySelect
|
||||
| Self::SqlalchemyInstrumentedAttribute => module == self.canonical_module(db),
|
||||
Self::NoneType => matches!(module, KnownModule::Typeshed | KnownModule::Types),
|
||||
Self::SpecialForm
|
||||
| Self::TypeAliasType
|
||||
@@ -5924,6 +5951,10 @@ mod tests {
|
||||
source: PythonVersionSource::default(),
|
||||
});
|
||||
for class in KnownClass::iter() {
|
||||
if class.canonical_module(&db).is_third_party() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let class_name = class.name(&db);
|
||||
let class_module =
|
||||
resolve_module_confident(&db, &class.canonical_module(&db).name()).unwrap();
|
||||
@@ -5952,6 +5983,10 @@ mod tests {
|
||||
});
|
||||
|
||||
for class in KnownClass::iter() {
|
||||
if class.canonical_module(&db).is_third_party() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check the class can be looked up successfully
|
||||
class.try_to_class_literal_without_logging(&db).unwrap();
|
||||
|
||||
@@ -5977,6 +6012,7 @@ mod tests {
|
||||
// This makes the test far faster as it minimizes the number of times
|
||||
// we need to change the Python version in the loop.
|
||||
let mut classes: Vec<(KnownClass, PythonVersion)> = KnownClass::iter()
|
||||
.filter(|class| !class.canonical_module(&db).is_third_party())
|
||||
.map(|class| {
|
||||
let version_added = match class {
|
||||
KnownClass::Template => PythonVersion::PY314,
|
||||
|
||||
@@ -1353,6 +1353,10 @@ pub enum KnownFunction {
|
||||
RevealProtocolInterface,
|
||||
/// `ty_extensions.reveal_mro`
|
||||
RevealMro,
|
||||
|
||||
/// `sqlalchemy.select`
|
||||
#[strum(serialize = "select")]
|
||||
SqlalchemySelect,
|
||||
}
|
||||
|
||||
impl KnownFunction {
|
||||
@@ -1425,6 +1429,9 @@ impl KnownFunction {
|
||||
|
||||
Self::TypeCheckOnly => matches!(module, KnownModule::Typing),
|
||||
Self::NamedTuple => matches!(module, KnownModule::Collections),
|
||||
Self::SqlalchemySelect => {
|
||||
matches!(module, KnownModule::SqlalchemySqlSelectableConstructors)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1896,6 +1903,56 @@ impl KnownFunction {
|
||||
|
||||
overload.set_return_type(Type::module_literal(db, file, module));
|
||||
}
|
||||
|
||||
KnownFunction::SqlalchemySelect => {
|
||||
// Try to extract types from InstrumentedAttribute[T] arguments.
|
||||
// If all arguments are InstrumentedAttribute instances, we construct
|
||||
// Select[tuple[T_1, T_2, ...]] where T_i are the inner types.
|
||||
//
|
||||
// We check the class via `class_literal.known(db)` rather than using
|
||||
// `known_specialization` because the class may be re-exported and not
|
||||
// directly importable from its canonical module.
|
||||
let inner_types: Option<Vec<_>> = parameter_types
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(|param_type| {
|
||||
let Type::NominalInstance(instance) = param_type else {
|
||||
return None;
|
||||
};
|
||||
let class = instance.class(db);
|
||||
let (class_literal, specialization) = class.class_literal(db);
|
||||
if class_literal.known(db)
|
||||
!= Some(KnownClass::SqlalchemyInstrumentedAttribute)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
specialization?.types(db).first().copied()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let Some(inner_types) = inner_types else {
|
||||
// Fall back to whatever we infer from the function signature
|
||||
return;
|
||||
};
|
||||
|
||||
if inner_types.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Construct Select[tuple[T1, T2, ...]]
|
||||
// We get the return type's class from the overload rather than looking
|
||||
// it up via try_to_class_literal, since the class may be re-exported.
|
||||
let Type::NominalInstance(return_instance) = overload.return_type() else {
|
||||
return;
|
||||
};
|
||||
let select_class = return_instance.class(db).class_literal(db).0;
|
||||
let tuple_type = Type::heterogeneous_tuple(db, inner_types);
|
||||
let class_type = select_class.apply_specialization(db, |generic_context| {
|
||||
generic_context.specialize(db, vec![tuple_type].into())
|
||||
});
|
||||
overload.set_return_type(Type::instance(db, class_type));
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -1964,6 +2021,8 @@ pub(crate) mod tests {
|
||||
|
||||
KnownFunction::ImportModule => KnownModule::ImportLib,
|
||||
KnownFunction::NamedTuple => KnownModule::Collections,
|
||||
|
||||
KnownFunction::SqlalchemySelect => continue,
|
||||
};
|
||||
|
||||
let function_definition = known_module_symbol(&db, module, function_name)
|
||||
|
||||
@@ -6,7 +6,7 @@ use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::definition::DefinitionKind;
|
||||
use crate::semantic_index::{attribute_scopes, global_scope, semantic_index, use_def_map};
|
||||
use crate::types::call::{CallArguments, MatchedArgument};
|
||||
use crate::types::signatures::Signature;
|
||||
use crate::types::signatures::{ParameterKind, Signature};
|
||||
use crate::types::{CallDunderError, UnionType};
|
||||
use crate::types::{CallableTypes, ClassBase, KnownClass, Type, TypeContext};
|
||||
use crate::{Db, DisplaySettings, HasType, SemanticModel};
|
||||
@@ -459,6 +459,12 @@ pub struct CallSignatureDetails<'db> {
|
||||
/// This provides easy access to parameter names for documentation lookup.
|
||||
pub parameter_names: Vec<String>,
|
||||
|
||||
/// Parameter kinds, useful to determine correct autocomplete suggestions.
|
||||
pub parameter_kinds: Vec<ParameterKind<'db>>,
|
||||
|
||||
/// Parameter kinds, useful to determine correct autocomplete suggestions.
|
||||
pub parameter_types: Vec<Option<Type<'db>>>,
|
||||
|
||||
/// The definition where this callable was originally defined (useful for
|
||||
/// extracting docstrings).
|
||||
pub definition: Option<Definition<'db>>,
|
||||
@@ -517,6 +523,12 @@ pub fn call_signature_details<'db>(
|
||||
let display_details = signature.display(model.db()).to_string_parts();
|
||||
let parameter_label_offsets = display_details.parameter_ranges;
|
||||
let parameter_names = display_details.parameter_names;
|
||||
let (parameter_kinds, parameter_types): (Vec<ParameterKind>, Vec<Option<Type>>) =
|
||||
signature
|
||||
.parameters()
|
||||
.iter()
|
||||
.map(|param| (param.kind().clone(), param.annotated_type()))
|
||||
.unzip();
|
||||
|
||||
CallSignatureDetails {
|
||||
definition: signature.definition(),
|
||||
@@ -524,6 +536,8 @@ pub fn call_signature_details<'db>(
|
||||
label: display_details.label,
|
||||
parameter_label_offsets,
|
||||
parameter_names,
|
||||
parameter_kinds,
|
||||
parameter_types,
|
||||
argument_to_parameter_mapping,
|
||||
}
|
||||
})
|
||||
|
||||
@@ -2292,7 +2292,7 @@ impl<'db> Parameter<'db> {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update, get_size2::GetSize)]
|
||||
pub(crate) enum ParameterKind<'db> {
|
||||
pub enum ParameterKind<'db> {
|
||||
/// Positional-only parameter, e.g. `def f(x, /): ...`
|
||||
PositionalOnly {
|
||||
/// Parameter name.
|
||||
|
||||
@@ -132,11 +132,44 @@ x: Literal[1] = 1
|
||||
";
|
||||
|
||||
let ty_toml = SystemPath::new("ty.toml");
|
||||
let ty_toml_content = "\
|
||||
[rules]
|
||||
unused-ignore-comment = \"warn\"
|
||||
let ty_toml_content = "";
|
||||
|
||||
let mut server = TestServerBuilder::new()?
|
||||
.with_workspace(workspace_root, None)?
|
||||
.with_file(ty_toml, ty_toml_content)?
|
||||
.with_file(foo, foo_content)?
|
||||
.enable_pull_diagnostics(true)
|
||||
.build()
|
||||
.wait_until_workspaces_are_initialized();
|
||||
|
||||
server.open_text_document(foo, foo_content, 1);
|
||||
|
||||
// Wait for diagnostics to be computed.
|
||||
let diagnostics = server.document_diagnostic_request(foo, None);
|
||||
let range = full_range(foo_content);
|
||||
let code_action_params = code_actions_at(&server, diagnostics, foo, range);
|
||||
|
||||
// Get code actions
|
||||
let code_action_id = server.send_request::<CodeActionRequest>(code_action_params);
|
||||
let code_actions = server.await_response::<CodeActionRequest>(&code_action_id);
|
||||
|
||||
insta::assert_json_snapshot!(code_actions);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// `Literal` is available from two places so we should suggest two possible imports
|
||||
#[test]
|
||||
fn code_action_undefined_reveal_type() -> Result<()> {
|
||||
let workspace_root = SystemPath::new("src");
|
||||
let foo = SystemPath::new("src/foo.py");
|
||||
let foo_content = "\
|
||||
reveal_type(1)
|
||||
";
|
||||
|
||||
let ty_toml = SystemPath::new("ty.toml");
|
||||
let ty_toml_content = "";
|
||||
|
||||
let mut server = TestServerBuilder::new()?
|
||||
.with_workspace(workspace_root, None)?
|
||||
.with_file(ty_toml, ty_toml_content)?
|
||||
|
||||
@@ -0,0 +1,98 @@
|
||||
---
|
||||
source: crates/ty_server/tests/e2e/code_actions.rs
|
||||
expression: code_actions
|
||||
---
|
||||
[
|
||||
{
|
||||
"title": "import typing.reveal_type",
|
||||
"kind": "quickfix",
|
||||
"diagnostics": [
|
||||
{
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 0,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 0,
|
||||
"character": 11
|
||||
}
|
||||
},
|
||||
"severity": 2,
|
||||
"code": "undefined-reveal",
|
||||
"codeDescription": {
|
||||
"href": "https://ty.dev/rules#undefined-reveal"
|
||||
},
|
||||
"source": "ty",
|
||||
"message": "`reveal_type` used without importing it",
|
||||
"relatedInformation": []
|
||||
}
|
||||
],
|
||||
"edit": {
|
||||
"changes": {
|
||||
"file://<temp_dir>/src/foo.py": [
|
||||
{
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 0,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 0,
|
||||
"character": 0
|
||||
}
|
||||
},
|
||||
"newText": "from typing import reveal_type\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"isPreferred": true
|
||||
},
|
||||
{
|
||||
"title": "Ignore 'undefined-reveal' for this line",
|
||||
"kind": "quickfix",
|
||||
"diagnostics": [
|
||||
{
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 0,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 0,
|
||||
"character": 11
|
||||
}
|
||||
},
|
||||
"severity": 2,
|
||||
"code": "undefined-reveal",
|
||||
"codeDescription": {
|
||||
"href": "https://ty.dev/rules#undefined-reveal"
|
||||
},
|
||||
"source": "ty",
|
||||
"message": "`reveal_type` used without importing it",
|
||||
"relatedInformation": []
|
||||
}
|
||||
],
|
||||
"edit": {
|
||||
"changes": {
|
||||
"file://<temp_dir>/src/foo.py": [
|
||||
{
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 0,
|
||||
"character": 14
|
||||
},
|
||||
"end": {
|
||||
"line": 0,
|
||||
"character": 14
|
||||
}
|
||||
},
|
||||
"newText": " # ty:ignore[undefined-reveal]"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"isPreferred": false
|
||||
}
|
||||
]
|
||||
Reference in New Issue
Block a user