Compare commits

..

1 Commits

Author SHA1 Message Date
David Peter
e4833614c2 [ty] Default specialize generic type aliases 2025-12-02 20:12:13 +01:00
370 changed files with 11256 additions and 26881 deletions

View File

@@ -75,6 +75,14 @@
matchManagers: ["cargo"],
enabled: false,
},
{
// `mkdocs-material` requires a manual update to keep the version in sync
// with `mkdocs-material-insider`.
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
matchManagers: ["pip_requirements"],
matchPackageNames: ["mkdocs-material"],
enabled: false,
},
{
groupName: "pre-commit dependencies",
matchManagers: ["pre-commit"],

View File

@@ -24,8 +24,6 @@ env:
PACKAGE_NAME: ruff
PYTHON_VERSION: "3.14"
NEXTEST_PROFILE: ci
# Enable mdtests that require external dependencies
MDTEST_EXTERNAL: "1"
jobs:
determine_changes:
@@ -781,6 +779,8 @@ jobs:
name: "mkdocs"
runs-on: ubuntu-latest
timeout-minutes: 10
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
@@ -788,6 +788,11 @@ jobs:
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
@@ -795,7 +800,11 @@ jobs:
with:
python-version: 3.13
activate-environment: true
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt
- name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: uv pip install -r docs/requirements.txt
- name: "Update README File"
run: python scripts/transform_readme.py --target mkdocs
@@ -803,8 +812,12 @@ jobs:
run: python scripts/generate_mkdocs.py
- name: "Check docs formatting"
run: python scripts/check_docs_formatted.py
- name: "Build Insiders docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
run: mkdocs build --strict -f mkdocs.yml
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
check-formatter-instability-and-black-similarity:
name: "formatter instabilities and black similarity"

View File

@@ -20,6 +20,8 @@ on:
jobs:
mkdocs:
runs-on: ubuntu-latest
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
@@ -57,12 +59,23 @@ jobs:
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain"
run: rustup show
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: pip install -r docs/requirements-insiders.txt
- name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: pip install -r docs/requirements.txt
- name: "Copy README File"
@@ -70,8 +83,13 @@ jobs:
python scripts/transform_readme.py --target mkdocs
python scripts/generate_mkdocs.py
- name: "Build Insiders docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs"
run: mkdocs build --strict -f mkdocs.yml
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: mkdocs build --strict -f mkdocs.public.yml
- name: "Clone docs repo"
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs

View File

@@ -18,8 +18,7 @@ jobs:
environment:
name: release
permissions:
# For PyPI's trusted publishing.
id-token: write
id-token: write # For PyPI's trusted publishing + PEP 740 attestations
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
@@ -28,5 +27,8 @@ jobs:
pattern: wheels-*
path: wheels
merge-multiple: true
- uses: astral-sh/attest-action@2c727738cea36d6c97dd85eb133ea0e0e8fe754b # v0.0.4
with:
paths: wheels/*
- name: Publish to PyPi
run: uv publish -v wheels/*

View File

@@ -1,34 +1,5 @@
# Changelog
## 0.14.8
Released on 2025-12-04.
### Preview features
- \[`flake8-bugbear`\] Catch `yield` expressions within other statements (`B901`) ([#21200](https://github.com/astral-sh/ruff/pull/21200))
- \[`flake8-use-pathlib`\] Mark fixes unsafe for return type changes (`PTH104`, `PTH105`, `PTH109`, `PTH115`) ([#21440](https://github.com/astral-sh/ruff/pull/21440))
### Bug fixes
- Fix syntax error false positives for `await` outside functions ([#21763](https://github.com/astral-sh/ruff/pull/21763))
- \[`flake8-simplify`\] Fix truthiness assumption for non-iterable arguments in tuple/list/set calls (`SIM222`, `SIM223`) ([#21479](https://github.com/astral-sh/ruff/pull/21479))
### Documentation
- Suggest using `--output-file` option in GitLab integration ([#21706](https://github.com/astral-sh/ruff/pull/21706))
### Other changes
- [syntax-error] Default type parameter followed by non-default type parameter ([#21657](https://github.com/astral-sh/ruff/pull/21657))
### Contributors
- [@kieran-ryan](https://github.com/kieran-ryan)
- [@11happy](https://github.com/11happy)
- [@danparizher](https://github.com/danparizher)
- [@ntBre](https://github.com/ntBre)
## 0.14.7
Released on 2025-11-28.

View File

@@ -331,6 +331,13 @@ you addressed them.
## MkDocs
> [!NOTE]
>
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
> This means only members of the Astral organization can preview the documentation exactly as it
> will appear in production.
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
To preview any changes to the documentation locally:
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
@@ -344,7 +351,11 @@ To preview any changes to the documentation locally:
1. Run the development server with:
```shell
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.yml
# For contributors.
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
```
The documentation should then be available locally at

8
Cargo.lock generated
View File

@@ -2859,7 +2859,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.14.8"
version = "0.14.7"
dependencies = [
"anyhow",
"argfile",
@@ -3117,14 +3117,13 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.14.8"
version = "0.14.7"
dependencies = [
"aho-corasick",
"anyhow",
"bitflags 2.10.0",
"clap",
"colored 3.0.0",
"compact_str",
"fern",
"glob",
"globset",
@@ -3473,7 +3472,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.14.8"
version = "0.14.7"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -4557,7 +4556,6 @@ dependencies = [
"anyhow",
"camino",
"colored 3.0.0",
"dunce",
"insta",
"memchr",
"path-slash",

View File

@@ -272,12 +272,6 @@ large_stack_arrays = "allow"
lto = "fat"
codegen-units = 16
# Profile to build a minimally sized binary for ruff/ty
[profile.minimal-size]
inherits = "release"
opt-level = "z"
codegen-units = 1
# Some crates don't change as much but benefit more from
# more expensive optimization passes, so we selectively
# decrease codegen-units in some cases.

View File

@@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.14.8/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.14.8/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.14.7/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.14.7/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.14.8
rev: v0.14.7
hooks:
# Run the linter.
- id: ruff-check

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.14.8"
version = "0.14.7"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -1440,78 +1440,6 @@ def function():
Ok(())
}
#[test]
fn ignore_noqa() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
import os # noqa: F401
# ruff: disable[F401]
import sys
"#,
)?;
// without --ignore-noqa
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py"),
@r"
success: false
exit_code: 1
----- stdout -----
noqa.py:5:8: F401 [*] `sys` imported but unused
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
");
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.args(["--preview"]),
@r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
");
// with --ignore-noqa --preview
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.args(["--ignore-noqa", "--preview"]),
@r"
success: false
exit_code: 1
----- stdout -----
noqa.py:2:8: F401 [*] `os` imported but unused
noqa.py:5:8: F401 [*] `sys` imported but unused
Found 2 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
");
Ok(())
}
#[test]
fn add_noqa() -> Result<()> {
let fixture = CliTest::new()?;
@@ -1704,100 +1632,6 @@ def unused(x): # noqa: ANN001, ARG001, D103
Ok(())
}
#[test]
fn add_noqa_existing_file_level_noqa() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
# ruff: noqa F401
import os
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.arg("--preview")
.args(["--add-noqa"])
.arg("-")
.pass_stdin(r#"
"#), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
# ruff: noqa F401
import os
");
Ok(())
}
#[test]
fn add_noqa_existing_range_suppression() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
# ruff: disable[F401]
import os
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.arg("--preview")
.args(["--add-noqa"])
.arg("-")
.pass_stdin(r#"
"#), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
# ruff: disable[F401]
import os
");
Ok(())
}
#[test]
fn add_noqa_multiline_comment() -> Result<()> {
let fixture = CliTest::new()?;

View File

@@ -6,8 +6,7 @@ use criterion::{
use ruff_benchmark::{
LARGE_DATASET, NUMPY_CTYPESLIB, NUMPY_GLOBALS, PYDANTIC_TYPES, TestCase, UNICODE_PYPINYIN,
};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::{Mode, lexer};
use ruff_python_parser::{Mode, TokenKind, lexer};
#[cfg(target_os = "windows")]
#[global_allocator]

View File

@@ -166,8 +166,28 @@ impl Diagnostic {
/// Returns the primary message for this diagnostic.
///
/// A diagnostic always has a message, but it may be empty.
///
/// NOTE: At present, this routine will return the first primary
/// annotation's message as the primary message when the main diagnostic
/// message is empty. This is meant to facilitate an incremental migration
/// in ty over to the new diagnostic data model. (The old data model
/// didn't distinguish between messages on the entire diagnostic and
/// messages attached to a particular span.)
pub fn primary_message(&self) -> &str {
self.inner.message.as_str()
if !self.inner.message.as_str().is_empty() {
return self.inner.message.as_str();
}
// FIXME: As a special case, while we're migrating ty
// to the new diagnostic data model, we'll look for a primary
// message from the primary annotation. This is because most
// ty diagnostics are created with an empty diagnostic
// message and instead attach the message to the annotation.
// Fixing this will require touching basically every diagnostic
// in ty, so we do it this way for now to match the old
// semantics. ---AG
self.primary_annotation()
.and_then(|ann| ann.get_message())
.unwrap_or_default()
}
/// Introspects this diagnostic and returns what kind of "primary" message
@@ -179,6 +199,18 @@ impl Diagnostic {
/// contains *essential* information or context for understanding the
/// diagnostic.
///
/// The reason why we don't just always return both the main diagnostic
/// message and the primary annotation message is because this was written
/// in the midst of an incremental migration of ty over to the new
/// diagnostic data model. At time of writing, diagnostics were still
/// constructed in the old model where the main diagnostic message and the
/// primary annotation message were not distinguished from each other. So
/// for now, we carefully return what kind of messages this diagnostic
/// contains. In effect, if this diagnostic has a non-empty main message
/// *and* a non-empty primary annotation message, then the diagnostic is
/// 100% using the new diagnostic data model and we can format things
/// appropriately.
///
/// The type returned implements the `std::fmt::Display` trait. In most
/// cases, just converting it to a string (or printing it) will do what
/// you want.
@@ -192,10 +224,11 @@ impl Diagnostic {
.primary_annotation()
.and_then(|ann| ann.get_message())
.unwrap_or_default();
if annotation.is_empty() {
ConciseMessage::MainDiagnostic(main)
} else {
ConciseMessage::Both { main, annotation }
match (main.is_empty(), annotation.is_empty()) {
(false, true) => ConciseMessage::MainDiagnostic(main),
(true, false) => ConciseMessage::PrimaryAnnotation(annotation),
(false, false) => ConciseMessage::Both { main, annotation },
(true, true) => ConciseMessage::Empty,
}
}
@@ -660,6 +693,18 @@ impl SubDiagnostic {
/// contains *essential* information or context for understanding the
/// diagnostic.
///
/// The reason why we don't just always return both the main diagnostic
/// message and the primary annotation message is because this was written
/// in the midst of an incremental migration of ty over to the new
/// diagnostic data model. At time of writing, diagnostics were still
/// constructed in the old model where the main diagnostic message and the
/// primary annotation message were not distinguished from each other. So
/// for now, we carefully return what kind of messages this diagnostic
/// contains. In effect, if this diagnostic has a non-empty main message
/// *and* a non-empty primary annotation message, then the diagnostic is
/// 100% using the new diagnostic data model and we can format things
/// appropriately.
///
/// The type returned implements the `std::fmt::Display` trait. In most
/// cases, just converting it to a string (or printing it) will do what
/// you want.
@@ -669,10 +714,11 @@ impl SubDiagnostic {
.primary_annotation()
.and_then(|ann| ann.get_message())
.unwrap_or_default();
if annotation.is_empty() {
ConciseMessage::MainDiagnostic(main)
} else {
ConciseMessage::Both { main, annotation }
match (main.is_empty(), annotation.is_empty()) {
(false, true) => ConciseMessage::MainDiagnostic(main),
(true, false) => ConciseMessage::PrimaryAnnotation(annotation),
(false, false) => ConciseMessage::Both { main, annotation },
(true, true) => ConciseMessage::Empty,
}
}
}
@@ -842,10 +888,6 @@ impl Annotation {
pub fn hide_snippet(&mut self, yes: bool) {
self.hide_snippet = yes;
}
pub fn is_primary(&self) -> bool {
self.is_primary
}
}
/// Tags that can be associated with an annotation.
@@ -1466,10 +1508,28 @@ pub enum DiagnosticFormat {
pub enum ConciseMessage<'a> {
/// A diagnostic contains a non-empty main message and an empty
/// primary annotation message.
///
/// This strongly suggests that the diagnostic is using the
/// "new" data model.
MainDiagnostic(&'a str),
/// A diagnostic contains an empty main message and a non-empty
/// primary annotation message.
///
/// This strongly suggests that the diagnostic is using the
/// "old" data model.
PrimaryAnnotation(&'a str),
/// A diagnostic contains a non-empty main message and a non-empty
/// primary annotation message.
///
/// This strongly suggests that the diagnostic is using the
/// "new" data model.
Both { main: &'a str, annotation: &'a str },
/// A diagnostic contains an empty main message and an empty
/// primary annotation message.
///
/// This indicates that the diagnostic is probably using the old
/// model.
Empty,
/// A custom concise message has been provided.
Custom(&'a str),
}
@@ -1480,9 +1540,13 @@ impl std::fmt::Display for ConciseMessage<'_> {
ConciseMessage::MainDiagnostic(main) => {
write!(f, "{main}")
}
ConciseMessage::PrimaryAnnotation(annotation) => {
write!(f, "{annotation}")
}
ConciseMessage::Both { main, annotation } => {
write!(f, "{main}: {annotation}")
}
ConciseMessage::Empty => Ok(()),
ConciseMessage::Custom(message) => {
write!(f, "{message}")
}

View File

@@ -21,11 +21,7 @@ use crate::source::source_text;
/// reflected in the changed AST offsets.
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
/// for determining if a query result is unchanged.
///
/// The LRU capacity of 200 was picked without any empirical evidence that it's optimal,
/// instead it's a wild guess that it should be unlikely that incremental changes involve
/// more than 200 modules. Parsed ASTs within the same revision are never evicted by Salsa.
#[salsa::tracked(returns(ref), no_eq, heap_size=ruff_memory_usage::heap_size, lru=200)]
#[salsa::tracked(returns(ref), no_eq, heap_size=ruff_memory_usage::heap_size)]
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
let _span = tracing::trace_span!("parsed_module", ?file).entered();
@@ -96,9 +92,14 @@ impl ParsedModule {
self.inner.store(None);
}
/// Returns the file to which this module belongs.
pub fn file(&self) -> File {
self.file
/// Returns the pointer address of this [`ParsedModule`].
///
/// The pointer uniquely identifies the module within the current Salsa revision,
/// regardless of whether particular [`ParsedModuleRef`] instances are garbage collected.
pub fn addr(&self) -> usize {
// Note that the outer `Arc` in `inner` is stable across garbage collection, while the inner
// `Arc` within the `ArcSwap` may change.
Arc::as_ptr(&self.inner).addr()
}
}

View File

@@ -667,13 +667,6 @@ impl Deref for SystemPathBuf {
}
}
impl AsRef<Path> for SystemPathBuf {
#[inline]
fn as_ref(&self) -> &Path {
self.0.as_std_path()
}
}
impl<P: AsRef<SystemPath>> FromIterator<P> for SystemPathBuf {
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> Self {
let mut buf = SystemPathBuf::new();

View File

@@ -49,7 +49,7 @@ impl ModuleImports {
// Resolve the imports.
let mut resolved_imports = ModuleImports::default();
for import in imports {
for resolved in Resolver::new(db, path).resolve(import) {
for resolved in Resolver::new(db).resolve(import) {
if let Some(path) = resolved.as_system_path() {
resolved_imports.insert(path.to_path_buf());
}

View File

@@ -1,9 +1,5 @@
use ruff_db::files::{File, FilePath, system_path_to_file};
use ruff_db::system::SystemPath;
use ty_python_semantic::{
ModuleName, resolve_module, resolve_module_confident, resolve_real_module,
resolve_real_module_confident,
};
use ruff_db::files::FilePath;
use ty_python_semantic::{ModuleName, resolve_module, resolve_real_module};
use crate::ModuleDb;
use crate::collector::CollectedImport;
@@ -11,15 +7,12 @@ use crate::collector::CollectedImport;
/// Collect all imports for a given Python file.
pub(crate) struct Resolver<'a> {
db: &'a ModuleDb,
file: Option<File>,
}
impl<'a> Resolver<'a> {
/// Initialize a [`Resolver`] with a given [`ModuleDb`].
pub(crate) fn new(db: &'a ModuleDb, path: &SystemPath) -> Self {
// If we know the importing file we can potentially resolve more imports
let file = system_path_to_file(db, path).ok();
Self { db, file }
pub(crate) fn new(db: &'a ModuleDb) -> Self {
Self { db }
}
/// Resolve the [`CollectedImport`] into a [`FilePath`].
@@ -77,21 +70,13 @@ impl<'a> Resolver<'a> {
/// Resolves a module name to a module.
pub(crate) fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
let module = if let Some(file) = self.file {
resolve_module(self.db, file, module_name)?
} else {
resolve_module_confident(self.db, module_name)?
};
let module = resolve_module(self.db, module_name)?;
Some(module.file(self.db)?.path(self.db))
}
/// Resolves a module name to a module (stubs not allowed).
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
let module = if let Some(file) = self.file {
resolve_real_module(self.db, file, module_name)?
} else {
resolve_real_module_confident(self.db, module_name)?
};
let module = resolve_real_module(self.db, module_name)?;
Some(module.file(self.db)?.path(self.db))
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.14.8"
version = "0.14.7"
publish = false
authors = { workspace = true }
edition = { workspace = true }
@@ -35,7 +35,6 @@ anyhow = { workspace = true }
bitflags = { workspace = true }
clap = { workspace = true, features = ["derive", "string"], optional = true }
colored = { workspace = true }
compact_str = { workspace = true }
fern = { workspace = true }
glob = { workspace = true }
globset = { workspace = true }

View File

@@ -28,11 +28,9 @@ yaml.load("{}", SafeLoader)
yaml.load("{}", yaml.SafeLoader)
yaml.load("{}", CSafeLoader)
yaml.load("{}", yaml.CSafeLoader)
yaml.load("{}", yaml.cyaml.CSafeLoader)
yaml.load("{}", NewSafeLoader)
yaml.load("{}", Loader=SafeLoader)
yaml.load("{}", Loader=yaml.SafeLoader)
yaml.load("{}", Loader=CSafeLoader)
yaml.load("{}", Loader=yaml.CSafeLoader)
yaml.load("{}", Loader=yaml.cyaml.CSafeLoader)
yaml.load("{}", Loader=NewSafeLoader)

View File

@@ -199,9 +199,6 @@ def bytes_okay(value=bytes(1)):
def int_okay(value=int("12")):
pass
# Allow immutable slice()
def slice_okay(value=slice(1,2)):
pass
# Allow immutable complex() value
def complex_okay(value=complex(1,2)):

View File

@@ -52,16 +52,16 @@ def not_broken5():
yield inner()
def broken3():
def not_broken6():
return (yield from [])
def broken4():
def not_broken7():
x = yield from []
return x
def broken5():
def not_broken8():
x = None
def inner(ex):
@@ -76,13 +76,3 @@ class NotBroken9(object):
def __await__(self):
yield from function()
return 42
async def broken6():
yield 1
return foo()
async def broken7():
yield 1
return [1, 2, 3]

View File

@@ -218,26 +218,3 @@ def should_not_fail(payload, Args):
Args:
The other arguments.
"""
# Test cases for Unpack[TypedDict] kwargs
from typing import TypedDict
from typing_extensions import Unpack
class User(TypedDict):
id: int
name: str
def function_with_unpack_args_should_not_fail(query: str, **kwargs: Unpack[User]):
"""Function with Unpack kwargs.
Args:
query: some arg
"""
def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
"""Function with Unpack kwargs but missing query arg documentation.
Args:
**kwargs: keyword arguments
"""

View File

@@ -17,24 +17,3 @@ def _():
# Valid yield scope
yield 3
# await is valid in any generator, sync or async
(await cor async for cor in f()) # ok
(await cor for cor in f()) # ok
# but not in comprehensions
[await cor async for cor in f()] # F704
{await cor async for cor in f()} # F704
{await cor: 1 async for cor in f()} # F704
[await cor for cor in f()] # F704
{await cor for cor in f()} # F704
{await cor: 1 for cor in f()} # F704
# or in the iterator of an async generator, which is evaluated in the parent
# scope
(cor async for cor in await f()) # F704
(await cor async for cor in [await c for c in f()]) # F704
# this is also okay because the comprehension is within the generator scope
([await c for c in cor] async for cor in f()) # ok

View File

@@ -1,56 +0,0 @@
def f():
# These should both be ignored by the range suppression.
# ruff: disable[E741, F841]
I = 1
# ruff: enable[E741, F841]
def f():
# These should both be ignored by the implicit range suppression.
# Should also generate an "unmatched suppression" warning.
# ruff:disable[E741,F841]
I = 1
def f():
# Neither warning is ignored, and an "unmatched suppression"
# should be generated.
I = 1
# ruff: enable[E741, F841]
def f():
# One should be ignored by the range suppression, and
# the other logged to the user.
# ruff: disable[E741]
I = 1
# ruff: enable[E741]
def f():
# Test interleaved range suppressions. The first and last
# lines should each log a different warning, while the
# middle line should be completely silenced.
# ruff: disable[E741]
l = 0
# ruff: disable[F841]
O = 1
# ruff: enable[E741]
I = 2
# ruff: enable[F841]
def f():
# Neither of these are ignored and warnings are
# logged to user
# ruff: disable[E501]
I = 1
# ruff: enable[E501]
def f():
# These should both be ignored by the range suppression,
# and an unusued noqa diagnostic should be logged.
# ruff:disable[E741,F841]
I = 1 # noqa: E741,F841
# ruff:enable[E741,F841]

View File

@@ -3,5 +3,3 @@ def func():
# Top-level await
await 1
([await c for c in cor] async for cor in func()) # ok

View File

@@ -1,24 +0,0 @@
async def gen():
yield 1
return 42
def gen(): # B901 but not a syntax error - not an async generator
yield 1
return 42
async def gen(): # ok - no value in return
yield 1
return
async def gen():
yield 1
return foo()
async def gen():
yield 1
return [1, 2, 3]
async def gen():
if True:
yield 1
return 10

View File

@@ -35,7 +35,6 @@ use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to
use ruff_python_ast::identifier::Identifier;
use ruff_python_ast::name::QualifiedName;
use ruff_python_ast::str::Quote;
use ruff_python_ast::token::Tokens;
use ruff_python_ast::visitor::{Visitor, walk_except_handler, walk_pattern};
use ruff_python_ast::{
self as ast, AnyParameterRef, ArgOrKeyword, Comprehension, ElifElseClause, ExceptHandler, Expr,
@@ -49,7 +48,7 @@ use ruff_python_parser::semantic_errors::{
SemanticSyntaxChecker, SemanticSyntaxContext, SemanticSyntaxError, SemanticSyntaxErrorKind,
};
use ruff_python_parser::typing::{AnnotationKind, ParsedAnnotation, parse_type_annotation};
use ruff_python_parser::{ParseError, Parsed};
use ruff_python_parser::{ParseError, Parsed, Tokens};
use ruff_python_semantic::all::{DunderAllDefinition, DunderAllFlags};
use ruff_python_semantic::analyze::{imports, typing};
use ruff_python_semantic::{
@@ -69,7 +68,6 @@ use crate::noqa::NoqaMapping;
use crate::package::PackageRoot;
use crate::preview::is_undefined_export_in_dunder_init_enabled;
use crate::registry::Rule;
use crate::rules::flake8_bugbear::rules::ReturnInGenerator;
use crate::rules::pyflakes::rules::{
LateFutureImport, MultipleStarredExpressions, ReturnOutsideFunction,
UndefinedLocalWithNestedImportStarUsage, YieldOutsideFunction,
@@ -730,12 +728,6 @@ impl SemanticSyntaxContext for Checker<'_> {
self.report_diagnostic(NonlocalWithoutBinding { name }, error.range);
}
}
SemanticSyntaxErrorKind::ReturnInGenerator => {
// B901
if self.is_rule_enabled(Rule::ReturnInGenerator) {
self.report_diagnostic(ReturnInGenerator, error.range);
}
}
SemanticSyntaxErrorKind::ReboundComprehensionVariable
| SemanticSyntaxErrorKind::DuplicateTypeParameter
| SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
@@ -754,7 +746,6 @@ impl SemanticSyntaxContext for Checker<'_> {
| SemanticSyntaxErrorKind::LoadBeforeNonlocalDeclaration { .. }
| SemanticSyntaxErrorKind::NonlocalAndGlobal(_)
| SemanticSyntaxErrorKind::AnnotatedGlobal(_)
| SemanticSyntaxErrorKind::TypeParameterDefaultOrder(_)
| SemanticSyntaxErrorKind::AnnotatedNonlocal(_) => {
self.semantic_errors.borrow_mut().push(error);
}
@@ -788,10 +779,6 @@ impl SemanticSyntaxContext for Checker<'_> {
match scope.kind {
ScopeKind::Class(_) => return false,
ScopeKind::Function(_) | ScopeKind::Lambda(_) => return true,
ScopeKind::Generator {
kind: GeneratorKind::Generator,
..
} => return true,
ScopeKind::Generator { .. }
| ScopeKind::Module
| ScopeKind::Type
@@ -841,19 +828,14 @@ impl SemanticSyntaxContext for Checker<'_> {
self.source_type.is_ipynb()
}
fn in_generator_context(&self) -> bool {
for scope in self.semantic.current_scopes() {
if matches!(
scope.kind,
ScopeKind::Generator {
kind: GeneratorKind::Generator,
..
}
) {
return true;
fn in_generator_scope(&self) -> bool {
matches!(
&self.semantic.current_scope().kind,
ScopeKind::Generator {
kind: GeneratorKind::Generator,
..
}
}
false
)
}
fn in_loop_context(&self) -> bool {

View File

@@ -1,6 +1,6 @@
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_parser::{TokenKind, Tokens};
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextRange};

View File

@@ -12,20 +12,17 @@ use crate::fix::edits::delete_comment;
use crate::noqa::{
Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping,
};
use crate::preview::is_range_suppressions_enabled;
use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target;
use crate::rules::pygrep_hooks;
use crate::rules::ruff;
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
use crate::settings::LinterSettings;
use crate::suppression::Suppressions;
use crate::{Edit, Fix, Locator};
use super::ast::LintContext;
/// RUF100
#[expect(clippy::too_many_arguments)]
pub(crate) fn check_noqa(
context: &mut LintContext,
path: &Path,
@@ -34,7 +31,6 @@ pub(crate) fn check_noqa(
noqa_line_for: &NoqaMapping,
analyze_directives: bool,
settings: &LinterSettings,
suppressions: &Suppressions,
) -> Vec<usize> {
// Identify any codes that are globally exempted (within the current file).
let file_noqa_directives =
@@ -44,7 +40,7 @@ pub(crate) fn check_noqa(
let mut noqa_directives =
NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator);
if file_noqa_directives.is_empty() && noqa_directives.is_empty() && suppressions.is_empty() {
if file_noqa_directives.is_empty() && noqa_directives.is_empty() {
return Vec::new();
}
@@ -64,19 +60,11 @@ pub(crate) fn check_noqa(
continue;
}
// Apply file-level suppressions first
if exemption.contains_secondary_code(code) {
ignored_diagnostics.push(index);
continue;
}
// Apply ranged suppressions next
if is_range_suppressions_enabled(settings) && suppressions.check_diagnostic(diagnostic) {
ignored_diagnostics.push(index);
continue;
}
// Apply end-of-line noqa suppressions last
let noqa_offsets = diagnostic
.parent()
.into_iter()

View File

@@ -4,9 +4,9 @@ use std::path::Path;
use ruff_notebook::CellOffsets;
use ruff_python_ast::PySourceType;
use ruff_python_ast::token::Tokens;
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_parser::Tokens;
use crate::Locator;
use crate::directives::TodoComment;

View File

@@ -5,8 +5,8 @@ use std::str::FromStr;
use bitflags::bitflags;
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_index::Indexer;
use ruff_python_parser::{TokenKind, Tokens};
use ruff_python_trivia::CommentRanges;
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};

View File

@@ -5,8 +5,8 @@ use std::iter::FusedIterator;
use std::slice::Iter;
use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt};
use ruff_python_ast::token::{Token, TokenKind, Tokens};
use ruff_python_ast::{self as ast, Stmt, Suite};
use ruff_python_parser::{Token, TokenKind, Tokens};
use ruff_source_file::UniversalNewlineIterator;
use ruff_text_size::{Ranged, TextSize};

View File

@@ -9,11 +9,10 @@ use anyhow::Result;
use libcst_native as cst;
use ruff_diagnostics::Edit;
use ruff_python_ast::token::Tokens;
use ruff_python_ast::{self as ast, Expr, ModModule, Stmt};
use ruff_python_codegen::Stylist;
use ruff_python_importer::Insertion;
use ruff_python_parser::Parsed;
use ruff_python_parser::{Parsed, Tokens};
use ruff_python_semantic::{
ImportedName, MemberNameImport, ModuleNameImport, NameImport, SemanticModel,
};

View File

@@ -46,7 +46,6 @@ pub mod rule_selector;
pub mod rules;
pub mod settings;
pub mod source_kind;
pub mod suppression;
mod text_helpers;
pub mod upstream_categories;
mod violation;

View File

@@ -32,7 +32,6 @@ use crate::rules::ruff::rules::test_rules::{self, TEST_RULES, TestRule};
use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, TargetVersion, flags};
use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::{Locator, directives, fs};
pub(crate) mod float;
@@ -129,7 +128,6 @@ pub fn check_path(
source_type: PySourceType,
parsed: &Parsed<ModModule>,
target_version: TargetVersion,
suppressions: &Suppressions,
) -> Vec<Diagnostic> {
// Aggregate all diagnostics.
let mut context = LintContext::new(path, locator.contents(), settings);
@@ -341,7 +339,6 @@ pub fn check_path(
&directives.noqa_line_for,
parsed.has_valid_syntax(),
settings,
suppressions,
);
if noqa.is_enabled() {
for index in ignored.iter().rev() {
@@ -403,9 +400,6 @@ pub fn add_noqa_to_path(
&indexer,
);
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics, ignoring any existing `noqa` directives.
let diagnostics = check_path(
path,
@@ -420,7 +414,6 @@ pub fn add_noqa_to_path(
source_type,
&parsed,
target_version,
&suppressions,
);
// Add any missing `# noqa` pragmas.
@@ -434,7 +427,6 @@ pub fn add_noqa_to_path(
&directives.noqa_line_for,
stylist.line_ending(),
reason,
&suppressions,
)
}
@@ -469,9 +461,6 @@ pub fn lint_only(
&indexer,
);
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics.
let diagnostics = check_path(
path,
@@ -486,7 +475,6 @@ pub fn lint_only(
source_type,
&parsed,
target_version,
&suppressions,
);
LinterResult {
@@ -578,9 +566,6 @@ pub fn lint_fix<'a>(
&indexer,
);
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics.
let diagnostics = check_path(
path,
@@ -595,7 +580,6 @@ pub fn lint_fix<'a>(
source_type,
&parsed,
target_version,
&suppressions,
);
if iterations == 0 {
@@ -785,7 +769,6 @@ mod tests {
use crate::registry::Rule;
use crate::settings::LinterSettings;
use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::test::{TestedNotebook, assert_notebook_path, test_contents, test_snippet};
use crate::{Locator, assert_diagnostics, directives, settings};
@@ -961,7 +944,6 @@ mod tests {
&locator,
&indexer,
);
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
let mut diagnostics = check_path(
path,
None,
@@ -975,7 +957,6 @@ mod tests {
source_type,
&parsed,
target_version,
&suppressions,
);
diagnostics.sort_by(Diagnostic::ruff_start_ordering);
diagnostics
@@ -1062,7 +1043,6 @@ mod tests {
Rule::YieldFromInAsyncFunction,
Path::new("yield_from_in_async_function.py")
)]
#[test_case(Rule::ReturnInGenerator, Path::new("return_in_generator.py"))]
fn test_syntax_errors(rule: Rule, path: &Path) -> Result<()> {
let snapshot = path.to_string_lossy().to_string();
let path = Path::new("resources/test/fixtures/syntax_errors").join(path);

View File

@@ -20,14 +20,12 @@ use crate::Locator;
use crate::fs::relativize_path;
use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target;
use crate::suppression::Suppressions;
/// Generates an array of edits that matches the length of `messages`.
/// Each potential edit in the array is paired, in order, with the associated diagnostic.
/// Each edit will add a `noqa` comment to the appropriate line in the source to hide
/// the diagnostic. These edits may conflict with each other and should not be applied
/// simultaneously.
#[expect(clippy::too_many_arguments)]
pub fn generate_noqa_edits(
path: &Path,
diagnostics: &[Diagnostic],
@@ -36,19 +34,11 @@ pub fn generate_noqa_edits(
external: &[String],
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
suppressions: &Suppressions,
) -> Vec<Option<Edit>> {
let file_directives = FileNoqaDirectives::extract(locator, comment_ranges, external, path);
let exemption = FileExemption::from(&file_directives);
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
let comments = find_noqa_comments(
diagnostics,
locator,
&exemption,
&directives,
noqa_line_for,
suppressions,
);
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
build_noqa_edits_by_diagnostic(comments, locator, line_ending, None)
}
@@ -735,7 +725,6 @@ pub(crate) fn add_noqa(
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
reason: Option<&str>,
suppressions: &Suppressions,
) -> Result<usize> {
let (count, output) = add_noqa_inner(
path,
@@ -746,7 +735,6 @@ pub(crate) fn add_noqa(
noqa_line_for,
line_ending,
reason,
suppressions,
);
fs::write(path, output)?;
@@ -763,7 +751,6 @@ fn add_noqa_inner(
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
reason: Option<&str>,
suppressions: &Suppressions,
) -> (usize, String) {
let mut count = 0;
@@ -773,14 +760,7 @@ fn add_noqa_inner(
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
let comments = find_noqa_comments(
diagnostics,
locator,
&exemption,
&directives,
noqa_line_for,
suppressions,
);
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason);
@@ -879,7 +859,6 @@ fn find_noqa_comments<'a>(
exemption: &'a FileExemption,
directives: &'a NoqaDirectives,
noqa_line_for: &NoqaMapping,
suppressions: &Suppressions,
) -> Vec<Option<NoqaComment<'a>>> {
// List of noqa comments, ordered to match up with `messages`
let mut comments_by_line: Vec<Option<NoqaComment<'a>>> = vec![];
@@ -896,12 +875,6 @@ fn find_noqa_comments<'a>(
continue;
}
// Apply ranged suppressions next
if suppressions.check_diagnostic(message) {
comments_by_line.push(None);
continue;
}
// Is the violation ignored by a `noqa` directive on the parent line?
if let Some(parent) = message.parent() {
if let Some(directive_line) =
@@ -1280,7 +1253,6 @@ mod tests {
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
use crate::rules::pyflakes::rules::UnusedVariable;
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
use crate::suppression::Suppressions;
use crate::{Edit, Violation};
use crate::{Locator, generate_noqa_edits};
@@ -2876,7 +2848,6 @@ mod tests {
&noqa_line_for,
LineEnding::Lf,
None,
&Suppressions::default(),
);
assert_eq!(count, 0);
assert_eq!(output, format!("{contents}"));
@@ -2901,7 +2872,6 @@ mod tests {
&noqa_line_for,
LineEnding::Lf,
None,
&Suppressions::default(),
);
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: F841\n");
@@ -2933,7 +2903,6 @@ mod tests {
&noqa_line_for,
LineEnding::Lf,
None,
&Suppressions::default(),
);
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
@@ -2965,7 +2934,6 @@ mod tests {
&noqa_line_for,
LineEnding::Lf,
None,
&Suppressions::default(),
);
assert_eq!(count, 0);
assert_eq!(output, "x = 1 # noqa");
@@ -2988,7 +2956,6 @@ print(
let messages = [PrintfStringFormatting
.into_diagnostic(TextRange::new(12.into(), 79.into()), &source_file)];
let comment_ranges = CommentRanges::default();
let suppressions = Suppressions::default();
let edits = generate_noqa_edits(
path,
&messages,
@@ -2997,7 +2964,6 @@ print(
&[],
&noqa_line_for,
LineEnding::Lf,
&suppressions,
);
assert_eq!(
edits,
@@ -3021,7 +2987,6 @@ bar =
[UselessSemicolon.into_diagnostic(TextRange::new(4.into(), 5.into()), &source_file)];
let noqa_line_for = NoqaMapping::default();
let comment_ranges = CommentRanges::default();
let suppressions = Suppressions::default();
let edits = generate_noqa_edits(
path,
&messages,
@@ -3030,7 +2995,6 @@ bar =
&[],
&noqa_line_for,
LineEnding::Lf,
&suppressions,
);
assert_eq!(
edits,

View File

@@ -286,8 +286,3 @@ pub(crate) const fn is_s310_resolve_string_literal_bindings_enabled(
) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/21623
pub(crate) const fn is_range_suppressions_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View File

@@ -75,7 +75,6 @@ pub(crate) fn unsafe_yaml_load(checker: &Checker, call: &ast::ExprCall) {
qualified_name.segments(),
["yaml", "SafeLoader" | "CSafeLoader"]
| ["yaml", "loader", "SafeLoader" | "CSafeLoader"]
| ["yaml", "cyaml", "CSafeLoader"]
)
})
{

View File

@@ -1,5 +1,6 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::visitor::{Visitor, walk_expr, walk_stmt};
use ruff_python_ast::statement_visitor;
use ruff_python_ast::statement_visitor::StatementVisitor;
use ruff_python_ast::{self as ast, Expr, Stmt, StmtFunctionDef};
use ruff_text_size::TextRange;
@@ -95,11 +96,6 @@ pub(crate) fn return_in_generator(checker: &Checker, function_def: &StmtFunction
return;
}
// Async functions are flagged by the `ReturnInGenerator` semantic syntax error.
if function_def.is_async {
return;
}
let mut visitor = ReturnInGeneratorVisitor::default();
visitor.visit_body(&function_def.body);
@@ -116,9 +112,15 @@ struct ReturnInGeneratorVisitor {
has_yield: bool,
}
impl Visitor<'_> for ReturnInGeneratorVisitor {
impl StatementVisitor<'_> for ReturnInGeneratorVisitor {
fn visit_stmt(&mut self, stmt: &Stmt) {
match stmt {
Stmt::Expr(ast::StmtExpr { value, .. }) => match **value {
Expr::Yield(_) | Expr::YieldFrom(_) => {
self.has_yield = true;
}
_ => {}
},
Stmt::FunctionDef(_) => {
// Do not recurse into nested functions; they're evaluated separately.
}
@@ -128,19 +130,8 @@ impl Visitor<'_> for ReturnInGeneratorVisitor {
node_index: _,
}) => {
self.return_ = Some(*range);
walk_stmt(self, stmt);
}
_ => walk_stmt(self, stmt),
}
}
fn visit_expr(&mut self, expr: &Expr) {
match expr {
Expr::Lambda(_) => {}
Expr::Yield(_) | Expr::YieldFrom(_) => {
self.has_yield = true;
}
_ => walk_expr(self, expr),
_ => statement_visitor::walk_stmt(self, stmt),
}
}
}

View File

@@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:242:20
--> B006_B008.py:239:20
|
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
243 | pass
240 | pass
|
help: Replace with `None`; initialize within function
239 |
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
236 |
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
- def nested_combo(a=[float(3), dt.datetime.now()]):
242 + def nested_combo(a=None):
243 | pass
244 |
245 |
239 + def nested_combo(a=None):
240 | pass
241 |
242 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:279:27
--> B006_B008.py:276:27
|
278 | def mutable_annotations(
279 | a: list[int] | None = [],
275 | def mutable_annotations(
276 | a: list[int] | None = [],
| ^^
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
help: Replace with `None`; initialize within function
276 |
277 |
278 | def mutable_annotations(
273 |
274 |
275 | def mutable_annotations(
- a: list[int] | None = [],
279 + a: list[int] | None = None,
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 + a: list[int] | None = None,
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:280:35
--> B006_B008.py:277:35
|
278 | def mutable_annotations(
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
275 | def mutable_annotations(
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
| ^^
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
help: Replace with `None`; initialize within function
277 |
278 | def mutable_annotations(
279 | a: list[int] | None = [],
274 |
275 | def mutable_annotations(
276 | a: list[int] | None = [],
- b: Optional[Dict[int, int]] = {},
280 + b: Optional[Dict[int, int]] = None,
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
277 + b: Optional[Dict[int, int]] = None,
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:281:62
--> B006_B008.py:278:62
|
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
|
help: Replace with `None`; initialize within function
278 | def mutable_annotations(
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
275 | def mutable_annotations(
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
- c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
284 | pass
278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
281 | pass
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:282:80
--> B006_B008.py:279:80
|
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^
283 | ):
284 | pass
280 | ):
281 | pass
|
help: Replace with `None`; initialize within function
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
- d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
283 | ):
284 | pass
285 |
279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
280 | ):
281 | pass
282 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:287:52
--> B006_B008.py:284:52
|
287 | def single_line_func_wrong(value: dict[str, str] = {}):
284 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
288 | """Docstring"""
285 | """Docstring"""
|
help: Replace with `None`; initialize within function
284 | pass
285 |
281 | pass
282 |
283 |
- def single_line_func_wrong(value: dict[str, str] = {}):
284 + def single_line_func_wrong(value: dict[str, str] = None):
285 | """Docstring"""
286 |
- def single_line_func_wrong(value: dict[str, str] = {}):
287 + def single_line_func_wrong(value: dict[str, str] = None):
288 | """Docstring"""
289 |
290 |
287 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:291:52
--> B006_B008.py:288:52
|
291 | def single_line_func_wrong(value: dict[str, str] = {}):
288 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
292 | """Docstring"""
293 | ...
289 | """Docstring"""
290 | ...
|
help: Replace with `None`; initialize within function
288 | """Docstring"""
289 |
290 |
285 | """Docstring"""
286 |
287 |
- def single_line_func_wrong(value: dict[str, str] = {}):
291 + def single_line_func_wrong(value: dict[str, str] = None):
292 | """Docstring"""
293 | ...
294 |
288 + def single_line_func_wrong(value: dict[str, str] = None):
289 | """Docstring"""
290 | ...
291 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:296:52
--> B006_B008.py:293:52
|
296 | def single_line_func_wrong(value: dict[str, str] = {}):
293 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
297 | """Docstring"""; ...
294 | """Docstring"""; ...
|
help: Replace with `None`; initialize within function
293 | ...
294 |
290 | ...
291 |
292 |
- def single_line_func_wrong(value: dict[str, str] = {}):
293 + def single_line_func_wrong(value: dict[str, str] = None):
294 | """Docstring"""; ...
295 |
- def single_line_func_wrong(value: dict[str, str] = {}):
296 + def single_line_func_wrong(value: dict[str, str] = None):
297 | """Docstring"""; ...
298 |
299 |
296 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:300:52
--> B006_B008.py:297:52
|
300 | def single_line_func_wrong(value: dict[str, str] = {}):
297 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
301 | """Docstring"""; \
302 | ...
298 | """Docstring"""; \
299 | ...
|
help: Replace with `None`; initialize within function
297 | """Docstring"""; ...
298 |
299 |
294 | """Docstring"""; ...
295 |
296 |
- def single_line_func_wrong(value: dict[str, str] = {}):
300 + def single_line_func_wrong(value: dict[str, str] = None):
301 | """Docstring"""; \
302 | ...
303 |
297 + def single_line_func_wrong(value: dict[str, str] = None):
298 | """Docstring"""; \
299 | ...
300 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:305:52
--> B006_B008.py:302:52
|
305 | def single_line_func_wrong(value: dict[str, str] = {
302 | def single_line_func_wrong(value: dict[str, str] = {
| ____________________________________________________^
306 | | # This is a comment
307 | | }):
303 | | # This is a comment
304 | | }):
| |_^
308 | """Docstring"""
305 | """Docstring"""
|
help: Replace with `None`; initialize within function
302 | ...
303 |
304 |
299 | ...
300 |
301 |
- def single_line_func_wrong(value: dict[str, str] = {
- # This is a comment
- }):
305 + def single_line_func_wrong(value: dict[str, str] = None):
306 | """Docstring"""
307 |
308 |
302 + def single_line_func_wrong(value: dict[str, str] = None):
303 | """Docstring"""
304 |
305 |
note: This is an unsafe fix and may change runtime behavior
B006 Do not use mutable data structures for argument defaults
--> B006_B008.py:311:52
--> B006_B008.py:308:52
|
311 | def single_line_func_wrong(value: dict[str, str] = {}) \
308 | def single_line_func_wrong(value: dict[str, str] = {}) \
| ^^
312 | : \
313 | """Docstring"""
309 | : \
310 | """Docstring"""
|
help: Replace with `None`; initialize within function
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:316:52
--> B006_B008.py:313:52
|
316 | def single_line_func_wrong(value: dict[str, str] = {}):
313 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
317 | """Docstring without newline"""
314 | """Docstring without newline"""
|
help: Replace with `None`; initialize within function
313 | """Docstring"""
314 |
315 |
310 | """Docstring"""
311 |
312 |
- def single_line_func_wrong(value: dict[str, str] = {}):
316 + def single_line_func_wrong(value: dict[str, str] = None):
317 | """Docstring without newline"""
313 + def single_line_func_wrong(value: dict[str, str] = None):
314 | """Docstring without newline"""
note: This is an unsafe fix and may change runtime behavior

View File

@@ -53,39 +53,39 @@ B008 Do not perform function call in argument defaults; instead, perform the cal
|
B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:242:31
--> B006_B008.py:239:31
|
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^
243 | pass
240 | pass
|
B008 Do not perform function call `map` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:248:22
--> B006_B008.py:245:22
|
246 | # Don't flag nested B006 since we can't guarantee that
247 | # it isn't made mutable by the outer operation.
248 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
243 | # Don't flag nested B006 since we can't guarantee that
244 | # it isn't made mutable by the outer operation.
245 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
249 | pass
246 | pass
|
B008 Do not perform function call `random.randint` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:253:19
--> B006_B008.py:250:19
|
252 | # B008-ception.
253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
249 | # B008-ception.
250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
254 | pass
251 | pass
|
B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:253:37
--> B006_B008.py:250:37
|
252 | # B008-ception.
253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
249 | # B008-ception.
250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
| ^^^^^^^^^^^^^^^^^
254 | pass
251 | pass
|

View File

@@ -21,46 +21,3 @@ B901 Using `yield` and `return {value}` in a generator function can lead to conf
37 |
38 | yield from not_broken()
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> B901.py:56:5
|
55 | def broken3():
56 | return (yield from [])
| ^^^^^^^^^^^^^^^^^^^^^^
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> B901.py:61:5
|
59 | def broken4():
60 | x = yield from []
61 | return x
| ^^^^^^^^
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> B901.py:72:5
|
71 | inner((yield from []))
72 | return x
| ^^^^^^^^
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> B901.py:83:5
|
81 | async def broken6():
82 | yield 1
83 | return foo()
| ^^^^^^^^^^^^
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> B901.py:88:5
|
86 | async def broken7():
87 | yield 1
88 | return [1, 2, 3]
| ^^^^^^^^^^^^^^^^
|

View File

@@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:242:20
--> B006_B008.py:239:20
|
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
243 | pass
240 | pass
|
help: Replace with `None`; initialize within function
239 |
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
236 |
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
- def nested_combo(a=[float(3), dt.datetime.now()]):
242 + def nested_combo(a=None):
243 | pass
244 |
245 |
239 + def nested_combo(a=None):
240 | pass
241 |
242 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:279:27
--> B006_B008.py:276:27
|
278 | def mutable_annotations(
279 | a: list[int] | None = [],
275 | def mutable_annotations(
276 | a: list[int] | None = [],
| ^^
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
help: Replace with `None`; initialize within function
276 |
277 |
278 | def mutable_annotations(
273 |
274 |
275 | def mutable_annotations(
- a: list[int] | None = [],
279 + a: list[int] | None = None,
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 + a: list[int] | None = None,
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:280:35
--> B006_B008.py:277:35
|
278 | def mutable_annotations(
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
275 | def mutable_annotations(
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
| ^^
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
help: Replace with `None`; initialize within function
277 |
278 | def mutable_annotations(
279 | a: list[int] | None = [],
274 |
275 | def mutable_annotations(
276 | a: list[int] | None = [],
- b: Optional[Dict[int, int]] = {},
280 + b: Optional[Dict[int, int]] = None,
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
277 + b: Optional[Dict[int, int]] = None,
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:281:62
--> B006_B008.py:278:62
|
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
|
help: Replace with `None`; initialize within function
278 | def mutable_annotations(
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
275 | def mutable_annotations(
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
- c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
284 | pass
278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
281 | pass
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:282:80
--> B006_B008.py:279:80
|
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^
283 | ):
284 | pass
280 | ):
281 | pass
|
help: Replace with `None`; initialize within function
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
- d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
283 | ):
284 | pass
285 |
279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
280 | ):
281 | pass
282 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:287:52
--> B006_B008.py:284:52
|
287 | def single_line_func_wrong(value: dict[str, str] = {}):
284 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
288 | """Docstring"""
285 | """Docstring"""
|
help: Replace with `None`; initialize within function
284 | pass
285 |
281 | pass
282 |
283 |
- def single_line_func_wrong(value: dict[str, str] = {}):
284 + def single_line_func_wrong(value: dict[str, str] = None):
285 | """Docstring"""
286 |
- def single_line_func_wrong(value: dict[str, str] = {}):
287 + def single_line_func_wrong(value: dict[str, str] = None):
288 | """Docstring"""
289 |
290 |
287 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:291:52
--> B006_B008.py:288:52
|
291 | def single_line_func_wrong(value: dict[str, str] = {}):
288 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
292 | """Docstring"""
293 | ...
289 | """Docstring"""
290 | ...
|
help: Replace with `None`; initialize within function
288 | """Docstring"""
289 |
290 |
285 | """Docstring"""
286 |
287 |
- def single_line_func_wrong(value: dict[str, str] = {}):
291 + def single_line_func_wrong(value: dict[str, str] = None):
292 | """Docstring"""
293 | ...
294 |
288 + def single_line_func_wrong(value: dict[str, str] = None):
289 | """Docstring"""
290 | ...
291 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:296:52
--> B006_B008.py:293:52
|
296 | def single_line_func_wrong(value: dict[str, str] = {}):
293 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
297 | """Docstring"""; ...
294 | """Docstring"""; ...
|
help: Replace with `None`; initialize within function
293 | ...
294 |
290 | ...
291 |
292 |
- def single_line_func_wrong(value: dict[str, str] = {}):
293 + def single_line_func_wrong(value: dict[str, str] = None):
294 | """Docstring"""; ...
295 |
- def single_line_func_wrong(value: dict[str, str] = {}):
296 + def single_line_func_wrong(value: dict[str, str] = None):
297 | """Docstring"""; ...
298 |
299 |
296 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:300:52
--> B006_B008.py:297:52
|
300 | def single_line_func_wrong(value: dict[str, str] = {}):
297 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
301 | """Docstring"""; \
302 | ...
298 | """Docstring"""; \
299 | ...
|
help: Replace with `None`; initialize within function
297 | """Docstring"""; ...
298 |
299 |
294 | """Docstring"""; ...
295 |
296 |
- def single_line_func_wrong(value: dict[str, str] = {}):
300 + def single_line_func_wrong(value: dict[str, str] = None):
301 | """Docstring"""; \
302 | ...
303 |
297 + def single_line_func_wrong(value: dict[str, str] = None):
298 | """Docstring"""; \
299 | ...
300 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:305:52
--> B006_B008.py:302:52
|
305 | def single_line_func_wrong(value: dict[str, str] = {
302 | def single_line_func_wrong(value: dict[str, str] = {
| ____________________________________________________^
306 | | # This is a comment
307 | | }):
303 | | # This is a comment
304 | | }):
| |_^
308 | """Docstring"""
305 | """Docstring"""
|
help: Replace with `None`; initialize within function
302 | ...
303 |
304 |
299 | ...
300 |
301 |
- def single_line_func_wrong(value: dict[str, str] = {
- # This is a comment
- }):
305 + def single_line_func_wrong(value: dict[str, str] = None):
306 | """Docstring"""
307 |
308 |
302 + def single_line_func_wrong(value: dict[str, str] = None):
303 | """Docstring"""
304 |
305 |
note: This is an unsafe fix and may change runtime behavior
B006 Do not use mutable data structures for argument defaults
--> B006_B008.py:311:52
--> B006_B008.py:308:52
|
311 | def single_line_func_wrong(value: dict[str, str] = {}) \
308 | def single_line_func_wrong(value: dict[str, str] = {}) \
| ^^
312 | : \
313 | """Docstring"""
309 | : \
310 | """Docstring"""
|
help: Replace with `None`; initialize within function
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:316:52
--> B006_B008.py:313:52
|
316 | def single_line_func_wrong(value: dict[str, str] = {}):
313 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
317 | """Docstring without newline"""
314 | """Docstring without newline"""
|
help: Replace with `None`; initialize within function
313 | """Docstring"""
314 |
315 |
310 | """Docstring"""
311 |
312 |
- def single_line_func_wrong(value: dict[str, str] = {}):
316 + def single_line_func_wrong(value: dict[str, str] = None):
317 | """Docstring without newline"""
313 + def single_line_func_wrong(value: dict[str, str] = None):
314 | """Docstring without newline"""
note: This is an unsafe fix and may change runtime behavior

View File

@@ -1,6 +1,6 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_index::Indexer;
use ruff_python_parser::{TokenKind, Tokens};
use ruff_text_size::{Ranged, TextRange};
use crate::Locator;

View File

@@ -3,7 +3,7 @@ use ruff_python_ast as ast;
use ruff_python_ast::ExprGenerator;
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::Checker;

View File

@@ -3,7 +3,7 @@ use ruff_python_ast as ast;
use ruff_python_ast::ExprGenerator;
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::Checker;

View File

@@ -1,7 +1,7 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast as ast;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::Checker;

View File

@@ -3,8 +3,8 @@ use std::borrow::Cow;
use itertools::Itertools;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::StringFlags;
use ruff_python_ast::token::{Token, TokenKind, Tokens};
use ruff_python_index::Indexer;
use ruff_python_parser::{Token, TokenKind, Tokens};
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextLen, TextRange};

View File

@@ -1,6 +1,6 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_ast::{self as ast, Expr};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_text_size::{Ranged, TextLen, TextSize};
use crate::checkers::ast::Checker;

View File

@@ -4,10 +4,10 @@ use ruff_diagnostics::Applicability;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::{is_const_false, is_const_true};
use ruff_python_ast::stmt_if::elif_else_range;
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::visitor::Visitor;
use ruff_python_ast::whitespace::indentation;
use ruff_python_ast::{self as ast, Decorator, ElifElseClause, Expr, Stmt};
use ruff_python_parser::TokenKind;
use ruff_python_semantic::SemanticModel;
use ruff_python_semantic::analyze::visibility::is_property;
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer, is_python_whitespace};

View File

@@ -1,5 +1,5 @@
use ruff_python_ast::token::Tokens;
use ruff_python_ast::{self as ast, Stmt};
use ruff_python_parser::Tokens;
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextRange};

View File

@@ -1,5 +1,5 @@
use ruff_python_ast::Stmt;
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_python_trivia::PythonWhitespace;
use ruff_source_file::UniversalNewlines;
use ruff_text_size::Ranged;

View File

@@ -11,8 +11,8 @@ use comments::Comment;
use normalize::normalize_imports;
use order::order_imports;
use ruff_python_ast::PySourceType;
use ruff_python_ast::token::Tokens;
use ruff_python_codegen::Stylist;
use ruff_python_parser::Tokens;
use settings::Settings;
use types::EitherImport::{Import, ImportFrom};
use types::{AliasData, ImportBlock, TrailingComma};

View File

@@ -1,11 +1,11 @@
use itertools::{EitherOrBoth, Itertools};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::Tokens;
use ruff_python_ast::whitespace::trailing_lines_end;
use ruff_python_ast::{PySourceType, PythonVersion, Stmt};
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_parser::Tokens;
use ruff_python_trivia::{PythonWhitespace, leading_indentation, textwrap::indent};
use ruff_source_file::{LineRanges, UniversalNewlines};
use ruff_text_size::{Ranged, TextRange};

View File

@@ -1,4 +1,4 @@
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
/// Returns `true` if the name should be considered "ambiguous".
pub(super) fn is_ambiguous_name(name: &str) -> bool {

View File

@@ -8,10 +8,10 @@ use itertools::Itertools;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_notebook::CellOffsets;
use ruff_python_ast::PySourceType;
use ruff_python_ast::token::TokenIterWithContext;
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::token::Tokens;
use ruff_python_codegen::Stylist;
use ruff_python_parser::TokenIterWithContext;
use ruff_python_parser::TokenKind;
use ruff_python_parser::Tokens;
use ruff_python_trivia::PythonWhitespace;
use ruff_source_file::{LineRanges, UniversalNewlines};
use ruff_text_size::TextRange;

View File

@@ -1,8 +1,8 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_notebook::CellOffsets;
use ruff_python_ast::PySourceType;
use ruff_python_ast::token::{TokenIterWithContext, TokenKind, Tokens};
use ruff_python_index::Indexer;
use ruff_python_parser::{TokenIterWithContext, TokenKind, Tokens};
use ruff_text_size::{Ranged, TextSize};
use crate::Locator;

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange};
use crate::AlwaysFixableViolation;

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::TextRange;
use crate::Violation;

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::Ranged;
use crate::Edit;

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::Ranged;
use crate::checkers::ast::LintContext;

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::LintContext;

View File

@@ -9,7 +9,7 @@ pub(crate) use missing_whitespace::*;
pub(crate) use missing_whitespace_after_keyword::*;
pub(crate) use missing_whitespace_around_operator::*;
pub(crate) use redundant_backslash::*;
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_python_trivia::is_python_whitespace;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
pub(crate) use space_around_operator::*;

View File

@@ -1,6 +1,6 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_index::Indexer;
use ruff_python_parser::TokenKind;
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextRange, TextSize};

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::LintContext;

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::LintContext;

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_python_trivia::PythonWhitespace;
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::LintContext;

View File

@@ -3,7 +3,7 @@ use std::iter::Peekable;
use itertools::Itertools;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_notebook::CellOffsets;
use ruff_python_ast::token::{Token, TokenKind, Tokens};
use ruff_python_parser::{Token, TokenKind, Tokens};
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::{AlwaysFixableViolation, Edit, Fix, checkers::ast::LintContext};

View File

@@ -4,9 +4,7 @@ use rustc_hash::FxHashSet;
use std::sync::LazyLock;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::Parameter;
use ruff_python_ast::docstrings::{clean_space, leading_space};
use ruff_python_ast::helpers::map_subscript;
use ruff_python_ast::identifier::Identifier;
use ruff_python_semantic::analyze::visibility::is_staticmethod;
use ruff_python_trivia::textwrap::dedent;
@@ -1186,9 +1184,6 @@ impl AlwaysFixableViolation for MissingSectionNameColon {
/// This rule is enabled when using the `google` convention, and disabled when
/// using the `pep257` and `numpy` conventions.
///
/// Parameters annotated with `typing.Unpack` are exempt from this rule.
/// This follows the Python typing specification for unpacking keyword arguments.
///
/// ## Example
/// ```python
/// def calculate_speed(distance: float, time: float) -> float:
@@ -1238,7 +1233,6 @@ impl AlwaysFixableViolation for MissingSectionNameColon {
/// - [PEP 257 Docstring Conventions](https://peps.python.org/pep-0257/)
/// - [PEP 287 reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
/// - [Google Python Style Guide - Docstrings](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings)
/// - [Python - Unpack for keyword arguments](https://typing.python.org/en/latest/spec/callables.html#unpack-kwargs)
#[derive(ViolationMetadata)]
#[violation_metadata(stable_since = "v0.0.73")]
pub(crate) struct UndocumentedParam {
@@ -1814,9 +1808,7 @@ fn missing_args(checker: &Checker, docstring: &Docstring, docstrings_args: &FxHa
missing_arg_names.insert(starred_arg_name);
}
}
if let Some(arg) = function.parameters.kwarg.as_ref()
&& !has_unpack_annotation(checker, arg)
{
if let Some(arg) = function.parameters.kwarg.as_ref() {
let arg_name = arg.name.as_str();
let starred_arg_name = format!("**{arg_name}");
if !arg_name.starts_with('_')
@@ -1842,15 +1834,6 @@ fn missing_args(checker: &Checker, docstring: &Docstring, docstrings_args: &FxHa
}
}
/// Returns `true` if the parameter is annotated with `typing.Unpack`
fn has_unpack_annotation(checker: &Checker, parameter: &Parameter) -> bool {
parameter.annotation.as_ref().is_some_and(|annotation| {
checker
.semantic()
.match_typing_expr(map_subscript(annotation), "Unpack")
})
}
// See: `GOOGLE_ARGS_REGEX` in `pydocstyle/checker.py`.
static GOOGLE_ARGS_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap());

View File

@@ -101,13 +101,3 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
200 | """
201 | Send a message.
|
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
--> D417.py:238:5
|
236 | """
237 |
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
239 | """Function with Unpack kwargs but missing query arg documentation.
|

View File

@@ -83,13 +83,3 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
200 | """
201 | Send a message.
|
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
--> D417.py:238:5
|
236 | """
237 |
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
239 | """Function with Unpack kwargs but missing query arg documentation.
|

View File

@@ -101,13 +101,3 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
200 | """
201 | Send a message.
|
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
--> D417.py:238:5
|
236 | """
237 |
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
239 | """Function with Unpack kwargs but missing query arg documentation.
|

View File

@@ -101,13 +101,3 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
200 | """
201 | Send a message.
|
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
--> D417.py:238:5
|
236 | """
237 |
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
239 | """Function with Unpack kwargs but missing query arg documentation.
|

View File

@@ -28,7 +28,6 @@ mod tests {
use crate::settings::types::PreviewMode;
use crate::settings::{LinterSettings, flags};
use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::test::{test_contents, test_path, test_snippet};
use crate::{Locator, assert_diagnostics, assert_diagnostics_diff, directives};
@@ -956,8 +955,6 @@ mod tests {
&locator,
&indexer,
);
let suppressions =
Suppressions::from_tokens(&settings, locator.contents(), parsed.tokens());
let mut messages = check_path(
Path::new("<filename>"),
None,
@@ -971,7 +968,6 @@ mod tests {
source_type,
&parsed,
target_version,
&suppressions,
);
messages.sort_by(Diagnostic::ruff_start_ordering);
let actual = messages

View File

@@ -2,8 +2,8 @@ use anyhow::{Error, bail};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers;
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_ast::{CmpOp, Expr};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;

View File

@@ -3,8 +3,8 @@ use itertools::Itertools;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::contains_effect;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_ast::{self as ast, Stmt};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_python_semantic::Binding;
use ruff_text_size::{Ranged, TextRange, TextSize};

View File

@@ -37,88 +37,3 @@ F704 `await` statement outside of a function
12 |
13 | def _():
|
F704 `await` statement outside of a function
--> F704.py:27:2
|
26 | # but not in comprehensions
27 | [await cor async for cor in f()] # F704
| ^^^^^^^^^
28 | {await cor async for cor in f()} # F704
29 | {await cor: 1 async for cor in f()} # F704
|
F704 `await` statement outside of a function
--> F704.py:28:2
|
26 | # but not in comprehensions
27 | [await cor async for cor in f()] # F704
28 | {await cor async for cor in f()} # F704
| ^^^^^^^^^
29 | {await cor: 1 async for cor in f()} # F704
30 | [await cor for cor in f()] # F704
|
F704 `await` statement outside of a function
--> F704.py:29:2
|
27 | [await cor async for cor in f()] # F704
28 | {await cor async for cor in f()} # F704
29 | {await cor: 1 async for cor in f()} # F704
| ^^^^^^^^^
30 | [await cor for cor in f()] # F704
31 | {await cor for cor in f()} # F704
|
F704 `await` statement outside of a function
--> F704.py:30:2
|
28 | {await cor async for cor in f()} # F704
29 | {await cor: 1 async for cor in f()} # F704
30 | [await cor for cor in f()] # F704
| ^^^^^^^^^
31 | {await cor for cor in f()} # F704
32 | {await cor: 1 for cor in f()} # F704
|
F704 `await` statement outside of a function
--> F704.py:31:2
|
29 | {await cor: 1 async for cor in f()} # F704
30 | [await cor for cor in f()] # F704
31 | {await cor for cor in f()} # F704
| ^^^^^^^^^
32 | {await cor: 1 for cor in f()} # F704
|
F704 `await` statement outside of a function
--> F704.py:32:2
|
30 | [await cor for cor in f()] # F704
31 | {await cor for cor in f()} # F704
32 | {await cor: 1 for cor in f()} # F704
| ^^^^^^^^^
33 |
34 | # or in the iterator of an async generator, which is evaluated in the parent
|
F704 `await` statement outside of a function
--> F704.py:36:23
|
34 | # or in the iterator of an async generator, which is evaluated in the parent
35 | # scope
36 | (cor async for cor in await f()) # F704
| ^^^^^^^^^
37 | (await cor async for cor in [await c for c in f()]) # F704
|
F704 `await` statement outside of a function
--> F704.py:37:30
|
35 | # scope
36 | (cor async for cor in await f()) # F704
37 | (await cor async for cor in [await c for c in f()]) # F704
| ^^^^^^^
38 |
39 | # this is also okay because the comprehension is within the generator scope
|

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::{Token, TokenKind};
use ruff_python_parser::{Token, TokenKind};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::Locator;

View File

@@ -1,5 +1,5 @@
use ruff_python_ast::StmtImportFrom;
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_text_size::{Ranged, TextRange};
use crate::Locator;

View File

@@ -1,10 +1,10 @@
use itertools::Itertools;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::Tokens;
use ruff_python_ast::whitespace::indentation;
use ruff_python_ast::{Alias, StmtImportFrom, StmtRef};
use ruff_python_codegen::Stylist;
use ruff_python_parser::Tokens;
use ruff_text_size::Ranged;
use crate::Locator;

View File

@@ -1,7 +1,7 @@
use std::slice::Iter;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::{Token, TokenKind, Tokens};
use ruff_python_parser::{Token, TokenKind, Tokens};
use ruff_text_size::{Ranged, TextRange};
use crate::Locator;

View File

@@ -6,11 +6,11 @@ use rustc_hash::{FxHashMap, FxHashSet};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::any_over_expr;
use ruff_python_ast::str::{leading_quote, trailing_quote};
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::{self as ast, Expr, Keyword, StringFlags};
use ruff_python_literal::format::{
FieldName, FieldNamePart, FieldType, FormatPart, FormatString, FromTemplate,
};
use ruff_python_parser::TokenKind;
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextRange};

View File

@@ -3,12 +3,12 @@ use std::fmt::Write;
use std::str::FromStr;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::{self as ast, AnyStringFlags, Expr, StringFlags, whitespace::indentation};
use ruff_python_codegen::Stylist;
use ruff_python_literal::cformat::{
CConversionFlags, CFormatPart, CFormatPrecision, CFormatQuantity, CFormatString,
};
use ruff_python_parser::TokenKind;
use ruff_python_stdlib::identifiers::is_identifier;
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextRange};

View File

@@ -1,6 +1,6 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::Stmt;
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::TokenKind;
use ruff_python_semantic::SemanticModel;
use ruff_source_file::LineRanges;
use ruff_text_size::{TextLen, TextRange, TextSize};

View File

@@ -1,7 +1,7 @@
use anyhow::Result;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_ast::{self as ast, Expr};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_python_stdlib::open_mode::OpenMode;
use ruff_text_size::{Ranged, TextSize};

View File

@@ -1,8 +1,8 @@
use std::fmt::Write as _;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_ast::{self as ast, Arguments, Expr, Keyword};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_text_size::{Ranged, TextRange};
use crate::Locator;

View File

@@ -305,25 +305,6 @@ mod tests {
Ok(())
}
#[test]
fn range_suppressions() -> Result<()> {
assert_diagnostics_diff!(
Path::new("ruff/suppressions.py"),
&settings::LinterSettings::for_rules(vec![
Rule::UnusedVariable,
Rule::AmbiguousVariableName,
Rule::UnusedNOQA,
]),
&settings::LinterSettings::for_rules(vec![
Rule::UnusedVariable,
Rule::AmbiguousVariableName,
Rule::UnusedNOQA,
])
.with_preview_mode(),
);
Ok(())
}
#[test]
fn ruf100_0() -> Result<()> {
let diagnostics = test_path(

View File

@@ -4,8 +4,8 @@ use anyhow::Result;
use libcst_native::{LeftParen, ParenthesizedNode, RightParen};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::{self as ast, Expr, OperatorPrecedence};
use ruff_python_parser::TokenKind;
use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;

View File

@@ -2,9 +2,9 @@ use std::cmp::Ordering;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::comment_indentation_after;
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_ast::whitespace::indentation;
use ruff_python_ast::{Stmt, StmtExpr, StmtFor, StmtIf, StmtTry, StmtWhile};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};

View File

@@ -9,8 +9,8 @@ use std::cmp::Ordering;
use itertools::Itertools;
use ruff_python_ast as ast;
use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_codegen::Stylist;
use ruff_python_parser::{TokenKind, Tokens};
use ruff_python_stdlib::str::is_cased_uppercase;
use ruff_python_trivia::{SimpleTokenKind, first_non_trivia_token, leading_indentation};
use ruff_source_file::LineRanges;

View File

@@ -1,7 +1,7 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::PythonVersion;
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::{Expr, ExprCall, parenthesize::parenthesized_range};
use ruff_python_parser::TokenKind;
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;

View File

@@ -1,168 +0,0 @@
---
source: crates/ruff_linter/src/rules/ruff/mod.rs
---
--- Linter settings ---
-linter.preview = disabled
+linter.preview = enabled
--- Summary ---
Removed: 9
Added: 1
--- Removed ---
E741 Ambiguous variable name: `I`
--> suppressions.py:4:5
|
2 | # These should both be ignored by the range suppression.
3 | # ruff: disable[E741, F841]
4 | I = 1
| ^
5 | # ruff: enable[E741, F841]
|
F841 [*] Local variable `I` is assigned to but never used
--> suppressions.py:4:5
|
2 | # These should both be ignored by the range suppression.
3 | # ruff: disable[E741, F841]
4 | I = 1
| ^
5 | # ruff: enable[E741, F841]
|
help: Remove assignment to unused variable `I`
1 | def f():
2 | # These should both be ignored by the range suppression.
3 | # ruff: disable[E741, F841]
- I = 1
4 + pass
5 | # ruff: enable[E741, F841]
6 |
7 |
note: This is an unsafe fix and may change runtime behavior
E741 Ambiguous variable name: `I`
--> suppressions.py:12:5
|
10 | # Should also generate an "unmatched suppression" warning.
11 | # ruff:disable[E741,F841]
12 | I = 1
| ^
|
F841 [*] Local variable `I` is assigned to but never used
--> suppressions.py:12:5
|
10 | # Should also generate an "unmatched suppression" warning.
11 | # ruff:disable[E741,F841]
12 | I = 1
| ^
|
help: Remove assignment to unused variable `I`
9 | # These should both be ignored by the implicit range suppression.
10 | # Should also generate an "unmatched suppression" warning.
11 | # ruff:disable[E741,F841]
- I = 1
12 + pass
13 |
14 |
15 | def f():
note: This is an unsafe fix and may change runtime behavior
E741 Ambiguous variable name: `I`
--> suppressions.py:26:5
|
24 | # the other logged to the user.
25 | # ruff: disable[E741]
26 | I = 1
| ^
27 | # ruff: enable[E741]
|
E741 Ambiguous variable name: `l`
--> suppressions.py:35:5
|
33 | # middle line should be completely silenced.
34 | # ruff: disable[E741]
35 | l = 0
| ^
36 | # ruff: disable[F841]
37 | O = 1
|
E741 Ambiguous variable name: `O`
--> suppressions.py:37:5
|
35 | l = 0
36 | # ruff: disable[F841]
37 | O = 1
| ^
38 | # ruff: enable[E741]
39 | I = 2
|
F841 [*] Local variable `O` is assigned to but never used
--> suppressions.py:37:5
|
35 | l = 0
36 | # ruff: disable[F841]
37 | O = 1
| ^
38 | # ruff: enable[E741]
39 | I = 2
|
help: Remove assignment to unused variable `O`
34 | # ruff: disable[E741]
35 | l = 0
36 | # ruff: disable[F841]
- O = 1
37 | # ruff: enable[E741]
38 | I = 2
39 | # ruff: enable[F841]
note: This is an unsafe fix and may change runtime behavior
F841 [*] Local variable `I` is assigned to but never used
--> suppressions.py:39:5
|
37 | O = 1
38 | # ruff: enable[E741]
39 | I = 2
| ^
40 | # ruff: enable[F841]
|
help: Remove assignment to unused variable `I`
36 | # ruff: disable[F841]
37 | O = 1
38 | # ruff: enable[E741]
- I = 2
39 | # ruff: enable[F841]
40 |
41 |
note: This is an unsafe fix and may change runtime behavior
--- Added ---
RUF100 [*] Unused `noqa` directive (unused: `E741`, `F841`)
--> suppressions.py:55:12
|
53 | # and an unusued noqa diagnostic should be logged.
54 | # ruff:disable[E741,F841]
55 | I = 1 # noqa: E741,F841
| ^^^^^^^^^^^^^^^^^
56 | # ruff:enable[E741,F841]
|
help: Remove unused `noqa` directive
52 | # These should both be ignored by the range suppression,
53 | # and an unusued noqa diagnostic should be logged.
54 | # ruff:disable[E741,F841]
- I = 1 # noqa: E741,F841
55 + I = 1
56 | # ruff:enable[E741,F841]

View File

@@ -465,12 +465,6 @@ impl LinterSettings {
self
}
#[must_use]
pub fn with_preview_mode(mut self) -> Self {
self.preview = PreviewMode::Enabled;
self
}
/// Resolve the [`TargetVersion`] to use for linting.
///
/// This method respects the per-file version overrides in

View File

@@ -17,6 +17,4 @@ PLE1142 `await` should be used within an async function
4 | # Top-level await
5 | await 1
| ^^^^^^^
6 |
7 | ([await c for c in cor] async for cor in func()) # ok
|

View File

@@ -1,55 +0,0 @@
---
source: crates/ruff_linter/src/linter.rs
---
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> resources/test/fixtures/syntax_errors/return_in_generator.py:3:5
|
1 | async def gen():
2 | yield 1
3 | return 42
| ^^^^^^^^^
4 |
5 | def gen(): # B901 but not a syntax error - not an async generator
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> resources/test/fixtures/syntax_errors/return_in_generator.py:7:5
|
5 | def gen(): # B901 but not a syntax error - not an async generator
6 | yield 1
7 | return 42
| ^^^^^^^^^
8 |
9 | async def gen(): # ok - no value in return
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> resources/test/fixtures/syntax_errors/return_in_generator.py:15:5
|
13 | async def gen():
14 | yield 1
15 | return foo()
| ^^^^^^^^^^^^
16 |
17 | async def gen():
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> resources/test/fixtures/syntax_errors/return_in_generator.py:19:5
|
17 | async def gen():
18 | yield 1
19 | return [1, 2, 3]
| ^^^^^^^^^^^^^^^^
20 |
21 | async def gen():
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
--> resources/test/fixtures/syntax_errors/return_in_generator.py:24:5
|
22 | if True:
23 | yield 1
24 | return 10
| ^^^^^^^^^
|

File diff suppressed because it is too large Load Diff

View File

@@ -32,7 +32,6 @@ use crate::packaging::detect_package_root;
use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, flags};
use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::{Applicability, FixAvailability};
use crate::{Locator, directives};
@@ -235,7 +234,6 @@ pub(crate) fn test_contents<'a>(
&locator,
&indexer,
);
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
let messages = check_path(
path,
path.parent()
@@ -251,7 +249,6 @@ pub(crate) fn test_contents<'a>(
source_type,
&parsed,
target_version,
&suppressions,
);
let source_has_errors = parsed.has_invalid_syntax();
@@ -302,8 +299,6 @@ pub(crate) fn test_contents<'a>(
&indexer,
);
let suppressions =
Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
let fixed_messages = check_path(
path,
None,
@@ -317,7 +312,6 @@ pub(crate) fn test_contents<'a>(
source_type,
&parsed,
target_version,
&suppressions,
);
if parsed.has_invalid_syntax() && !source_has_errors {

View File

@@ -1,46 +1,17 @@
use std::cell::RefCell;
use std::sync::{LazyLock, Mutex};
use get_size2::{GetSize, StandardTracker};
use ordermap::{OrderMap, OrderSet};
thread_local! {
pub static TRACKER: RefCell<Option<StandardTracker>>= const { RefCell::new(None) };
}
struct TrackerGuard(Option<StandardTracker>);
impl Drop for TrackerGuard {
fn drop(&mut self) {
TRACKER.set(self.0.take());
}
}
pub fn attach_tracker<R>(tracker: StandardTracker, f: impl FnOnce() -> R) -> R {
let prev = TRACKER.replace(Some(tracker));
let _guard = TrackerGuard(prev);
f()
}
fn with_tracker<F, R>(f: F) -> R
where
F: FnOnce(Option<&mut StandardTracker>) -> R,
{
TRACKER.with(|tracker| {
let mut tracker = tracker.borrow_mut();
f(tracker.as_mut())
})
}
/// Returns the memory usage of the provided object, using a global tracker to avoid
/// double-counting shared objects.
pub fn heap_size<T: GetSize>(value: &T) -> usize {
with_tracker(|tracker| {
if let Some(tracker) = tracker {
value.get_heap_size_with_tracker(tracker).0
} else {
value.get_heap_size()
}
})
static TRACKER: LazyLock<Mutex<StandardTracker>> =
LazyLock::new(|| Mutex::new(StandardTracker::new()));
value
.get_heap_size_with_tracker(&mut *TRACKER.lock().unwrap())
.0
}
/// An implementation of [`GetSize::get_heap_size`] for [`OrderSet`].

View File

@@ -29,7 +29,6 @@ pub mod statement_visitor;
pub mod stmt_if;
pub mod str;
pub mod str_prefix;
pub mod token;
pub mod traversal;
pub mod types;
pub mod visitor;

Some files were not shown because too many files have changed in this diff Show More