Compare commits

..

1 Commits

Author SHA1 Message Date
David Peter
048182635a [ty] Use full project names in good.txt 2025-09-24 09:09:47 +02:00
306 changed files with 4731 additions and 10030 deletions

View File

@@ -452,7 +452,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo-binstall"
uses: cargo-bins/cargo-binstall@38e8f5e4c386b611d51e8aa997b9a06a3c8eb67a # v1.15.6
uses: cargo-bins/cargo-binstall@20aa316bab4942180bbbabe93237858e8d77f1ed # v1.15.5
- name: "Install cargo-fuzz"
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
@@ -703,7 +703,7 @@ jobs:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: cargo-bins/cargo-binstall@38e8f5e4c386b611d51e8aa997b9a06a3c8eb67a # v1.15.6
- uses: cargo-bins/cargo-binstall@20aa316bab4942180bbbabe93237858e8d77f1ed # v1.15.5
- run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
@@ -932,7 +932,7 @@ jobs:
tool: cargo-codspeed
- name: "Build benchmarks"
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark formatter lexer linter parser
- name: "Run benchmarks"
uses: CodSpeedHQ/action@653fdc30e6c40ffd9739e40c8a0576f4f4523ca1 # v4.0.1
@@ -967,7 +967,7 @@ jobs:
tool: cargo-codspeed
- name: "Build benchmarks"
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench ty
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark ty
- name: "Run benchmarks"
uses: CodSpeedHQ/action@653fdc30e6c40ffd9739e40c8a0576f4f4523ca1 # v4.0.1

View File

@@ -64,7 +64,7 @@ jobs:
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@279f8a15b0e7f77213bf9096dbc2335a19ef89c5"
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@fc0f612798710b0dd69bb7528bc9b361dc60bd43"
ecosystem-analyzer \
--repository ruff \
@@ -95,14 +95,6 @@ jobs:
--new-name "$REF_NAME" \
--output diff-statistics.md
ecosystem-analyzer \
generate-timing-diff \
diagnostics-old.json \
diagnostics-new.json \
--old-name "main (merge base)" \
--new-name "$REF_NAME" \
--output-html dist/timing.html
echo '## `ecosystem-analyzer` results' > comment.md
echo >> comment.md
cat diff-statistics.md >> comment.md
@@ -126,7 +118,7 @@ jobs:
DEPLOYMENT_URL: ${{ steps.deploy.outputs.pages-deployment-alias-url }}
run: |
echo >> comment.md
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)**" >> comment.md
- name: Upload comment
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
@@ -145,9 +137,3 @@ jobs:
with:
name: diff.html
path: dist/diff.html
- name: Upload timing diff
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: timing.html
path: dist/timing.html

View File

@@ -49,13 +49,12 @@ jobs:
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@279f8a15b0e7f77213bf9096dbc2335a19ef89c5"
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@fc0f612798710b0dd69bb7528bc9b361dc60bd43"
ecosystem-analyzer \
--verbose \
--repository ruff \
analyze \
--profile=release \
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
--output ecosystem-diagnostics.json
@@ -63,7 +62,7 @@ jobs:
ecosystem-analyzer \
generate-report \
--max-diagnostics-per-project=1000 \
--max-diagnostics-per-project=1200 \
ecosystem-diagnostics.json \
--output dist/index.html

View File

@@ -1,96 +1,5 @@
# Changelog
## 0.13.3
Released on 2025-10-02.
### Preview features
- Display diffs for `ruff format --check` and add support for different output formats ([#20443](https://github.com/astral-sh/ruff/pull/20443))
- \[`pyflakes`\] Handle some common submodule import situations for `unused-import` (`F401`) ([#20200](https://github.com/astral-sh/ruff/pull/20200))
- \[`ruff`\] Do not flag `%r` + `repr()` combinations (`RUF065`) ([#20600](https://github.com/astral-sh/ruff/pull/20600))
### Bug fixes
- \[`cli`\] Add conflict between `--add-noqa` and `--diff` options ([#20642](https://github.com/astral-sh/ruff/pull/20642))
- \[`pylint`\] Exempt required imports from `PLR0402` ([#20381](https://github.com/astral-sh/ruff/pull/20381))
- \[`pylint`\] Fix missing `max-nested-blocks` in settings display ([#20574](https://github.com/astral-sh/ruff/pull/20574))
- \[`pyupgrade`\] Prevent infinite loop with `I002` and `UP026` ([#20634](https://github.com/astral-sh/ruff/pull/20634))
### Rule changes
- \[`flake8-simplify`\] Improve help message clarity (`SIM105`) ([#20548](https://github.com/astral-sh/ruff/pull/20548))
### Documentation
- Add the *The Basics* title back to CONTRIBUTING.md ([#20624](https://github.com/astral-sh/ruff/pull/20624))
- Fixed documentation for try_consider_else ([#20587](https://github.com/astral-sh/ruff/pull/20587))
- \[`isort`\] Clarify dependency between `order-by-type` and `case-sensitive` settings ([#20559](https://github.com/astral-sh/ruff/pull/20559))
- \[`pylint`\] Clarify fix safety to include left-hand hashability (`PLR6201`) ([#20518](https://github.com/astral-sh/ruff/pull/20518))
### Other changes
- \[`playground`\] Fix quick fixes for empty ranges in playground ([#20599](https://github.com/astral-sh/ruff/pull/20599))
### Contributors
- [@TaKO8Ki](https://github.com/TaKO8Ki)
- [@ntBre](https://github.com/ntBre)
- [@dylwil3](https://github.com/dylwil3)
- [@MichaReiser](https://github.com/MichaReiser)
- [@danparizher](https://github.com/danparizher)
- [@LilMonk](https://github.com/LilMonk)
- [@mgiovani](https://github.com/mgiovani)
- [@IDrokin117](https://github.com/IDrokin117)
## 0.13.2
Released on 2025-09-25.
### Preview features
- \[`flake8-async`\] Implement `blocking-path-method` (`ASYNC240`) ([#20264](https://github.com/astral-sh/ruff/pull/20264))
- \[`flake8-bugbear`\] Implement `map-without-explicit-strict` (`B912`) ([#20429](https://github.com/astral-sh/ruff/pull/20429))
- \[`flake8-bultins`\] Detect class-scope builtin shadowing in decorators, default args, and attribute initializers (`A003`) ([#20178](https://github.com/astral-sh/ruff/pull/20178))
- \[`ruff`\] Implement `logging-eager-conversion` (`RUF065`) ([#19942](https://github.com/astral-sh/ruff/pull/19942))
- Include `.pyw` files by default when linting and formatting ([#20458](https://github.com/astral-sh/ruff/pull/20458))
### Bug fixes
- Deduplicate input paths ([#20105](https://github.com/astral-sh/ruff/pull/20105))
- \[`flake8-comprehensions`\] Preserve trailing commas for single-element lists (`C409`) ([#19571](https://github.com/astral-sh/ruff/pull/19571))
- \[`flake8-pyi`\] Avoid syntax error from conflict with `PIE790` (`PYI021`) ([#20010](https://github.com/astral-sh/ruff/pull/20010))
- \[`flake8-simplify`\] Correct fix for positive `maxsplit` without separator (`SIM905`) ([#20056](https://github.com/astral-sh/ruff/pull/20056))
- \[`pyupgrade`\] Fix `UP008` not to apply when `__class__` is a local variable ([#20497](https://github.com/astral-sh/ruff/pull/20497))
- \[`ruff`\] Fix `B004` to skip invalid `hasattr`/`getattr` calls ([#20486](https://github.com/astral-sh/ruff/pull/20486))
- \[`ruff`\] Replace `-nan` with `nan` when using the value to construct a `Decimal` (`FURB164` ) ([#20391](https://github.com/astral-sh/ruff/pull/20391))
### Documentation
- Add 'Finding ways to help' to CONTRIBUTING.md ([#20567](https://github.com/astral-sh/ruff/pull/20567))
- Update import path to `ruff-wasm-web` ([#20539](https://github.com/astral-sh/ruff/pull/20539))
- \[`flake8-bandit`\] Clarify the supported hashing functions (`S324`) ([#20534](https://github.com/astral-sh/ruff/pull/20534))
### Other changes
- \[`playground`\] Allow hover quick fixes to appear for overlapping diagnostics ([#20527](https://github.com/astral-sh/ruff/pull/20527))
- \[`playground`\] Fix nonBMP code point handling in quick fixes and markers ([#20526](https://github.com/astral-sh/ruff/pull/20526))
### Contributors
- [@BurntSushi](https://github.com/BurntSushi)
- [@mtshiba](https://github.com/mtshiba)
- [@second-ed](https://github.com/second-ed)
- [@danparizher](https://github.com/danparizher)
- [@ShikChen](https://github.com/ShikChen)
- [@PieterCK](https://github.com/PieterCK)
- [@GDYendell](https://github.com/GDYendell)
- [@RazerM](https://github.com/RazerM)
- [@TaKO8Ki](https://github.com/TaKO8Ki)
- [@amyreese](https://github.com/amyreese)
- [@ntbre](https://github.com/ntBre)
- [@MichaReiser](https://github.com/MichaReiser)
## 0.13.1
Released on 2025-09-18.

View File

@@ -7,38 +7,22 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
> This guide is for Ruff. If you're looking to contribute to ty, please see [the ty contributing
> guide](https://github.com/astral-sh/ruff/blob/main/crates/ty/CONTRIBUTING.md).
## Finding ways to help
We label issues that would be good for a first time contributor as
[`good first issue`](https://github.com/astral-sh/ruff/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
These usually do not require significant experience with Rust or the Ruff code base.
We label issues that we think are a good opportunity for subsequent contributions as
[`help wanted`](https://github.com/astral-sh/ruff/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22).
These require varying levels of experience with Rust and Ruff. Often, we want to accomplish these
tasks but do not have the resources to do so ourselves.
You don't need our permission to start on an issue we have labeled as appropriate for community
contribution as described above. However, it's a good idea to indicate that you are going to work on
an issue to avoid concurrent attempts to solve the same problem.
Please check in with us before starting work on an issue that has not been labeled as appropriate
for community contribution. We're happy to receive contributions for other issues, but it's
important to make sure we have consensus on the solution to the problem first.
Outside of issues with the labels above, issues labeled as
[`bug`](https://github.com/astral-sh/ruff/issues?q=is%3Aopen+is%3Aissue+label%3A%22bug%22) are the
best candidates for contribution. In contrast, issues labeled with `needs-decision` or
`needs-design` are _not_ good candidates for contribution. Please do not open pull requests for
issues with these labels.
Please do not open pull requests for new features without prior discussion. While we appreciate
exploration of new features, we will often close these pull requests immediately. Adding a
new feature to ruff creates a long-term maintenance burden and requires strong consensus from the ruff
team before it is appropriate to begin work on an implementation.
## The Basics
Ruff welcomes contributions in the form of pull requests.
For small changes (e.g., bug fixes), feel free to submit a PR.
For larger changes (e.g., new lint rules, new functionality, new configuration options), consider
creating an [**issue**](https://github.com/astral-sh/ruff/issues) outlining your proposed change.
You can also join us on [Discord](https://discord.com/invite/astral-sh) to discuss your idea with the
community. We've labeled [beginner-friendly tasks](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
in the issue tracker, along with [bugs](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
and [improvements](https://github.com/astral-sh/ruff/issues?q=is%3Aissue+is%3Aopen+label%3Aaccepted)
that are ready for contributions.
If you have suggestions on how we might improve the contributing documentation, [let us know](https://github.com/astral-sh/ruff/discussions/5693)!
### Prerequisites
Ruff is written in Rust. You'll need to install the

12
Cargo.lock generated
View File

@@ -2738,7 +2738,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.13.3"
version = "0.13.1"
dependencies = [
"anyhow",
"argfile",
@@ -2994,7 +2994,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.13.3"
version = "0.13.1"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3348,7 +3348,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.13.3"
version = "0.13.1"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3463,7 +3463,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=29ab321b45d00daa4315fa2a06f7207759a8c87e#29ab321b45d00daa4315fa2a06f7207759a8c87e"
source = "git+https://github.com/salsa-rs/salsa.git?rev=3713cd7eb30821c0c086591832dd6f59f2af7fe7#3713cd7eb30821c0c086591832dd6f59f2af7fe7"
dependencies = [
"boxcar",
"compact_str",
@@ -3487,12 +3487,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=29ab321b45d00daa4315fa2a06f7207759a8c87e#29ab321b45d00daa4315fa2a06f7207759a8c87e"
source = "git+https://github.com/salsa-rs/salsa.git?rev=3713cd7eb30821c0c086591832dd6f59f2af7fe7#3713cd7eb30821c0c086591832dd6f59f2af7fe7"
[[package]]
name = "salsa-macros"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=29ab321b45d00daa4315fa2a06f7207759a8c87e#29ab321b45d00daa4315fa2a06f7207759a8c87e"
source = "git+https://github.com/salsa-rs/salsa.git?rev=3713cd7eb30821c0c086591832dd6f59f2af7fe7#3713cd7eb30821c0c086591832dd6f59f2af7fe7"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -144,7 +144,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "29ab321b45d00daa4315fa2a06f7207759a8c87e", default-features = false, features = [
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "3713cd7eb30821c0c086591832dd6f59f2af7fe7", default-features = false, features = [
"compact_str",
"macros",
"salsa_unstable",

View File

@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.13.3/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.13.3/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.13.1/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.13.1/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.13.3
rev: v0.13.1
hooks:
# Run the linter.
- id: ruff-check

View File

@@ -31,7 +31,6 @@ extend-ignore-re = [
"typ",
# TODO: Remove this once the `TYP` redirects are removed from `rule_redirects.rs`
"TYP",
"ntBre"
]
[default.extend-identifiers]

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.13.3"
version = "0.13.1"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -416,7 +416,6 @@ pub struct CheckCommand {
conflicts_with = "stdin_filename",
conflicts_with = "watch",
conflicts_with = "fix",
conflicts_with = "diff",
)]
pub add_noqa: bool,
/// See the files Ruff will be run against with the current settings.
@@ -538,14 +537,6 @@ pub struct FormatCommand {
/// Exit with a non-zero status code if any files were modified via format, even if all files were formatted successfully.
#[arg(long, help_heading = "Miscellaneous", alias = "exit-non-zero-on-fix")]
pub exit_non_zero_on_format: bool,
/// Output serialization format for violations, when used with `--check`.
/// The default serialization format is "full".
///
/// Note that this option is currently only respected in preview mode. A warning will be emitted
/// if this flag is used on stable.
#[arg(long, value_enum, env = "RUFF_OUTPUT_FORMAT")]
pub output_format: Option<OutputFormat>,
}
#[derive(Copy, Clone, Debug, clap::Parser)]
@@ -793,7 +784,6 @@ impl FormatCommand {
target_version: self.target_version.map(ast::PythonVersion::from),
cache_dir: self.cache_dir,
extension: self.extension,
output_format: self.output_format,
..ExplicitConfigOverrides::default()
};

View File

@@ -9,10 +9,11 @@ use ignore::Error;
use log::{debug, warn};
#[cfg(not(target_family = "wasm"))]
use rayon::prelude::*;
use ruff_linter::message::create_panic_diagnostic;
use rustc_hash::FxHashMap;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticId, Span, SubDiagnostic, SubDiagnosticSeverity,
};
use ruff_db::panic::catch_unwind;
use ruff_linter::package::PackageRoot;
use ruff_linter::registry::Rule;
@@ -194,7 +195,23 @@ fn lint_path(
match result {
Ok(inner) => inner,
Err(error) => {
let diagnostic = create_panic_diagnostic(&error, Some(path));
let message = match error.payload.as_str() {
Some(summary) => format!("Fatal error while linting: {summary}"),
_ => "Fatal error while linting".to_owned(),
};
let mut diagnostic = Diagnostic::new(
DiagnosticId::Panic,
ruff_db::diagnostic::Severity::Fatal,
message,
);
let span = Span::from(SourceFileBuilder::new(path.to_string_lossy(), "").finish());
let mut annotation = Annotation::primary(span);
annotation.set_file_level(true);
diagnostic.annotate(annotation);
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!("{error}"),
));
Ok(Diagnostics::new(vec![diagnostic], FxHashMap::default()))
}
}
@@ -210,8 +227,7 @@ mod test {
use rustc_hash::FxHashMap;
use tempfile::TempDir;
use ruff_db::diagnostic::{DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics};
use ruff_linter::message::EmitterContext;
use ruff_linter::message::{Emitter, EmitterContext, TextEmitter};
use ruff_linter::registry::Rule;
use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{LinterSettings, flags};
@@ -264,16 +280,19 @@ mod test {
UnsafeFixes::Enabled,
)
.unwrap();
let mut output = Vec::new();
let config = DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Concise)
.hide_severity(true);
let messages = DisplayDiagnostics::new(
&EmitterContext::new(&FxHashMap::default()),
&config,
&diagnostics.inner,
)
.to_string();
TextEmitter::default()
.with_show_fix_status(true)
.with_color(false)
.emit(
&mut output,
&diagnostics.inner,
&EmitterContext::new(&FxHashMap::default()),
)
.unwrap();
let messages = String::from_utf8(output).unwrap();
insta::with_settings!({
omit_expression => true,

View File

@@ -11,19 +11,13 @@ use itertools::Itertools;
use log::{error, warn};
use rayon::iter::Either::{Left, Right};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticId, DisplayDiagnosticConfig, Severity, Span,
};
use ruff_linter::message::{EmitterContext, create_panic_diagnostic, render_diagnostics};
use ruff_linter::settings::types::OutputFormat;
use ruff_notebook::NotebookIndex;
use ruff_python_parser::ParseError;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_hash::FxHashSet;
use thiserror::Error;
use tracing::debug;
use ruff_db::panic::{PanicError, catch_unwind};
use ruff_diagnostics::{Edit, Fix, SourceMap};
use ruff_diagnostics::SourceMap;
use ruff_linter::fs;
use ruff_linter::logging::{DisplayParseError, LogLevel};
use ruff_linter::package::PackageRoot;
@@ -33,15 +27,14 @@ use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::warn_user_once;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_python_formatter::{FormatModuleError, QuoteStyle, format_module_source, format_range};
use ruff_source_file::{LineIndex, LineRanges, OneIndexed, SourceFileBuilder};
use ruff_source_file::LineIndex;
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_workspace::FormatterSettings;
use ruff_workspace::resolver::{
PyprojectConfig, ResolvedFile, Resolver, match_exclusion, python_files_in_path,
};
use ruff_workspace::resolver::{ResolvedFile, Resolver, match_exclusion, python_files_in_path};
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
use crate::cache::{Cache, FileCacheKey, PackageCacheMap, PackageCaches};
use crate::resolve::resolve;
use crate::{ExitStatus, resolve_default_files};
#[derive(Debug, Copy, Clone, is_macro::Is)]
@@ -70,14 +63,11 @@ impl FormatMode {
pub(crate) fn format(
cli: FormatArguments,
config_arguments: &ConfigArguments,
pyproject_config: &PyprojectConfig,
) -> Result<ExitStatus> {
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
let mode = FormatMode::from_cli(&cli);
let files = resolve_default_files(cli.files, false);
let (paths, resolver) = python_files_in_path(&files, pyproject_config, config_arguments)?;
let output_format = pyproject_config.settings.output_format;
let preview = pyproject_config.settings.formatter.preview;
let (paths, resolver) = python_files_in_path(&files, &pyproject_config, config_arguments)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");
@@ -194,26 +184,17 @@ pub(crate) fn format(
caches.persist()?;
// Report on any errors.
//
// We only convert errors to `Diagnostic`s in `Check` mode with preview enabled, otherwise we
// fall back on printing simple messages.
if !(preview.is_enabled() && mode.is_check()) {
errors.sort_unstable_by(|a, b| a.path().cmp(&b.path()));
errors.sort_unstable_by(|a, b| a.path().cmp(&b.path()));
for error in &errors {
error!("{error}");
}
for error in &errors {
error!("{error}");
}
let results = FormatResults::new(results.as_slice(), mode);
match mode {
FormatMode::Write => {}
FormatMode::Check => {
if preview.is_enabled() {
results.write_changed_preview(&mut stdout().lock(), output_format, &errors)?;
} else {
results.write_changed(&mut stdout().lock())?;
}
results.write_changed(&mut stdout().lock())?;
}
FormatMode::Diff => {
results.write_diff(&mut stdout().lock())?;
@@ -225,7 +206,7 @@ pub(crate) fn format(
if mode.is_diff() {
// Allow piping the diff to e.g. a file by writing the summary to stderr
results.write_summary(&mut stderr().lock())?;
} else if !preview.is_enabled() || output_format.is_human_readable() {
} else {
results.write_summary(&mut stdout().lock())?;
}
}
@@ -314,7 +295,8 @@ pub(crate) fn format_path(
FormatResult::Formatted
}
FormatMode::Check | FormatMode::Diff => FormatResult::Diff {
FormatMode::Check => FormatResult::Formatted,
FormatMode::Diff => FormatResult::Diff {
unformatted,
formatted,
},
@@ -347,7 +329,7 @@ pub(crate) enum FormattedSource {
impl From<FormattedSource> for FormatResult {
fn from(value: FormattedSource) -> Self {
match value {
FormattedSource::Formatted { .. } => FormatResult::Formatted,
FormattedSource::Formatted(_) => FormatResult::Formatted,
FormattedSource::Unchanged => FormatResult::Unchanged,
}
}
@@ -495,10 +477,10 @@ pub(crate) fn format_source(
/// The result of an individual formatting operation.
#[derive(Debug, Clone, is_macro::Is)]
pub(crate) enum FormatResult {
/// The file was formatted and written back to disk.
/// The file was formatted.
Formatted,
/// The file needs to be formatted, as the `formatted` and `unformatted` contents differ.
/// The file was formatted, [`SourceKind`] contains the formatted code
Diff {
unformatted: SourceKind,
formatted: SourceKind,
@@ -570,7 +552,7 @@ impl<'a> FormatResults<'a> {
.results
.iter()
.filter_map(|result| {
if result.result.is_diff() {
if result.result.is_formatted() {
Some(result.path.as_path())
} else {
None
@@ -584,30 +566,6 @@ impl<'a> FormatResults<'a> {
Ok(())
}
/// Write a list of the files that would be changed and any errors to the given writer.
fn write_changed_preview(
&self,
f: &mut impl Write,
output_format: OutputFormat,
errors: &[FormatCommandError],
) -> io::Result<()> {
let mut notebook_index = FxHashMap::default();
let diagnostics: Vec<_> = errors
.iter()
.map(Diagnostic::from)
.chain(self.to_diagnostics(&mut notebook_index))
.sorted_unstable_by(Diagnostic::ruff_start_ordering)
.collect();
let context = EmitterContext::new(&notebook_index);
let config = DisplayDiagnosticConfig::default()
.hide_severity(true)
.show_fix_diff(true)
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize());
render_diagnostics(f, output_format, config, &context, &diagnostics)
}
/// Write a summary of the formatting results to the given writer.
fn write_summary(&self, f: &mut impl Write) -> io::Result<()> {
// Compute the number of changed and unchanged files.
@@ -670,155 +628,6 @@ impl<'a> FormatResults<'a> {
Ok(())
}
}
/// Convert formatted files into [`Diagnostic`]s.
fn to_diagnostics(
&self,
notebook_index: &mut FxHashMap<String, NotebookIndex>,
) -> impl Iterator<Item = Diagnostic> {
/// The number of unmodified context lines rendered in diffs.
///
/// Note that this should be kept in sync with the argument to `TextDiff::grouped_ops` in
/// the diff rendering in `ruff_db` (currently 3). The `similar` crate uses two times that
/// argument as a cutoff for rendering unmodified lines.
const CONTEXT_LINES: u32 = 6;
self.results.iter().filter_map(|result| {
let (unformatted, formatted) = match &result.result {
FormatResult::Skipped | FormatResult::Unchanged => return None,
FormatResult::Diff {
unformatted,
formatted,
} => (unformatted, formatted),
FormatResult::Formatted => {
debug_assert!(
false,
"Expected `FormatResult::Diff` for changed files in check mode"
);
return None;
}
};
let mut diagnostic = Diagnostic::new(
DiagnosticId::Unformatted,
Severity::Error,
"File would be reformatted",
);
// Locate the first and last characters that differ to use as the diagnostic
// range and to narrow the `Edit` range.
let modified_range = ModifiedRange::new(unformatted, formatted);
let path = result.path.to_string_lossy();
// For scripts, this is a single `Edit` using the `ModifiedRange` above, but notebook
// edits must be split by cell in order to render them as diffs.
//
// We also attempt to estimate the line number width for aligning the
// annotate-snippets header. This is only an estimate because we don't actually know
// if the maximum line number present in the document will be rendered as part of
// the diff, either as a changed line or as an unchanged context line. For
// notebooks, we refine our estimate by checking the number of lines in each cell
// individually, otherwise we could use `formatted.source_code().count_lines(...)`
// in both cases.
let (fix, line_count) = if let SourceKind::IpyNotebook(formatted) = formatted
&& let SourceKind::IpyNotebook(unformatted) = unformatted
{
notebook_index.insert(path.to_string(), unformatted.index().clone());
let mut edits = formatted
.cell_offsets()
.ranges()
.zip(unformatted.cell_offsets().ranges())
.filter_map(|(formatted_range, unformatted_range)| {
// Filter out cells that weren't modified. We use `intersect` instead of
// `contains_range` because the full modified range might start or end in
// the middle of a cell:
//
// ```
// | cell 1 | cell 2 | cell 3 |
// |----------------| modified range
// ```
//
// The intersection will be `Some` for all three cells in this case.
if modified_range
.unformatted
.intersect(unformatted_range)
.is_some()
{
let formatted = &formatted.source_code()[formatted_range];
let edit = if formatted.is_empty() {
Edit::range_deletion(unformatted_range)
} else {
Edit::range_replacement(formatted.to_string(), unformatted_range)
};
Some(edit)
} else {
None
}
});
let fix = Fix::safe_edits(
edits
.next()
.expect("Formatted files must have at least one edit"),
edits,
);
let source = formatted.source_code();
let line_count = formatted
.cell_offsets()
.ranges()
.filter_map(|range| {
if modified_range.formatted.contains_range(range) {
Some(source.count_lines(range))
} else {
None
}
})
.max()
.unwrap_or_default();
(fix, line_count)
} else {
let formatted_code = &formatted.source_code()[modified_range.formatted];
let edit = if formatted_code.is_empty() {
Edit::range_deletion(modified_range.unformatted)
} else {
Edit::range_replacement(formatted_code.to_string(), modified_range.unformatted)
};
let fix = Fix::safe_edit(edit);
let line_count = formatted
.source_code()
.count_lines(TextRange::up_to(modified_range.formatted.end()));
(fix, line_count)
};
let source_file = SourceFileBuilder::new(path, unformatted.source_code()).finish();
let span = Span::from(source_file).with_range(modified_range.unformatted);
let mut annotation = Annotation::primary(span);
annotation.hide_snippet(true);
diagnostic.annotate(annotation);
diagnostic.set_fix(fix);
// TODO(brent) this offset is a hack to get the header of the diagnostic message, which
// is rendered by our fork of `annotate-snippets`, to align with our manually-rendered
// diff. `annotate-snippets` computes the alignment of the arrow in the header based on
// the maximum line number width in its rendered snippet. However, we don't have a
// reasonable range to underline in an annotation, so we don't send `annotate-snippets`
// a snippet to measure. If we commit to staying on our fork, a more robust way of
// handling this would be to move the diff rendering in
// `ruff_db::diagnostic::render::full` into `annotate-snippets`, likely as another
// `DisplayLine` variant and update the `lineno_width` calculation in
// `DisplayList::fmt`. That would handle this offset "automatically."
let line_count = (line_count + CONTEXT_LINES).min(
formatted
.source_code()
.count_lines(TextRange::up_to(formatted.source_code().text_len())),
);
let lines = OneIndexed::new(line_count as usize).unwrap_or_default();
diagnostic.set_header_offset(lines.digits().get());
Some(diagnostic)
})
}
}
/// An error that can occur while formatting a set of files.
@@ -830,6 +639,7 @@ pub(crate) enum FormatCommandError {
Read(Option<PathBuf>, SourceError),
Format(Option<PathBuf>, FormatModuleError),
Write(Option<PathBuf>, SourceError),
Diff(Option<PathBuf>, io::Error),
RangeFormatNotebook(Option<PathBuf>),
}
@@ -848,65 +658,12 @@ impl FormatCommandError {
| Self::Read(path, _)
| Self::Format(path, _)
| Self::Write(path, _)
| Self::Diff(path, _)
| Self::RangeFormatNotebook(path) => path.as_deref(),
}
}
}
impl From<&FormatCommandError> for Diagnostic {
fn from(error: &FormatCommandError) -> Self {
let annotation = error.path().map(|path| {
let file = SourceFileBuilder::new(path.to_string_lossy(), "").finish();
let span = Span::from(file);
let mut annotation = Annotation::primary(span);
annotation.hide_snippet(true);
annotation
});
let mut diagnostic = match error {
FormatCommandError::Ignore(error) => {
Diagnostic::new(DiagnosticId::Io, Severity::Error, error)
}
FormatCommandError::Parse(display_parse_error) => Diagnostic::new(
DiagnosticId::InvalidSyntax,
Severity::Error,
&display_parse_error.error().error,
),
FormatCommandError::Panic(path, panic_error) => {
return create_panic_diagnostic(panic_error, path.as_deref());
}
FormatCommandError::Read(_, source_error)
| FormatCommandError::Write(_, source_error) => {
Diagnostic::new(DiagnosticId::Io, Severity::Error, source_error)
}
FormatCommandError::Format(_, format_module_error) => match format_module_error {
FormatModuleError::ParseError(parse_error) => Diagnostic::new(
DiagnosticId::InternalError,
Severity::Error,
&parse_error.error,
),
FormatModuleError::FormatError(format_error) => {
Diagnostic::new(DiagnosticId::InternalError, Severity::Error, format_error)
}
FormatModuleError::PrintError(print_error) => {
Diagnostic::new(DiagnosticId::InternalError, Severity::Error, print_error)
}
},
FormatCommandError::RangeFormatNotebook(_) => Diagnostic::new(
DiagnosticId::InvalidCliOption,
Severity::Error,
"Range formatting isn't supported for notebooks.",
),
};
if let Some(annotation) = annotation {
diagnostic.annotate(annotation);
}
diagnostic
}
}
impl Display for FormatCommandError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
@@ -974,6 +731,23 @@ impl Display for FormatCommandError {
write!(f, "{header} {err}", header = "Failed to format:".bold())
}
}
Self::Diff(path, err) => {
if let Some(path) = path {
write!(
f,
"{}{}{} {err}",
"Failed to generate diff for ".bold(),
fs::relativize_path(path).bold(),
":".bold()
)
} else {
write!(
f,
"{header} {err}",
header = "Failed to generate diff:".bold(),
)
}
}
Self::RangeFormatNotebook(path) => {
if let Some(path) = path {
write!(
@@ -1018,54 +792,6 @@ impl Display for FormatCommandError {
}
}
#[derive(Debug)]
struct ModifiedRange {
unformatted: TextRange,
formatted: TextRange,
}
impl ModifiedRange {
/// Determine the range that differs between `unformatted` and `formatted`.
///
/// If the two inputs are equal, the returned ranges will be empty.
fn new(unformatted: &SourceKind, formatted: &SourceKind) -> Self {
let unformatted = unformatted.source_code();
let formatted = formatted.source_code();
let mut prefix_length = TextSize::ZERO;
for (unformatted, formatted) in unformatted.chars().zip(formatted.chars()) {
if unformatted != formatted {
break;
}
prefix_length += unformatted.text_len();
}
// For the ends of the ranges, track the length of the common suffix and then subtract that
// from each total text length. Unlike for `start`, the character offsets are very unlikely
// to be equal, so they need to be treated separately.
let mut suffix_length = TextSize::ZERO;
for (old, new) in unformatted[prefix_length.to_usize()..]
.chars()
.rev()
.zip(formatted[prefix_length.to_usize()..].chars().rev())
{
if old != new {
break;
}
suffix_length += old.text_len();
}
let unformatted_range =
TextRange::new(prefix_length, unformatted.text_len() - suffix_length);
let formatted_range = TextRange::new(prefix_length, formatted.text_len() - suffix_length);
Self {
unformatted: unformatted_range,
formatted: formatted_range,
}
}
}
pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
// First, collect all rules that are incompatible regardless of the linter-specific settings.
let mut incompatible_rules = FxHashSet::default();
@@ -1237,144 +963,3 @@ pub(super) fn warn_incompatible_formatter_settings(resolver: &Resolver) {
}
}
}
#[cfg(test)]
mod tests {
use std::io;
use std::ops::Range;
use std::path::PathBuf;
use ignore::Error;
use insta::assert_snapshot;
use ruff_db::panic::catch_unwind;
use ruff_linter::logging::DisplayParseError;
use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_python_formatter::FormatModuleError;
use ruff_python_parser::{ParseError, ParseErrorType};
use ruff_text_size::{TextRange, TextSize};
use test_case::test_case;
use crate::commands::format::{FormatCommandError, FormatMode, FormatResults, ModifiedRange};
#[test]
fn error_diagnostics() -> anyhow::Result<()> {
let path = PathBuf::from("test.py");
let source_kind = SourceKind::Python("1".to_string());
let panic_error = catch_unwind(|| {
panic!("Test panic for FormatCommandError");
})
.unwrap_err();
let errors = [
FormatCommandError::Ignore(Error::WithPath {
path: path.clone(),
err: Box::new(Error::Io(io::Error::new(
io::ErrorKind::PermissionDenied,
"Permission denied",
))),
}),
FormatCommandError::Parse(DisplayParseError::from_source_kind(
ParseError {
error: ParseErrorType::UnexpectedIndentation,
location: TextRange::default(),
},
Some(path.clone()),
&source_kind,
)),
FormatCommandError::Panic(Some(path.clone()), Box::new(panic_error)),
FormatCommandError::Read(
Some(path.clone()),
SourceError::Io(io::Error::new(io::ErrorKind::NotFound, "File not found")),
),
FormatCommandError::Format(
Some(path.clone()),
FormatModuleError::ParseError(ParseError {
error: ParseErrorType::EmptySlice,
location: TextRange::default(),
}),
),
FormatCommandError::Write(
Some(path.clone()),
SourceError::Io(io::Error::new(
io::ErrorKind::PermissionDenied,
"Cannot write to file",
)),
),
FormatCommandError::RangeFormatNotebook(Some(path)),
];
let results = FormatResults::new(&[], FormatMode::Check);
let mut buf = Vec::new();
results.write_changed_preview(
&mut buf,
ruff_linter::settings::types::OutputFormat::Full,
&errors,
)?;
let mut settings = insta::Settings::clone_current();
settings.add_filter(r"(Panicked at) [^:]+:\d+:\d+", "$1 <location>");
let _s = settings.bind_to_scope();
assert_snapshot!(str::from_utf8(&buf)?, @r"
io: test.py: Permission denied
--> test.py:1:1
invalid-syntax: Unexpected indentation
--> test.py:1:1
io: File not found
--> test.py:1:1
internal-error: Expected index or slice expression
--> test.py:1:1
io: Cannot write to file
--> test.py:1:1
invalid-cli-option: Range formatting isn't supported for notebooks.
--> test.py:1:1
panic: Panicked at <location> when checking `test.py`: `Test panic for FormatCommandError`
--> test.py:1:1
info: This indicates a bug in Ruff.
info: If you could open an issue at https://github.com/astral-sh/ruff/issues/new?title=%5Bpanic%5D, we'd be very appreciative!
info: run with `RUST_BACKTRACE=1` environment variable to show the full backtrace information
");
Ok(())
}
#[test_case("abcdef", "abcXYdef", 3..3, 3..5; "insertion")]
#[test_case("abcXYdef", "abcdef", 3..5, 3..3; "deletion")]
#[test_case("abcXdef", "abcYdef", 3..4, 3..4; "modification")]
#[test_case("abc", "abcX", 3..3, 3..4; "strict_prefix")]
#[test_case("", "", 0..0, 0..0; "empty")]
#[test_case("abc", "abc", 3..3, 3..3; "equal")]
fn modified_range(
unformatted: &str,
formatted: &str,
expect_unformatted: Range<u32>,
expect_formatted: Range<u32>,
) {
let mr = ModifiedRange::new(
&SourceKind::Python(unformatted.to_string()),
&SourceKind::Python(formatted.to_string()),
);
assert_eq!(
mr.unformatted,
TextRange::new(
TextSize::new(expect_unformatted.start),
TextSize::new(expect_unformatted.end)
)
);
assert_eq!(
mr.formatted,
TextRange::new(
TextSize::new(expect_formatted.start),
TextSize::new(expect_formatted.end)
)
);
}
}

View File

@@ -4,10 +4,10 @@ use std::path::Path;
use anyhow::Result;
use log::error;
use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::{PySourceType, SourceType};
use ruff_workspace::FormatterSettings;
use ruff_workspace::resolver::{PyprojectConfig, Resolver, match_exclusion, python_file_at_path};
use ruff_workspace::resolver::{Resolver, match_exclusion, python_file_at_path};
use crate::ExitStatus;
use crate::args::{ConfigArguments, FormatArguments, FormatRange};
@@ -15,15 +15,17 @@ use crate::commands::format::{
FormatCommandError, FormatMode, FormatResult, FormattedSource, format_source,
warn_incompatible_formatter_settings,
};
use crate::resolve::resolve;
use crate::stdin::{parrot_stdin, read_from_stdin};
/// Run the formatter over a single file, read from `stdin`.
pub(crate) fn format_stdin(
cli: &FormatArguments,
config_arguments: &ConfigArguments,
pyproject_config: &PyprojectConfig,
) -> Result<ExitStatus> {
let mut resolver = Resolver::new(pyproject_config);
let pyproject_config = resolve(config_arguments, cli.stdin_filename.as_deref())?;
let mut resolver = Resolver::new(&pyproject_config);
warn_incompatible_formatter_settings(&resolver);
let mode = FormatMode::from_cli(cli);
@@ -122,9 +124,7 @@ fn format_source_code(
"{}",
source_kind.diff(formatted, path).unwrap()
)
.map_err(|err| {
FormatCommandError::Write(path.map(Path::to_path_buf), SourceError::Io(err))
})?;
.map_err(|err| FormatCommandError::Diff(path.map(Path::to_path_buf), err))?;
}
},
FormattedSource::Unchanged => {

View File

@@ -205,18 +205,12 @@ pub fn run(
}
fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
let cli_output_format_set = args.output_format.is_some();
let (cli, config_arguments) = args.partition(global_options)?;
let pyproject_config = resolve::resolve(&config_arguments, cli.stdin_filename.as_deref())?;
if cli_output_format_set && !pyproject_config.settings.formatter.preview.is_enabled() {
warn_user_once!(
"The --output-format flag for the formatter is unstable and requires preview mode to use."
);
}
if is_stdin(&cli.files, cli.stdin_filename.as_deref()) {
commands::format_stdin::format_stdin(&cli, &config_arguments, &pyproject_config)
commands::format_stdin::format_stdin(&cli, &config_arguments)
} else {
commands::format::format(cli, &config_arguments, &pyproject_config)
commands::format::format(cli, &config_arguments)
}
}

View File

@@ -10,11 +10,12 @@ use ruff_linter::linter::FixTable;
use serde::Serialize;
use ruff_db::diagnostic::{
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics,
DisplayGithubDiagnostics, GithubRenderer, SecondaryCode,
};
use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel;
use ruff_linter::message::{EmitterContext, render_diagnostics};
use ruff_linter::message::{Emitter, EmitterContext, GroupedEmitter, SarifEmitter, TextEmitter};
use ruff_linter::notify_user;
use ruff_linter::settings::flags::{self};
use ruff_linter::settings::types::{OutputFormat, UnsafeFixes};
@@ -224,28 +225,86 @@ impl Printer {
let context = EmitterContext::new(&diagnostics.notebook_indexes);
let fixables = FixableStatistics::try_from(diagnostics, self.unsafe_fixes);
let config = DisplayDiagnosticConfig::default()
.preview(preview)
.hide_severity(true)
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize())
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.with_fix_applicability(self.unsafe_fixes.required_applicability())
.show_fix_diff(preview);
let config = DisplayDiagnosticConfig::default().preview(preview);
render_diagnostics(writer, self.format, config, &context, &diagnostics.inner)?;
if matches!(
self.format,
OutputFormat::Full | OutputFormat::Concise | OutputFormat::Grouped
) {
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
if !diagnostics.fixed.is_empty() {
writeln!(writer)?;
print_fix_summary(writer, &diagnostics.fixed)?;
writeln!(writer)?;
}
match self.format {
OutputFormat::Json => {
let config = config.format(DiagnosticFormat::Json);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
}
OutputFormat::Rdjson => {
let config = config.format(DiagnosticFormat::Rdjson);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
}
OutputFormat::JsonLines => {
let config = config.format(DiagnosticFormat::JsonLines);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
}
OutputFormat::Junit => {
let config = config.format(DiagnosticFormat::Junit);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
}
OutputFormat::Concise | OutputFormat::Full => {
TextEmitter::default()
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.with_show_fix_diff(self.format == OutputFormat::Full && preview)
.with_show_source(self.format == OutputFormat::Full)
.with_fix_applicability(self.unsafe_fixes.required_applicability())
.with_preview(preview)
.emit(writer, &diagnostics.inner, &context)?;
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
if !diagnostics.fixed.is_empty() {
writeln!(writer)?;
print_fix_summary(writer, &diagnostics.fixed)?;
writeln!(writer)?;
}
}
self.write_summary_text(writer, diagnostics)?;
}
OutputFormat::Grouped => {
GroupedEmitter::default()
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.with_unsafe_fixes(self.unsafe_fixes)
.emit(writer, &diagnostics.inner, &context)?;
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
if !diagnostics.fixed.is_empty() {
writeln!(writer)?;
print_fix_summary(writer, &diagnostics.fixed)?;
writeln!(writer)?;
}
}
self.write_summary_text(writer, diagnostics)?;
}
OutputFormat::Github => {
let renderer = GithubRenderer::new(&context, "Ruff");
let value = DisplayGithubDiagnostics::new(&renderer, &diagnostics.inner);
write!(writer, "{value}")?;
}
OutputFormat::Gitlab => {
let config = config.format(DiagnosticFormat::Gitlab);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
}
OutputFormat::Pylint => {
let config = config.format(DiagnosticFormat::Pylint);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
}
OutputFormat::Azure => {
let config = config.format(DiagnosticFormat::Azure);
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
write!(writer, "{value}")?;
}
OutputFormat::Sarif => {
SarifEmitter.emit(writer, &diagnostics.inner, &context)?;
}
self.write_summary_text(writer, diagnostics)?;
}
writer.flush()?;
@@ -389,22 +448,11 @@ impl Printer {
}
let context = EmitterContext::new(&diagnostics.notebook_indexes);
let format = if preview {
DiagnosticFormat::Full
} else {
DiagnosticFormat::Concise
};
let config = DisplayDiagnosticConfig::default()
.hide_severity(true)
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize())
TextEmitter::default()
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
.format(format)
.with_fix_applicability(self.unsafe_fixes.required_applicability());
write!(
writer,
"{}",
DisplayDiagnostics::new(&context, &config, &diagnostics.inner)
)?;
.with_show_source(preview)
.with_fix_applicability(self.unsafe_fixes.required_applicability())
.emit(writer, &diagnostics.inner, &context)?;
}
writer.flush()?;

View File

@@ -12,8 +12,8 @@ use tempfile::TempDir;
const BIN_NAME: &str = "ruff";
fn tempdir_filter(path: impl AsRef<Path>) -> String {
format!(r"{}\\?/?", escape(path.as_ref().to_str().unwrap()))
fn tempdir_filter(tempdir: &TempDir) -> String {
format!(r"{}\\?/?", escape(tempdir.path().to_str().unwrap()))
}
#[test]
@@ -609,112 +609,6 @@ if __name__ == "__main__":
Ok(())
}
#[test_case::test_case("concise")]
#[test_case::test_case("full")]
#[test_case::test_case("json")]
#[test_case::test_case("json-lines")]
#[test_case::test_case("junit")]
#[test_case::test_case("grouped")]
#[test_case::test_case("github")]
#[test_case::test_case("gitlab")]
#[test_case::test_case("pylint")]
#[test_case::test_case("rdjson")]
#[test_case::test_case("azure")]
#[test_case::test_case("sarif")]
fn output_format(output_format: &str) -> Result<()> {
const CONTENT: &str = r#"
from test import say_hy
if __name__ == "__main__":
say_hy("dear Ruff contributor")
"#;
let tempdir = TempDir::new()?;
let input = tempdir.path().join("input.py");
fs::write(&input, CONTENT)?;
let snapshot = format!("output_format_{output_format}");
let project_dir = dunce::canonicalize(tempdir.path())?;
insta::with_settings!({
filters => vec![
(tempdir_filter(&project_dir).as_str(), "[TMP]/"),
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r#""[^"]+\\?/?input.py"#, r#""[TMP]/input.py"#),
(ruff_linter::VERSION, "[VERSION]"),
]
}, {
assert_cmd_snapshot!(
snapshot,
Command::new(get_cargo_bin(BIN_NAME))
.args([
"format",
"--no-cache",
"--output-format",
output_format,
"--preview",
"--check",
"input.py",
])
.current_dir(&tempdir),
);
});
Ok(())
}
#[test]
fn output_format_notebook() {
let args = ["format", "--no-cache", "--isolated", "--preview", "--check"];
let fixtures = Path::new("resources").join("test").join("fixtures");
let path = fixtures.join("unformatted.ipynb");
insta::with_settings!({filters => vec![
// Replace windows paths
(r"\\", "/"),
]}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME)).args(args).arg(path),
@r"
success: false
exit_code: 1
----- stdout -----
unformatted: File would be reformatted
--> resources/test/fixtures/unformatted.ipynb:cell 1:1:1
::: cell 1
1 | import numpy
- maths = (numpy.arange(100)**2).sum()
- stats= numpy.asarray([1,2,3,4]).median()
2 +
3 + maths = (numpy.arange(100) ** 2).sum()
4 + stats = numpy.asarray([1, 2, 3, 4]).median()
::: cell 3
1 | # A cell with IPython escape command
2 | def some_function(foo, bar):
3 | pass
4 +
5 +
6 | %matplotlib inline
::: cell 4
1 | foo = %pwd
- def some_function(foo,bar,):
2 +
3 +
4 + def some_function(
5 + foo,
6 + bar,
7 + ):
8 | # Another cell with IPython escape command
9 | foo = %pwd
10 | print(foo)
1 file would be reformatted
----- stderr -----
");
});
}
#[test]
fn exit_non_zero_on_format() -> Result<()> {
let tempdir = TempDir::new()?;
@@ -2461,21 +2355,3 @@ fn cookiecutter_globbing() -> Result<()> {
Ok(())
}
#[test]
fn stable_output_format_warning() {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.args(["format", "--output-format=full", "-"])
.pass_stdin("1"),
@r"
success: true
exit_code: 0
----- stdout -----
1
----- stderr -----
warning: The --output-format flag for the formatter is unstable and requires preview mode to use.
",
);
}

View File

@@ -2445,7 +2445,6 @@ requires-python = ">= 3.11"
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
@@ -2477,320 +2476,6 @@ requires-python = ">= 3.11"
Ok(())
}
/// ```
/// tmp
/// ├── pyproject.toml #<--- no `[tool.ruff]`
/// └── test.py
/// ```
#[test]
fn requires_python_no_tool_preview_enabled() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("pyproject.toml");
fs::write(
&ruff_toml,
r#"[project]
requires-python = ">= 3.11"
"#,
)?;
let testpy = tempdir.path().join("test.py");
fs::write(
&testpy,
r#"from typing import Union;foo: Union[int, str] = 1"#,
)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&project_dir).as_str(), "[TMP]/")]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.args(STDIN_BASE_OPTIONS)
.arg("--preview")
.arg("--show-settings")
.args(["--select","UP007"])
.arg("test.py")
.arg("-")
.current_dir(project_dir)
, @r#"
success: true
exit_code: 0
----- stdout -----
Resolved settings for: "[TMP]/test.py"
# General Settings
cache_dir = "[TMP]/.ruff_cache"
fix = false
fix_only = false
output_format = concise
show_fixes = false
unsafe_fixes = hint
# File Resolver Settings
file_resolver.exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"dist",
"node_modules",
"site-packages",
"venv",
]
file_resolver.extend_exclude = []
file_resolver.force_exclude = false
file_resolver.include = [
"*.py",
"*.pyi",
"*.pyw",
"*.ipynb",
"**/pyproject.toml",
]
file_resolver.extend_include = []
file_resolver.respect_gitignore = true
file_resolver.project_root = "[TMP]/"
# Linter Settings
linter.exclude = []
linter.project_root = "[TMP]/"
linter.rules.enabled = [
non-pep604-annotation-union (UP007),
]
linter.rules.should_fix = [
non-pep604-annotation-union (UP007),
]
linter.per_file_ignores = {}
linter.safety_table.forced_safe = []
linter.safety_table.forced_unsafe = []
linter.unresolved_target_version = 3.11
linter.per_file_target_version = {}
linter.preview = enabled
linter.explicit_preview_rules = false
linter.extension = ExtensionMapping({})
linter.allowed_confusables = []
linter.builtins = []
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
linter.external = []
linter.ignore_init_module_imports = true
linter.logger_objects = []
linter.namespace_packages = []
linter.src = [
"[TMP]/",
"[TMP]/src",
]
linter.tab_size = 4
linter.line_length = 88
linter.task_tags = [
TODO,
FIXME,
XXX,
]
linter.typing_modules = []
linter.typing_extensions = true
# Linter Plugins
linter.flake8_annotations.mypy_init_return = false
linter.flake8_annotations.suppress_dummy_args = false
linter.flake8_annotations.suppress_none_returning = false
linter.flake8_annotations.allow_star_arg_any = false
linter.flake8_annotations.ignore_fully_untyped = false
linter.flake8_bandit.hardcoded_tmp_directory = [
/tmp,
/var/tmp,
/dev/shm,
]
linter.flake8_bandit.check_typed_exception = false
linter.flake8_bandit.extend_markup_names = []
linter.flake8_bandit.allowed_markup_calls = []
linter.flake8_bugbear.extend_immutable_calls = []
linter.flake8_builtins.allowed_modules = []
linter.flake8_builtins.ignorelist = []
linter.flake8_builtins.strict_checking = false
linter.flake8_comprehensions.allow_dict_calls_with_keyword_arguments = false
linter.flake8_copyright.notice_rgx = (?i)Copyright\s+((?:\(C\)|©)\s+)?\d{4}((-|,\s)\d{4})*
linter.flake8_copyright.author = none
linter.flake8_copyright.min_file_size = 0
linter.flake8_errmsg.max_string_length = 0
linter.flake8_gettext.functions_names = [
_,
gettext,
ngettext,
]
linter.flake8_implicit_str_concat.allow_multiline = true
linter.flake8_import_conventions.aliases = {
altair = alt,
holoviews = hv,
matplotlib = mpl,
matplotlib.pyplot = plt,
networkx = nx,
numpy = np,
numpy.typing = npt,
pandas = pd,
panel = pn,
plotly.express = px,
polars = pl,
pyarrow = pa,
seaborn = sns,
tensorflow = tf,
tkinter = tk,
xml.etree.ElementTree = ET,
}
linter.flake8_import_conventions.banned_aliases = {}
linter.flake8_import_conventions.banned_from = []
linter.flake8_pytest_style.fixture_parentheses = false
linter.flake8_pytest_style.parametrize_names_type = tuple
linter.flake8_pytest_style.parametrize_values_type = list
linter.flake8_pytest_style.parametrize_values_row_type = tuple
linter.flake8_pytest_style.raises_require_match_for = [
BaseException,
Exception,
ValueError,
OSError,
IOError,
EnvironmentError,
socket.error,
]
linter.flake8_pytest_style.raises_extend_require_match_for = []
linter.flake8_pytest_style.mark_parentheses = false
linter.flake8_quotes.inline_quotes = double
linter.flake8_quotes.multiline_quotes = double
linter.flake8_quotes.docstring_quotes = double
linter.flake8_quotes.avoid_escape = true
linter.flake8_self.ignore_names = [
_make,
_asdict,
_replace,
_fields,
_field_defaults,
_name_,
_value_,
]
linter.flake8_tidy_imports.ban_relative_imports = "parents"
linter.flake8_tidy_imports.banned_api = {}
linter.flake8_tidy_imports.banned_module_level_imports = []
linter.flake8_type_checking.strict = false
linter.flake8_type_checking.exempt_modules = [
typing,
typing_extensions,
]
linter.flake8_type_checking.runtime_required_base_classes = []
linter.flake8_type_checking.runtime_required_decorators = []
linter.flake8_type_checking.quote_annotations = false
linter.flake8_unused_arguments.ignore_variadic_names = false
linter.isort.required_imports = []
linter.isort.combine_as_imports = false
linter.isort.force_single_line = false
linter.isort.force_sort_within_sections = false
linter.isort.detect_same_package = true
linter.isort.case_sensitive = false
linter.isort.force_wrap_aliases = false
linter.isort.force_to_top = []
linter.isort.known_modules = {}
linter.isort.order_by_type = true
linter.isort.relative_imports_order = furthest_to_closest
linter.isort.single_line_exclusions = []
linter.isort.split_on_trailing_comma = true
linter.isort.classes = []
linter.isort.constants = []
linter.isort.variables = []
linter.isort.no_lines_before = []
linter.isort.lines_after_imports = -1
linter.isort.lines_between_types = 0
linter.isort.forced_separate = []
linter.isort.section_order = [
known { type = future },
known { type = standard_library },
known { type = third_party },
known { type = first_party },
known { type = local_folder },
]
linter.isort.default_section = known { type = third_party }
linter.isort.no_sections = false
linter.isort.from_first = false
linter.isort.length_sort = false
linter.isort.length_sort_straight = false
linter.mccabe.max_complexity = 10
linter.pep8_naming.ignore_names = [
setUp,
tearDown,
setUpClass,
tearDownClass,
setUpModule,
tearDownModule,
asyncSetUp,
asyncTearDown,
setUpTestData,
failureException,
longMessage,
maxDiff,
]
linter.pep8_naming.classmethod_decorators = []
linter.pep8_naming.staticmethod_decorators = []
linter.pycodestyle.max_line_length = 88
linter.pycodestyle.max_doc_length = none
linter.pycodestyle.ignore_overlong_task_comments = false
linter.pyflakes.extend_generics = []
linter.pyflakes.allowed_unused_imports = []
linter.pylint.allow_magic_value_types = [
str,
bytes,
]
linter.pylint.allow_dunder_method_names = []
linter.pylint.max_args = 5
linter.pylint.max_positional_args = 5
linter.pylint.max_returns = 6
linter.pylint.max_bool_expr = 5
linter.pylint.max_branches = 12
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
# Formatter Settings
formatter.exclude = []
formatter.unresolved_target_version = 3.11
formatter.per_file_target_version = {}
formatter.preview = enabled
formatter.line_width = 88
formatter.line_ending = auto
formatter.indent_style = space
formatter.indent_width = 4
formatter.quote_style = double
formatter.magic_trailing_comma = respect
formatter.docstring_code_format = disabled
formatter.docstring_code_line_width = dynamic
# Analyze Settings
analyze.exclude = []
analyze.preview = enabled
analyze.target_version = 3.11
analyze.string_imports = disabled
analyze.extension = ExtensionMapping({})
analyze.include_dependencies = {}
----- stderr -----
"#);
});
Ok(())
}
/// ```
/// tmp
/// ├── pyproject.toml #<--- no `[tool.ruff]`
@@ -3072,7 +2757,6 @@ requires-python = ">= 3.11"
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
@@ -3437,7 +3121,6 @@ from typing import Union;foo: Union[int, str] = 1
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
@@ -3818,7 +3501,6 @@ from typing import Union;foo: Union[int, str] = 1
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
@@ -4147,7 +3829,6 @@ from typing import Union;foo: Union[int, str] = 1
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
@@ -4476,7 +4157,6 @@ from typing import Union;foo: Union[int, str] = 1
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
@@ -4762,7 +4442,6 @@ from typing import Union;foo: Union[int, str] = 1
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
@@ -5101,7 +4780,6 @@ from typing import Union;foo: Union[int, str] = 1
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false
@@ -6199,36 +5877,6 @@ match 42: # invalid-syntax
Ok(())
}
#[test_case::test_case("concise"; "concise_show_fixes")]
#[test_case::test_case("full"; "full_show_fixes")]
#[test_case::test_case("grouped"; "grouped_show_fixes")]
fn output_format_show_fixes(output_format: &str) -> Result<()> {
let tempdir = TempDir::new()?;
let input = tempdir.path().join("input.py");
fs::write(&input, "import os # F401")?;
let snapshot = format!("output_format_show_fixes_{output_format}");
assert_cmd_snapshot!(
snapshot,
Command::new(get_cargo_bin(BIN_NAME))
.args([
"check",
"--no-cache",
"--output-format",
output_format,
"--select",
"F401",
"--fix",
"--show-fixes",
"input.py",
])
.current_dir(&tempdir),
);
Ok(())
}
#[test]
fn up045_nested_optional_flatten_all() {
let contents = "\
@@ -6302,7 +5950,6 @@ fn rule_panic_mixed_results_concise() -> Result<()> {
filters => vec![
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r"\\", r"/"),
(r"(Panicked at) [^:]+:\d+:\d+", "$1 <location>")
]
}, {
assert_cmd_snapshot!(
@@ -6319,7 +5966,7 @@ fn rule_panic_mixed_results_concise() -> Result<()> {
[TMP]/normal.py:1:1: RUF903 Hey this is a stable test rule with a display only fix.
[TMP]/normal.py:1:1: RUF911 Hey this is a preview test rule.
[TMP]/normal.py:1:1: RUF950 Hey this is a test rule that was redirected from another.
[TMP]/panic.py: panic: Panicked at <location> when checking `[TMP]/panic.py`: `This is a fake panic for testing.`
[TMP]/panic.py: panic: Fatal error while linting: This is a fake panic for testing.
Found 7 errors.
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
@@ -6348,7 +5995,6 @@ fn rule_panic_mixed_results_full() -> Result<()> {
filters => vec![
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r"\\", r"/"),
(r"(Panicked at) [^:]+:\d+:\d+", "$1 <location>"),
]
}, {
assert_cmd_snapshot!(
@@ -6379,11 +6025,12 @@ fn rule_panic_mixed_results_full() -> Result<()> {
RUF950 Hey this is a test rule that was redirected from another.
--> [TMP]/normal.py:1:1
panic: Panicked at <location> when checking `[TMP]/panic.py`: `This is a fake panic for testing.`
panic: Fatal error while linting: This is a fake panic for testing.
--> [TMP]/panic.py:1:1
info: This indicates a bug in Ruff.
info: If you could open an issue at https://github.com/astral-sh/ruff/issues/new?title=%5Bpanic%5D, we'd be very appreciative!
info: run with `RUST_BACKTRACE=1` environment variable to show the full backtrace information
info: panicked at crates/ruff_linter/src/rules/ruff/rules/test_rules.rs:511:9:
This is a fake panic for testing.
run with `RUST_BACKTRACE=1` environment variable to display a backtrace
Found 7 errors.
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
@@ -6398,200 +6045,3 @@ fn rule_panic_mixed_results_full() -> Result<()> {
});
Ok(())
}
/// Test that the same rule fires across all supported extensions, but not on unsupported files
#[test]
fn supported_file_extensions() -> Result<()> {
let tempdir = TempDir::new()?;
let inner_dir = tempdir.path().join("src");
fs::create_dir(&inner_dir)?;
// Create files of various types
// text file
fs::write(inner_dir.join("thing.txt"), b"hello world\n")?;
// regular python
fs::write(
inner_dir.join("thing.py"),
b"import os\nprint('hello world')\n",
)?;
// python typestub
fs::write(
inner_dir.join("thing.pyi"),
b"import os\nclass foo:\n ...\n",
)?;
// windows gui
fs::write(
inner_dir.join("thing.pyw"),
b"import os\nprint('hello world')\n",
)?;
// cython
fs::write(
inner_dir.join("thing.pyx"),
b"import os\ncdef int add(int a, int b):\n return a + b\n",
)?;
// notebook
fs::write(
inner_dir.join("thing.ipynb"),
r#"
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "ad6f36d9-4b7d-4562-8d00-f15a0f1fbb6d",
"metadata": {},
"outputs": [],
"source": [
"import os"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
"#,
)?;
insta::with_settings!({
filters => vec![
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r"\\", r"/"),
]
}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--select", "F401", "--output-format=concise", "--no-cache"])
.args([inner_dir]),
@r"
success: false
exit_code: 1
----- stdout -----
[TMP]/src/thing.ipynb:cell 1:1:8: F401 [*] `os` imported but unused
[TMP]/src/thing.py:1:8: F401 [*] `os` imported but unused
[TMP]/src/thing.pyi:1:8: F401 [*] `os` imported but unused
Found 3 errors.
[*] 3 fixable with the `--fix` option.
----- stderr -----
");
});
Ok(())
}
/// Test that the same rule fires across all supported extensions, but not on unsupported files
#[test]
fn supported_file_extensions_preview_enabled() -> Result<()> {
let tempdir = TempDir::new()?;
let inner_dir = tempdir.path().join("src");
fs::create_dir(&inner_dir)?;
// Create files of various types
// text file
fs::write(inner_dir.join("thing.txt"), b"hello world\n")?;
// regular python
fs::write(
inner_dir.join("thing.py"),
b"import os\nprint('hello world')\n",
)?;
// python typestub
fs::write(
inner_dir.join("thing.pyi"),
b"import os\nclass foo:\n ...\n",
)?;
// windows gui
fs::write(
inner_dir.join("thing.pyw"),
b"import os\nprint('hello world')\n",
)?;
// cython
fs::write(
inner_dir.join("thing.pyx"),
b"import os\ncdef int add(int a, int b):\n return a + b\n",
)?;
// notebook
fs::write(
inner_dir.join("thing.ipynb"),
r#"
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "ad6f36d9-4b7d-4562-8d00-f15a0f1fbb6d",
"metadata": {},
"outputs": [],
"source": [
"import os"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
"#,
)?;
insta::with_settings!({
filters => vec![
(tempdir_filter(&tempdir).as_str(), "[TMP]/"),
(r"\\", r"/"),
]
}, {
assert_cmd_snapshot!(
Command::new(get_cargo_bin(BIN_NAME))
.args(["check", "--select", "F401", "--preview", "--output-format=concise", "--no-cache"])
.args([inner_dir]),
@r"
success: false
exit_code: 1
----- stdout -----
[TMP]/src/thing.ipynb:cell 1:1:8: F401 [*] `os` imported but unused
[TMP]/src/thing.py:1:8: F401 [*] `os` imported but unused
[TMP]/src/thing.pyi:1:8: F401 [*] `os` imported but unused
[TMP]/src/thing.pyw:1:8: F401 [*] `os` imported but unused
Found 4 errors.
[*] 4 fixable with the `--fix` option.
----- stderr -----
");
});
Ok(())
}

View File

@@ -1,19 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- azure
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
##vso[task.logissue type=error;sourcepath=[TMP]/input.py;linenumber=1;columnnumber=1;code=unformatted;]File would be reformatted
----- stderr -----

View File

@@ -1,20 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- concise
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:1:1: unformatted: File would be reformatted
1 file would be reformatted
----- stderr -----

View File

@@ -1,26 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- full
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
unformatted: File would be reformatted
--> input.py:1:1
-
1 | from test import say_hy
2 |
3 | if __name__ == "__main__":
1 file would be reformatted
----- stderr -----

View File

@@ -1,19 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- github
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
::error title=Ruff (unformatted),file=[TMP]/input.py,line=1,col=1,endLine=2,endColumn=1::input.py:1:1: unformatted: File would be reformatted
----- stderr -----

View File

@@ -1,38 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- gitlab
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
[
{
"check_name": "unformatted",
"description": "unformatted: File would be reformatted",
"severity": "major",
"fingerprint": "d868d7da11a65fcf",
"location": {
"path": "input.py",
"positions": {
"begin": {
"line": 1,
"column": 1
},
"end": {
"line": 2,
"column": 1
}
}
}
}
]
----- stderr -----

View File

@@ -1,21 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- grouped
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:
1:1 unformatted: File would be reformatted
1 file would be reformatted
----- stderr -----

View File

@@ -1,19 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- json-lines
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
{"cell":null,"code":"unformatted","end_location":{"column":1,"row":2},"filename":"[TMP]/input.py","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":null},"location":{"column":1,"row":1},"message":"File would be reformatted","noqa_row":null,"url":null}
----- stderr -----

View File

@@ -1,52 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- json
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
[
{
"cell": null,
"code": "unformatted",
"end_location": {
"column": 1,
"row": 2
},
"filename": "[TMP]/input.py",
"fix": {
"applicability": "safe",
"edits": [
{
"content": "",
"end_location": {
"column": 1,
"row": 2
},
"location": {
"column": 1,
"row": 1
}
}
],
"message": null
},
"location": {
"column": 1,
"row": 1
},
"message": "File would be reformatted",
"noqa_row": null,
"url": null
}
]
----- stderr -----

View File

@@ -1,26 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- junit
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="1" failures="1" errors="0">
<testsuite name="[TMP]/input.py" tests="1" disabled="0" errors="0" failures="1" package="org.ruff">
<testcase name="org.ruff.unformatted" classname="[TMP]/input" line="1" column="1">
<failure message="File would be reformatted">line 1, col 1, File would be reformatted</failure>
</testcase>
</testsuite>
</testsuites>
----- stderr -----

View File

@@ -1,18 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- pylint
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
input.py:1: [unformatted] File would be reformatted
----- stderr -----

View File

@@ -1,60 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- rdjson
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
{
"diagnostics": [
{
"code": {
"value": "unformatted"
},
"location": {
"path": "[TMP]/input.py",
"range": {
"end": {
"column": 1,
"line": 2
},
"start": {
"column": 1,
"line": 1
}
}
},
"message": "File would be reformatted",
"suggestions": [
{
"range": {
"end": {
"column": 1,
"line": 2
},
"start": {
"column": 1,
"line": 1
}
},
"text": ""
}
]
}
],
"severity": "WARNING",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"
}
}
----- stderr -----

View File

@@ -1,81 +0,0 @@
---
source: crates/ruff/tests/format.rs
info:
program: ruff
args:
- format
- "--no-cache"
- "--output-format"
- sarif
- "--preview"
- "--check"
- input.py
---
success: false
exit_code: 1
----- stdout -----
{
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
"runs": [
{
"results": [
{
"fixes": [
{
"artifactChanges": [
{
"artifactLocation": {
"uri": "[TMP]/input.py"
},
"replacements": [
{
"deletedRegion": {
"endColumn": 1,
"endLine": 2,
"startColumn": 1,
"startLine": 1
}
}
]
}
],
"description": {
"text": null
}
}
],
"level": "error",
"locations": [
{
"physicalLocation": {
"artifactLocation": {
"uri": "[TMP]/input.py"
},
"region": {
"endColumn": 1,
"endLine": 2,
"startColumn": 1,
"startLine": 1
}
}
}
],
"message": {
"text": "File would be reformatted"
},
"ruleId": "unformatted"
}
],
"tool": {
"driver": {
"informationUri": "https://github.com/astral-sh/ruff",
"name": "ruff",
"rules": [],
"version": "[VERSION]"
}
}
}
],
"version": "2.1.0"
}
----- stderr -----

View File

@@ -44,43 +44,6 @@ import some_module
__all__ = ["some_module"]
```
## Preview
When [preview] is enabled (and certain simplifying assumptions
are met), we analyze all import statements for a given module
when determining whether an import is used, rather than simply
the last of these statements. This can result in both different and
more import statements being marked as unused.
For example, if a module consists of
```python
import a
import a.b
```
then both statements are marked as unused under [preview], whereas
only the second is marked as unused under stable behavior.
As another example, if a module consists of
```python
import a.b
import a
a.b.foo()
```
then a diagnostic will only be emitted for the first line under [preview],
whereas a diagnostic would only be emitted for the second line under
stable behavior.
Note that this behavior is somewhat subjective and is designed
to conform to the developer's intuition rather than Python's actual
execution. To wit, the statement `import a.b` automatically executes
`import a`, so in some sense `import a` is _always_ redundant
in the presence of `import a.b`.
## Fix safety
Fixes to remove unused imports are safe, except in `__init__.py` files.
@@ -133,6 +96,4 @@ else:
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)
[preview]: https://docs.astral.sh/ruff/preview/
----- stderr -----

View File

@@ -119,7 +119,7 @@ exit_code: 1
"rules": [
{
"fullDescription": {
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Preview\nWhen [preview] is enabled (and certain simplifying assumptions\nare met), we analyze all import statements for a given module\nwhen determining whether an import is used, rather than simply\nthe last of these statements. This can result in both different and\nmore import statements being marked as unused.\n\nFor example, if a module consists of\n\n```python\nimport a\nimport a.b\n```\n\nthen both statements are marked as unused under [preview], whereas\nonly the second is marked as unused under stable behavior.\n\nAs another example, if a module consists of\n\n```python\nimport a.b\nimport a\n\na.b.foo()\n```\n\nthen a diagnostic will only be emitted for the first line under [preview],\nwhereas a diagnostic would only be emitted for the second line under\nstable behavior.\n\nNote that this behavior is somewhat subjective and is designed\nto conform to the developer's intuition rather than Python's actual\nexecution. To wit, the statement `import a.b` automatically executes\n`import a`, so in some sense `import a` is _always_ redundant\nin the presence of `import a.b`.\n\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n\n[preview]: https://docs.astral.sh/ruff/preview/\n"
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
},
"help": {
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"

View File

@@ -1,26 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- concise
- "--select"
- F401
- "--fix"
- "--show-fixes"
- input.py
---
success: true
exit_code: 0
----- stdout -----
Fixed 1 error:
- input.py:
1 × F401 (unused-import)
Found 1 error (1 fixed, 0 remaining).
----- stderr -----

View File

@@ -1,26 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- full
- "--select"
- F401
- "--fix"
- "--show-fixes"
- input.py
---
success: true
exit_code: 0
----- stdout -----
Fixed 1 error:
- input.py:
1 × F401 (unused-import)
Found 1 error (1 fixed, 0 remaining).
----- stderr -----

View File

@@ -1,26 +0,0 @@
---
source: crates/ruff/tests/lint.rs
info:
program: ruff
args:
- check
- "--no-cache"
- "--output-format"
- grouped
- "--select"
- F401
- "--fix"
- "--show-fixes"
- input.py
---
success: true
exit_code: 0
----- stdout -----
Fixed 1 error:
- input.py:
1 × F401 (unused-import)
Found 1 error (1 fixed, 0 remaining).
----- stderr -----

View File

@@ -371,7 +371,6 @@ linter.pylint.max_branches = 12
linter.pylint.max_statements = 50
linter.pylint.max_public_methods = 20
linter.pylint.max_locals = 15
linter.pylint.max_nested_blocks = 5
linter.pyupgrade.keep_runtime_typing = false
linter.ruff.parenthesize_tuple_in_subscript = false

View File

@@ -56,7 +56,6 @@ pub(crate) struct DisplayList<'a> {
pub(crate) stylesheet: &'a Stylesheet,
pub(crate) anonymized_line_numbers: bool,
pub(crate) cut_indicator: &'static str,
pub(crate) lineno_offset: usize,
}
impl PartialEq for DisplayList<'_> {
@@ -82,14 +81,13 @@ impl Display for DisplayList<'_> {
_ => max,
})
});
let lineno_width = self.lineno_offset
+ if lineno_width == 0 {
lineno_width
} else if self.anonymized_line_numbers {
ANONYMIZED_LINE_NUM.len()
} else {
((lineno_width as f64).log10().floor() as usize) + 1
};
let lineno_width = if lineno_width == 0 {
lineno_width
} else if self.anonymized_line_numbers {
ANONYMIZED_LINE_NUM.len()
} else {
((lineno_width as f64).log10().floor() as usize) + 1
};
let multiline_depth = self.body.iter().fold(0, |max, set| {
set.display_lines.iter().fold(max, |max2, line| match line {
@@ -126,7 +124,6 @@ impl<'a> DisplayList<'a> {
term_width: usize,
cut_indicator: &'static str,
) -> DisplayList<'a> {
let lineno_offset = message.lineno_offset;
let body = format_message(
message,
term_width,
@@ -140,7 +137,6 @@ impl<'a> DisplayList<'a> {
stylesheet,
anonymized_line_numbers,
cut_indicator,
lineno_offset,
}
}
@@ -1092,7 +1088,6 @@ fn format_message<'m>(
footer,
snippets,
is_fixable,
lineno_offset: _,
} = message;
let mut sets = vec![];

View File

@@ -23,7 +23,6 @@ pub struct Message<'a> {
pub(crate) snippets: Vec<Snippet<'a>>,
pub(crate) footer: Vec<Message<'a>>,
pub(crate) is_fixable: bool,
pub(crate) lineno_offset: usize,
}
impl<'a> Message<'a> {
@@ -60,16 +59,6 @@ impl<'a> Message<'a> {
self.is_fixable = yes;
self
}
/// Add an offset used for aligning the header sigil (`-->`) with the line number separators.
///
/// For normal diagnostics this is computed automatically based on the lines to be rendered.
/// This is intended only for use in the formatter, where we don't render a snippet directly but
/// still want the header to align with the diff.
pub fn lineno_offset(mut self, offset: usize) -> Self {
self.lineno_offset = offset;
self
}
}
/// Structure containing the slice of text to be annotated and
@@ -155,7 +144,7 @@ impl<'a> Annotation<'a> {
self
}
pub fn hide_snippet(mut self, yes: bool) -> Self {
pub fn is_file_level(mut self, yes: bool) -> Self {
self.is_file_level = yes;
self
}
@@ -184,7 +173,6 @@ impl Level {
snippets: vec![],
footer: vec![],
is_fixable: false,
lineno_offset: 0,
}
}

View File

@@ -444,7 +444,7 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
criterion.bench_function("ty_micro[complex_constrained_attributes_2]", |b| {
b.iter_batched_ref(
|| {
// This is similar to the case above, but now the attributes are actually defined.
// This is is similar to the case above, but now the attributes are actually defined.
// https://github.com/astral-sh/ty/issues/711
setup_micro_case(
r#"

View File

@@ -117,7 +117,7 @@ static COLOUR_SCIENCE: std::sync::LazyLock<Benchmark<'static>> = std::sync::Lazy
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY310,
},
500,
477,
)
});

View File

@@ -69,7 +69,6 @@ impl Diagnostic {
parent: None,
noqa_offset: None,
secondary_code: None,
header_offset: 0,
});
Diagnostic { inner }
}
@@ -433,23 +432,14 @@ impl Diagnostic {
/// Returns the URL for the rule documentation, if it exists.
pub fn to_ruff_url(&self) -> Option<String> {
match self.id() {
DiagnosticId::Panic
| DiagnosticId::Io
| DiagnosticId::InvalidSyntax
| DiagnosticId::RevealedType
| DiagnosticId::UnknownRule
| DiagnosticId::InvalidGlob
| DiagnosticId::EmptyInclude
| DiagnosticId::UnnecessaryOverridesSection
| DiagnosticId::UselessOverridesSection
| DiagnosticId::DeprecatedSetting
| DiagnosticId::Unformatted
| DiagnosticId::InvalidCliOption
| DiagnosticId::InternalError => None,
DiagnosticId::Lint(lint_name) => {
Some(format!("{}/rules/{lint_name}", env!("CARGO_PKG_HOMEPAGE")))
}
if self.is_invalid_syntax() {
None
} else {
Some(format!(
"{}/rules/{}",
env!("CARGO_PKG_HOMEPAGE"),
self.name()
))
}
}
@@ -522,11 +512,6 @@ impl Diagnostic {
a.cmp(&b)
}
/// Add an offset for aligning the header sigil with the line number separators in a diff.
pub fn set_header_offset(&mut self, offset: usize) {
Arc::make_mut(&mut self.inner).header_offset = offset;
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
@@ -540,7 +525,6 @@ struct DiagnosticInner {
parent: Option<TextSize>,
noqa_offset: Option<TextSize>,
secondary_code: Option<SecondaryCode>,
header_offset: usize,
}
struct RenderingSortKey<'a> {
@@ -758,11 +742,11 @@ pub struct Annotation {
is_primary: bool,
/// The diagnostic tags associated with this annotation.
tags: Vec<DiagnosticTag>,
/// Whether the snippet for this annotation should be hidden.
/// Whether this annotation is a file-level or full-file annotation.
///
/// When set, rendering will only include the file's name and (optional) range. Everything else
/// is omitted, including any file snippet or message.
hide_snippet: bool,
is_file_level: bool,
}
impl Annotation {
@@ -781,7 +765,7 @@ impl Annotation {
message: None,
is_primary: true,
tags: Vec::new(),
hide_snippet: false,
is_file_level: false,
}
}
@@ -798,7 +782,7 @@ impl Annotation {
message: None,
is_primary: false,
tags: Vec::new(),
hide_snippet: false,
is_file_level: false,
}
}
@@ -865,20 +849,19 @@ impl Annotation {
self.tags.push(tag);
}
/// Set whether or not the snippet on this annotation should be suppressed when rendering.
/// Set whether or not this annotation is file-level.
///
/// Such annotations are only rendered with their file name and range, if available. This is
/// intended for backwards compatibility with Ruff diagnostics, which historically used
/// File-level annotations are only rendered with their file name and range, if available. This
/// is intended for backwards compatibility with Ruff diagnostics, which historically used
/// `TextRange::default` to indicate a file-level diagnostic. In the new diagnostic model, a
/// [`Span`] with a range of `None` should be used instead, as mentioned in the `Span`
/// documentation.
///
/// TODO(brent) update this usage in Ruff and remove `is_file_level` entirely. See
/// <https://github.com/astral-sh/ruff/issues/19688>, especially my first comment, for more
/// details. As of 2025-09-26 we also use this to suppress snippet rendering for formatter
/// diagnostics, which also need to have a range, so we probably can't eliminate this entirely.
pub fn hide_snippet(&mut self, yes: bool) {
self.hide_snippet = yes;
/// details.
pub fn set_file_level(&mut self, yes: bool) {
self.is_file_level = yes;
}
}
@@ -1033,17 +1016,6 @@ pub enum DiagnosticId {
/// Use of a deprecated setting.
DeprecatedSetting,
/// The code needs to be formatted.
Unformatted,
/// Use of an invalid command-line option.
InvalidCliOption,
/// An internal assumption was violated.
///
/// This indicates a bug in the program rather than a user error.
InternalError,
}
impl DiagnosticId {
@@ -1083,9 +1055,6 @@ impl DiagnosticId {
DiagnosticId::UnnecessaryOverridesSection => "unnecessary-overrides-section",
DiagnosticId::UselessOverridesSection => "useless-overrides-section",
DiagnosticId::DeprecatedSetting => "deprecated-setting",
DiagnosticId::Unformatted => "unformatted",
DiagnosticId::InvalidCliOption => "invalid-cli-option",
DiagnosticId::InternalError => "internal-error",
}
}
@@ -1384,7 +1353,7 @@ impl DisplayDiagnosticConfig {
}
/// Whether to show a fix's availability or not.
pub fn with_show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
show_fix_status: yes,
..self
@@ -1405,20 +1374,12 @@ impl DisplayDiagnosticConfig {
/// availability for unsafe or display-only fixes.
///
/// Note that this option is currently ignored when `hide_severity` is false.
pub fn with_fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
DisplayDiagnosticConfig {
fix_applicability: applicability,
..self
}
}
pub fn show_fix_status(&self) -> bool {
self.show_fix_status
}
pub fn fix_applicability(&self) -> Applicability {
self.fix_applicability
}
}
impl Default for DisplayDiagnosticConfig {

View File

@@ -15,6 +15,7 @@ use crate::{
Db,
files::File,
source::{SourceText, line_index, source_text},
system::SystemPath,
};
use super::{
@@ -208,7 +209,6 @@ struct ResolvedDiagnostic<'a> {
message: String,
annotations: Vec<ResolvedAnnotation<'a>>,
is_fixable: bool,
header_offset: usize,
}
impl<'a> ResolvedDiagnostic<'a> {
@@ -259,8 +259,7 @@ impl<'a> ResolvedDiagnostic<'a> {
id,
message: diag.inner.message.as_str().to_string(),
annotations,
is_fixable: config.show_fix_status && diag.has_applicable_fix(config),
header_offset: diag.inner.header_offset,
is_fixable: diag.has_applicable_fix(config),
}
}
@@ -290,7 +289,6 @@ impl<'a> ResolvedDiagnostic<'a> {
message: diag.inner.message.as_str().to_string(),
annotations,
is_fixable: false,
header_offset: 0,
}
}
@@ -388,7 +386,6 @@ impl<'a> ResolvedDiagnostic<'a> {
message: &self.message,
snippets_by_input,
is_fixable: self.is_fixable,
header_offset: self.header_offset,
}
}
}
@@ -408,7 +405,7 @@ struct ResolvedAnnotation<'a> {
line_end: OneIndexed,
message: Option<&'a str>,
is_primary: bool,
hide_snippet: bool,
is_file_level: bool,
notebook_index: Option<NotebookIndex>,
}
@@ -456,7 +453,7 @@ impl<'a> ResolvedAnnotation<'a> {
line_end,
message: ann.get_message(),
is_primary: ann.is_primary,
hide_snippet: ann.hide_snippet,
is_file_level: ann.is_file_level,
notebook_index: resolver.notebook_index(&ann.span.file),
})
}
@@ -496,11 +493,6 @@ struct RenderableDiagnostic<'r> {
///
/// This is rendered as a `[*]` indicator after the diagnostic ID.
is_fixable: bool,
/// Offset to align the header sigil (`-->`) with the subsequent line number separators.
///
/// This is only needed for formatter diagnostics where we don't render a snippet via
/// `annotate-snippets` and thus the alignment isn't computed automatically.
header_offset: usize,
}
impl RenderableDiagnostic<'_> {
@@ -513,11 +505,7 @@ impl RenderableDiagnostic<'_> {
.iter()
.map(|snippet| snippet.to_annotate(path))
});
let mut message = self
.level
.title(self.message)
.is_fixable(self.is_fixable)
.lineno_offset(self.header_offset);
let mut message = self.level.title(self.message).is_fixable(self.is_fixable);
if let Some(id) = self.id {
message = message.id(id);
}
@@ -722,8 +710,8 @@ struct RenderableAnnotation<'r> {
message: Option<&'r str>,
/// Whether this annotation is considered "primary" or not.
is_primary: bool,
/// Whether the snippet for this annotation should be hidden instead of rendered.
hide_snippet: bool,
/// Whether this annotation applies to an entire file, rather than a snippet within it.
is_file_level: bool,
}
impl<'r> RenderableAnnotation<'r> {
@@ -745,7 +733,7 @@ impl<'r> RenderableAnnotation<'r> {
range,
message: ann.message,
is_primary: ann.is_primary,
hide_snippet: ann.hide_snippet,
is_file_level: ann.is_file_level,
}
}
@@ -771,7 +759,7 @@ impl<'r> RenderableAnnotation<'r> {
if let Some(message) = self.message {
ann = ann.label(message);
}
ann.hide_snippet(self.hide_snippet)
ann.is_file_level(self.is_file_level)
}
}
@@ -812,7 +800,7 @@ where
T: Db,
{
fn path(&self, file: File) -> &str {
file.path(self).as_str()
relativize_path(self.system().current_directory(), file.path(self).as_str())
}
fn input(&self, file: File) -> Input {
@@ -848,7 +836,7 @@ where
impl FileResolver for &dyn Db {
fn path(&self, file: File) -> &str {
file.path(*self).as_str()
relativize_path(self.system().current_directory(), file.path(*self).as_str())
}
fn input(&self, file: File) -> Input {
@@ -967,6 +955,14 @@ fn context_after(
line
}
/// Convert an absolute path to be relative to the current working directory.
fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
if let Ok(path) = SystemPath::new(path).strip_prefix(cwd) {
return path.as_str();
}
path
}
/// Given some source code and annotation ranges, this routine replaces
/// unprintable characters with printable representations of them.
///
@@ -2631,7 +2627,7 @@ watermelon
/// Show fix availability when rendering.
pub(super) fn show_fix_status(&mut self, yes: bool) {
let mut config = std::mem::take(&mut self.config);
config = config.with_show_fix_status(yes);
config = config.show_fix_status(yes);
self.config = config;
}
@@ -2645,7 +2641,7 @@ watermelon
/// The lowest fix applicability to show when rendering.
pub(super) fn fix_applicability(&mut self, applicability: Applicability) {
let mut config = std::mem::take(&mut self.config);
config = config.with_fix_applicability(applicability);
config = config.fix_applicability(applicability);
self.config = config;
}

View File

@@ -366,7 +366,6 @@ mod tests {
fn hide_severity_output() {
let (mut env, diagnostics) = create_diagnostics(DiagnosticFormat::Full);
env.hide_severity(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r#"
@@ -573,7 +572,7 @@ print()
let mut diagnostic = env.err().build();
let span = env.path("example.py").with_range(TextRange::default());
let mut annotation = Annotation::primary(span);
annotation.hide_snippet(true);
annotation.set_file_level(true);
diagnostic.annotate(annotation);
insta::assert_snapshot!(env.render(&diagnostic), @r"
@@ -585,8 +584,7 @@ print()
/// Check that ranges in notebooks are remapped relative to the cells.
#[test]
fn notebook_output() {
let (mut env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
env.show_fix_status(true);
let (env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
error[unused-import][*]: `os` imported but unused
--> notebook.ipynb:cell 1:2:8
@@ -700,7 +698,6 @@ print()
fn notebook_output_with_diff() {
let (mut env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
env.show_fix_diff(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
@@ -755,7 +752,6 @@ print()
fn notebook_output_with_diff_spanning_cells() {
let (mut env, mut diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
env.show_fix_diff(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
// Move all of the edits from the later diagnostics to the first diagnostic to simulate a
@@ -932,7 +928,6 @@ line 10
env.add("example.py", contents);
env.format(DiagnosticFormat::Full);
env.show_fix_diff(true);
env.show_fix_status(true);
env.fix_applicability(Applicability::DisplayOnly);
let mut diagnostic = env.err().primary("example.py", "3", "3", "label").build();

View File

@@ -2,6 +2,6 @@
source: crates/ruff_db/src/diagnostic/render/azure.rs
expression: env.render_diagnostics(&diagnostics)
---
##vso[task.logissue type=error;sourcepath=/fib.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
##vso[task.logissue type=error;sourcepath=/fib.py;linenumber=6;columnnumber=5;code=F841;]Local variable `x` is assigned to but never used
##vso[task.logissue type=error;sourcepath=/undef.py;linenumber=1;columnnumber=4;code=F821;]Undefined name `a`
##vso[task.logissue type=error;sourcepath=fib.py;linenumber=1;columnnumber=8;code=F401;]`os` imported but unused
##vso[task.logissue type=error;sourcepath=fib.py;linenumber=6;columnnumber=5;code=F841;]Local variable `x` is assigned to but never used
##vso[task.logissue type=error;sourcepath=undef.py;linenumber=1;columnnumber=4;code=F821;]Undefined name `a`

View File

@@ -2,5 +2,5 @@
source: crates/ruff_db/src/diagnostic/render/azure.rs
expression: env.render_diagnostics(&diagnostics)
---
##vso[task.logissue type=error;sourcepath=/syntax_errors.py;linenumber=1;columnnumber=15;code=invalid-syntax;]Expected one or more symbol names after import
##vso[task.logissue type=error;sourcepath=/syntax_errors.py;linenumber=3;columnnumber=12;code=invalid-syntax;]Expected ')', found newline
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;code=invalid-syntax;]Expected one or more symbol names after import
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;code=invalid-syntax;]Expected ')', found newline

View File

@@ -2,6 +2,6 @@
source: crates/ruff_db/src/diagnostic/render/github.rs
expression: env.render_diagnostics(&diagnostics)
---
::error title=ty (F401),file=/fib.py,line=1,col=8,endLine=1,endColumn=10::fib.py:1:8: F401 `os` imported but unused
::error title=ty (F841),file=/fib.py,line=6,col=5,endLine=6,endColumn=6::fib.py:6:5: F841 Local variable `x` is assigned to but never used
::error title=ty (F821),file=/undef.py,line=1,col=4,endLine=1,endColumn=5::undef.py:1:4: F821 Undefined name `a`
::error title=ty (F401),file=fib.py,line=1,col=8,endLine=1,endColumn=10::fib.py:1:8: F401 `os` imported but unused
::error title=ty (F841),file=fib.py,line=6,col=5,endLine=6,endColumn=6::fib.py:6:5: F841 Local variable `x` is assigned to but never used
::error title=ty (F821),file=undef.py,line=1,col=4,endLine=1,endColumn=5::undef.py:1:4: F821 Undefined name `a`

View File

@@ -2,5 +2,5 @@
source: crates/ruff_db/src/diagnostic/render/github.rs
expression: env.render_diagnostics(&diagnostics)
---
::error title=ty (invalid-syntax),file=/syntax_errors.py,line=1,col=15,endLine=2,endColumn=1::syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import
::error title=ty (invalid-syntax),file=/syntax_errors.py,line=3,col=12,endLine=4,endColumn=1::syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline
::error title=ty (invalid-syntax),file=syntax_errors.py,line=1,col=15,endLine=2,endColumn=1::syntax_errors.py:1:15: invalid-syntax: Expected one or more symbol names after import
::error title=ty (invalid-syntax),file=syntax_errors.py,line=3,col=12,endLine=4,endColumn=1::syntax_errors.py:3:12: invalid-syntax: Expected ')', found newline

View File

@@ -10,7 +10,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 10,
"row": 2
},
"filename": "/notebook.ipynb",
"filename": "notebook.ipynb",
"fix": {
"applicability": "safe",
"edits": [
@@ -43,7 +43,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 12,
"row": 2
},
"filename": "/notebook.ipynb",
"filename": "notebook.ipynb",
"fix": {
"applicability": "safe",
"edits": [
@@ -76,7 +76,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 6,
"row": 4
},
"filename": "/notebook.ipynb",
"filename": "notebook.ipynb",
"fix": {
"applicability": "unsafe",
"edits": [

View File

@@ -10,7 +10,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 10,
"row": 1
},
"filename": "/fib.py",
"filename": "fib.py",
"fix": {
"applicability": "unsafe",
"edits": [
@@ -43,7 +43,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 6,
"row": 6
},
"filename": "/fib.py",
"filename": "fib.py",
"fix": {
"applicability": "unsafe",
"edits": [
@@ -76,7 +76,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 5,
"row": 1
},
"filename": "/undef.py",
"filename": "undef.py",
"fix": null,
"location": {
"column": 4,

View File

@@ -10,7 +10,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 1,
"row": 2
},
"filename": "/syntax_errors.py",
"filename": "syntax_errors.py",
"fix": null,
"location": {
"column": 15,
@@ -27,7 +27,7 @@ expression: env.render_diagnostics(&diagnostics)
"column": 1,
"row": 4
},
"filename": "/syntax_errors.py",
"filename": "syntax_errors.py",
"fix": null,
"location": {
"column": 12,

View File

@@ -2,6 +2,6 @@
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
expression: env.render_diagnostics(&diagnostics)
---
{"cell":1,"code":"F401","end_location":{"column":10,"row":2},"filename":"/notebook.ipynb","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":10,"row":2},"location":{"column":1,"row":2}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":2},"message":"`os` imported but unused","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":2,"code":"F401","end_location":{"column":12,"row":2},"filename":"/notebook.ipynb","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":3},"location":{"column":1,"row":2}}],"message":"Remove unused import: `math`"},"location":{"column":8,"row":2},"message":"`math` imported but unused","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":3,"code":"F841","end_location":{"column":6,"row":4},"filename":"/notebook.ipynb","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":1,"row":5},"location":{"column":1,"row":4}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":4},"message":"Local variable `x` is assigned to but never used","noqa_row":4,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}
{"cell":1,"code":"F401","end_location":{"column":10,"row":2},"filename":"notebook.ipynb","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":10,"row":2},"location":{"column":1,"row":2}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":2},"message":"`os` imported but unused","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":2,"code":"F401","end_location":{"column":12,"row":2},"filename":"notebook.ipynb","fix":{"applicability":"safe","edits":[{"content":"","end_location":{"column":1,"row":3},"location":{"column":1,"row":2}}],"message":"Remove unused import: `math`"},"location":{"column":8,"row":2},"message":"`math` imported but unused","noqa_row":2,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":3,"code":"F841","end_location":{"column":6,"row":4},"filename":"notebook.ipynb","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":1,"row":5},"location":{"column":1,"row":4}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":4},"message":"Local variable `x` is assigned to but never used","noqa_row":4,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}

View File

@@ -2,6 +2,6 @@
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
expression: env.render_diagnostics(&diagnostics)
---
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"/fib.py","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":null,"code":"F841","end_location":{"column":6,"row":6},"filename":"/fib.py","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":10,"row":6},"location":{"column":5,"row":6}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":6},"message":"Local variable `x` is assigned to but never used","noqa_row":6,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}
{"cell":null,"code":"F821","end_location":{"column":5,"row":1},"filename":"/undef.py","fix":null,"location":{"column":4,"row":1},"message":"Undefined name `a`","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}
{"cell":null,"code":"F401","end_location":{"column":10,"row":1},"filename":"fib.py","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":1,"row":2},"location":{"column":1,"row":1}}],"message":"Remove unused import: `os`"},"location":{"column":8,"row":1},"message":"`os` imported but unused","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/unused-import"}
{"cell":null,"code":"F841","end_location":{"column":6,"row":6},"filename":"fib.py","fix":{"applicability":"unsafe","edits":[{"content":"","end_location":{"column":10,"row":6},"location":{"column":5,"row":6}}],"message":"Remove assignment to unused variable `x`"},"location":{"column":5,"row":6},"message":"Local variable `x` is assigned to but never used","noqa_row":6,"url":"https://docs.astral.sh/ruff/rules/unused-variable"}
{"cell":null,"code":"F821","end_location":{"column":5,"row":1},"filename":"undef.py","fix":null,"location":{"column":4,"row":1},"message":"Undefined name `a`","noqa_row":1,"url":"https://docs.astral.sh/ruff/rules/undefined-name"}

View File

@@ -2,5 +2,5 @@
source: crates/ruff_db/src/diagnostic/render/json_lines.rs
expression: env.render_diagnostics(&diagnostics)
---
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":2},"filename":"/syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"Expected one or more symbol names after import","noqa_row":null,"url":null}
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":4},"filename":"/syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"Expected ')', found newline","noqa_row":null,"url":null}
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"Expected one or more symbol names after import","noqa_row":null,"url":null}
{"cell":null,"code":"invalid-syntax","end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"Expected ')', found newline","noqa_row":null,"url":null}

View File

@@ -4,16 +4,16 @@ expression: env.render_diagnostics(&diagnostics)
---
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="3" failures="3" errors="0">
<testsuite name="/fib.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
<testcase name="org.ruff.F401" classname="/fib" line="1" column="8">
<testsuite name="fib.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
<testcase name="org.ruff.F401" classname="fib" line="1" column="8">
<failure message="`os` imported but unused">line 1, col 8, `os` imported but unused</failure>
</testcase>
<testcase name="org.ruff.F841" classname="/fib" line="6" column="5">
<testcase name="org.ruff.F841" classname="fib" line="6" column="5">
<failure message="Local variable `x` is assigned to but never used">line 6, col 5, Local variable `x` is assigned to but never used</failure>
</testcase>
</testsuite>
<testsuite name="/undef.py" tests="1" disabled="0" errors="0" failures="1" package="org.ruff">
<testcase name="org.ruff.F821" classname="/undef" line="1" column="4">
<testsuite name="undef.py" tests="1" disabled="0" errors="0" failures="1" package="org.ruff">
<testcase name="org.ruff.F821" classname="undef" line="1" column="4">
<failure message="Undefined name `a`">line 1, col 4, Undefined name `a`</failure>
</testcase>
</testsuite>

View File

@@ -4,11 +4,11 @@ expression: env.render_diagnostics(&diagnostics)
---
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="2" failures="2" errors="0">
<testsuite name="/syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
<testcase name="org.ruff.invalid-syntax" classname="/syntax_errors" line="1" column="15">
<testsuite name="syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="1" column="15">
<failure message="Expected one or more symbol names after import">line 1, col 15, Expected one or more symbol names after import</failure>
</testcase>
<testcase name="org.ruff.invalid-syntax" classname="/syntax_errors" line="3" column="12">
<testcase name="org.ruff.invalid-syntax" classname="syntax_errors" line="3" column="12">
<failure message="Expected &apos;)&apos;, found newline">line 3, col 12, Expected &apos;)&apos;, found newline</failure>
</testcase>
</testsuite>

View File

@@ -10,7 +10,7 @@ expression: env.render_diagnostics(&diagnostics)
"value": "F401"
},
"location": {
"path": "/fib.py",
"path": "fib.py",
"range": {
"end": {
"column": 10,
@@ -45,7 +45,7 @@ expression: env.render_diagnostics(&diagnostics)
"value": "F841"
},
"location": {
"path": "/fib.py",
"path": "fib.py",
"range": {
"end": {
"column": 6,
@@ -80,7 +80,7 @@ expression: env.render_diagnostics(&diagnostics)
"value": "F821"
},
"location": {
"path": "/undef.py",
"path": "undef.py",
"range": {
"end": {
"column": 5,

View File

@@ -9,7 +9,7 @@ expression: env.render_diagnostics(&diagnostics)
"value": "invalid-syntax"
},
"location": {
"path": "/syntax_errors.py",
"path": "syntax_errors.py",
"range": {
"end": {
"column": 1,
@@ -28,7 +28,7 @@ expression: env.render_diagnostics(&diagnostics)
"value": "invalid-syntax"
},
"location": {
"path": "/syntax_errors.py",
"path": "syntax_errors.py",
"range": {
"end": {
"column": 1,

View File

@@ -31,29 +31,6 @@ impl Payload {
}
}
impl PanicError {
pub fn to_diagnostic_message(&self, path: Option<impl std::fmt::Display>) -> String {
use std::fmt::Write;
let mut message = String::new();
message.push_str("Panicked");
if let Some(location) = &self.location {
let _ = write!(&mut message, " at {location}");
}
if let Some(path) = path {
let _ = write!(&mut message, " when checking `{path}`");
}
if let Some(payload) = self.payload.as_str() {
let _ = write!(&mut message, ": `{payload}`");
}
message
}
}
impl std::fmt::Display for PanicError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "panicked at")?;

View File

@@ -38,12 +38,12 @@ impl std::fmt::Display for FormatError {
),
FormatError::InvalidDocument(error) => std::write!(
fmt,
"Invalid document: {error}\n\n This is an internal Ruff error. Please report if necessary."
"Invalid document: {error}\n\n This is an internal Rome error. Please report if necessary."
),
FormatError::PoorLayout => {
std::write!(
fmt,
"Poor layout: The formatter wasn't able to pick a good layout for your document. This is an internal Ruff error. Please report if necessary."
"Poor layout: The formatter wasn't able to pick a good layout for your document. This is an internal Rome error. Please report if necessary."
)
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.13.3"
version = "0.13.1"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -1,18 +0,0 @@
from contextlib import nullcontext
def check_isolation_level(mode: int) -> None:
"""Will report both, but only fix the first.""" # ERROR PYI021
... # ERROR PIE790
with nullcontext():
"""Should not report."""
# add something thats not a pass here
# to not get a remove unnecessary pass err
x = 0
if True:
"""Should not report."""
# same as above
y = 1

View File

@@ -1,3 +0,0 @@
import concurrent.futures as futures
1

View File

@@ -1,3 +0,0 @@
import concurrent.futures as futures
1

View File

@@ -1 +0,0 @@
from builtins import str, int

View File

@@ -37,9 +37,3 @@ log(logging.INFO, "Hello %r", repr("World!"))
def str(s): return f"str = {s}"
# Don't flag this
logging.info("Hello %s", str("World!"))
logging.info("Debug info: %r", repr("test\nstring"))
logging.warning("Value: %r", repr(42))
logging.error("Error: %r", repr([1, 2, 3]))
logging.info("Debug info: %s", repr("test\nstring"))
logging.warning("Value: %s", repr(42))

View File

@@ -145,6 +145,9 @@ pub(crate) fn definitions(checker: &mut Checker) {
}
// flake8-pyi
if enforce_stubs {
flake8_pyi::rules::docstring_in_stubs(checker, definition, docstring);
}
if enforce_stubs_and_runtime {
flake8_pyi::rules::iter_method_return_iterable(checker, definition);
}

View File

@@ -212,10 +212,13 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
if ctx.is_store() {
let check_too_many_expressions =
checker.is_rule_enabled(Rule::ExpressionsInStarAssignment);
let check_two_starred_expressions =
checker.is_rule_enabled(Rule::MultipleStarredExpressions);
pyflakes::rules::starred_expressions(
checker,
elts,
check_too_many_expressions,
check_two_starred_expressions,
expr.range(),
);
}

View File

@@ -803,7 +803,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
for alias in names {
if module != Some("__future__") && &alias.name == "*" {
if let Some("__future__") = module {
if checker.is_rule_enabled(Rule::FutureFeatureNotDefined) {
pyflakes::rules::future_feature_not_defined(checker, alias);
}
} else if &alias.name == "*" {
// F403
checker.report_diagnostic_if_enabled(
pyflakes::rules::UndefinedLocalWithImportStar {

View File

@@ -2,17 +2,14 @@ use ruff_python_ast::Stmt;
use crate::checkers::ast::Checker;
use crate::codes::Rule;
use crate::rules::flake8_pie;
use crate::rules::refurb;
use crate::rules::{flake8_pie, flake8_pyi};
/// Run lint rules over a suite of [`Stmt`] syntax nodes.
pub(crate) fn suite(suite: &[Stmt], checker: &Checker) {
if checker.is_rule_enabled(Rule::UnnecessaryPlaceholder) {
flake8_pie::rules::unnecessary_placeholder(checker, suite);
}
if checker.source_type.is_stub() && checker.is_rule_enabled(Rule::DocstringInStub) {
flake8_pyi::rules::docstring_in_stubs(checker, suite);
}
if checker.is_rule_enabled(Rule::RepeatedGlobal) {
refurb::rules::repeated_global(checker, suite);
}

View File

@@ -28,7 +28,7 @@ use itertools::Itertools;
use log::debug;
use rustc_hash::{FxHashMap, FxHashSet};
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticTag, IntoDiagnosticMessage, Span};
use ruff_db::diagnostic::{Annotation, Diagnostic, IntoDiagnosticMessage, Span};
use ruff_diagnostics::{Applicability, Fix, IsolationLevel};
use ruff_notebook::{CellOffsets, NotebookIndex};
use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to_module_path};
@@ -69,8 +69,8 @@ use crate::package::PackageRoot;
use crate::preview::is_undefined_export_in_dunder_init_enabled;
use crate::registry::Rule;
use crate::rules::pyflakes::rules::{
LateFutureImport, MultipleStarredExpressions, ReturnOutsideFunction,
UndefinedLocalWithNestedImportStarUsage, YieldOutsideFunction,
LateFutureImport, ReturnOutsideFunction, UndefinedLocalWithNestedImportStarUsage,
YieldOutsideFunction,
};
use crate::rules::pylint::rules::{
AwaitOutsideAsync, LoadBeforeGlobalDeclaration, YieldFromInAsyncFunction,
@@ -87,7 +87,7 @@ mod deferred;
/// State representing whether a docstring is expected or not for the next statement.
#[derive(Debug, Copy, Clone, PartialEq)]
pub(crate) enum DocstringState {
enum DocstringState {
/// The next statement is expected to be a docstring, but not necessarily so.
///
/// For example, in the following code:
@@ -128,7 +128,7 @@ impl DocstringState {
/// The kind of an expected docstring.
#[derive(Debug, Copy, Clone, PartialEq)]
pub(crate) enum ExpectedDocstringKind {
enum ExpectedDocstringKind {
/// A module-level docstring.
///
/// For example,
@@ -603,11 +603,6 @@ impl<'a> Checker<'a> {
pub(crate) const fn context(&self) -> &'a LintContext<'a> {
self.context
}
/// Return the current [`DocstringState`].
pub(crate) fn docstring_state(&self) -> DocstringState {
self.docstring_state
}
}
pub(crate) struct TypingImporter<'a, 'b> {
@@ -690,20 +685,6 @@ impl SemanticSyntaxContext for Checker<'_> {
self.report_diagnostic(YieldFromInAsyncFunction, error.range);
}
}
SemanticSyntaxErrorKind::MultipleStarredExpressions => {
// F622
if self.is_rule_enabled(Rule::MultipleStarredExpressions) {
self.report_diagnostic(MultipleStarredExpressions, error.range);
}
}
SemanticSyntaxErrorKind::FutureFeatureNotDefined(name) => {
if self.is_rule_enabled(Rule::FutureFeatureNotDefined) {
self.report_diagnostic(
pyflakes::rules::FutureFeatureNotDefined { name },
error.range,
);
}
}
SemanticSyntaxErrorKind::ReboundComprehensionVariable
| SemanticSyntaxErrorKind::DuplicateTypeParameter
| SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
@@ -2360,7 +2341,7 @@ impl<'a> Checker<'a> {
}
}
/// Visit a body of [`Stmt`] nodes within a type-checking block.
/// Visit an body of [`Stmt`] nodes within a type-checking block.
fn visit_type_checking_block(&mut self, body: &'a [Stmt]) {
let snapshot = self.semantic.flags;
self.semantic.flags |= SemanticModelFlags::TYPE_CHECKING_BLOCK;
@@ -3326,56 +3307,6 @@ impl DiagnosticGuard<'_, '_> {
pub(crate) fn defuse(mut self) {
self.diagnostic = None;
}
/// Set the message on the primary annotation for this diagnostic.
///
/// If a message already exists on the primary annotation, then this
/// overwrites the existing message.
///
/// This message is associated with the primary annotation created
/// for every `Diagnostic` that uses the `DiagnosticGuard` API.
/// Specifically, the annotation is derived from the `TextRange` given to
/// the `LintContext::report_diagnostic` API.
///
/// Callers can add additional primary or secondary annotations via the
/// `DerefMut` trait implementation to a `Diagnostic`.
pub(crate) fn set_primary_message(&mut self, message: impl IntoDiagnosticMessage) {
// N.B. It is normally bad juju to define `self` methods
// on types that implement `Deref`. Instead, it's idiomatic
// to do `fn foo(this: &mut LintDiagnosticGuard)`, which in
// turn forces callers to use
// `LintDiagnosticGuard(&mut guard, message)`. But this is
// supremely annoying for what is expected to be a common
// case.
//
// Moreover, most of the downside that comes from these sorts
// of methods is a semver hazard. Because the deref target type
// could also define a method by the same name, and that leads
// to confusion. But we own all the code involved here and
// there is no semver boundary. So... ¯\_(ツ)_/¯ ---AG
// OK because we know the diagnostic was constructed with a single
// primary annotation that will always come before any other annotation
// in the diagnostic. (This relies on the `Diagnostic` API not exposing
// any methods for removing annotations or re-ordering them, which is
// true as of 2025-04-11.)
let ann = self.primary_annotation_mut().unwrap();
ann.set_message(message);
}
/// Adds a tag on the primary annotation for this diagnostic.
///
/// This tag is associated with the primary annotation created
/// for every `Diagnostic` that uses the `DiagnosticGuard` API.
/// Specifically, the annotation is derived from the `TextRange` given to
/// the `LintContext::report_diagnostic` API.
///
/// Callers can add additional primary or secondary annotations via the
/// `DerefMut` trait implementation to a `Diagnostic`.
pub(crate) fn add_primary_tag(&mut self, tag: DiagnosticTag) {
let ann = self.primary_annotation_mut().unwrap();
ann.push_tag(tag);
}
}
impl DiagnosticGuard<'_, '_> {

View File

@@ -223,11 +223,6 @@ impl DisplayParseError {
pub fn path(&self) -> Option<&Path> {
self.path.as_deref()
}
/// Return the underlying [`ParseError`].
pub fn error(&self) -> &ParseError {
&self.error
}
}
impl std::error::Error for DisplayParseError {}

View File

@@ -6,25 +6,17 @@ use std::num::NonZeroUsize;
use colored::Colorize;
use ruff_db::diagnostic::Diagnostic;
use ruff_diagnostics::Applicability;
use ruff_notebook::NotebookIndex;
use ruff_source_file::{LineColumn, OneIndexed};
use crate::fs::relativize_path;
use crate::message::{Emitter, EmitterContext};
use crate::settings::types::UnsafeFixes;
#[derive(Default)]
pub struct GroupedEmitter {
show_fix_status: bool,
applicability: Applicability,
}
impl Default for GroupedEmitter {
fn default() -> Self {
Self {
show_fix_status: false,
applicability: Applicability::Safe,
}
}
unsafe_fixes: UnsafeFixes,
}
impl GroupedEmitter {
@@ -35,8 +27,8 @@ impl GroupedEmitter {
}
#[must_use]
pub fn with_applicability(mut self, applicability: Applicability) -> Self {
self.applicability = applicability;
pub fn with_unsafe_fixes(mut self, unsafe_fixes: UnsafeFixes) -> Self {
self.unsafe_fixes = unsafe_fixes;
self
}
}
@@ -75,7 +67,7 @@ impl Emitter for GroupedEmitter {
notebook_index: context.notebook_index(&message.expect_ruff_filename()),
message,
show_fix_status: self.show_fix_status,
applicability: self.applicability,
unsafe_fixes: self.unsafe_fixes,
row_length,
column_length,
}
@@ -122,7 +114,7 @@ fn group_diagnostics_by_filename(
struct DisplayGroupedMessage<'a> {
message: MessageWithLocation<'a>,
show_fix_status: bool,
applicability: Applicability,
unsafe_fixes: UnsafeFixes,
row_length: NonZeroUsize,
column_length: NonZeroUsize,
notebook_index: Option<&'a NotebookIndex>,
@@ -170,7 +162,7 @@ impl Display for DisplayGroupedMessage<'_> {
code_and_body = RuleCodeAndBody {
message,
show_fix_status: self.show_fix_status,
applicability: self.applicability
unsafe_fixes: self.unsafe_fixes
},
)?;
@@ -181,7 +173,7 @@ impl Display for DisplayGroupedMessage<'_> {
pub(super) struct RuleCodeAndBody<'a> {
pub(crate) message: &'a Diagnostic,
pub(crate) show_fix_status: bool,
pub(crate) applicability: Applicability,
pub(crate) unsafe_fixes: UnsafeFixes,
}
impl Display for RuleCodeAndBody<'_> {
@@ -189,7 +181,7 @@ impl Display for RuleCodeAndBody<'_> {
if self.show_fix_status {
if let Some(fix) = self.message.fix() {
// Do not display an indicator for inapplicable fixes
if fix.applies(self.applicability) {
if fix.applies(self.unsafe_fixes.required_applicability()) {
if let Some(code) = self.message.secondary_code() {
write!(f, "{} ", code.red().bold())?;
}
@@ -225,12 +217,11 @@ impl Display for RuleCodeAndBody<'_> {
mod tests {
use insta::assert_snapshot;
use ruff_diagnostics::Applicability;
use crate::message::GroupedEmitter;
use crate::message::tests::{
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
};
use crate::settings::types::UnsafeFixes;
#[test]
fn default() {
@@ -260,7 +251,7 @@ mod tests {
fn fix_status_unsafe() {
let mut emitter = GroupedEmitter::default()
.with_show_fix_status(true)
.with_applicability(Applicability::Unsafe);
.with_unsafe_fixes(UnsafeFixes::Enabled);
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);

View File

@@ -1,30 +1,27 @@
use std::backtrace::BacktraceStatus;
use std::fmt::Display;
use std::io::Write;
use std::path::Path;
use ruff_db::panic::PanicError;
use rustc_hash::FxHashMap;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig,
DisplayDiagnostics, DisplayGithubDiagnostics, FileResolver, GithubRenderer, Input, LintName,
SecondaryCode, Severity, Span, SubDiagnostic, SubDiagnosticSeverity, UnifiedFile,
Annotation, Diagnostic, DiagnosticId, FileResolver, Input, LintName, SecondaryCode, Severity,
Span, UnifiedFile,
};
use ruff_db::files::File;
pub use grouped::GroupedEmitter;
use ruff_notebook::NotebookIndex;
use ruff_source_file::{SourceFile, SourceFileBuilder};
use ruff_source_file::SourceFile;
use ruff_text_size::{Ranged, TextRange, TextSize};
pub use sarif::SarifEmitter;
pub use text::TextEmitter;
use crate::Fix;
use crate::registry::Rule;
use crate::settings::types::{OutputFormat, RuffOutputFormat};
mod grouped;
mod sarif;
mod text;
/// Creates a `Diagnostic` from a syntax error, with the format expected by Ruff.
///
@@ -44,55 +41,6 @@ pub fn create_syntax_error_diagnostic(
diag
}
/// Create a `Diagnostic` from a panic.
pub fn create_panic_diagnostic(error: &PanicError, path: Option<&Path>) -> Diagnostic {
let mut diagnostic = Diagnostic::new(
DiagnosticId::Panic,
Severity::Fatal,
error.to_diagnostic_message(path.as_ref().map(|path| path.display())),
);
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
"This indicates a bug in Ruff.",
));
let report_message = "If you could open an issue at \
https://github.com/astral-sh/ruff/issues/new?title=%5Bpanic%5D, \
we'd be very appreciative!";
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
report_message,
));
if let Some(backtrace) = &error.backtrace {
match backtrace.status() {
BacktraceStatus::Disabled => {
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
"run with `RUST_BACKTRACE=1` environment variable to show the full backtrace information",
));
}
BacktraceStatus::Captured => {
diagnostic.sub(SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format!("Backtrace:\n{backtrace}"),
));
}
_ => {}
}
}
if let Some(path) = path {
let file = SourceFileBuilder::new(path.to_string_lossy(), "").finish();
let span = Span::from(file);
let mut annotation = Annotation::primary(span);
annotation.hide_snippet(true);
diagnostic.annotate(annotation);
}
diagnostic
}
#[expect(clippy::too_many_arguments)]
pub fn create_lint_diagnostic<B, S>(
body: B,
@@ -122,7 +70,7 @@ where
// actually need it, but we need to be able to cache the new diagnostic model first. See
// https://github.com/astral-sh/ruff/issues/19688.
if range == TextRange::default() {
annotation.hide_snippet(true);
annotation.set_file_level(true);
}
diagnostic.annotate(annotation);
@@ -212,48 +160,14 @@ impl<'a> EmitterContext<'a> {
}
}
pub fn render_diagnostics(
writer: &mut dyn Write,
format: OutputFormat,
config: DisplayDiagnosticConfig,
context: &EmitterContext<'_>,
diagnostics: &[Diagnostic],
) -> std::io::Result<()> {
match DiagnosticFormat::try_from(format) {
Ok(format) => {
let config = config.format(format);
let value = DisplayDiagnostics::new(context, &config, diagnostics);
write!(writer, "{value}")?;
}
Err(RuffOutputFormat::Github) => {
let renderer = GithubRenderer::new(context, "Ruff");
let value = DisplayGithubDiagnostics::new(&renderer, diagnostics);
write!(writer, "{value}")?;
}
Err(RuffOutputFormat::Grouped) => {
GroupedEmitter::default()
.with_show_fix_status(config.show_fix_status())
.with_applicability(config.fix_applicability())
.emit(writer, diagnostics, context)
.map_err(std::io::Error::other)?;
}
Err(RuffOutputFormat::Sarif) => {
SarifEmitter
.emit(writer, diagnostics, context)
.map_err(std::io::Error::other)?;
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use rustc_hash::FxHashMap;
use ruff_db::diagnostic::Diagnostic;
use ruff_notebook::NotebookIndex;
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
use ruff_source_file::SourceFileBuilder;
use ruff_source_file::{OneIndexed, SourceFileBuilder};
use ruff_text_size::{TextRange, TextSize};
use crate::codes::Rule;
@@ -343,6 +257,104 @@ def fibonacci(n):
vec![unused_import, unused_variable, undefined_name]
}
pub(super) fn create_notebook_diagnostics()
-> (Vec<Diagnostic>, FxHashMap<String, NotebookIndex>) {
let notebook = r"# cell 1
import os
# cell 2
import math
print('hello world')
# cell 3
def foo():
print()
x = 1
";
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
let unused_import_os_start = TextSize::from(16);
let unused_import_os = create_lint_diagnostic(
"`os` imported but unused",
Some("Remove unused import: `os`"),
TextRange::new(unused_import_os_start, TextSize::from(18)),
Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(9),
TextSize::from(19),
)))),
None,
notebook_source.clone(),
Some(unused_import_os_start),
Rule::UnusedImport,
);
let unused_import_math_start = TextSize::from(35);
let unused_import_math = create_lint_diagnostic(
"`math` imported but unused",
Some("Remove unused import: `math`"),
TextRange::new(unused_import_math_start, TextSize::from(39)),
Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(28),
TextSize::from(40),
)))),
None,
notebook_source.clone(),
Some(unused_import_math_start),
Rule::UnusedImport,
);
let unused_variable_start = TextSize::from(98);
let unused_variable = create_lint_diagnostic(
"Local variable `x` is assigned to but never used",
Some("Remove assignment to unused variable `x`"),
TextRange::new(unused_variable_start, TextSize::from(99)),
Some(Fix::unsafe_edit(Edit::deletion(
TextSize::from(94),
TextSize::from(104),
))),
None,
notebook_source,
Some(unused_variable_start),
Rule::UnusedVariable,
);
let mut notebook_indexes = FxHashMap::default();
notebook_indexes.insert(
"notebook.ipynb".to_string(),
NotebookIndex::new(
vec![
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
],
vec![
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(3),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(3),
],
),
);
(
vec![unused_import_os, unused_import_math, unused_variable],
notebook_indexes,
)
}
pub(super) fn capture_emitter_output(
emitter: &mut dyn Emitter,
diagnostics: &[Diagnostic],
@@ -354,4 +366,16 @@ def fibonacci(n):
String::from_utf8(output).expect("Output to be valid UTF-8")
}
pub(super) fn capture_emitter_notebook_output(
emitter: &mut dyn Emitter,
diagnostics: &[Diagnostic],
notebook_indexes: &FxHashMap<String, NotebookIndex>,
) -> String {
let context = EmitterContext::new(notebook_indexes);
let mut output: Vec<u8> = Vec::new();
emitter.emit(&mut output, diagnostics, &context).unwrap();
String::from_utf8(output).expect("Output to be valid UTF-8")
}
}

View File

@@ -129,7 +129,7 @@ expression: value
"rules": [
{
"fullDescription": {
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Preview\nWhen [preview] is enabled (and certain simplifying assumptions\nare met), we analyze all import statements for a given module\nwhen determining whether an import is used, rather than simply\nthe last of these statements. This can result in both different and\nmore import statements being marked as unused.\n\nFor example, if a module consists of\n\n```python\nimport a\nimport a.b\n```\n\nthen both statements are marked as unused under [preview], whereas\nonly the second is marked as unused under stable behavior.\n\nAs another example, if a module consists of\n\n```python\nimport a.b\nimport a\n\na.b.foo()\n```\n\nthen a diagnostic will only be emitted for the first line under [preview],\nwhereas a diagnostic would only be emitted for the second line under\nstable behavior.\n\nNote that this behavior is somewhat subjective and is designed\nto conform to the developer's intuition rather than Python's actual\nexecution. To wit, the statement `import a.b` automatically executes\n`import a`, so in some sense `import a` is _always_ redundant\nin the presence of `import a.b`.\n\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n\n[preview]: https://docs.astral.sh/ruff/preview/\n"
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
},
"help": {
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"

View File

@@ -0,0 +1,30 @@
---
source: crates/ruff_linter/src/message/text.rs
expression: content
---
F401 `os` imported but unused
--> fib.py:1:8
|
1 | import os
| ^^
|
help: Remove unused import: `os`
F841 Local variable `x` is assigned to but never used
--> fib.py:6:5
|
4 | def fibonacci(n):
5 | """Compute the nth number in the Fibonacci sequence."""
6 | x = 1
| ^
7 | if n == 0:
8 | return 0
|
help: Remove assignment to unused variable `x`
F821 Undefined name `a`
--> undef.py:1:4
|
1 | if a == 1: pass
| ^
|

View File

@@ -0,0 +1,30 @@
---
source: crates/ruff_linter/src/message/text.rs
expression: content
---
F401 `os` imported but unused
--> fib.py:1:8
|
1 | import os
| ^^
|
help: Remove unused import: `os`
F841 Local variable `x` is assigned to but never used
--> fib.py:6:5
|
4 | def fibonacci(n):
5 | """Compute the nth number in the Fibonacci sequence."""
6 | x = 1
| ^
7 | if n == 0:
8 | return 0
|
help: Remove assignment to unused variable `x`
F821 Undefined name `a`
--> undef.py:1:4
|
1 | if a == 1: pass
| ^
|

View File

@@ -0,0 +1,30 @@
---
source: crates/ruff_linter/src/message/text.rs
expression: content
---
F401 [*] `os` imported but unused
--> fib.py:1:8
|
1 | import os
| ^^
|
help: Remove unused import: `os`
F841 [*] Local variable `x` is assigned to but never used
--> fib.py:6:5
|
4 | def fibonacci(n):
5 | """Compute the nth number in the Fibonacci sequence."""
6 | x = 1
| ^
7 | if n == 0:
8 | return 0
|
help: Remove assignment to unused variable `x`
F821 Undefined name `a`
--> undef.py:1:4
|
1 | if a == 1: pass
| ^
|

View File

@@ -0,0 +1,33 @@
---
source: crates/ruff_linter/src/message/text.rs
expression: content
---
F401 [*] `os` imported but unused
--> notebook.ipynb:cell 1:2:8
|
1 | # cell 1
2 | import os
| ^^
|
help: Remove unused import: `os`
F401 [*] `math` imported but unused
--> notebook.ipynb:cell 2:2:8
|
1 | # cell 2
2 | import math
| ^^^^
3 |
4 | print('hello world')
|
help: Remove unused import: `math`
F841 [*] Local variable `x` is assigned to but never used
--> notebook.ipynb:cell 3:4:5
|
2 | def foo():
3 | print()
4 | x = 1
| ^
|
help: Remove assignment to unused variable `x`

View File

@@ -0,0 +1,23 @@
---
source: crates/ruff_linter/src/message/text.rs
expression: content
---
invalid-syntax: Expected one or more symbol names after import
--> syntax_errors.py:1:15
|
1 | from os import
| ^
2 |
3 | if call(foo
|
invalid-syntax: Expected ')', found newline
--> syntax_errors.py:3:12
|
1 | from os import
2 |
3 | if call(foo
| ^
4 | def bar():
5 | pass
|

View File

@@ -0,0 +1,143 @@
use std::io::Write;
use ruff_db::diagnostic::{
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics,
};
use ruff_diagnostics::Applicability;
use crate::message::{Emitter, EmitterContext};
pub struct TextEmitter {
config: DisplayDiagnosticConfig,
}
impl Default for TextEmitter {
fn default() -> Self {
Self {
config: DisplayDiagnosticConfig::default()
.format(DiagnosticFormat::Concise)
.hide_severity(true)
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize()),
}
}
}
impl TextEmitter {
#[must_use]
pub fn with_show_fix_status(mut self, show_fix_status: bool) -> Self {
self.config = self.config.show_fix_status(show_fix_status);
self
}
#[must_use]
pub fn with_show_fix_diff(mut self, show_fix_diff: bool) -> Self {
self.config = self.config.show_fix_diff(show_fix_diff);
self
}
#[must_use]
pub fn with_show_source(mut self, show_source: bool) -> Self {
self.config = self.config.format(if show_source {
DiagnosticFormat::Full
} else {
DiagnosticFormat::Concise
});
self
}
#[must_use]
pub fn with_fix_applicability(mut self, applicability: Applicability) -> Self {
self.config = self.config.fix_applicability(applicability);
self
}
#[must_use]
pub fn with_preview(mut self, preview: bool) -> Self {
self.config = self.config.preview(preview);
self
}
#[must_use]
pub fn with_color(mut self, color: bool) -> Self {
self.config = self.config.color(color);
self
}
}
impl Emitter for TextEmitter {
fn emit(
&mut self,
writer: &mut dyn Write,
diagnostics: &[Diagnostic],
context: &EmitterContext,
) -> anyhow::Result<()> {
write!(
writer,
"{}",
DisplayDiagnostics::new(context, &self.config, diagnostics)
)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use ruff_diagnostics::Applicability;
use crate::message::TextEmitter;
use crate::message::tests::{
capture_emitter_notebook_output, capture_emitter_output, create_diagnostics,
create_notebook_diagnostics, create_syntax_error_diagnostics,
};
#[test]
fn default() {
let mut emitter = TextEmitter::default().with_show_source(true);
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn fix_status() {
let mut emitter = TextEmitter::default()
.with_show_fix_status(true)
.with_show_source(true);
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn fix_status_unsafe() {
let mut emitter = TextEmitter::default()
.with_show_fix_status(true)
.with_show_source(true)
.with_fix_applicability(Applicability::Unsafe);
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
assert_snapshot!(content);
}
#[test]
fn notebook_output() {
let mut emitter = TextEmitter::default()
.with_show_fix_status(true)
.with_show_source(true)
.with_fix_applicability(Applicability::Unsafe);
let (messages, notebook_indexes) = create_notebook_diagnostics();
let content = capture_emitter_notebook_output(&mut emitter, &messages, &notebook_indexes);
assert_snapshot!(content);
}
#[test]
fn syntax_errors() {
let mut emitter = TextEmitter::default().with_show_source(true);
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
assert_snapshot!(content);
}
}

View File

@@ -235,8 +235,3 @@ pub(crate) const fn is_a003_class_scope_shadowing_expansion_enabled(
) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/20200
pub(crate) const fn is_refined_submodule_import_match_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View File

@@ -17,13 +17,13 @@ use ruff_text_size::TextRange;
pub(crate) enum Replacement {
// There's no replacement or suggestion other than removal
None,
// Additional information. Used when there's no direct maaping replacement.
Message(&'static str),
// The attribute name of a class has been changed.
AttrName(&'static str),
// Additional information. Used when there's replacement but they're not direct mapping.
Message(&'static str),
// Symbols updated in Airflow 3 with replacement
// e.g., `airflow.datasets.Dataset` to `airflow.sdk.Asset`
Rename {
AutoImport {
module: &'static str,
name: &'static str,
},
@@ -37,7 +37,7 @@ pub(crate) enum Replacement {
#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) enum ProviderReplacement {
Rename {
AutoImport {
module: &'static str,
name: &'static str,
provider: &'static str,

View File

@@ -50,7 +50,7 @@ impl Violation for Airflow3MovedToProvider<'_> {
replacement,
} = self;
match replacement {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
name: _,
module: _,
provider,
@@ -70,7 +70,7 @@ impl Violation for Airflow3MovedToProvider<'_> {
fn fix_title(&self) -> Option<String> {
let Airflow3MovedToProvider { replacement, .. } = self;
if let Some((module, name, provider, version)) = match &replacement {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module,
name,
provider,
@@ -125,18 +125,20 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "amazon",
version: "1.0.0",
},
["airflow", "operators", "gcs_to_s3", "GCSToS3Operator"] => ProviderReplacement::Rename {
module: "airflow.providers.amazon.aws.transfers.gcs_to_s3",
name: "GCSToS3Operator",
provider: "amazon",
version: "1.0.0",
},
["airflow", "operators", "gcs_to_s3", "GCSToS3Operator"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.amazon.aws.transfers.gcs_to_s3",
name: "GCSToS3Operator",
provider: "amazon",
version: "1.0.0",
}
}
[
"airflow",
"operators",
"google_api_to_s3_transfer",
"GoogleApiToS3Operator" | "GoogleApiToS3Transfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.amazon.aws.transfers.google_api_to_s3",
name: "GoogleApiToS3Operator",
provider: "amazon",
@@ -147,7 +149,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"redshift_to_s3_operator",
"RedshiftToS3Operator" | "RedshiftToS3Transfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.amazon.aws.transfers.redshift_to_s3",
name: "RedshiftToS3Operator",
provider: "amazon",
@@ -158,7 +160,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"s3_file_transform_operator",
"S3FileTransformOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.amazon.aws.operators.s3",
name: "S3FileTransformOperator",
provider: "amazon",
@@ -169,13 +171,13 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"s3_to_redshift_operator",
"S3ToRedshiftOperator" | "S3ToRedshiftTransfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.amazon.aws.transfers.s3_to_redshift",
name: "S3ToRedshiftOperator",
provider: "amazon",
version: "1.0.0",
},
["airflow", "sensors", "s3_key_sensor", "S3KeySensor"] => ProviderReplacement::Rename {
["airflow", "sensors", "s3_key_sensor", "S3KeySensor"] => ProviderReplacement::AutoImport {
module: "airflow.providers.amazon.aws.sensors.s3",
name: "S3KeySensor",
provider: "amazon",
@@ -188,20 +190,20 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"config_templates",
"default_celery",
"DEFAULT_CELERY_CONFIG",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.celery.executors.default_celery",
name: "DEFAULT_CELERY_CONFIG",
provider: "celery",
version: "3.3.0",
},
["airflow", "executors", "celery_executor", rest] => match *rest {
"app" => ProviderReplacement::Rename {
"app" => ProviderReplacement::AutoImport {
module: "airflow.providers.celery.executors.celery_executor_utils",
name: "app",
provider: "celery",
version: "3.3.0",
},
"CeleryExecutor" => ProviderReplacement::Rename {
"CeleryExecutor" => ProviderReplacement::AutoImport {
module: "airflow.providers.celery.executors.celery_executor",
name: "CeleryExecutor",
provider: "celery",
@@ -214,7 +216,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"executors",
"celery_kubernetes_executor",
"CeleryKubernetesExecutor",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.celery.executors.celery_kubernetes_executor",
name: "CeleryKubernetesExecutor",
provider: "celery",
@@ -233,7 +235,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "common-sql",
version: "1.0.0",
},
["airflow", "hooks", "dbapi_hook", "DbApiHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "dbapi_hook", "DbApiHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.hooks.sql",
name: "DbApiHook",
provider: "common-sql",
@@ -250,7 +252,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"check_operator" | "druid_check_operator" | "presto_check_operator",
"CheckOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLCheckOperator",
provider: "common-sql",
@@ -267,7 +269,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"presto_check_operator",
"PrestoCheckOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLCheckOperator",
provider: "common-sql",
@@ -286,7 +288,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"IntervalCheckOperator",
]
| ["airflow", "operators", "sql", "SQLIntervalCheckOperator"] => {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLIntervalCheckOperator",
provider: "common-sql",
@@ -298,7 +300,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"presto_check_operator",
"PrestoIntervalCheckOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLIntervalCheckOperator",
provider: "common-sql",
@@ -311,7 +313,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"SQLThresholdCheckOperator" | "ThresholdCheckOperator",
]
| ["airflow", "operators", "sql", "SQLThresholdCheckOperator"] => {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLThresholdCheckOperator",
provider: "common-sql",
@@ -330,18 +332,20 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"presto_check_operator",
"ValueCheckOperator",
]
| ["airflow", "operators", "sql", "SQLValueCheckOperator"] => ProviderReplacement::Rename {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLValueCheckOperator",
provider: "common-sql",
version: "1.1.0",
},
| ["airflow", "operators", "sql", "SQLValueCheckOperator"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLValueCheckOperator",
provider: "common-sql",
version: "1.1.0",
}
}
[
"airflow",
"operators",
"presto_check_operator",
"PrestoValueCheckOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLValueCheckOperator",
provider: "common-sql",
@@ -366,12 +370,14 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
}
_ => return,
},
["airflow", "sensors", "sql" | "sql_sensor", "SqlSensor"] => ProviderReplacement::Rename {
module: "airflow.providers.common.sql.sensors.sql",
name: "SqlSensor",
provider: "common-sql",
version: "1.0.0",
},
["airflow", "sensors", "sql" | "sql_sensor", "SqlSensor"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.sensors.sql",
name: "SqlSensor",
provider: "common-sql",
version: "1.0.0",
}
}
["airflow", "operators", "jdbc_operator", "JdbcOperator"]
| ["airflow", "operators", "mssql_operator", "MsSqlOperator"]
| ["airflow", "operators", "mysql_operator", "MySqlOperator"]
@@ -383,7 +389,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"PostgresOperator",
]
| ["airflow", "operators", "sqlite_operator", "SqliteOperator"] => {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module: "airflow.providers.common.sql.operators.sql",
name: "SQLExecuteQueryOperator",
provider: "common-sql",
@@ -392,22 +398,24 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
}
// apache-airflow-providers-daskexecutor
["airflow", "executors", "dask_executor", "DaskExecutor"] => ProviderReplacement::Rename {
module: "airflow.providers.daskexecutor.executors.dask_executor",
name: "DaskExecutor",
provider: "daskexecutor",
version: "1.0.0",
},
["airflow", "executors", "dask_executor", "DaskExecutor"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.daskexecutor.executors.dask_executor",
name: "DaskExecutor",
provider: "daskexecutor",
version: "1.0.0",
}
}
// apache-airflow-providers-docker
["airflow", "hooks", "docker_hook", "DockerHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "docker_hook", "DockerHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.docker.hooks.docker",
name: "DockerHook",
provider: "docker",
version: "1.0.0",
},
["airflow", "operators", "docker_operator", "DockerOperator"] => {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module: "airflow.providers.docker.operators.docker",
name: "DockerOperator",
provider: "docker",
@@ -432,7 +440,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"hive_to_druid",
"HiveToDruidOperator" | "HiveToDruidTransfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.druid.transfers.hive_to_druid",
name: "HiveToDruidOperator",
provider: "apache-druid",
@@ -489,7 +497,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"fab",
"fab_auth_manager",
"FabAuthManager",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.fab.auth_manager.fab_auth_manager",
name: "FabAuthManager",
provider: "fab",
@@ -503,7 +511,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"security_manager",
"override",
"MAX_NUM_DATABASE_USER_SESSIONS",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.fab.auth_manager.security_manager.override",
name: "MAX_NUM_DATABASE_USER_SESSIONS",
provider: "fab",
@@ -523,7 +531,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"www",
"security",
"FabAirflowSecurityManagerOverride",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.fab.auth_manager.security_manager.override",
name: "FabAirflowSecurityManagerOverride",
provider: "fab",
@@ -531,18 +539,20 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-apache-hdfs
["airflow", "hooks", "webhdfs_hook", "WebHDFSHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "webhdfs_hook", "WebHDFSHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hdfs.hooks.webhdfs",
name: "WebHDFSHook",
provider: "apache-hdfs",
version: "1.0.0",
},
["airflow", "sensors", "web_hdfs_sensor", "WebHdfsSensor"] => ProviderReplacement::Rename {
module: "airflow.providers.apache.hdfs.sensors.web_hdfs",
name: "WebHdfsSensor",
provider: "apache-hdfs",
version: "1.0.0",
},
["airflow", "sensors", "web_hdfs_sensor", "WebHdfsSensor"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hdfs.sensors.web_hdfs",
name: "WebHdfsSensor",
provider: "apache-hdfs",
version: "1.0.0",
}
}
// apache-airflow-providers-apache-hive
[
@@ -570,18 +580,20 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "apache-hive",
version: "5.1.0",
},
["airflow", "operators", "hive_operator", "HiveOperator"] => ProviderReplacement::Rename {
module: "airflow.providers.apache.hive.operators.hive",
name: "HiveOperator",
provider: "apache-hive",
version: "1.0.0",
},
["airflow", "operators", "hive_operator", "HiveOperator"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.operators.hive",
name: "HiveOperator",
provider: "apache-hive",
version: "1.0.0",
}
}
[
"airflow",
"operators",
"hive_stats_operator",
"HiveStatsCollectionOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.operators.hive_stats",
name: "HiveStatsCollectionOperator",
provider: "apache-hive",
@@ -592,7 +604,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"hive_to_mysql",
"HiveToMySqlOperator" | "HiveToMySqlTransfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.transfers.hive_to_mysql",
name: "HiveToMySqlOperator",
provider: "apache-hive",
@@ -603,7 +615,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"hive_to_samba_operator",
"HiveToSambaOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.transfers.hive_to_samba",
name: "HiveToSambaOperator",
provider: "apache-hive",
@@ -614,7 +626,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"mssql_to_hive",
"MsSqlToHiveOperator" | "MsSqlToHiveTransfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.transfers.mssql_to_hive",
name: "MsSqlToHiveOperator",
provider: "apache-hive",
@@ -625,7 +637,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"mysql_to_hive",
"MySqlToHiveOperator" | "MySqlToHiveTransfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.transfers.mysql_to_hive",
name: "MySqlToHiveOperator",
provider: "apache-hive",
@@ -636,7 +648,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"s3_to_hive_operator",
"S3ToHiveOperator" | "S3ToHiveTransfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.transfers.s3_to_hive",
name: "S3ToHiveOperator",
provider: "apache-hive",
@@ -647,7 +659,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"sensors",
"hive_partition_sensor",
"HivePartitionSensor",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.sensors.hive_partition",
name: "HivePartitionSensor",
provider: "apache-hive",
@@ -658,7 +670,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"sensors",
"metastore_partition_sensor",
"MetastorePartitionSensor",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.sensors.metastore_partition",
name: "MetastorePartitionSensor",
provider: "apache-hive",
@@ -669,7 +681,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"sensors",
"named_hive_partition_sensor",
"NamedHivePartitionSensor",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.hive.sensors.named_hive_partition",
name: "NamedHivePartitionSensor",
provider: "apache-hive",
@@ -677,7 +689,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-http
["airflow", "hooks", "http_hook", "HttpHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "http_hook", "HttpHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.http.hooks.http",
name: "HttpHook",
provider: "http",
@@ -688,13 +700,13 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"http_operator",
"SimpleHttpOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.http.operators.http",
name: "HttpOperator",
provider: "http",
version: "5.0.0",
},
["airflow", "sensors", "http_sensor", "HttpSensor"] => ProviderReplacement::Rename {
["airflow", "sensors", "http_sensor", "HttpSensor"] => ProviderReplacement::AutoImport {
module: "airflow.providers.http.sensors.http",
name: "HttpSensor",
provider: "http",
@@ -753,7 +765,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"kubernetes",
"kubernetes_helper_functions",
"add_pod_suffix",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.kubernetes_helper_functions",
name: "add_unique_suffix",
provider: "cncf-kubernetes",
@@ -764,7 +776,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"kubernetes",
"kubernetes_helper_functions",
"create_pod_id",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.kubernetes_helper_functions",
name: "create_unique_id",
provider: "cncf-kubernetes",
@@ -785,13 +797,13 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
version: "7.4.0",
},
["airflow", "kubernetes", "pod", rest] => match *rest {
"Port" => ProviderReplacement::Rename {
"Port" => ProviderReplacement::AutoImport {
module: "kubernetes.client.models",
name: "V1ContainerPort",
provider: "cncf-kubernetes",
version: "7.4.0",
},
"Resources" => ProviderReplacement::Rename {
"Resources" => ProviderReplacement::AutoImport {
module: "kubernetes.client.models",
name: "V1ResourceRequirements",
provider: "cncf-kubernetes",
@@ -811,19 +823,19 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "cncf-kubernetes",
version: "7.4.0",
},
"PodDefaults" => ProviderReplacement::Rename {
"PodDefaults" => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.utils.xcom_sidecar",
name: "PodDefaults",
provider: "cncf-kubernetes",
version: "7.4.0",
},
"PodGeneratorDeprecated" => ProviderReplacement::Rename {
"PodGeneratorDeprecated" => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.pod_generator",
name: "PodGenerator",
provider: "cncf-kubernetes",
version: "7.4.0",
},
"add_pod_suffix" => ProviderReplacement::Rename {
"add_pod_suffix" => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.kubernetes_helper_functions",
name: "add_unique_suffix",
provider: "cncf-kubernetes",
@@ -853,7 +865,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"kubernetes",
"pod_generator_deprecated" | "pod_launcher_deprecated",
"PodDefaults",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.utils.xcom_sidecar",
name: "PodDefaults",
provider: "cncf-kubernetes",
@@ -864,7 +876,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"kubernetes",
"pod_launcher_deprecated",
"get_kube_client",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.kube_client",
name: "get_kube_client",
provider: "cncf-kubernetes",
@@ -875,7 +887,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"kubernetes",
"pod_launcher" | "pod_launcher_deprecated",
"PodLauncher",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.utils.pod_manager",
name: "PodManager",
provider: "cncf-kubernetes",
@@ -886,7 +898,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"kubernetes",
"pod_launcher" | "pod_launcher_deprecated",
"PodStatus",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: " airflow.providers.cncf.kubernetes.utils.pod_manager",
name: "PodPhase",
provider: "cncf-kubernetes",
@@ -897,20 +909,20 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"kubernetes",
"pod_runtime_info_env",
"PodRuntimeInfoEnv",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "kubernetes.client.models",
name: "V1EnvVar",
provider: "cncf-kubernetes",
version: "7.4.0",
},
["airflow", "kubernetes", "secret", rest] => match *rest {
"K8SModel" => ProviderReplacement::Rename {
"K8SModel" => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.k8s_model",
name: "K8SModel",
provider: "cncf-kubernetes",
version: "7.4.0",
},
"Secret" => ProviderReplacement::Rename {
"Secret" => ProviderReplacement::AutoImport {
module: "airflow.providers.cncf.kubernetes.secret",
name: "Secret",
provider: "cncf-kubernetes",
@@ -918,21 +930,23 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
_ => return,
},
["airflow", "kubernetes", "volume", "Volume"] => ProviderReplacement::Rename {
["airflow", "kubernetes", "volume", "Volume"] => ProviderReplacement::AutoImport {
module: "kubernetes.client.models",
name: "V1Volume",
provider: "cncf-kubernetes",
version: "7.4.0",
},
["airflow", "kubernetes", "volume_mount", "VolumeMount"] => ProviderReplacement::Rename {
module: "kubernetes.client.models",
name: "V1VolumeMount",
provider: "cncf-kubernetes",
version: "7.4.0",
},
["airflow", "kubernetes", "volume_mount", "VolumeMount"] => {
ProviderReplacement::AutoImport {
module: "kubernetes.client.models",
name: "V1VolumeMount",
provider: "cncf-kubernetes",
version: "7.4.0",
}
}
// apache-airflow-providers-microsoft-mssql
["airflow", "hooks", "mssql_hook", "MsSqlHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "mssql_hook", "MsSqlHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.microsoft.mssql.hooks.mssql",
name: "MsSqlHook",
provider: "microsoft-mssql",
@@ -940,7 +954,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-mysql
["airflow", "hooks", "mysql_hook", "MySqlHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "mysql_hook", "MySqlHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.mysql.hooks.mysql",
name: "MySqlHook",
provider: "mysql",
@@ -951,7 +965,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"presto_to_mysql",
"PrestoToMySqlOperator" | "PrestoToMySqlTransfer",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.mysql.transfers.presto_to_mysql",
name: "PrestoToMySqlOperator",
provider: "mysql",
@@ -959,7 +973,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-oracle
["airflow", "hooks", "oracle_hook", "OracleHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "oracle_hook", "OracleHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.oracle.hooks.oracle",
name: "OracleHook",
provider: "oracle",
@@ -972,7 +986,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"papermill_operator",
"PapermillOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.papermill.operators.papermill",
name: "PapermillOperator",
provider: "papermill",
@@ -980,21 +994,23 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-apache-pig
["airflow", "hooks", "pig_hook", "PigCliHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "pig_hook", "PigCliHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.apache.pig.hooks.pig",
name: "PigCliHook",
provider: "apache-pig",
version: "1.0.0",
},
["airflow", "operators", "pig_operator", "PigOperator"] => ProviderReplacement::Rename {
module: "airflow.providers.apache.pig.operators.pig",
name: "PigOperator",
provider: "apache-pig",
version: "1.0.0",
},
["airflow", "operators", "pig_operator", "PigOperator"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.apache.pig.operators.pig",
name: "PigOperator",
provider: "apache-pig",
version: "1.0.0",
}
}
// apache-airflow-providers-postgres
["airflow", "hooks", "postgres_hook", "PostgresHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "postgres_hook", "PostgresHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.postgres.hooks.postgres",
name: "PostgresHook",
provider: "postgres",
@@ -1002,7 +1018,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-presto
["airflow", "hooks", "presto_hook", "PrestoHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "presto_hook", "PrestoHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.presto.hooks.presto",
name: "PrestoHook",
provider: "presto",
@@ -1010,7 +1026,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-samba
["airflow", "hooks", "samba_hook", "SambaHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "samba_hook", "SambaHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.samba.hooks.samba",
name: "SambaHook",
provider: "samba",
@@ -1018,7 +1034,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-slack
["airflow", "hooks", "slack_hook", "SlackHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "slack_hook", "SlackHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.slack.hooks.slack",
name: "SlackHook",
provider: "slack",
@@ -1042,7 +1058,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"email_operator" | "email",
"EmailOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.smtp.operators.smtp",
name: "EmailOperator",
provider: "smtp",
@@ -1050,7 +1066,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-sqlite
["airflow", "hooks", "sqlite_hook", "SqliteHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "sqlite_hook", "SqliteHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.sqlite.hooks.sqlite",
name: "SqliteHook",
provider: "sqlite",
@@ -1058,7 +1074,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
},
// apache-airflow-providers-zendesk
["airflow", "hooks", "zendesk_hook", "ZendeskHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "zendesk_hook", "ZendeskHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.zendesk.hooks.zendesk",
name: "ZendeskHook",
provider: "zendesk",
@@ -1077,12 +1093,14 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "standard",
version: "0.0.3",
},
["airflow", "operators", "bash_operator", "BashOperator"] => ProviderReplacement::Rename {
module: "airflow.providers.standard.operators.bash",
name: "BashOperator",
provider: "standard",
version: "0.0.1",
},
["airflow", "operators", "bash_operator", "BashOperator"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.bash",
name: "BashOperator",
provider: "standard",
version: "0.0.1",
}
}
[
"airflow",
"operators",
@@ -1099,14 +1117,14 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"trigger_dagrun",
"TriggerDagRunLink",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.trigger_dagrun",
name: "TriggerDagRunLink",
provider: "standard",
version: "0.0.2",
},
["airflow", "operators", "datetime", "target_times_as_dates"] => {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.datetime",
name: "target_times_as_dates",
provider: "standard",
@@ -1118,7 +1136,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"dummy" | "dummy_operator",
"EmptyOperator" | "DummyOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.empty",
name: "EmptyOperator",
provider: "standard",
@@ -1129,7 +1147,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"latest_only_operator",
"LatestOnlyOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.latest_only",
name: "LatestOnlyOperator",
provider: "standard",
@@ -1154,7 +1172,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"sensors",
"external_task" | "external_task_sensor",
"ExternalTaskSensorLink",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.sensors.external_task",
name: "ExternalDagLink",
provider: "standard",
@@ -1171,7 +1189,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "standard",
version: "0.0.3",
},
["airflow", "sensors", "time_delta", "WaitSensor"] => ProviderReplacement::Rename {
["airflow", "sensors", "time_delta", "WaitSensor"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.sensors.time_delta",
name: "WaitSensor",
provider: "standard",
@@ -1182,7 +1200,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
};
let (module, name) = match &replacement {
ProviderReplacement::Rename { module, name, .. } => (module, *name),
ProviderReplacement::AutoImport { module, name, .. } => (module, *name),
ProviderReplacement::SourceModuleMovedToProvider { module, name, .. } => {
(module, name.as_str())
}

View File

@@ -59,7 +59,7 @@ impl Violation for Airflow3Removal {
Replacement::None
| Replacement::AttrName(_)
| Replacement::Message(_)
| Replacement::Rename { module: _, name: _ }
| Replacement::AutoImport { module: _, name: _ }
| Replacement::SourceModuleMoved { module: _, name: _ } => {
format!("`{deprecated}` is removed in Airflow 3.0")
}
@@ -72,7 +72,7 @@ impl Violation for Airflow3Removal {
Replacement::None => None,
Replacement::AttrName(name) => Some(format!("Use `{name}` instead")),
Replacement::Message(message) => Some((*message).to_string()),
Replacement::Rename { module, name } => {
Replacement::AutoImport { module, name } => {
Some(format!("Use `{name}` from `{module}` instead."))
}
Replacement::SourceModuleMoved { module, name } => {
@@ -593,7 +593,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"api_connexion",
"security",
"requires_access_dataset",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.api_fastapi.core_api.security",
name: "requires_access_asset",
},
@@ -605,7 +605,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"managers",
"base_auth_manager",
"BaseAuthManager",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.api_fastapi.auth.managers.base_auth_manager",
name: "BaseAuthManager",
},
@@ -616,7 +616,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"models",
"resource_details",
"DatasetDetails",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.api_fastapi.auth.managers.models.resource_details",
name: "AssetDetails",
},
@@ -639,15 +639,15 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
// airflow.datasets.manager
["airflow", "datasets", "manager", rest] => match *rest {
"DatasetManager" => Replacement::Rename {
"DatasetManager" => Replacement::AutoImport {
module: "airflow.assets.manager",
name: "AssetManager",
},
"dataset_manager" => Replacement::Rename {
"dataset_manager" => Replacement::AutoImport {
module: "airflow.assets.manager",
name: "asset_manager",
},
"resolve_dataset_manager" => Replacement::Rename {
"resolve_dataset_manager" => Replacement::AutoImport {
module: "airflow.assets.manager",
name: "resolve_asset_manager",
},
@@ -657,24 +657,24 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
["airflow", "datasets", "DatasetAliasEvent"] => Replacement::None,
// airflow.hooks
["airflow", "hooks", "base_hook", "BaseHook"] => Replacement::Rename {
["airflow", "hooks", "base_hook", "BaseHook"] => Replacement::AutoImport {
module: "airflow.hooks.base",
name: "BaseHook",
},
// airflow.lineage.hook
["airflow", "lineage", "hook", "DatasetLineageInfo"] => Replacement::Rename {
["airflow", "lineage", "hook", "DatasetLineageInfo"] => Replacement::AutoImport {
module: "airflow.lineage.hook",
name: "AssetLineageInfo",
},
// airflow.listeners.spec
["airflow", "listeners", "spec", "dataset", rest] => match *rest {
"on_dataset_created" => Replacement::Rename {
"on_dataset_created" => Replacement::AutoImport {
module: "airflow.listeners.spec.asset",
name: "on_asset_created",
},
"on_dataset_changed" => Replacement::Rename {
"on_dataset_changed" => Replacement::AutoImport {
module: "airflow.listeners.spec.asset",
name: "on_asset_changed",
},
@@ -683,11 +683,11 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
// airflow.metrics.validators
["airflow", "metrics", "validators", rest] => match *rest {
"AllowListValidator" => Replacement::Rename {
"AllowListValidator" => Replacement::AutoImport {
module: "airflow.metrics.validators",
name: "PatternAllowListValidator",
},
"BlockListValidator" => Replacement::Rename {
"BlockListValidator" => Replacement::AutoImport {
module: "airflow.metrics.validators",
name: "PatternBlockListValidator",
},
@@ -695,7 +695,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
},
// airflow.notifications
["airflow", "notifications", "basenotifier", "BaseNotifier"] => Replacement::Rename {
["airflow", "notifications", "basenotifier", "BaseNotifier"] => Replacement::AutoImport {
module: "airflow.sdk.bases.notifier",
name: "BaseNotifier",
},
@@ -705,23 +705,23 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
Replacement::Message("The whole `airflow.subdag` module has been removed.")
}
["airflow", "operators", "postgres_operator", "Mapping"] => Replacement::None,
["airflow", "operators", "python", "get_current_context"] => Replacement::Rename {
["airflow", "operators", "python", "get_current_context"] => Replacement::AutoImport {
module: "airflow.sdk",
name: "get_current_context",
},
// airflow.secrets
["airflow", "secrets", "cache", "SecretCache"] => Replacement::Rename {
["airflow", "secrets", "cache", "SecretCache"] => Replacement::AutoImport {
module: "airflow.sdk",
name: "SecretCache",
},
["airflow", "secrets", "local_filesystem", "load_connections"] => Replacement::Rename {
["airflow", "secrets", "local_filesystem", "load_connections"] => Replacement::AutoImport {
module: "airflow.secrets.local_filesystem",
name: "load_connections_dict",
},
// airflow.security
["airflow", "security", "permissions", "RESOURCE_DATASET"] => Replacement::Rename {
["airflow", "security", "permissions", "RESOURCE_DATASET"] => Replacement::AutoImport {
module: "airflow.security.permissions",
name: "RESOURCE_ASSET",
},
@@ -732,7 +732,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"sensors",
"base_sensor_operator",
"BaseSensorOperator",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.sdk.bases.sensor",
name: "BaseSensorOperator",
},
@@ -743,7 +743,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"timetables",
"simple",
"DatasetTriggeredTimetable",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.timetables.simple",
name: "AssetTriggeredTimetable",
},
@@ -775,25 +775,25 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
] => Replacement::None,
// airflow.utils.file
["file", "TemporaryDirectory"] => Replacement::Rename {
["file", "TemporaryDirectory"] => Replacement::AutoImport {
module: "tempfile",
name: "TemporaryDirectory",
},
["file", "mkdirs"] => Replacement::Message("Use `pathlib.Path({path}).mkdir` instead"),
// airflow.utils.helpers
["helpers", "chain"] => Replacement::Rename {
["helpers", "chain"] => Replacement::AutoImport {
module: "airflow.sdk",
name: "chain",
},
["helpers", "cross_downstream"] => Replacement::Rename {
["helpers", "cross_downstream"] => Replacement::AutoImport {
module: "airflow.sdk",
name: "cross_downstream",
},
// TODO: update it as SourceModuleMoved
// airflow.utils.log.secrets_masker
["log", "secrets_masker"] => Replacement::Rename {
["log", "secrets_masker"] => Replacement::AutoImport {
module: "airflow.sdk.execution_time",
name: "secrets_masker",
},
@@ -834,15 +834,15 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"s3",
rest,
] => match *rest {
"create_dataset" => Replacement::Rename {
"create_dataset" => Replacement::AutoImport {
module: "airflow.providers.amazon.aws.assets.s3",
name: "create_asset",
},
"convert_dataset_to_openlineage" => Replacement::Rename {
"convert_dataset_to_openlineage" => Replacement::AutoImport {
module: "airflow.providers.amazon.aws.assets.s3",
name: "convert_asset_to_openlineage",
},
"sanitize_uri" => Replacement::Rename {
"sanitize_uri" => Replacement::AutoImport {
module: "airflow.providers.amazon.aws.assets.s3",
name: "sanitize_uri",
},
@@ -858,7 +858,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"entities",
"AvpEntities",
"DATASET",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.providers.amazon.aws.auth_manager.avp.entities",
name: "AvpEntities.ASSET",
},
@@ -874,15 +874,15 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"file",
rest,
] => match *rest {
"create_dataset" => Replacement::Rename {
"create_dataset" => Replacement::AutoImport {
module: "airflow.providers.common.io.assets.file",
name: "create_asset",
},
"convert_dataset_to_openlineage" => Replacement::Rename {
"convert_dataset_to_openlineage" => Replacement::AutoImport {
module: "airflow.providers.common.io.assets.file",
name: "convert_asset_to_openlineage",
},
"sanitize_uri" => Replacement::Rename {
"sanitize_uri" => Replacement::AutoImport {
module: "airflow.providers.common.io.assets.file",
name: "sanitize_uri",
},
@@ -892,19 +892,19 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
// airflow.providers.google
// airflow.providers.google.datasets
["airflow", "providers", "google", "datasets", rest @ ..] => match &rest {
["bigquery", "create_dataset"] => Replacement::Rename {
["bigquery", "create_dataset"] => Replacement::AutoImport {
module: "airflow.providers.google.assets.bigquery",
name: "create_asset",
},
["gcs", "create_dataset"] => Replacement::Rename {
["gcs", "create_dataset"] => Replacement::AutoImport {
module: "airflow.providers.google.assets.gcs",
name: "create_asset",
},
["gcs", "convert_dataset_to_openlineage"] => Replacement::Rename {
["gcs", "convert_dataset_to_openlineage"] => Replacement::AutoImport {
module: "airflow.providers.google.assets.gcs",
name: "convert_asset_to_openlineage",
},
["gcs", "sanitize_uri"] => Replacement::Rename {
["gcs", "sanitize_uri"] => Replacement::AutoImport {
module: "airflow.providers.google.assets.gcs",
name: "sanitize_uri",
},
@@ -920,7 +920,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"datasets",
"mysql",
"sanitize_uri",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.providers.mysql.assets.mysql",
name: "sanitize_uri",
},
@@ -933,7 +933,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"datasets",
"postgres",
"sanitize_uri",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.providers.postgres.assets.postgres",
name: "sanitize_uri",
},
@@ -948,12 +948,12 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"utils",
rest,
] => match *rest {
"DatasetInfo" => Replacement::Rename {
"DatasetInfo" => Replacement::AutoImport {
module: "airflow.providers.openlineage.utils.utils",
name: "AssetInfo",
},
"translate_airflow_dataset" => Replacement::Rename {
"translate_airflow_dataset" => Replacement::AutoImport {
module: "airflow.providers.openlineage.utils.utils",
name: "translate_airflow_asset",
},
@@ -968,7 +968,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"datasets",
"trino",
"sanitize_uri",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.providers.trino.assets.trino",
name: "sanitize_uri",
},
@@ -977,7 +977,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
};
let (module, name) = match &replacement {
Replacement::Rename { module, name } => (module, *name),
Replacement::AutoImport { module, name } => (module, *name),
Replacement::SourceModuleMoved { module, name } => (module, name.as_str()),
_ => {
checker.report_diagnostic(

View File

@@ -65,7 +65,7 @@ impl Violation for Airflow3SuggestedToMoveToProvider<'_> {
replacement,
} = self;
match replacement {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
name: _,
module: _,
provider,
@@ -88,7 +88,7 @@ impl Violation for Airflow3SuggestedToMoveToProvider<'_> {
fn fix_title(&self) -> Option<String> {
let Airflow3SuggestedToMoveToProvider { replacement, .. } = self;
match replacement {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module,
name,
provider,
@@ -130,32 +130,34 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
let replacement = match qualified_name.segments() {
// apache-airflow-providers-standard
["airflow", "hooks", "filesystem", "FSHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "filesystem", "FSHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.hooks.filesystem",
name: "FSHook",
provider: "standard",
version: "0.0.1",
},
["airflow", "hooks", "package_index", "PackageIndexHook"] => ProviderReplacement::Rename {
module: "airflow.providers.standard.hooks.package_index",
name: "PackageIndexHook",
provider: "standard",
version: "0.0.1",
},
["airflow", "hooks", "subprocess", "SubprocessHook"] => ProviderReplacement::Rename {
["airflow", "hooks", "package_index", "PackageIndexHook"] => {
ProviderReplacement::AutoImport {
module: "airflow.providers.standard.hooks.package_index",
name: "PackageIndexHook",
provider: "standard",
version: "0.0.1",
}
}
["airflow", "hooks", "subprocess", "SubprocessHook"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.hooks.subprocess",
name: "SubprocessHook",
provider: "standard",
version: "0.0.3",
},
["airflow", "operators", "bash", "BashOperator"] => ProviderReplacement::Rename {
["airflow", "operators", "bash", "BashOperator"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.bash",
name: "BashOperator",
provider: "standard",
version: "0.0.1",
},
["airflow", "operators", "datetime", "BranchDateTimeOperator"] => {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.datetime",
name: "BranchDateTimeOperator",
provider: "standard",
@@ -167,20 +169,20 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
"operators",
"trigger_dagrun",
"TriggerDagRunOperator",
] => ProviderReplacement::Rename {
] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.trigger_dagrun",
name: "TriggerDagRunOperator",
provider: "standard",
version: "0.0.2",
},
["airflow", "operators", "empty", "EmptyOperator"] => ProviderReplacement::Rename {
["airflow", "operators", "empty", "EmptyOperator"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.empty",
name: "EmptyOperator",
provider: "standard",
version: "0.0.2",
},
["airflow", "operators", "latest_only", "LatestOnlyOperator"] => {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.latest_only",
name: "LatestOnlyOperator",
provider: "standard",
@@ -202,14 +204,14 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
version: "0.0.1",
},
["airflow", "operators", "weekday", "BranchDayOfWeekOperator"] => {
ProviderReplacement::Rename {
ProviderReplacement::AutoImport {
module: "airflow.providers.standard.operators.weekday",
name: "BranchDayOfWeekOperator",
provider: "standard",
version: "0.0.1",
}
}
["airflow", "sensors", "bash", "BashSensor"] => ProviderReplacement::Rename {
["airflow", "sensors", "bash", "BashSensor"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.sensor.bash",
name: "BashSensor",
provider: "standard",
@@ -237,13 +239,13 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "standard",
version: "0.0.3",
},
["airflow", "sensors", "filesystem", "FileSensor"] => ProviderReplacement::Rename {
["airflow", "sensors", "filesystem", "FileSensor"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.sensors.filesystem",
name: "FileSensor",
provider: "standard",
version: "0.0.2",
},
["airflow", "sensors", "python", "PythonSensor"] => ProviderReplacement::Rename {
["airflow", "sensors", "python", "PythonSensor"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.sensors.python",
name: "PythonSensor",
provider: "standard",
@@ -271,7 +273,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "standard",
version: "0.0.1",
},
["airflow", "sensors", "weekday", "DayOfWeekSensor"] => ProviderReplacement::Rename {
["airflow", "sensors", "weekday", "DayOfWeekSensor"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.sensors.weekday",
name: "DayOfWeekSensor",
provider: "standard",
@@ -288,7 +290,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
provider: "standard",
version: "0.0.3",
},
["airflow", "triggers", "file", "FileTrigger"] => ProviderReplacement::Rename {
["airflow", "triggers", "file", "FileTrigger"] => ProviderReplacement::AutoImport {
module: "airflow.providers.standard.triggers.file",
name: "FileTrigger",
provider: "standard",
@@ -309,7 +311,7 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
};
let (module, name) = match &replacement {
ProviderReplacement::Rename { module, name, .. } => (module, *name),
ProviderReplacement::AutoImport { module, name, .. } => (module, *name),
ProviderReplacement::SourceModuleMovedToProvider { module, name, .. } => {
(module, name.as_str())
}

View File

@@ -55,7 +55,7 @@ impl Violation for Airflow3SuggestedUpdate {
Replacement::None
| Replacement::AttrName(_)
| Replacement::Message(_)
| Replacement::Rename { module: _, name: _ }
| Replacement::AutoImport { module: _, name: _ }
| Replacement::SourceModuleMoved { module: _, name: _ } => {
format!(
"`{deprecated}` is removed in Airflow 3.0; \
@@ -71,7 +71,7 @@ impl Violation for Airflow3SuggestedUpdate {
Replacement::None => None,
Replacement::AttrName(name) => Some(format!("Use `{name}` instead")),
Replacement::Message(message) => Some((*message).to_string()),
Replacement::Rename { module, name } => {
Replacement::AutoImport { module, name } => {
Some(format!("Use `{name}` from `{module}` instead."))
}
Replacement::SourceModuleMoved { module, name } => {
@@ -191,30 +191,30 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
let replacement = match qualified_name.segments() {
// airflow.datasets.metadata
["airflow", "datasets", "metadata", "Metadata"] => Replacement::Rename {
["airflow", "datasets", "metadata", "Metadata"] => Replacement::AutoImport {
module: "airflow.sdk",
name: "Metadata",
},
// airflow.datasets
["airflow", "Dataset"] | ["airflow", "datasets", "Dataset"] => Replacement::Rename {
["airflow", "Dataset"] | ["airflow", "datasets", "Dataset"] => Replacement::AutoImport {
module: "airflow.sdk",
name: "Asset",
},
["airflow", "datasets", rest] => match *rest {
"DatasetAliasEvent" => Replacement::None,
"DatasetAlias" => Replacement::Rename {
"DatasetAlias" => Replacement::AutoImport {
module: "airflow.sdk",
name: "AssetAlias",
},
"DatasetAll" => Replacement::Rename {
"DatasetAll" => Replacement::AutoImport {
module: "airflow.sdk",
name: "AssetAll",
},
"DatasetAny" => Replacement::Rename {
"DatasetAny" => Replacement::AutoImport {
module: "airflow.sdk",
name: "AssetAny",
},
"expand_alias_to_datasets" => Replacement::Rename {
"expand_alias_to_datasets" => Replacement::AutoImport {
module: "airflow.models.asset",
name: "expand_alias_to_assets",
},
@@ -261,7 +261,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
name: (*rest).to_string(),
}
}
["airflow", "models", "Param"] => Replacement::Rename {
["airflow", "models", "Param"] => Replacement::AutoImport {
module: "airflow.sdk.definitions.param",
name: "Param",
},
@@ -276,7 +276,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
module: "airflow.sdk",
name: (*rest).to_string(),
},
["airflow", "models", "baseoperatorlink", "BaseOperatorLink"] => Replacement::Rename {
["airflow", "models", "baseoperatorlink", "BaseOperatorLink"] => Replacement::AutoImport {
module: "airflow.sdk",
name: "BaseOperatorLink",
},
@@ -299,7 +299,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
},
// airflow.timetables
["airflow", "timetables", "datasets", "DatasetOrTimeSchedule"] => Replacement::Rename {
["airflow", "timetables", "datasets", "DatasetOrTimeSchedule"] => Replacement::AutoImport {
module: "airflow.timetables.assets",
name: "AssetOrTimeSchedule",
},
@@ -310,7 +310,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
"utils",
"dag_parsing_context",
"get_parsing_context",
] => Replacement::Rename {
] => Replacement::AutoImport {
module: "airflow.sdk",
name: "get_parsing_context",
},
@@ -319,7 +319,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
};
let (module, name) = match &replacement {
Replacement::Rename { module, name } => (module, *name),
Replacement::AutoImport { module, name } => (module, *name),
Replacement::SourceModuleMoved { module, name } => (module, name.as_str()),
_ => {
checker.report_diagnostic(

View File

@@ -23,17 +23,6 @@ use crate::rules::flake8_bandit::helpers::string_literal;
/// Avoid using weak or broken cryptographic hash functions in security
/// contexts. Instead, use a known secure hash function such as SHA256.
///
/// Note: This rule targets the following weak algorithm names in `hashlib`:
/// `md4`, `md5`, `sha`, and `sha1`. It also flags uses of `crypt.crypt` and
/// `crypt.mksalt` when configured with `METHOD_CRYPT`, `METHOD_MD5`, or
/// `METHOD_BLOWFISH`.
///
/// It does not attempt to lint OpenSSL- or platform-specific aliases and OIDs
/// (for example: `"sha-1"`, `"ssl3-sha1"`, `"ssl3-md5"`, or
/// `"1.3.14.3.2.26"`), nor variations with trailing spaces, as the set of
/// accepted aliases depends on the underlying OpenSSL version and varies across
/// platforms and Python builds.
///
/// ## Example
/// ```python
/// import hashlib

View File

@@ -71,8 +71,6 @@ pub(crate) fn implicit_namespace_package(
if package.is_none()
// Ignore non-`.py` files, which don't require an `__init__.py`.
&& PySourceType::try_from_path(path).is_some_and(PySourceType::is_py_file)
// Ignore `.pyw` files that are also PySourceType::Python but aren't importable namespaces
&& path.extension().is_some_and(|ext| ext == "py")
// Ignore any files that are direct children of the project root.
&& path
.parent()

View File

@@ -192,17 +192,4 @@ mod tests {
assert_diagnostics!(snapshot, diagnostics);
Ok(())
}
#[test_case(Path::new("PYI021_1.pyi"))]
fn pyi021_pie790_isolation_check(path: &Path) -> Result<()> {
let diagnostics = test_path(
Path::new("flake8_pyi").join(path).as_path(),
&settings::LinterSettings::for_rules([
Rule::DocstringInStub,
Rule::UnnecessaryPlaceholder,
]),
)?;
assert_diagnostics!(diagnostics);
Ok(())
}
}

View File

@@ -1,9 +1,9 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::{ExprStringLiteral, Stmt};
use ruff_python_ast::ExprStringLiteral;
use ruff_python_semantic::Definition;
use ruff_text_size::Ranged;
use crate::checkers::ast::{Checker, DocstringState, ExpectedDocstringKind};
use crate::docstrings::extraction::docstring_from;
use crate::checkers::ast::Checker;
use crate::{AlwaysFixableViolation, Edit, Fix};
/// ## What it does
@@ -41,34 +41,26 @@ impl AlwaysFixableViolation for DocstringInStub {
}
/// PYI021
pub(crate) fn docstring_in_stubs(checker: &Checker, body: &[Stmt]) {
if !matches!(
checker.docstring_state(),
DocstringState::Expected(
ExpectedDocstringKind::Module
| ExpectedDocstringKind::Class
| ExpectedDocstringKind::Function
)
) {
return;
}
let docstring = docstring_from(body);
pub(crate) fn docstring_in_stubs(
checker: &Checker,
definition: &Definition,
docstring: Option<&ExprStringLiteral>,
) {
let Some(docstring_range) = docstring.map(ExprStringLiteral::range) else {
return;
};
let edit = if body.len() == 1 {
let statements = match definition {
Definition::Module(module) => module.python_ast,
Definition::Member(member) => member.body(),
};
let edit = if statements.len() == 1 {
Edit::range_replacement("...".to_string(), docstring_range)
} else {
Edit::range_deletion(docstring_range)
};
let isolation_level = Checker::isolation(checker.semantic().current_statement_id());
let fix = Fix::unsafe_edit(edit).isolate(isolation_level);
checker
.report_diagnostic(DocstringInStub, docstring_range)
.set_fix(fix);
let mut diagnostic = checker.report_diagnostic(DocstringInStub, docstring_range);
diagnostic.set_fix(Fix::unsafe_edit(edit));
}

View File

@@ -1,39 +0,0 @@
---
source: crates/ruff_linter/src/rules/flake8_pyi/mod.rs
---
PYI021 [*] Docstrings should not be included in stubs
--> PYI021_1.pyi:5:5
|
4 | def check_isolation_level(mode: int) -> None:
5 | """Will report both, but only fix the first.""" # ERROR PYI021
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
6 | ... # ERROR PIE790
|
help: Remove docstring
2 |
3 |
4 | def check_isolation_level(mode: int) -> None:
- """Will report both, but only fix the first.""" # ERROR PYI021
5 + # ERROR PYI021
6 | ... # ERROR PIE790
7 |
8 |
note: This is an unsafe fix and may change runtime behavior
PIE790 [*] Unnecessary `...` literal
--> PYI021_1.pyi:6:5
|
4 | def check_isolation_level(mode: int) -> None:
5 | """Will report both, but only fix the first.""" # ERROR PYI021
6 | ... # ERROR PIE790
| ^^^
|
help: Remove unnecessary `...`
3 |
4 | def check_isolation_level(mode: int) -> None:
5 | """Will report both, but only fix the first.""" # ERROR PYI021
- ... # ERROR PIE790
6 + # ERROR PIE790
7 |
8 |
9 | with nullcontext():

View File

@@ -58,9 +58,7 @@ impl Violation for SuppressibleException {
fn fix_title(&self) -> Option<String> {
let SuppressibleException { exception } = self;
Some(format!(
"Replace `try`-`except`-`pass` with `with contextlib.suppress({exception}): ...`"
))
Some(format!("Replace with `contextlib.suppress({exception})`"))
}
}

View File

@@ -11,7 +11,7 @@ SIM105 [*] Use `contextlib.suppress(ValueError)` instead of `try`-`except`-`pass
9 | | pass
| |________^
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ValueError): ...`
help: Replace with `contextlib.suppress(ValueError)`
1 + import contextlib
2 | def foo():
3 | pass
@@ -40,7 +40,7 @@ SIM105 [*] Use `contextlib.suppress(ValueError, OSError)` instead of `try`-`exce
17 |
18 | # SIM105
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ValueError, OSError): ...`
help: Replace with `contextlib.suppress(ValueError, OSError)`
1 + import contextlib
2 | def foo():
3 | pass
@@ -71,7 +71,7 @@ SIM105 [*] Use `contextlib.suppress(ValueError, OSError)` instead of `try`-`exce
23 |
24 | # SIM105
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ValueError, OSError): ...`
help: Replace with `contextlib.suppress(ValueError, OSError)`
1 + import contextlib
2 | def foo():
3 | pass
@@ -102,7 +102,7 @@ SIM105 [*] Use `contextlib.suppress(BaseException)` instead of `try`-`except`-`p
29 |
30 | # SIM105
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(BaseException): ...`
help: Replace with `contextlib.suppress(BaseException)`
1 + import contextlib
2 + import builtins
3 | def foo():
@@ -134,7 +134,7 @@ SIM105 [*] Use `contextlib.suppress(a.Error, b.Error)` instead of `try`-`except`
35 |
36 | # OK
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(a.Error, b.Error): ...`
help: Replace with `contextlib.suppress(a.Error, b.Error)`
1 + import contextlib
2 | def foo():
3 | pass
@@ -164,7 +164,7 @@ SIM105 [*] Use `contextlib.suppress(ValueError)` instead of `try`-`except`-`pass
88 | | ...
| |___________^
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ValueError): ...`
help: Replace with `contextlib.suppress(ValueError)`
1 + import contextlib
2 | def foo():
3 | pass
@@ -195,7 +195,7 @@ SIM105 Use `contextlib.suppress(ValueError, OSError)` instead of `try`-`except`-
104 |
105 | try:
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ValueError, OSError): ...`
help: Replace with `contextlib.suppress(ValueError, OSError)`
SIM105 [*] Use `contextlib.suppress(OSError)` instead of `try`-`except`-`pass`
--> SIM105_0.py:117:5
@@ -210,7 +210,7 @@ SIM105 [*] Use `contextlib.suppress(OSError)` instead of `try`-`except`-`pass`
121 |
122 | try: os.makedirs(model_dir);
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(OSError): ...`
help: Replace with `contextlib.suppress(OSError)`
1 + import contextlib
2 | def foo():
3 | pass
@@ -241,7 +241,7 @@ SIM105 [*] Use `contextlib.suppress(OSError)` instead of `try`-`except`-`pass`
125 |
126 | try: os.makedirs(model_dir);
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(OSError): ...`
help: Replace with `contextlib.suppress(OSError)`
1 + import contextlib
2 | def foo():
3 | pass
@@ -271,7 +271,7 @@ SIM105 [*] Use `contextlib.suppress(OSError)` instead of `try`-`except`-`pass`
129 | \
130 | #
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(OSError): ...`
help: Replace with `contextlib.suppress(OSError)`
1 + import contextlib
2 | def foo():
3 | pass
@@ -299,7 +299,7 @@ SIM105 [*] Use `contextlib.suppress()` instead of `try`-`except`-`pass`
136 | | pass
| |________^
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(): ...`
help: Replace with `contextlib.suppress()`
1 + import contextlib
2 | def foo():
3 | pass
@@ -328,7 +328,7 @@ SIM105 [*] Use `contextlib.suppress(BaseException)` instead of `try`-`except`-`p
143 | | pass
| |________^
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(BaseException): ...`
help: Replace with `contextlib.suppress(BaseException)`
1 + import contextlib
2 | def foo():
3 | pass

View File

@@ -11,7 +11,7 @@ SIM105 [*] Use `contextlib.suppress(ValueError)` instead of `try`-`except`-`pass
8 | | pass
| |________^
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ValueError): ...`
help: Replace with `contextlib.suppress(ValueError)`
1 | """Case: There's a random import, so it should add `contextlib` after it."""
2 | import math
3 + import contextlib

View File

@@ -11,7 +11,7 @@ SIM105 [*] Use `contextlib.suppress(ValueError)` instead of `try`-`except`-`pass
13 | | pass
| |________^
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ValueError): ...`
help: Replace with `contextlib.suppress(ValueError)`
7 |
8 |
9 | # SIM105

View File

@@ -12,4 +12,4 @@ SIM105 Use `contextlib.suppress(ValueError)` instead of `try`-`except`-`pass`
13 | | pass
| |____________^
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ValueError): ...`
help: Replace with `contextlib.suppress(ValueError)`

View File

@@ -10,7 +10,7 @@ SIM105 [*] Use `contextlib.suppress(ImportError)` instead of `try`-`except`-`pas
4 | | except ImportError: pass
| |___________________________^
|
help: Replace `try`-`except`-`pass` with `with contextlib.suppress(ImportError): ...`
help: Replace with `contextlib.suppress(ImportError)`
1 | #!/usr/bin/env python
- try:
2 + import contextlib

View File

@@ -991,29 +991,6 @@ mod tests {
Ok(())
}
#[test_case(Path::new("plr0402_skip.py"))]
fn plr0402_skips_required_imports(path: &Path) -> Result<()> {
let snapshot = format!("plr0402_skips_required_imports_{}", path.to_string_lossy());
let diagnostics = test_path(
Path::new("isort/required_imports").join(path).as_path(),
&LinterSettings {
src: vec![test_resource_path("fixtures/isort")],
isort: super::settings::Settings {
required_imports: BTreeSet::from_iter([NameImport::Import(
ModuleNameImport::alias(
"concurrent.futures".to_string(),
"futures".to_string(),
),
)]),
..super::settings::Settings::default()
},
..LinterSettings::for_rules([Rule::MissingRequiredImport, Rule::ManualFromImport])
},
)?;
assert_diagnostics!(snapshot, diagnostics);
Ok(())
}
#[test_case(Path::new("from_first.py"))]
fn from_first(path: &Path) -> Result<()> {
let snapshot = format!("from_first_{}", path.to_string_lossy());

View File

@@ -1,4 +0,0 @@
---
source: crates/ruff_linter/src/rules/isort/mod.rs
---

Some files were not shown because too many files have changed in this diff Show More