Compare commits
1 Commits
nextest
...
charlie/pa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
89d9f1e124 |
136
.github/workflows/benchmark.yaml
vendored
Normal file
136
.github/workflows/benchmark.yaml
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
name: Benchmark
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "rust-toolchain"
|
||||
- "crates/**"
|
||||
- "!crates/ruff_dev"
|
||||
- "!crates/ruff_shrinking"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
run-benchmark:
|
||||
if: github.event_name == 'pull_request'
|
||||
name: "Run | ${{ matrix.os }}"
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: "PR - Checkout Branch"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: "PR - Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "PR - Build benchmarks"
|
||||
run: cargo bench -p ruff_benchmark --no-run
|
||||
|
||||
- name: "PR - Run benchmarks"
|
||||
run: cargo benchmark --save-baseline=pr
|
||||
|
||||
- name: "Main - Checkout Branch"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
ref: main
|
||||
|
||||
- name: "Main - Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Main - Build benchmarks"
|
||||
run: cargo bench -p ruff_benchmark --no-run
|
||||
|
||||
- name: "Main - Run benchmarks"
|
||||
run: cargo benchmark --save-baseline=main
|
||||
|
||||
- name: "Upload benchmark results"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: benchmark-results-${{ matrix.os }}
|
||||
path: ./target/criterion
|
||||
|
||||
# Cleanup
|
||||
- name: Remove Criterion Artifact
|
||||
uses: JesseTG/rm@v1.0.3
|
||||
with:
|
||||
path: ./target/criterion
|
||||
|
||||
benchmark-compare:
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
name: Compare
|
||||
needs:
|
||||
- run-benchmark
|
||||
|
||||
steps:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install critcmp"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: critcmp
|
||||
|
||||
- name: "Linux | Download PR benchmark results"
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: benchmark-results-ubuntu-latest
|
||||
path: ./target/criterion
|
||||
|
||||
- name: "Linux | Compare benchmark results"
|
||||
shell: bash
|
||||
run: |
|
||||
echo "### Benchmark" >> summary.md
|
||||
echo "#### Linux" >> summary.md
|
||||
echo "\`\`\`" >> summary.md
|
||||
critcmp main pr >> summary.md
|
||||
echo "\`\`\`" >> summary.md
|
||||
echo "" >> summary.md
|
||||
|
||||
- name: "Linux | Cleanup benchmark results"
|
||||
run: rm -rf ./target/criterion
|
||||
|
||||
- name: "Windows | Download PR benchmark results"
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: benchmark-results-windows-latest
|
||||
path: ./target/criterion
|
||||
|
||||
- name: "Windows | Compare benchmark results"
|
||||
shell: bash
|
||||
run: |
|
||||
echo "#### Windows" >> summary.md
|
||||
echo "\`\`\`" >> summary.md
|
||||
critcmp main pr >> summary.md
|
||||
echo "\`\`\`" >> summary.md
|
||||
echo "" >> summary.md
|
||||
|
||||
echo ${{ github.event.pull_request.number }} > pr-number
|
||||
|
||||
cat summary.md > $GITHUB_STEP_SUMMARY
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload PR Number
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload Summary
|
||||
with:
|
||||
name: summary
|
||||
path: summary.md
|
||||
33
.github/workflows/ci.yaml
vendored
33
.github/workflows/ci.yaml
vendored
@@ -96,17 +96,11 @@ jobs:
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- run: pip install black[d]==23.1.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests (Ubuntu)"
|
||||
if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
run: >
|
||||
cargo insta test --all --all-features --unreferenced reject --test-runner nextest
|
||||
-- --status-level skip --failure-output immediate-final --no-fail-fast --all-targets
|
||||
run: cargo insta test --all --all-features --unreferenced reject
|
||||
- name: "Run tests (Windows)"
|
||||
if: ${{ matrix.os == 'windows-latest' }}
|
||||
shell: bash
|
||||
@@ -347,28 +341,3 @@ jobs:
|
||||
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
|
||||
- name: "Remove checkouts from cache"
|
||||
run: rm -r target/progress_projects
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v1
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
56
.github/workflows/pr-comment.yaml
vendored
56
.github/workflows/pr-comment.yaml
vendored
@@ -2,7 +2,7 @@ name: PR Check Comment
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [CI]
|
||||
workflows: [CI, Benchmark]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
@@ -43,34 +43,42 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
if_no_artifact_found: ignore
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
name: "Download Benchmark Result"
|
||||
id: download-benchmark-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: summary
|
||||
workflow: benchmark.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/benchmark
|
||||
if_no_artifact_found: ignore
|
||||
|
||||
- name: Generate Comment
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true' || steps.download-benchmark-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
echo '## PR Check Results' >> comment.txt
|
||||
|
||||
echo "### Ecosystem" >> comment.txt
|
||||
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||
echo "" >> comment.txt
|
||||
|
||||
echo 'comment<<EOF' >> $GITHUB_OUTPUT
|
||||
cat comment.txt >> $GITHUB_OUTPUT
|
||||
echo '## PR Check Results' >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "### Ecosystem" >> $GITHUB_OUTPUT
|
||||
cat pr/ecosystem/ecosystem-result >> $GITHUB_OUTPUT
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f pr/benchmark/summary.md ]]
|
||||
then
|
||||
cat pr/benchmark/summary.md >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo 'EOF' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: PR Check Results
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
if: steps.generate-comment.outputs.comment
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
pr_number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
message: ${{ steps.generate-comment.outputs.comment }}
|
||||
comment_tag: PR Check Results
|
||||
|
||||
101
Cargo.lock
generated
101
Cargo.lock
generated
@@ -414,28 +414,6 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codspeed"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aeec2fbed4969dc38b5ca201115dd5c2614b8ef78e0a7221dd5f0977fb1552b"
|
||||
dependencies = [
|
||||
"colored",
|
||||
"libc",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codspeed-criterion-compat"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b13f0a08d40ce7c95bdf288f725b975e62fcadfa8ba152340943bab6de43af7"
|
||||
dependencies = [
|
||||
"codspeed",
|
||||
"colored",
|
||||
"criterion",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
version = "1.0.0"
|
||||
@@ -834,20 +812,15 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.286"
|
||||
version = "0.0.285"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"colored",
|
||||
"configparser",
|
||||
"itertools",
|
||||
"log",
|
||||
"once_cell",
|
||||
"pep440_rs",
|
||||
"pretty_assertions",
|
||||
"regex",
|
||||
"ruff",
|
||||
"ruff_workspace",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -903,10 +876,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2093,7 +2064,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.286"
|
||||
version = "0.0.285"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -2101,9 +2072,11 @@ dependencies = [
|
||||
"chrono",
|
||||
"clap",
|
||||
"colored",
|
||||
"dirs 5.0.1",
|
||||
"fern",
|
||||
"glob",
|
||||
"globset",
|
||||
"ignore",
|
||||
"imperative",
|
||||
"insta",
|
||||
"is-macro",
|
||||
@@ -2143,6 +2116,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"shellexpand",
|
||||
"similar",
|
||||
"smallvec",
|
||||
"strum",
|
||||
@@ -2154,7 +2128,6 @@ dependencies = [
|
||||
"typed-arena",
|
||||
"unicode-width",
|
||||
"unicode_names2",
|
||||
"uuid",
|
||||
"wsl",
|
||||
]
|
||||
|
||||
@@ -2162,7 +2135,6 @@ dependencies = [
|
||||
name = "ruff_benchmark"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codspeed-criterion-compat",
|
||||
"criterion",
|
||||
"mimalloc",
|
||||
"once_cell",
|
||||
@@ -2192,7 +2164,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.0.286"
|
||||
version = "0.0.285"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -2221,7 +2193,6 @@ dependencies = [
|
||||
"ruff",
|
||||
"ruff_cache",
|
||||
"ruff_diagnostics",
|
||||
"ruff_formatter",
|
||||
"ruff_macros",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_formatter",
|
||||
@@ -2229,7 +2200,6 @@ dependencies = [
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"ruff_workspace",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2237,9 +2207,7 @@ dependencies = [
|
||||
"similar",
|
||||
"strum",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tikv-jemallocator",
|
||||
"tracing",
|
||||
"ureq",
|
||||
"walkdir",
|
||||
"wild",
|
||||
@@ -2272,7 +2240,6 @@ dependencies = [
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_python_trivia",
|
||||
"ruff_workspace",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2373,7 +2340,6 @@ dependencies = [
|
||||
"insta",
|
||||
"is-macro",
|
||||
"itertools",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"ruff_formatter",
|
||||
"ruff_python_ast",
|
||||
@@ -2532,48 +2498,18 @@ dependencies = [
|
||||
"log",
|
||||
"ruff",
|
||||
"ruff_diagnostics",
|
||||
"ruff_formatter",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_codegen",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_index",
|
||||
"ruff_python_parser",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"ruff_workspace",
|
||||
"serde",
|
||||
"serde-wasm-bindgen",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-test",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_workspace"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"colored",
|
||||
"dirs 5.0.1",
|
||||
"glob",
|
||||
"globset",
|
||||
"ignore",
|
||||
"itertools",
|
||||
"log",
|
||||
"path-absolutize",
|
||||
"pep440_rs",
|
||||
"regex",
|
||||
"ruff",
|
||||
"ruff_cache",
|
||||
"ruff_macros",
|
||||
"rustc-hash",
|
||||
"schemars",
|
||||
"serde",
|
||||
"shellexpand",
|
||||
"strum",
|
||||
"tempfile",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-stemmers"
|
||||
version = "1.2.0"
|
||||
@@ -2756,9 +2692,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.105"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360"
|
||||
checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
@@ -3409,26 +3345,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.4.1"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"rand",
|
||||
"uuid-macro-internal",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uuid-macro-internal"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7e1ba1f333bd65ce3c9f27de592fcbc256dafe3af2717f56d7c87761fbaccf4"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be"
|
||||
|
||||
[[package]]
|
||||
name = "valuable"
|
||||
|
||||
@@ -50,7 +50,6 @@ tracing = "0.1.37"
|
||||
tracing-indicatif = "0.3.4"
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
||||
unicode-width = "0.1.10"
|
||||
uuid = { version = "1.4.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
wsl = { version = "0.1.0" }
|
||||
|
||||
# v1.0.1
|
||||
|
||||
@@ -140,7 +140,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.0.286
|
||||
rev: v0.0.285
|
||||
hooks:
|
||||
- id: ruff
|
||||
```
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.286"
|
||||
version = "0.0.285"
|
||||
description = """
|
||||
Convert Flake8 configuration files to Ruff configuration files.
|
||||
"""
|
||||
@@ -14,16 +14,12 @@ license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff = { path = "../ruff", default-features = false }
|
||||
ruff_workspace = { path = "../ruff_workspace" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
colored = { workspace = true }
|
||||
configparser = { version = "3.0.2" }
|
||||
itertools = { workspace = true }
|
||||
log = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
pep440_rs = { version = "0.3.1", features = ["serde"] }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
@@ -31,6 +27,3 @@ serde_json = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1.3.0"
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
use super::black::Black;
|
||||
use super::isort::Isort;
|
||||
use super::pep621::Project;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct ExternalConfig<'a> {
|
||||
pub(crate) black: Option<&'a Black>,
|
||||
pub(crate) isort: Option<&'a Isort>,
|
||||
pub(crate) project: Option<&'a Project>,
|
||||
}
|
||||
@@ -1,24 +1,12 @@
|
||||
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
|
||||
|
||||
mod black;
|
||||
mod converter;
|
||||
mod external_config;
|
||||
mod isort;
|
||||
mod parser;
|
||||
mod pep621;
|
||||
mod plugin;
|
||||
mod pyproject;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use configparser::ini::Ini;
|
||||
|
||||
use crate::converter::convert;
|
||||
use crate::external_config::ExternalConfig;
|
||||
use crate::plugin::Plugin;
|
||||
use crate::pyproject::parse;
|
||||
use ruff::flake8_to_ruff::{self, ExternalConfig};
|
||||
use ruff::logging::{set_up_logging, LogLevel};
|
||||
|
||||
#[derive(Parser)]
|
||||
@@ -37,7 +25,7 @@ struct Args {
|
||||
pyproject: Option<PathBuf>,
|
||||
/// List of plugins to enable.
|
||||
#[arg(long, value_delimiter = ',')]
|
||||
plugin: Option<Vec<Plugin>>,
|
||||
plugin: Option<Vec<flake8_to_ruff::Plugin>>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
@@ -51,7 +39,7 @@ fn main() -> Result<()> {
|
||||
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
|
||||
|
||||
// Read the pyproject.toml file.
|
||||
let pyproject = args.pyproject.map(parse).transpose()?;
|
||||
let pyproject = args.pyproject.map(flake8_to_ruff::parse).transpose()?;
|
||||
let external_config = pyproject
|
||||
.as_ref()
|
||||
.and_then(|pyproject| pyproject.tool.as_ref())
|
||||
@@ -69,7 +57,7 @@ fn main() -> Result<()> {
|
||||
};
|
||||
|
||||
// Create Ruff's pyproject.toml section.
|
||||
let pyproject = convert(&config, &external_config, args.plugin);
|
||||
let pyproject = flake8_to_ruff::convert(&config, &external_config, args.plugin)?;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.286"
|
||||
version = "0.0.285"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -36,9 +36,11 @@ bitflags = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "string"], optional = true }
|
||||
colored = { workspace = true }
|
||||
dirs = { version = "5.0.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
glob = { workspace = true }
|
||||
globset = { workspace = true }
|
||||
ignore = { workspace = true }
|
||||
imperative = { version = "1.0.4" }
|
||||
is-macro = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
@@ -66,6 +68,7 @@ serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
serde_with = { version = "3.0.0" }
|
||||
similar = { workspace = true }
|
||||
shellexpand = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
@@ -74,7 +77,6 @@ toml = { workspace = true }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unicode-width = { workspace = true }
|
||||
unicode_names2 = { version = "0.6.0", git = "https://github.com/youknowone/unicode_names2.git", rev = "4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde" }
|
||||
uuid = { workspace = true, features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
wsl = { version = "0.1.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -19,12 +19,3 @@ def foo(x, y, z):
|
||||
class A():
|
||||
pass
|
||||
# b = c
|
||||
|
||||
|
||||
dictionary = {
|
||||
# "key1": 123, # noqa: ERA001
|
||||
# "key2": 456,
|
||||
# "key3": 789, # test
|
||||
}
|
||||
|
||||
#import os # noqa
|
||||
|
||||
@@ -230,10 +230,6 @@ def timedelta_okay(value=dt.timedelta(hours=1)):
|
||||
def path_okay(value=Path(".")):
|
||||
pass
|
||||
|
||||
# B008 allow arbitrary call with immutable annotation
|
||||
def immutable_annotation_call(value: Sequence[int] = foo()):
|
||||
pass
|
||||
|
||||
# B006 and B008
|
||||
# We should handle arbitrary nesting of these B008.
|
||||
def nested_combo(a=[float(3), dt.datetime.now()]):
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from typing import List
|
||||
|
||||
import fastapi
|
||||
import custom
|
||||
from fastapi import Query
|
||||
|
||||
|
||||
@@ -17,9 +16,5 @@ def okay(data: List[str] = Query(None)):
|
||||
...
|
||||
|
||||
|
||||
def okay(data: custom.ImmutableTypeA = foo()):
|
||||
...
|
||||
|
||||
|
||||
def error_due_to_missing_import(data: List[str] = Depends(None)):
|
||||
...
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
"""Test cases for difficult renames."""
|
||||
|
||||
|
||||
def rename_global():
|
||||
try:
|
||||
global pandas
|
||||
import pandas
|
||||
except ImportError:
|
||||
return False
|
||||
@@ -68,14 +68,3 @@ import ctypes
|
||||
|
||||
# OK
|
||||
raise ctypes.WinError(1)
|
||||
|
||||
|
||||
# RSE102
|
||||
raise IndexError()from ZeroDivisionError
|
||||
|
||||
raise IndexError()\
|
||||
from ZeroDivisionError
|
||||
|
||||
raise IndexError() from ZeroDivisionError
|
||||
|
||||
raise IndexError();
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
import os
|
||||
import pandas
|
||||
import foo.baz
|
||||
@@ -1,2 +0,0 @@
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import math\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"math.pi"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python (ruff)",
|
||||
"language": "python",
|
||||
"name": "ruff"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import math\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"math.pi"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python (ruff)",
|
||||
"language": "python",
|
||||
"name": "ruff"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
@@ -15,11 +15,9 @@
|
||||
"{:s} {:y}".format("hello", "world") # [bad-format-character]
|
||||
|
||||
"{:*^30s}".format("centered") # OK
|
||||
"{:{s}}".format("hello", s="s") # OK (nested placeholder value not checked)
|
||||
"{:{s:y}}".format("hello", s="s") # [bad-format-character] (nested placeholder format spec checked)
|
||||
"{0:.{prec}g}".format(1.23, prec=15) # OK (cannot validate after nested placeholder)
|
||||
"{0:.{foo}{bar}{foobar}y}".format(...) # OK (cannot validate after nested placeholders)
|
||||
"{0:.{foo}x{bar}y{foobar}g}".format(...) # OK (all nested placeholders are consumed without considering in between chars)
|
||||
"{:{s}}".format("hello", s="s") # OK (nested replacement value not checked)
|
||||
|
||||
"{:{s:y}}".format("hello", s="s") # [bad-format-character] (nested replacement format spec checked)
|
||||
|
||||
## f-strings
|
||||
|
||||
|
||||
@@ -59,35 +59,3 @@ def f() -> None:
|
||||
x = Union["str", "int"]
|
||||
x: Union[str, int]
|
||||
x: Union["str", "int"]
|
||||
|
||||
|
||||
def f(x: Union[int : float]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f(x: Union[str, int : float]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f(x: Union[x := int]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f(x: Union[str, x := int]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f(x: Union[lambda: int]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f(x: Union[str, lambda: int]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f(x: Optional[int : float]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f(x: Optional[str, int : float]) -> None:
|
||||
...
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
#import os # noqa
|
||||
#import os # noqa: ERA001
|
||||
|
||||
dictionary = {
|
||||
# "key1": 123, # noqa: ERA001
|
||||
# "key2": 456, # noqa
|
||||
# "key3": 789,
|
||||
}
|
||||
|
||||
|
||||
#import os # noqa: E501
|
||||
@@ -4,11 +4,10 @@ use anyhow::{bail, Result};
|
||||
use libcst_native::{
|
||||
Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
|
||||
};
|
||||
use ruff_python_ast::{Ranged, Stmt};
|
||||
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::cst::helpers::compose_module_path;
|
||||
use crate::cst::matchers::match_statement;
|
||||
|
||||
@@ -3,14 +3,15 @@
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, Keyword, Stmt};
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, Keyword, Ranged, Stmt};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
|
||||
use ruff_python_trivia::{
|
||||
has_leading_content, is_python_whitespace, PythonWhitespace, SimpleTokenKind, SimpleTokenizer,
|
||||
};
|
||||
use ruff_source_file::{Locator, NewlineWithTrailingNewline};
|
||||
use ruff_text_size::{Ranged, TextLen, TextSize};
|
||||
use ruff_text_size::{TextLen, TextSize};
|
||||
|
||||
use crate::autofix::codemods;
|
||||
|
||||
@@ -253,9 +254,10 @@ fn next_stmt_break(semicolon: TextSize, locator: &Locator) -> TextSize {
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
|
||||
use ruff_python_ast::Ranged;
|
||||
use ruff_python_parser::parse_suite;
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use crate::autofix::edits::{next_stmt_break, trailing_semicolon};
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use itertools::Itertools;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel};
|
||||
@@ -138,9 +138,11 @@ fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Orderi
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::autofix::source_map::SourceMarker;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use ruff_diagnostics::{Diagnostic, Fix};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::Ranged;
|
||||
use ruff_python_semantic::analyze::visibility;
|
||||
use ruff_python_semantic::{Binding, BindingKind, ScopeKind};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use ruff_python_ast::str::raw_contents_range;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_python_ast::Ranged;
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use ruff_python_semantic::{BindingKind, ContextualizedDefinition, Export};
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, ExceptHandler};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{self as ast, ExceptHandler, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Rule;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, Constant, Expr, ExprContext, Operator};
|
||||
use ruff_python_ast::{self as ast, Arguments, Constant, Expr, ExprContext, Operator, Ranged};
|
||||
use ruff_python_literal::cformat::{CFormatError, CFormatErrorType};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
@@ -6,7 +6,6 @@ use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::types::Node;
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_python_semantic::ScopeKind;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Rule;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use ruff_python_ast::Parameter;
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{Parameter, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use ruff_python_ast::{self as ast, Expr, Ranged, Stmt};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::helpers;
|
||||
|
||||
use ruff_python_ast::types::Node;
|
||||
use ruff_python_ast::{self as ast, Expr, Stmt};
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_python_semantic::ScopeKind;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Rule;
|
||||
|
||||
@@ -32,10 +32,10 @@ use itertools::Itertools;
|
||||
use log::error;
|
||||
use ruff_python_ast::{
|
||||
self as ast, Arguments, Comprehension, Constant, ElifElseClause, ExceptHandler, Expr,
|
||||
ExprContext, Keyword, MatchCase, Parameter, ParameterWithDefault, Parameters, Pattern, Stmt,
|
||||
Suite, UnaryOp,
|
||||
ExprContext, Keyword, MatchCase, Parameter, ParameterWithDefault, Parameters, Pattern, Ranged,
|
||||
Stmt, Suite, UnaryOp,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, IsolationLevel};
|
||||
use ruff_python_ast::all::{extract_all_names, DunderAllFlags};
|
||||
@@ -1335,23 +1335,46 @@ where
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'b Pattern) {
|
||||
// Step 1: Binding
|
||||
if let Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
})
|
||||
| Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
})
|
||||
| Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
rest: Some(name), ..
|
||||
}) = pattern
|
||||
{
|
||||
self.add_binding(
|
||||
name,
|
||||
name.range(),
|
||||
BindingKind::Assignment,
|
||||
BindingFlags::empty(),
|
||||
);
|
||||
match &pattern {
|
||||
Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name),
|
||||
pattern: _,
|
||||
range: _,
|
||||
}) => {
|
||||
self.add_binding(
|
||||
name,
|
||||
name.range(),
|
||||
BindingKind::Assignment,
|
||||
BindingFlags::empty(),
|
||||
);
|
||||
}
|
||||
Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
}) => {
|
||||
self.add_binding(
|
||||
name,
|
||||
name.range(),
|
||||
BindingKind::Assignment,
|
||||
BindingFlags::empty(),
|
||||
);
|
||||
}
|
||||
Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
rest: Some(rest), ..
|
||||
}) => {
|
||||
if let Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
}) = rest.as_ref()
|
||||
{
|
||||
self.add_binding(
|
||||
name,
|
||||
name.range(),
|
||||
BindingKind::Assignment,
|
||||
BindingFlags::empty(),
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Step 2: Traversal
|
||||
@@ -1895,8 +1918,6 @@ impl<'a> Checker<'a> {
|
||||
for (name, range) in exports {
|
||||
if let Some(binding_id) = self.semantic.global_scope().get(name) {
|
||||
// Mark anything referenced in `__all__` as used.
|
||||
// TODO(charlie): `range` here should be the range of the name in `__all__`, not
|
||||
// the range of `__all__` itself.
|
||||
self.semantic.add_global_reference(binding_id, range);
|
||||
} else {
|
||||
if self.semantic.global_scope().uses_star_imports() {
|
||||
|
||||
@@ -2,15 +2,16 @@
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt, Suite};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::helpers::to_module_path;
|
||||
use ruff_python_ast::imports::{ImportMap, ModuleImport};
|
||||
use ruff_python_ast::statement_visitor::StatementVisitor;
|
||||
use ruff_python_ast::{self as ast, PySourceType, Stmt, Suite};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::directives::IsortDirectives;
|
||||
use crate::registry::Rule;
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind};
|
||||
use ruff_python_ast::Ranged;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::TokenKind;
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use crate::registry::{AsRule, Rule};
|
||||
use crate::rules::pycodestyle::rules::logical_lines::{
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
use std::path::Path;
|
||||
|
||||
use itertools::Itertools;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use ruff_python_ast::Ranged;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
//! Lint rules based on checking physical lines.
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_source_file::{Locator, UniversalNewlines};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::flake8_copyright::rules::missing_copyright_notice;
|
||||
@@ -98,10 +99,11 @@ pub(crate) fn check_physical_lines(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::lexer::lex;
|
||||
use ruff_python_parser::Mode;
|
||||
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::line_width::LineLength;
|
||||
@@ -130,7 +132,7 @@ mod tests {
|
||||
},
|
||||
)
|
||||
};
|
||||
let line_length = LineLength::try_from(8).unwrap();
|
||||
let line_length = LineLength::from(8);
|
||||
assert_eq!(check_with_max_line_length(line_length), vec![]);
|
||||
assert_eq!(check_with_max_line_length(line_length), vec![]);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use std::str::FromStr;
|
||||
use bitflags::bitflags;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::Tok;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Stmt, Suite};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Ranged, Stmt, Suite};
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::Tok;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
|
||||
use ruff_source_file::{Locator, UniversalNewlineIterator};
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, Ranged};
|
||||
use ruff_python_semantic::Definition;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
pub(crate) mod extraction;
|
||||
pub(crate) mod google;
|
||||
|
||||
@@ -2,7 +2,8 @@ use std::fmt::{Debug, Formatter};
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
use ruff_python_ast::docstrings::{leading_space, leading_words};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use ruff_python_ast::Ranged;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use strum_macros::EnumIter;
|
||||
|
||||
use ruff_source_file::{Line, UniversalNewlineIterator, UniversalNewlines};
|
||||
@@ -164,7 +165,7 @@ impl<'a> SectionContexts<'a> {
|
||||
lines.peek(),
|
||||
) {
|
||||
if let Some(mut last) = last.take() {
|
||||
last.range = TextRange::new(last.start(), line.start());
|
||||
last.range = TextRange::new(last.range.start(), line.start());
|
||||
contexts.push(last);
|
||||
}
|
||||
|
||||
@@ -181,7 +182,7 @@ impl<'a> SectionContexts<'a> {
|
||||
}
|
||||
|
||||
if let Some(mut last) = last.take() {
|
||||
last.range = TextRange::new(last.start(), contents.text_len());
|
||||
last.range = TextRange::new(last.range.start(), contents.text_len());
|
||||
contexts.push(last);
|
||||
}
|
||||
|
||||
@@ -267,12 +268,6 @@ struct SectionContextData {
|
||||
summary_full_end: TextSize,
|
||||
}
|
||||
|
||||
impl Ranged for SectionContextData {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct SectionContext<'a> {
|
||||
data: &'a SectionContextData,
|
||||
docstring_body: DocstringBody<'a>,
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
//! Extract Black configuration settings from a pyproject.toml.
|
||||
|
||||
use ruff::line_width::LineLength;
|
||||
use ruff::settings::types::PythonVersion;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::settings::types::PythonVersion;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct Black {
|
||||
pub struct Black {
|
||||
#[serde(alias = "line-length", alias = "line_length")]
|
||||
pub(crate) line_length: Option<LineLength>,
|
||||
pub line_length: Option<usize>,
|
||||
#[serde(alias = "target-version", alias = "target_version")]
|
||||
pub(crate) target_version: Option<Vec<PythonVersion>>,
|
||||
pub target_version: Option<Vec<PythonVersion>>,
|
||||
}
|
||||
@@ -1,25 +1,25 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
|
||||
use ruff::line_width::LineLength;
|
||||
use ruff::registry::Linter;
|
||||
use ruff::rule_selector::RuleSelector;
|
||||
use ruff::rules::flake8_pytest_style::types::{
|
||||
use crate::line_width::LineLength;
|
||||
use crate::registry::Linter;
|
||||
use crate::rule_selector::RuleSelector;
|
||||
use crate::rules::flake8_pytest_style::types::{
|
||||
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
|
||||
};
|
||||
use ruff::rules::flake8_quotes::settings::Quote;
|
||||
use ruff::rules::flake8_tidy_imports::settings::Strictness;
|
||||
use ruff::rules::pydocstyle::settings::Convention;
|
||||
use ruff::settings::types::PythonVersion;
|
||||
use ruff::warn_user;
|
||||
use ruff_workspace::options::{
|
||||
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
|
||||
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, McCabeOptions,
|
||||
Options, Pep8NamingOptions, PydocstyleOptions,
|
||||
use crate::rules::flake8_quotes::settings::Quote;
|
||||
use crate::rules::flake8_tidy_imports::settings::Strictness;
|
||||
use crate::rules::pydocstyle::settings::Convention;
|
||||
use crate::rules::{
|
||||
flake8_annotations, flake8_bugbear, flake8_builtins, flake8_errmsg, flake8_pytest_style,
|
||||
flake8_quotes, flake8_tidy_imports, mccabe, pep8_naming, pydocstyle,
|
||||
};
|
||||
use ruff_workspace::pyproject::Pyproject;
|
||||
use crate::settings::options::Options;
|
||||
use crate::settings::pyproject::Pyproject;
|
||||
use crate::settings::types::PythonVersion;
|
||||
use crate::warn_user;
|
||||
|
||||
use super::external_config::ExternalConfig;
|
||||
use super::plugin::Plugin;
|
||||
@@ -30,11 +30,11 @@ const DEFAULT_SELECTORS: &[RuleSelector] = &[
|
||||
RuleSelector::Linter(Linter::Pycodestyle),
|
||||
];
|
||||
|
||||
pub(crate) fn convert(
|
||||
pub fn convert(
|
||||
config: &HashMap<String, HashMap<String, Option<String>>>,
|
||||
external_config: &ExternalConfig,
|
||||
plugins: Option<Vec<Plugin>>,
|
||||
) -> Pyproject {
|
||||
) -> Result<Pyproject> {
|
||||
// Extract the Flake8 section.
|
||||
let flake8 = config
|
||||
.get("flake8")
|
||||
@@ -103,16 +103,16 @@ pub(crate) fn convert(
|
||||
|
||||
// Parse each supported option.
|
||||
let mut options = Options::default();
|
||||
let mut flake8_annotations = Flake8AnnotationsOptions::default();
|
||||
let mut flake8_bugbear = Flake8BugbearOptions::default();
|
||||
let mut flake8_builtins = Flake8BuiltinsOptions::default();
|
||||
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
|
||||
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
|
||||
let mut flake8_quotes = Flake8QuotesOptions::default();
|
||||
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
|
||||
let mut mccabe = McCabeOptions::default();
|
||||
let mut pep8_naming = Pep8NamingOptions::default();
|
||||
let mut pydocstyle = PydocstyleOptions::default();
|
||||
let mut flake8_annotations = flake8_annotations::settings::Options::default();
|
||||
let mut flake8_bugbear = flake8_bugbear::settings::Options::default();
|
||||
let mut flake8_builtins = flake8_builtins::settings::Options::default();
|
||||
let mut flake8_errmsg = flake8_errmsg::settings::Options::default();
|
||||
let mut flake8_pytest_style = flake8_pytest_style::settings::Options::default();
|
||||
let mut flake8_quotes = flake8_quotes::settings::Options::default();
|
||||
let mut flake8_tidy_imports = flake8_tidy_imports::options::Options::default();
|
||||
let mut mccabe = mccabe::settings::Options::default();
|
||||
let mut pep8_naming = pep8_naming::settings::Options::default();
|
||||
let mut pydocstyle = pydocstyle::settings::Options::default();
|
||||
for (key, value) in flake8 {
|
||||
if let Some(value) = value {
|
||||
match key.as_str() {
|
||||
@@ -120,8 +120,10 @@ pub(crate) fn convert(
|
||||
"builtins" => {
|
||||
options.builtins = Some(parser::parse_strings(value.as_ref()));
|
||||
}
|
||||
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
|
||||
Ok(line_length) => options.line_length = Some(line_length),
|
||||
"max-line-length" | "max_line_length" => match value.parse::<usize>() {
|
||||
Ok(line_length) => {
|
||||
options.line_length = Some(LineLength::from(line_length));
|
||||
}
|
||||
Err(e) => {
|
||||
warn_user!("Unable to parse '{key}' property: {e}");
|
||||
}
|
||||
@@ -370,41 +372,41 @@ pub(crate) fn convert(
|
||||
.sorted_by_key(RuleSelector::prefix_and_code)
|
||||
.collect(),
|
||||
);
|
||||
if flake8_annotations != Flake8AnnotationsOptions::default() {
|
||||
if flake8_annotations != flake8_annotations::settings::Options::default() {
|
||||
options.flake8_annotations = Some(flake8_annotations);
|
||||
}
|
||||
if flake8_bugbear != Flake8BugbearOptions::default() {
|
||||
if flake8_bugbear != flake8_bugbear::settings::Options::default() {
|
||||
options.flake8_bugbear = Some(flake8_bugbear);
|
||||
}
|
||||
if flake8_builtins != Flake8BuiltinsOptions::default() {
|
||||
if flake8_builtins != flake8_builtins::settings::Options::default() {
|
||||
options.flake8_builtins = Some(flake8_builtins);
|
||||
}
|
||||
if flake8_errmsg != Flake8ErrMsgOptions::default() {
|
||||
if flake8_errmsg != flake8_errmsg::settings::Options::default() {
|
||||
options.flake8_errmsg = Some(flake8_errmsg);
|
||||
}
|
||||
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
|
||||
if flake8_pytest_style != flake8_pytest_style::settings::Options::default() {
|
||||
options.flake8_pytest_style = Some(flake8_pytest_style);
|
||||
}
|
||||
if flake8_quotes != Flake8QuotesOptions::default() {
|
||||
if flake8_quotes != flake8_quotes::settings::Options::default() {
|
||||
options.flake8_quotes = Some(flake8_quotes);
|
||||
}
|
||||
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
|
||||
if flake8_tidy_imports != flake8_tidy_imports::options::Options::default() {
|
||||
options.flake8_tidy_imports = Some(flake8_tidy_imports);
|
||||
}
|
||||
if mccabe != McCabeOptions::default() {
|
||||
if mccabe != mccabe::settings::Options::default() {
|
||||
options.mccabe = Some(mccabe);
|
||||
}
|
||||
if pep8_naming != Pep8NamingOptions::default() {
|
||||
if pep8_naming != pep8_naming::settings::Options::default() {
|
||||
options.pep8_naming = Some(pep8_naming);
|
||||
}
|
||||
if pydocstyle != PydocstyleOptions::default() {
|
||||
if pydocstyle != pydocstyle::settings::Options::default() {
|
||||
options.pydocstyle = Some(pydocstyle);
|
||||
}
|
||||
|
||||
// Extract any settings from the existing `pyproject.toml`.
|
||||
if let Some(black) = &external_config.black {
|
||||
if let Some(line_length) = &black.line_length {
|
||||
options.line_length = Some(*line_length);
|
||||
options.line_length = Some(LineLength::from(*line_length));
|
||||
}
|
||||
|
||||
if let Some(target_version) = &black.target_version {
|
||||
@@ -437,7 +439,7 @@ pub(crate) fn convert(
|
||||
}
|
||||
|
||||
// Create the pyproject.toml.
|
||||
Pyproject::new(options)
|
||||
Ok(Pyproject::new(options))
|
||||
}
|
||||
|
||||
/// Resolve the set of enabled `RuleSelector` values for the given
|
||||
@@ -456,20 +458,19 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use pep440_rs::VersionSpecifiers;
|
||||
|
||||
use pretty_assertions::assert_eq;
|
||||
use ruff::line_width::LineLength;
|
||||
use ruff::registry::Linter;
|
||||
use ruff::rule_selector::RuleSelector;
|
||||
use ruff::rules::flake8_quotes;
|
||||
use ruff::rules::pydocstyle::settings::Convention;
|
||||
use ruff::settings::types::PythonVersion;
|
||||
use ruff_workspace::options::{Flake8QuotesOptions, Options, PydocstyleOptions};
|
||||
use ruff_workspace::pyproject::Pyproject;
|
||||
|
||||
use crate::converter::DEFAULT_SELECTORS;
|
||||
use crate::pep621::Project;
|
||||
use crate::ExternalConfig;
|
||||
use crate::flake8_to_ruff::converter::DEFAULT_SELECTORS;
|
||||
use crate::flake8_to_ruff::pep621::Project;
|
||||
use crate::flake8_to_ruff::ExternalConfig;
|
||||
use crate::line_width::LineLength;
|
||||
use crate::registry::Linter;
|
||||
use crate::rule_selector::RuleSelector;
|
||||
use crate::rules::pydocstyle::settings::Convention;
|
||||
use crate::rules::{flake8_quotes, pydocstyle};
|
||||
use crate::settings::options::Options;
|
||||
use crate::settings::pyproject::Pyproject;
|
||||
use crate::settings::types::PythonVersion;
|
||||
|
||||
use super::super::plugin::Plugin;
|
||||
use super::convert;
|
||||
@@ -490,18 +491,20 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_empty() {
|
||||
fn it_converts_empty() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
||||
&ExternalConfig::default(),
|
||||
None,
|
||||
);
|
||||
)?;
|
||||
let expected = Pyproject::new(default_options([]));
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_dashes() {
|
||||
fn it_converts_dashes() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
@@ -509,16 +512,18 @@ mod tests {
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![]),
|
||||
);
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||
line_length: Some(LineLength::from(100)),
|
||||
..default_options([])
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_underscores() {
|
||||
fn it_converts_underscores() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
@@ -526,16 +531,18 @@ mod tests {
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![]),
|
||||
);
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
line_length: Some(LineLength::try_from(100).unwrap()),
|
||||
line_length: Some(LineLength::from(100)),
|
||||
..default_options([])
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_ignores_parse_errors() {
|
||||
fn it_ignores_parse_errors() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
@@ -543,13 +550,15 @@ mod tests {
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![]),
|
||||
);
|
||||
)?;
|
||||
let expected = Pyproject::new(default_options([]));
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_plugin_options() {
|
||||
fn it_converts_plugin_options() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
@@ -557,9 +566,9 @@ mod tests {
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![]),
|
||||
);
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
flake8_quotes: Some(Flake8QuotesOptions {
|
||||
flake8_quotes: Some(flake8_quotes::settings::Options {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
@@ -568,10 +577,12 @@ mod tests {
|
||||
..default_options([])
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_converts_docstring_conventions() {
|
||||
fn it_converts_docstring_conventions() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
@@ -582,9 +593,9 @@ mod tests {
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
Some(vec![Plugin::Flake8Docstrings]),
|
||||
);
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
pydocstyle: Some(PydocstyleOptions {
|
||||
pydocstyle: Some(pydocstyle::settings::Options {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: None,
|
||||
property_decorators: None,
|
||||
@@ -592,10 +603,12 @@ mod tests {
|
||||
..default_options([Linter::Pydocstyle.into()])
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_infers_plugins_if_omitted() {
|
||||
fn it_infers_plugins_if_omitted() -> Result<()> {
|
||||
let actual = convert(
|
||||
&HashMap::from([(
|
||||
"flake8".to_string(),
|
||||
@@ -603,9 +616,9 @@ mod tests {
|
||||
)]),
|
||||
&ExternalConfig::default(),
|
||||
None,
|
||||
);
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
flake8_quotes: Some(Flake8QuotesOptions {
|
||||
flake8_quotes: Some(flake8_quotes::settings::Options {
|
||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
||||
multiline_quotes: None,
|
||||
docstring_quotes: None,
|
||||
@@ -614,6 +627,8 @@ mod tests {
|
||||
..default_options([Linter::Flake8Quotes.into()])
|
||||
});
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -627,7 +642,7 @@ mod tests {
|
||||
..ExternalConfig::default()
|
||||
},
|
||||
Some(vec![]),
|
||||
);
|
||||
)?;
|
||||
let expected = Pyproject::new(Options {
|
||||
target_version: Some(PythonVersion::Py38),
|
||||
..default_options([])
|
||||
10
crates/ruff/src/flake8_to_ruff/external_config.rs
Normal file
10
crates/ruff/src/flake8_to_ruff/external_config.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use super::black::Black;
|
||||
use super::isort::Isort;
|
||||
use super::pep621::Project;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ExternalConfig<'a> {
|
||||
pub black: Option<&'a Black>,
|
||||
pub isort: Option<&'a Isort>,
|
||||
pub project: Option<&'a Project>,
|
||||
}
|
||||
@@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct Isort {
|
||||
pub struct Isort {
|
||||
#[serde(alias = "src-paths", alias = "src_paths")]
|
||||
pub(crate) src_paths: Option<Vec<String>>,
|
||||
pub src_paths: Option<Vec<String>>,
|
||||
}
|
||||
13
crates/ruff/src/flake8_to_ruff/mod.rs
Normal file
13
crates/ruff/src/flake8_to_ruff/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
pub use converter::convert;
|
||||
pub use external_config::ExternalConfig;
|
||||
pub use plugin::Plugin;
|
||||
pub use pyproject::parse;
|
||||
|
||||
mod black;
|
||||
mod converter;
|
||||
mod external_config;
|
||||
mod isort;
|
||||
mod parser;
|
||||
pub mod pep621;
|
||||
mod plugin;
|
||||
mod pyproject;
|
||||
@@ -3,10 +3,12 @@ use std::str::FromStr;
|
||||
use anyhow::{bail, Result};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use ruff::settings::types::PatternPrefixPair;
|
||||
use ruff::{warn_user, RuleSelector};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::rule_selector::RuleSelector;
|
||||
use crate::settings::types::PatternPrefixPair;
|
||||
use crate::warn_user;
|
||||
|
||||
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
|
||||
|
||||
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
|
||||
@@ -192,11 +194,11 @@ pub(crate) fn collect_per_file_ignores(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use ruff::RuleSelector;
|
||||
|
||||
use ruff::codes;
|
||||
use ruff::registry::Linter;
|
||||
use ruff::settings::types::PatternPrefixPair;
|
||||
use crate::codes;
|
||||
use crate::registry::Linter;
|
||||
use crate::rule_selector::RuleSelector;
|
||||
use crate::settings::types::PatternPrefixPair;
|
||||
|
||||
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
|
||||
|
||||
@@ -4,7 +4,7 @@ use pep440_rs::VersionSpecifiers;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct Project {
|
||||
pub struct Project {
|
||||
#[serde(alias = "requires-python", alias = "requires_python")]
|
||||
pub(crate) requires_python: Option<VersionSpecifiers>,
|
||||
pub requires_python: Option<VersionSpecifiers>,
|
||||
}
|
||||
@@ -3,8 +3,9 @@ use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use ruff::registry::Linter;
|
||||
use ruff::RuleSelector;
|
||||
|
||||
use crate::registry::Linter;
|
||||
use crate::rule_selector::RuleSelector;
|
||||
|
||||
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
|
||||
pub enum Plugin {
|
||||
@@ -8,18 +8,18 @@ use super::isort::Isort;
|
||||
use super::pep621::Project;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub(crate) struct Tools {
|
||||
pub(crate) black: Option<Black>,
|
||||
pub(crate) isort: Option<Isort>,
|
||||
pub struct Tools {
|
||||
pub black: Option<Black>,
|
||||
pub isort: Option<Isort>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub(crate) struct Pyproject {
|
||||
pub(crate) tool: Option<Tools>,
|
||||
pub(crate) project: Option<Project>,
|
||||
pub struct Pyproject {
|
||||
pub tool: Option<Tools>,
|
||||
pub project: Option<Project>,
|
||||
}
|
||||
|
||||
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
|
||||
pub fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
|
||||
let contents = std::fs::read_to_string(path)?;
|
||||
let pyproject = toml::from_str::<Pyproject>(&contents)?;
|
||||
Ok(pyproject)
|
||||
@@ -1,9 +1,9 @@
|
||||
//! Insert statements into Python code.
|
||||
use std::ops::Add;
|
||||
|
||||
use ruff_python_ast::{PySourceType, Stmt};
|
||||
use ruff_python_ast::{PySourceType, Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::helpers::is_docstring_stmt;
|
||||
|
||||
@@ -7,8 +7,8 @@ use std::error::Error;
|
||||
|
||||
use anyhow::Result;
|
||||
use libcst_native::{ImportAlias, Name, NameOrAttribute};
|
||||
use ruff_python_ast::{self as ast, PySourceType, Stmt, Suite};
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt, Suite};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::imports::{AnyImport, Import, ImportFrom};
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
/// When we lint a jupyter notebook, we have to translate the row/column based on
|
||||
/// [`ruff_text_size::TextSize`] to jupyter notebook cell/row/column.
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct NotebookIndex {
|
||||
pub struct JupyterIndex {
|
||||
/// Enter a row (1-based), get back the cell (1-based)
|
||||
pub(super) row_to_cell: Vec<u32>,
|
||||
/// Enter a row (1-based), get back the row in cell (1-based)
|
||||
pub(super) row_to_row_in_cell: Vec<u32>,
|
||||
}
|
||||
|
||||
impl NotebookIndex {
|
||||
impl JupyterIndex {
|
||||
/// Returns the cell number (1-based) for the given row (1-based).
|
||||
pub fn cell(&self, row: usize) -> Option<u32> {
|
||||
self.row_to_cell.get(row).copied()
|
||||
|
||||
@@ -9,7 +9,6 @@ use itertools::Itertools;
|
||||
use once_cell::sync::OnceCell;
|
||||
use serde::Serialize;
|
||||
use serde_json::error::Category;
|
||||
use uuid::Uuid;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_parser::lexer::lex;
|
||||
@@ -18,7 +17,7 @@ use ruff_source_file::{NewlineWithTrailingNewline, UniversalNewlineIterator};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use crate::autofix::source_map::{SourceMap, SourceMarker};
|
||||
use crate::jupyter::index::NotebookIndex;
|
||||
use crate::jupyter::index::JupyterIndex;
|
||||
use crate::jupyter::schema::{Cell, RawNotebook, SortAlphabetically, SourceValue};
|
||||
use crate::rules::pycodestyle::rules::SyntaxError;
|
||||
use crate::IOError;
|
||||
@@ -83,8 +82,8 @@ impl Cell {
|
||||
Cell::Code(cell) => &cell.source,
|
||||
_ => return false,
|
||||
};
|
||||
// Ignore cells containing cell magic as they act on the entire cell
|
||||
// as compared to line magic which acts on a single line.
|
||||
// Ignore cells containing cell magic. This is different from line magic
|
||||
// which is allowed and ignored by the parser.
|
||||
!match source {
|
||||
SourceValue::String(string) => string
|
||||
.lines()
|
||||
@@ -107,7 +106,7 @@ pub struct Notebook {
|
||||
source_code: String,
|
||||
/// The index of the notebook. This is used to map between the concatenated
|
||||
/// source code and the original notebook.
|
||||
index: OnceCell<NotebookIndex>,
|
||||
index: OnceCell<JupyterIndex>,
|
||||
/// The raw notebook i.e., the deserialized version of JSON string.
|
||||
raw: RawNotebook,
|
||||
/// The offsets of each cell in the concatenated source code. This includes
|
||||
@@ -157,7 +156,7 @@ impl Notebook {
|
||||
TextRange::default(),
|
||||
)
|
||||
})?;
|
||||
let mut raw_notebook: RawNotebook = match serde_json::from_reader(reader.by_ref()) {
|
||||
let raw_notebook: RawNotebook = match serde_json::from_reader(reader.by_ref()) {
|
||||
Ok(notebook) => notebook,
|
||||
Err(err) => {
|
||||
// Translate the error into a diagnostic
|
||||
@@ -263,23 +262,6 @@ impl Notebook {
|
||||
cell_offsets.push(current_offset);
|
||||
}
|
||||
|
||||
// Add cell ids to 4.5+ notebooks if they are missing
|
||||
// https://github.com/astral-sh/ruff/issues/6834
|
||||
// https://github.com/jupyter/enhancement-proposals/blob/master/62-cell-id/cell-id.md#required-field
|
||||
if raw_notebook.nbformat == 4 && raw_notebook.nbformat_minor >= 5 {
|
||||
for cell in &mut raw_notebook.cells {
|
||||
let id = match cell {
|
||||
Cell::Code(cell) => &mut cell.id,
|
||||
Cell::Markdown(cell) => &mut cell.id,
|
||||
Cell::Raw(cell) => &mut cell.id,
|
||||
};
|
||||
if id.is_none() {
|
||||
// https://github.com/jupyter/enhancement-proposals/blob/master/62-cell-id/cell-id.md#questions
|
||||
*id = Some(Uuid::new_v4().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
raw: raw_notebook,
|
||||
index: OnceCell::new(),
|
||||
@@ -386,7 +368,7 @@ impl Notebook {
|
||||
///
|
||||
/// The index building is expensive as it needs to go through the content of
|
||||
/// every valid code cell.
|
||||
fn build_index(&self) -> NotebookIndex {
|
||||
fn build_index(&self) -> JupyterIndex {
|
||||
let mut row_to_cell = vec![0];
|
||||
let mut row_to_row_in_cell = vec![0];
|
||||
|
||||
@@ -413,7 +395,7 @@ impl Notebook {
|
||||
row_to_row_in_cell.extend(1..=line_count);
|
||||
}
|
||||
|
||||
NotebookIndex {
|
||||
JupyterIndex {
|
||||
row_to_cell,
|
||||
row_to_row_in_cell,
|
||||
}
|
||||
@@ -431,7 +413,7 @@ impl Notebook {
|
||||
/// The index is built only once when required. This is only used to
|
||||
/// report diagnostics, so by that time all of the autofixes must have
|
||||
/// been applied if `--fix` was passed.
|
||||
pub(crate) fn index(&self) -> &NotebookIndex {
|
||||
pub(crate) fn index(&self) -> &JupyterIndex {
|
||||
self.index.get_or_init(|| self.build_index())
|
||||
}
|
||||
|
||||
@@ -491,14 +473,12 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::jupyter::index::NotebookIndex;
|
||||
use crate::jupyter::index::JupyterIndex;
|
||||
use crate::jupyter::schema::Cell;
|
||||
use crate::jupyter::Notebook;
|
||||
use crate::registry::Rule;
|
||||
use crate::source_kind::SourceKind;
|
||||
use crate::test::{
|
||||
read_jupyter_notebook, test_contents, test_notebook_path, test_resource_path,
|
||||
TestedNotebook,
|
||||
read_jupyter_notebook, test_notebook_path, test_resource_path, TestedNotebook,
|
||||
};
|
||||
use crate::{assert_messages, settings};
|
||||
|
||||
@@ -579,7 +559,7 @@ print("after empty cells")
|
||||
);
|
||||
assert_eq!(
|
||||
notebook.index(),
|
||||
&NotebookIndex {
|
||||
&JupyterIndex {
|
||||
row_to_cell: vec![0, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 5, 7, 7, 8],
|
||||
row_to_row_in_cell: vec![0, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 1, 1, 2, 1],
|
||||
}
|
||||
@@ -679,28 +659,4 @@ print("after empty cells")
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Version <4.5, don't emit cell ids
|
||||
#[test_case(Path::new("no_cell_id.ipynb"), false; "no_cell_id")]
|
||||
// Version 4.5, cell ids are missing and need to be added
|
||||
#[test_case(Path::new("add_missing_cell_id.ipynb"), true; "add_missing_cell_id")]
|
||||
fn test_cell_id(path: &Path, has_id: bool) -> Result<()> {
|
||||
let source_notebook = read_jupyter_notebook(path)?;
|
||||
let source_kind = SourceKind::IpyNotebook(source_notebook);
|
||||
let (_, transformed) = test_contents(
|
||||
&source_kind,
|
||||
path,
|
||||
&settings::Settings::for_rule(Rule::UnusedImport),
|
||||
);
|
||||
let linted_notebook = transformed.into_owned().expect_ipy_notebook();
|
||||
let mut writer = Vec::new();
|
||||
linted_notebook.write_inner(&mut writer)?;
|
||||
let actual = String::from_utf8(writer)?;
|
||||
if has_id {
|
||||
assert!(actual.contains(r#""id": ""#));
|
||||
} else {
|
||||
assert!(!actual.contains(r#""id":"#));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -150,7 +150,6 @@ pub struct CodeCell {
|
||||
/// Technically, id isn't required (it's not even present) in schema v4.0 through v4.4, but
|
||||
/// it's required in v4.5. Main issue is that pycharm creates notebooks without an id
|
||||
/// <https://youtrack.jetbrains.com/issue/PY-59438/Jupyter-notebooks-created-with-PyCharm-are-missing-the-id-field-in-cells-in-the-.ipynb-json>
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<String>,
|
||||
/// Cell-level metadata.
|
||||
pub metadata: Value,
|
||||
|
||||
@@ -12,12 +12,13 @@ pub const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
mod autofix;
|
||||
mod checkers;
|
||||
pub mod codes;
|
||||
mod codes;
|
||||
mod comments;
|
||||
mod cst;
|
||||
pub mod directives;
|
||||
mod doc_lines;
|
||||
mod docstrings;
|
||||
pub mod flake8_to_ruff;
|
||||
pub mod fs;
|
||||
mod importer;
|
||||
pub mod jupyter;
|
||||
@@ -31,8 +32,9 @@ pub mod packaging;
|
||||
pub mod pyproject_toml;
|
||||
pub mod registry;
|
||||
mod renamer;
|
||||
pub mod resolver;
|
||||
mod rule_redirects;
|
||||
pub mod rule_selector;
|
||||
mod rule_selector;
|
||||
pub mod rules;
|
||||
pub mod settings;
|
||||
pub mod source_kind;
|
||||
@@ -40,5 +42,3 @@ pub mod upstream_categories;
|
||||
|
||||
#[cfg(any(test, fuzzing))]
|
||||
pub mod test;
|
||||
|
||||
pub const RUFF_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
@@ -1,111 +1,30 @@
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::error::Error;
|
||||
use std::hash::Hasher;
|
||||
use std::num::{NonZeroU16, NonZeroU8, ParseIntError};
|
||||
use std::str::FromStr;
|
||||
use std::num::NonZeroU8;
|
||||
use unicode_width::UnicodeWidthChar;
|
||||
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
/// The length of a line of text that is considered too long.
|
||||
///
|
||||
/// The allowed range of values is 1..=320
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, CacheKey)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct LineLength(NonZeroU16);
|
||||
|
||||
impl LineLength {
|
||||
/// Maximum allowed value for a valid [`LineLength`]
|
||||
pub const MAX: u16 = 320;
|
||||
|
||||
/// Return the numeric value for this [`LineLength`]
|
||||
pub fn value(&self) -> u16 {
|
||||
self.0.get()
|
||||
}
|
||||
}
|
||||
pub struct LineLength(usize);
|
||||
|
||||
impl Default for LineLength {
|
||||
/// The default line length.
|
||||
fn default() -> Self {
|
||||
Self(NonZeroU16::new(88).unwrap())
|
||||
Self(88)
|
||||
}
|
||||
}
|
||||
|
||||
impl CacheKey for LineLength {
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
state.write_u16(self.0.get());
|
||||
impl LineLength {
|
||||
pub const fn get(&self) -> usize {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Error type returned when parsing a [`LineLength`] from a string fails
|
||||
pub enum ParseLineWidthError {
|
||||
/// The string could not be parsed as a valid [u16]
|
||||
ParseError(ParseIntError),
|
||||
/// The [u16] value of the string is not a valid [LineLength]
|
||||
TryFromIntError(LineLengthFromIntError),
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ParseLineWidthError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ParseLineWidthError {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ParseLineWidthError::ParseError(err) => std::fmt::Display::fmt(err, fmt),
|
||||
ParseLineWidthError::TryFromIntError(err) => std::fmt::Display::fmt(err, fmt),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for ParseLineWidthError {}
|
||||
|
||||
impl FromStr for LineLength {
|
||||
type Err = ParseLineWidthError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let value = u16::from_str(s).map_err(ParseLineWidthError::ParseError)?;
|
||||
let value = Self::try_from(value).map_err(ParseLineWidthError::TryFromIntError)?;
|
||||
Ok(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// Error type returned when converting a u16 to a [`LineLength`] fails
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct LineLengthFromIntError(pub u16);
|
||||
|
||||
impl TryFrom<u16> for LineLength {
|
||||
type Error = LineLengthFromIntError;
|
||||
|
||||
fn try_from(value: u16) -> Result<Self, Self::Error> {
|
||||
match NonZeroU16::try_from(value) {
|
||||
Ok(value) if value.get() <= Self::MAX => Ok(LineLength(value)),
|
||||
Ok(_) | Err(_) => Err(LineLengthFromIntError(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LineLengthFromIntError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
writeln!(
|
||||
f,
|
||||
"The line width must be a value between 1 and {}.",
|
||||
LineLength::MAX
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LineLength> for u16 {
|
||||
fn from(value: LineLength) -> Self {
|
||||
value.0.get()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LineLength> for NonZeroU16 {
|
||||
fn from(value: LineLength) -> Self {
|
||||
value.0
|
||||
impl From<usize> for LineLength {
|
||||
fn from(value: usize) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,7 +33,7 @@ impl From<LineLength> for NonZeroU16 {
|
||||
/// This is used to determine if a line is too long.
|
||||
/// It should be compared to a [`LineLength`].
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct LineWidthBuilder {
|
||||
pub struct LineWidth {
|
||||
/// The width of the line.
|
||||
width: usize,
|
||||
/// The column of the line.
|
||||
@@ -124,40 +43,40 @@ pub struct LineWidthBuilder {
|
||||
tab_size: TabSize,
|
||||
}
|
||||
|
||||
impl Default for LineWidthBuilder {
|
||||
impl Default for LineWidth {
|
||||
fn default() -> Self {
|
||||
Self::new(TabSize::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for LineWidthBuilder {
|
||||
impl PartialEq for LineWidth {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.width == other.width
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for LineWidthBuilder {}
|
||||
impl Eq for LineWidth {}
|
||||
|
||||
impl PartialOrd for LineWidthBuilder {
|
||||
impl PartialOrd for LineWidth {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for LineWidthBuilder {
|
||||
impl Ord for LineWidth {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.width.cmp(&other.width)
|
||||
}
|
||||
}
|
||||
|
||||
impl LineWidthBuilder {
|
||||
impl LineWidth {
|
||||
pub fn get(&self) -> usize {
|
||||
self.width
|
||||
}
|
||||
|
||||
/// Creates a new `LineWidth` with the given tab size.
|
||||
pub fn new(tab_size: TabSize) -> Self {
|
||||
LineWidthBuilder {
|
||||
LineWidth {
|
||||
width: 0,
|
||||
column: 0,
|
||||
tab_size,
|
||||
@@ -200,7 +119,7 @@ impl LineWidthBuilder {
|
||||
|
||||
/// Adds the given width to the line width.
|
||||
/// Also adds the given width to the column.
|
||||
/// It is generally better to use [`LineWidthBuilder::add_str`] or [`LineWidthBuilder::add_char`].
|
||||
/// It is generally better to use [`LineWidth::add_str`] or [`LineWidth::add_char`].
|
||||
/// The width and column should be the same for the corresponding text.
|
||||
/// Currently, this is only used to add spaces.
|
||||
#[must_use]
|
||||
@@ -211,15 +130,15 @@ impl LineWidthBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<LineLength> for LineWidthBuilder {
|
||||
impl PartialEq<LineLength> for LineWidth {
|
||||
fn eq(&self, other: &LineLength) -> bool {
|
||||
self.width == (other.value() as usize)
|
||||
self.width == other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<LineLength> for LineWidthBuilder {
|
||||
impl PartialOrd<LineLength> for LineWidth {
|
||||
fn partial_cmp(&self, other: &LineLength) -> Option<std::cmp::Ordering> {
|
||||
self.width.partial_cmp(&(other.value() as usize))
|
||||
self.width.partial_cmp(&other.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
|
||||
use ruff_source_file::{Locator, SourceFileBuilder};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::autofix::{fix_file, FixResult};
|
||||
use crate::checkers::ast::check_ast;
|
||||
@@ -148,7 +147,7 @@ pub fn check_path(
|
||||
match ruff_python_parser::parse_program_tokens(
|
||||
tokens,
|
||||
&path.to_string_lossy(),
|
||||
source_type.is_ipynb(),
|
||||
source_type.is_jupyter(),
|
||||
) {
|
||||
Ok(python_ast) => {
|
||||
if use_ast {
|
||||
|
||||
@@ -15,7 +15,7 @@ use crate::fs;
|
||||
use crate::jupyter::Notebook;
|
||||
use crate::source_kind::SourceKind;
|
||||
|
||||
pub static WARNINGS: Lazy<Mutex<Vec<&'static str>>> = Lazy::new(Mutex::default);
|
||||
pub(crate) static WARNINGS: Lazy<Mutex<Vec<&'static str>>> = Lazy::new(Mutex::default);
|
||||
|
||||
/// Warn a user once, with uniqueness determined by the given ID.
|
||||
#[macro_export]
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::num::NonZeroUsize;
|
||||
|
||||
use colored::{Color, ColoredString, Colorize, Styles};
|
||||
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use similar::{ChangeTag, TextDiff};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
|
||||
@@ -7,7 +7,7 @@ use colored::Colorize;
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::fs::relativize_path;
|
||||
use crate::jupyter::{Notebook, NotebookIndex};
|
||||
use crate::jupyter::{JupyterIndex, Notebook};
|
||||
use crate::message::diff::calculate_print_width;
|
||||
use crate::message::text::{MessageCodeFrame, RuleCodeAndBody};
|
||||
use crate::message::{
|
||||
@@ -92,7 +92,7 @@ struct DisplayGroupedMessage<'a> {
|
||||
show_source: bool,
|
||||
row_length: NonZeroUsize,
|
||||
column_length: NonZeroUsize,
|
||||
jupyter_index: Option<&'a NotebookIndex>,
|
||||
jupyter_index: Option<&'a JupyterIndex>,
|
||||
}
|
||||
|
||||
impl Display for DisplayGroupedMessage<'_> {
|
||||
|
||||
@@ -6,7 +6,6 @@ use serde_json::{json, Value};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_source_file::SourceCode;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::message::{Emitter, EmitterContext, Message};
|
||||
use crate::registry::AsRule;
|
||||
|
||||
@@ -15,7 +15,7 @@ pub use junit::JunitEmitter;
|
||||
pub use pylint::PylintEmitter;
|
||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
|
||||
use ruff_source_file::{SourceFile, SourceLocation};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
pub use text::TextEmitter;
|
||||
|
||||
use crate::jupyter::Notebook;
|
||||
@@ -66,6 +66,14 @@ impl Message {
|
||||
pub fn compute_end_location(&self) -> SourceLocation {
|
||||
self.file.to_source_code().source_location(self.end())
|
||||
}
|
||||
|
||||
pub const fn start(&self) -> TextSize {
|
||||
self.range.start()
|
||||
}
|
||||
|
||||
pub const fn end(&self) -> TextSize {
|
||||
self.range.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Message {
|
||||
@@ -80,12 +88,6 @@ impl PartialOrd for Message {
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for Message {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
struct MessageWithLocation<'a> {
|
||||
message: &'a Message,
|
||||
start_location: SourceLocation,
|
||||
@@ -152,7 +154,7 @@ mod tests {
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Edit, Fix};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use crate::message::{Emitter, EmitterContext, Message};
|
||||
|
||||
|
||||
@@ -8,11 +8,11 @@ use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
|
||||
use ruff_source_file::{OneIndexed, SourceLocation};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use crate::fs::relativize_path;
|
||||
use crate::jupyter::{Notebook, NotebookIndex};
|
||||
use crate::line_width::{LineWidthBuilder, TabSize};
|
||||
use crate::jupyter::{JupyterIndex, Notebook};
|
||||
use crate::line_width::{LineWidth, TabSize};
|
||||
use crate::message::diff::Diff;
|
||||
use crate::message::{Emitter, EmitterContext, Message};
|
||||
use crate::registry::AsRule;
|
||||
@@ -161,7 +161,7 @@ impl Display for RuleCodeAndBody<'_> {
|
||||
|
||||
pub(super) struct MessageCodeFrame<'a> {
|
||||
pub(crate) message: &'a Message,
|
||||
pub(crate) jupyter_index: Option<&'a NotebookIndex>,
|
||||
pub(crate) jupyter_index: Option<&'a JupyterIndex>,
|
||||
}
|
||||
|
||||
impl Display for MessageCodeFrame<'_> {
|
||||
@@ -292,7 +292,7 @@ fn replace_whitespace(source: &str, annotation_range: TextRange) -> SourceCode {
|
||||
let mut result = String::new();
|
||||
let mut last_end = 0;
|
||||
let mut range = annotation_range;
|
||||
let mut line_width = LineWidthBuilder::new(TabSize::default());
|
||||
let mut line_width = LineWidth::new(TabSize::default());
|
||||
|
||||
for (index, c) in source.char_indices() {
|
||||
let old_width = line_width.get();
|
||||
|
||||
@@ -8,7 +8,8 @@ use std::path::Path;
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use log::warn;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use ruff_python_ast::Ranged;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_trivia::indentation_at_offset;
|
||||
@@ -536,7 +537,7 @@ fn add_noqa_inner(
|
||||
let rule = diagnostic.kind.rule();
|
||||
if !includes(rule, codes) {
|
||||
matches_by_line
|
||||
.entry(directive_line.start())
|
||||
.entry(directive_line.range.start())
|
||||
.or_insert_with(|| {
|
||||
(RuleSet::default(), Some(&directive_line.directive))
|
||||
})
|
||||
@@ -644,13 +645,6 @@ pub(crate) struct NoqaDirectiveLine<'a> {
|
||||
pub(crate) matches: Vec<NoqaCode>,
|
||||
}
|
||||
|
||||
impl Ranged for NoqaDirectiveLine<'_> {
|
||||
/// The range of the `noqa` directive.
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct NoqaDirectives<'a> {
|
||||
inner: Vec<NoqaDirectiveLine<'a>>,
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::resolver::{PyprojectConfig, Resolver};
|
||||
|
||||
// If we have a Python package layout like:
|
||||
// - root/
|
||||
// - foo/
|
||||
@@ -47,6 +51,68 @@ pub fn detect_package_root<'a>(
|
||||
current
|
||||
}
|
||||
|
||||
/// A wrapper around `is_package` to cache filesystem lookups.
|
||||
fn is_package_with_cache<'a>(
|
||||
path: &'a Path,
|
||||
namespace_packages: &'a [PathBuf],
|
||||
package_cache: &mut FxHashMap<&'a Path, bool>,
|
||||
) -> bool {
|
||||
*package_cache
|
||||
.entry(path)
|
||||
.or_insert_with(|| is_package(path, namespace_packages))
|
||||
}
|
||||
|
||||
/// A wrapper around `detect_package_root` to cache filesystem lookups.
|
||||
fn detect_package_root_with_cache<'a>(
|
||||
path: &'a Path,
|
||||
namespace_packages: &'a [PathBuf],
|
||||
package_cache: &mut FxHashMap<&'a Path, bool>,
|
||||
) -> Option<&'a Path> {
|
||||
let mut current = None;
|
||||
for parent in path.ancestors() {
|
||||
if !is_package_with_cache(parent, namespace_packages, package_cache) {
|
||||
return current;
|
||||
}
|
||||
current = Some(parent);
|
||||
}
|
||||
current
|
||||
}
|
||||
|
||||
/// Return a mapping from Python package to its package root.
|
||||
pub fn detect_package_roots<'a>(
|
||||
files: &[&'a Path],
|
||||
resolver: &'a Resolver,
|
||||
pyproject_config: &'a PyprojectConfig,
|
||||
) -> FxHashMap<&'a Path, Option<&'a Path>> {
|
||||
// Pre-populate the module cache, since the list of files could (but isn't
|
||||
// required to) contain some `__init__.py` files.
|
||||
let mut package_cache: FxHashMap<&Path, bool> = FxHashMap::default();
|
||||
for file in files {
|
||||
if file.ends_with("__init__.py") {
|
||||
if let Some(parent) = file.parent() {
|
||||
package_cache.insert(parent, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Search for the package root for each file.
|
||||
let mut package_roots: FxHashMap<&Path, Option<&Path>> = FxHashMap::default();
|
||||
for file in files {
|
||||
let namespace_packages = &resolver.resolve(file, pyproject_config).namespace_packages;
|
||||
if let Some(package) = file.parent() {
|
||||
if package_roots.contains_key(package) {
|
||||
continue;
|
||||
}
|
||||
package_roots.insert(
|
||||
package,
|
||||
detect_package_root_with_cache(package, namespace_packages, &mut package_cache),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
package_roots
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -4,8 +4,8 @@ use anyhow::{anyhow, Result};
|
||||
use itertools::Itertools;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::Ranged;
|
||||
use ruff_python_semantic::{Binding, BindingKind, Scope, ScopeId, SemanticModel};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
pub(crate) struct Renamer;
|
||||
|
||||
|
||||
@@ -5,20 +5,17 @@ use std::collections::BTreeMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::RwLock;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, bail};
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use ignore::{DirEntry, WalkBuilder, WalkState};
|
||||
use itertools::Itertools;
|
||||
use log::debug;
|
||||
use path_absolutize::path_dedot;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::configuration::Configuration;
|
||||
use crate::pyproject;
|
||||
use crate::pyproject::settings_toml;
|
||||
use ruff::fs;
|
||||
use ruff::packaging::is_package;
|
||||
use ruff::settings::{AllSettings, Settings};
|
||||
use crate::fs;
|
||||
use crate::settings::configuration::Configuration;
|
||||
use crate::settings::pyproject::settings_toml;
|
||||
use crate::settings::{pyproject, AllSettings, Settings};
|
||||
|
||||
/// The configuration information from a `pyproject.toml` file.
|
||||
pub struct PyprojectConfig {
|
||||
@@ -48,7 +45,7 @@ impl PyprojectConfig {
|
||||
|
||||
/// The strategy used to discover the relevant `pyproject.toml` file for each
|
||||
/// Python file.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, is_macro::Is)]
|
||||
pub enum PyprojectDiscoveryStrategy {
|
||||
/// Use a fixed `pyproject.toml` file for all Python files (i.e., one
|
||||
/// provided on the command-line).
|
||||
@@ -58,16 +55,6 @@ pub enum PyprojectDiscoveryStrategy {
|
||||
Hierarchical,
|
||||
}
|
||||
|
||||
impl PyprojectDiscoveryStrategy {
|
||||
pub const fn is_fixed(self) -> bool {
|
||||
matches!(self, PyprojectDiscoveryStrategy::Fixed)
|
||||
}
|
||||
|
||||
pub const fn is_hierarchical(self) -> bool {
|
||||
matches!(self, PyprojectDiscoveryStrategy::Hierarchical)
|
||||
}
|
||||
}
|
||||
|
||||
/// The strategy for resolving file paths in a `pyproject.toml`.
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum Relativity {
|
||||
@@ -79,7 +66,7 @@ pub enum Relativity {
|
||||
}
|
||||
|
||||
impl Relativity {
|
||||
pub fn resolve(self, path: &Path) -> PathBuf {
|
||||
pub fn resolve(&self, path: &Path) -> PathBuf {
|
||||
match self {
|
||||
Relativity::Parent => path
|
||||
.parent()
|
||||
@@ -97,7 +84,7 @@ pub struct Resolver {
|
||||
|
||||
impl Resolver {
|
||||
/// Add a resolved [`Settings`] under a given [`PathBuf`] scope.
|
||||
fn add(&mut self, path: PathBuf, settings: AllSettings) {
|
||||
pub fn add(&mut self, path: PathBuf, settings: AllSettings) {
|
||||
self.settings.insert(path, settings);
|
||||
}
|
||||
|
||||
@@ -126,88 +113,31 @@ impl Resolver {
|
||||
&self.resolve_all(path, pyproject_config).lib
|
||||
}
|
||||
|
||||
/// Return a mapping from Python package to its package root.
|
||||
pub fn package_roots<'a>(
|
||||
&'a self,
|
||||
files: &[&'a Path],
|
||||
pyproject_config: &'a PyprojectConfig,
|
||||
) -> FxHashMap<&'a Path, Option<&'a Path>> {
|
||||
// Pre-populate the module cache, since the list of files could (but isn't
|
||||
// required to) contain some `__init__.py` files.
|
||||
let mut package_cache: FxHashMap<&Path, bool> = FxHashMap::default();
|
||||
for file in files {
|
||||
if file.ends_with("__init__.py") {
|
||||
if let Some(parent) = file.parent() {
|
||||
package_cache.insert(parent, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Search for the package root for each file.
|
||||
let mut package_roots: FxHashMap<&Path, Option<&Path>> = FxHashMap::default();
|
||||
for file in files {
|
||||
let namespace_packages = &self.resolve(file, pyproject_config).namespace_packages;
|
||||
if let Some(package) = file.parent() {
|
||||
if package_roots.contains_key(package) {
|
||||
continue;
|
||||
}
|
||||
package_roots.insert(
|
||||
package,
|
||||
detect_package_root_with_cache(package, namespace_packages, &mut package_cache),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
package_roots
|
||||
}
|
||||
|
||||
/// Return an iterator over the resolved [`Settings`] in this [`Resolver`].
|
||||
pub fn settings(&self) -> impl Iterator<Item = &AllSettings> {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &AllSettings> {
|
||||
self.settings.values()
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around `detect_package_root` to cache filesystem lookups.
|
||||
fn detect_package_root_with_cache<'a>(
|
||||
path: &'a Path,
|
||||
namespace_packages: &'a [PathBuf],
|
||||
package_cache: &mut FxHashMap<&'a Path, bool>,
|
||||
) -> Option<&'a Path> {
|
||||
let mut current = None;
|
||||
for parent in path.ancestors() {
|
||||
if !is_package_with_cache(parent, namespace_packages, package_cache) {
|
||||
return current;
|
||||
}
|
||||
current = Some(parent);
|
||||
}
|
||||
current
|
||||
}
|
||||
|
||||
/// A wrapper around `is_package` to cache filesystem lookups.
|
||||
fn is_package_with_cache<'a>(
|
||||
path: &'a Path,
|
||||
namespace_packages: &'a [PathBuf],
|
||||
package_cache: &mut FxHashMap<&'a Path, bool>,
|
||||
) -> bool {
|
||||
*package_cache
|
||||
.entry(path)
|
||||
.or_insert_with(|| is_package(path, namespace_packages))
|
||||
}
|
||||
|
||||
pub trait ConfigProcessor: Sync {
|
||||
pub trait ConfigProcessor: Copy + Send + Sync {
|
||||
fn process_config(&self, config: &mut Configuration);
|
||||
}
|
||||
|
||||
struct NoOpProcessor;
|
||||
impl ConfigProcessor for &NoOpProcessor {
|
||||
fn process_config(&self, _config: &mut Configuration) {}
|
||||
}
|
||||
|
||||
/// Recursively resolve a [`Configuration`] from a `pyproject.toml` file at the
|
||||
/// specified [`Path`].
|
||||
// TODO(charlie): This whole system could do with some caching. Right now, if a
|
||||
// configuration file extends another in the same path, we'll re-parse the same
|
||||
// file at least twice (possibly more than twice, since we'll also parse it when
|
||||
// resolving the "default" configuration).
|
||||
fn resolve_configuration(
|
||||
pub fn resolve_configuration(
|
||||
pyproject: &Path,
|
||||
relativity: Relativity,
|
||||
processor: &dyn ConfigProcessor,
|
||||
relativity: &Relativity,
|
||||
processor: impl ConfigProcessor,
|
||||
) -> Result<Configuration> {
|
||||
let mut seen = FxHashSet::default();
|
||||
let mut stack = vec![];
|
||||
@@ -250,33 +180,49 @@ fn resolve_configuration(
|
||||
|
||||
/// Extract the project root (scope) and [`Settings`] from a given
|
||||
/// `pyproject.toml`.
|
||||
fn resolve_scoped_settings(
|
||||
pub fn resolve_scoped_settings(
|
||||
pyproject: &Path,
|
||||
relativity: Relativity,
|
||||
processor: &dyn ConfigProcessor,
|
||||
relativity: &Relativity,
|
||||
processor: impl ConfigProcessor,
|
||||
) -> Result<(PathBuf, AllSettings)> {
|
||||
let configuration = resolve_configuration(pyproject, relativity, processor)?;
|
||||
let project_root = relativity.resolve(pyproject);
|
||||
let settings = configuration.into_all_settings(&project_root)?;
|
||||
let settings = AllSettings::from_configuration(configuration, &project_root)?;
|
||||
Ok((project_root, settings))
|
||||
}
|
||||
|
||||
/// Extract the [`Settings`] from a given `pyproject.toml`.
|
||||
pub fn resolve_settings(pyproject: &Path, relativity: &Relativity) -> Result<AllSettings> {
|
||||
let (_project_root, settings) = resolve_scoped_settings(pyproject, relativity, &NoOpProcessor)?;
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
/// Extract the [`Settings`] from a given `pyproject.toml` and process the
|
||||
/// configuration with the given [`ConfigProcessor`].
|
||||
pub fn resolve_settings_with_processor(
|
||||
pyproject: &Path,
|
||||
relativity: Relativity,
|
||||
processor: &dyn ConfigProcessor,
|
||||
relativity: &Relativity,
|
||||
processor: impl ConfigProcessor,
|
||||
) -> Result<AllSettings> {
|
||||
let (_project_root, settings) = resolve_scoped_settings(pyproject, relativity, processor)?;
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
/// Return `true` if the given file should be ignored based on the exclusion
|
||||
/// criteria.
|
||||
fn match_exclusion<P: AsRef<Path>, R: AsRef<Path>>(
|
||||
file_path: P,
|
||||
file_basename: R,
|
||||
exclusion: &globset::GlobSet,
|
||||
) -> bool {
|
||||
exclusion.is_match(file_path) || exclusion.is_match(file_basename)
|
||||
}
|
||||
|
||||
/// Find all Python (`.py`, `.pyi` and `.ipynb` files) in a set of paths.
|
||||
pub fn python_files_in_path(
|
||||
paths: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
processor: &dyn ConfigProcessor,
|
||||
processor: impl ConfigProcessor,
|
||||
) -> Result<(Vec<Result<DirEntry, ignore::Error>>, Resolver)> {
|
||||
// Normalize every path (e.g., convert from relative to absolute).
|
||||
let mut paths: Vec<PathBuf> = paths.iter().map(fs::normalize_path).unique().collect();
|
||||
@@ -290,7 +236,7 @@ pub fn python_files_in_path(
|
||||
if seen.insert(ancestor) {
|
||||
if let Some(pyproject) = settings_toml(ancestor)? {
|
||||
let (root, settings) =
|
||||
resolve_scoped_settings(&pyproject, Relativity::Parent, processor)?;
|
||||
resolve_scoped_settings(&pyproject, &Relativity::Parent, processor)?;
|
||||
resolver.add(root, settings);
|
||||
}
|
||||
}
|
||||
@@ -362,7 +308,7 @@ pub fn python_files_in_path(
|
||||
match settings_toml(entry.path()) {
|
||||
Ok(Some(pyproject)) => match resolve_scoped_settings(
|
||||
&pyproject,
|
||||
Relativity::Parent,
|
||||
&Relativity::Parent,
|
||||
processor,
|
||||
) {
|
||||
Ok((root, settings)) => {
|
||||
@@ -422,7 +368,7 @@ pub fn python_files_in_path(
|
||||
pub fn python_file_at_path(
|
||||
path: &Path,
|
||||
pyproject_config: &PyprojectConfig,
|
||||
processor: &dyn ConfigProcessor,
|
||||
processor: impl ConfigProcessor,
|
||||
) -> Result<bool> {
|
||||
if !pyproject_config.settings.lib.force_exclude {
|
||||
return Ok(true);
|
||||
@@ -437,7 +383,7 @@ pub fn python_file_at_path(
|
||||
for ancestor in path.ancestors() {
|
||||
if let Some(pyproject) = settings_toml(ancestor)? {
|
||||
let (root, settings) =
|
||||
resolve_scoped_settings(&pyproject, Relativity::Parent, processor)?;
|
||||
resolve_scoped_settings(&pyproject, &Relativity::Parent, processor)?;
|
||||
resolver.add(root, settings);
|
||||
}
|
||||
}
|
||||
@@ -482,16 +428,6 @@ fn is_file_excluded(
|
||||
false
|
||||
}
|
||||
|
||||
/// Return `true` if the given file should be ignored based on the exclusion
|
||||
/// criteria.
|
||||
fn match_exclusion<P: AsRef<Path>, R: AsRef<Path>>(
|
||||
file_path: P,
|
||||
file_basename: R,
|
||||
exclusion: &globset::GlobSet,
|
||||
) -> bool {
|
||||
exclusion.is_match(file_path) || exclusion.is_match(file_basename)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::fs::{create_dir, File};
|
||||
@@ -503,92 +439,14 @@ mod tests {
|
||||
use path_absolutize::Absolutize;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::configuration::Configuration;
|
||||
use crate::pyproject::find_settings_toml;
|
||||
use ruff::settings::types::FilePattern;
|
||||
use ruff::settings::AllSettings;
|
||||
|
||||
use crate::resolver::{
|
||||
is_file_excluded, match_exclusion, python_files_in_path, resolve_settings_with_processor,
|
||||
ConfigProcessor, PyprojectConfig, PyprojectDiscoveryStrategy, Relativity, Resolver,
|
||||
NoOpProcessor, PyprojectConfig, PyprojectDiscoveryStrategy, Relativity, Resolver,
|
||||
};
|
||||
use crate::tests::test_resource_path;
|
||||
|
||||
struct NoOpProcessor;
|
||||
|
||||
impl ConfigProcessor for NoOpProcessor {
|
||||
fn process_config(&self, _config: &mut Configuration) {}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rooted_exclusion() -> Result<()> {
|
||||
let package_root = test_resource_path("package");
|
||||
let resolver = Resolver::default();
|
||||
let pyproject_config = PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Hierarchical,
|
||||
resolve_settings_with_processor(
|
||||
&find_settings_toml(&package_root)?.unwrap(),
|
||||
Relativity::Parent,
|
||||
&NoOpProcessor,
|
||||
)?,
|
||||
None,
|
||||
);
|
||||
// src/app.py should not be excluded even if it lives in a hierarchy that should
|
||||
// be excluded by virtue of the pyproject.toml having `resources/*` in
|
||||
// it.
|
||||
assert!(!is_file_excluded(
|
||||
&package_root.join("src/app.py"),
|
||||
&resolver,
|
||||
&pyproject_config,
|
||||
));
|
||||
// However, resources/ignored.py should be ignored, since that `resources` is
|
||||
// beneath the package root.
|
||||
assert!(is_file_excluded(
|
||||
&package_root.join("resources/ignored.py"),
|
||||
&resolver,
|
||||
&pyproject_config,
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn find_python_files() -> Result<()> {
|
||||
// Initialize the filesystem:
|
||||
// root
|
||||
// ├── file1.py
|
||||
// ├── dir1.py
|
||||
// │ └── file2.py
|
||||
// └── dir2.py
|
||||
let tmp_dir = TempDir::new()?;
|
||||
let root = tmp_dir.path();
|
||||
let file1 = root.join("file1.py");
|
||||
let dir1 = root.join("dir1.py");
|
||||
let file2 = dir1.join("file2.py");
|
||||
let dir2 = root.join("dir2.py");
|
||||
File::create(&file1)?;
|
||||
create_dir(dir1)?;
|
||||
File::create(&file2)?;
|
||||
create_dir(dir2)?;
|
||||
|
||||
let (paths, _) = python_files_in_path(
|
||||
&[root.to_path_buf()],
|
||||
&PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Fixed,
|
||||
AllSettings::default(),
|
||||
None,
|
||||
),
|
||||
&NoOpProcessor,
|
||||
)?;
|
||||
let paths = paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ignore::DirEntry::path)
|
||||
.sorted()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(paths, &[file2, file1]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
use crate::settings::pyproject::find_settings_toml;
|
||||
use crate::settings::types::FilePattern;
|
||||
use crate::settings::AllSettings;
|
||||
use crate::test::test_resource_path;
|
||||
|
||||
fn make_exclusion(file_pattern: FilePattern) -> GlobSet {
|
||||
let mut builder = globset::GlobSetBuilder::new();
|
||||
@@ -720,4 +578,74 @@ mod tests {
|
||||
&make_exclusion(exclude),
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rooted_exclusion() -> Result<()> {
|
||||
let package_root = test_resource_path("package");
|
||||
let resolver = Resolver::default();
|
||||
let pyproject_config = PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Hierarchical,
|
||||
resolve_settings_with_processor(
|
||||
&find_settings_toml(&package_root)?.unwrap(),
|
||||
&Relativity::Parent,
|
||||
&NoOpProcessor,
|
||||
)?,
|
||||
None,
|
||||
);
|
||||
// src/app.py should not be excluded even if it lives in a hierarchy that should
|
||||
// be excluded by virtue of the pyproject.toml having `resources/*` in
|
||||
// it.
|
||||
assert!(!is_file_excluded(
|
||||
&package_root.join("src/app.py"),
|
||||
&resolver,
|
||||
&pyproject_config,
|
||||
));
|
||||
// However, resources/ignored.py should be ignored, since that `resources` is
|
||||
// beneath the package root.
|
||||
assert!(is_file_excluded(
|
||||
&package_root.join("resources/ignored.py"),
|
||||
&resolver,
|
||||
&pyproject_config,
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn find_python_files() -> Result<()> {
|
||||
// Initialize the filesystem:
|
||||
// root
|
||||
// ├── file1.py
|
||||
// ├── dir1.py
|
||||
// │ └── file2.py
|
||||
// └── dir2.py
|
||||
let tmp_dir = TempDir::new()?;
|
||||
let root = tmp_dir.path();
|
||||
let file1 = root.join("file1.py");
|
||||
let dir1 = root.join("dir1.py");
|
||||
let file2 = dir1.join("file2.py");
|
||||
let dir2 = root.join("dir2.py");
|
||||
File::create(&file1)?;
|
||||
create_dir(dir1)?;
|
||||
File::create(&file2)?;
|
||||
create_dir(dir2)?;
|
||||
|
||||
let (paths, _) = python_files_in_path(
|
||||
&[root.to_path_buf()],
|
||||
&PyprojectConfig::new(
|
||||
PyprojectDiscoveryStrategy::Fixed,
|
||||
AllSettings::default(),
|
||||
None,
|
||||
),
|
||||
&NoOpProcessor,
|
||||
)?;
|
||||
let paths = paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ignore::DirEntry::path)
|
||||
.sorted()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(paths, &[file2, file1]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -263,7 +263,7 @@ mod schema {
|
||||
}
|
||||
|
||||
impl RuleSelector {
|
||||
pub fn specificity(&self) -> Specificity {
|
||||
pub(crate) fn specificity(&self) -> Specificity {
|
||||
match self {
|
||||
RuleSelector::All => Specificity::All,
|
||||
RuleSelector::Nursery => Specificity::All,
|
||||
@@ -286,7 +286,7 @@ impl RuleSelector {
|
||||
}
|
||||
|
||||
#[derive(EnumIter, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]
|
||||
pub enum Specificity {
|
||||
pub(crate) enum Specificity {
|
||||
All,
|
||||
LinterGroup,
|
||||
Linter,
|
||||
|
||||
@@ -2,8 +2,7 @@ use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::Constant;
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{Expr, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ static HASH_NUMBER: Lazy<Regex> = Lazy::new(|| Regex::new(r"#\d").unwrap());
|
||||
static MULTILINE_ASSIGNMENT_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^\s*([(\[]\s*)?(\w+\s*,\s*)*\w+\s*([)\]]\s*)?=.*[(\[{]$").unwrap());
|
||||
static PARTIAL_DICTIONARY_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r#"^\s*['"]\w+['"]\s*:.+[,{]\s*(#.*)?$"#).unwrap());
|
||||
Lazy::new(|| Regex::new(r#"^\s*['"]\w+['"]\s*:.+[,{]\s*$"#).unwrap());
|
||||
static PRINT_RETURN_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^(print|return)\b\s*").unwrap());
|
||||
|
||||
/// Returns `true` if a comment contains Python code.
|
||||
|
||||
@@ -105,47 +105,5 @@ ERA001.py:21:5: ERA001 [*] Found commented-out code
|
||||
19 19 | class A():
|
||||
20 20 | pass
|
||||
21 |- # b = c
|
||||
22 21 |
|
||||
23 22 |
|
||||
24 23 | dictionary = {
|
||||
|
||||
ERA001.py:26:5: ERA001 [*] Found commented-out code
|
||||
|
|
||||
24 | dictionary = {
|
||||
25 | # "key1": 123, # noqa: ERA001
|
||||
26 | # "key2": 456,
|
||||
| ^^^^^^^^^^^^^^ ERA001
|
||||
27 | # "key3": 789, # test
|
||||
28 | }
|
||||
|
|
||||
= help: Remove commented-out code
|
||||
|
||||
ℹ Possible fix
|
||||
23 23 |
|
||||
24 24 | dictionary = {
|
||||
25 25 | # "key1": 123, # noqa: ERA001
|
||||
26 |- # "key2": 456,
|
||||
27 26 | # "key3": 789, # test
|
||||
28 27 | }
|
||||
29 28 |
|
||||
|
||||
ERA001.py:27:5: ERA001 [*] Found commented-out code
|
||||
|
|
||||
25 | # "key1": 123, # noqa: ERA001
|
||||
26 | # "key2": 456,
|
||||
27 | # "key3": 789, # test
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ ERA001
|
||||
28 | }
|
||||
|
|
||||
= help: Remove commented-out code
|
||||
|
||||
ℹ Possible fix
|
||||
24 24 | dictionary = {
|
||||
25 25 | # "key1": 123, # noqa: ERA001
|
||||
26 26 | # "key2": 456,
|
||||
27 |- # "key3": 789, # test
|
||||
28 27 | }
|
||||
29 28 |
|
||||
30 29 | #import os # noqa
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use num_bigint::BigInt;
|
||||
use ruff_python_ast::{self as ast, CmpOp, Constant, Expr};
|
||||
use ruff_python_ast::{self as ast, CmpOp, Constant, Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Rule;
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use num_bigint::BigInt;
|
||||
use ruff_python_ast::{self as ast, Constant, Expr};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Rule;
|
||||
|
||||
@@ -3,12 +3,11 @@ use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::ReturnStatementVisitor;
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_ast::statement_visitor::StatementVisitor;
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, ParameterWithDefault, Stmt};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, ParameterWithDefault, Ranged, Stmt};
|
||||
use ruff_python_parser::typing::parse_type_annotation;
|
||||
use ruff_python_semantic::analyze::visibility;
|
||||
use ruff_python_semantic::Definition;
|
||||
use ruff_python_stdlib::typing::simple_magic_return_type;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::{AsRule, Rule};
|
||||
|
||||
@@ -1,6 +1,65 @@
|
||||
//! Settings for the `flake-annotations` plugin.
|
||||
|
||||
use ruff_macros::CacheKey;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
|
||||
|
||||
#[derive(
|
||||
Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
|
||||
)]
|
||||
#[serde(
|
||||
deny_unknown_fields,
|
||||
rename_all = "kebab-case",
|
||||
rename = "Flake8AnnotationsOptions"
|
||||
)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct Options {
|
||||
#[option(
|
||||
default = "false",
|
||||
value_type = "bool",
|
||||
example = "mypy-init-return = true"
|
||||
)]
|
||||
/// Whether to allow the omission of a return type hint for `__init__` if at
|
||||
/// least one argument is annotated.
|
||||
pub mypy_init_return: Option<bool>,
|
||||
#[option(
|
||||
default = "false",
|
||||
value_type = "bool",
|
||||
example = "suppress-dummy-args = true"
|
||||
)]
|
||||
/// Whether to suppress `ANN000`-level violations for arguments matching the
|
||||
/// "dummy" variable regex (like `_`).
|
||||
pub suppress_dummy_args: Option<bool>,
|
||||
#[option(
|
||||
default = "false",
|
||||
value_type = "bool",
|
||||
example = "suppress-none-returning = true"
|
||||
)]
|
||||
/// Whether to suppress `ANN200`-level violations for functions that meet
|
||||
/// either of the following criteria:
|
||||
///
|
||||
/// - Contain no `return` statement.
|
||||
/// - Explicit `return` statement(s) all return `None` (explicitly or
|
||||
/// implicitly).
|
||||
pub suppress_none_returning: Option<bool>,
|
||||
#[option(
|
||||
default = "false",
|
||||
value_type = "bool",
|
||||
example = "allow-star-arg-any = true"
|
||||
)]
|
||||
/// Whether to suppress `ANN401` for dynamically typed `*args` and
|
||||
/// `**kwargs` arguments.
|
||||
pub allow_star_arg_any: Option<bool>,
|
||||
#[option(
|
||||
default = "false",
|
||||
value_type = "bool",
|
||||
example = "ignore-fully-untyped = true"
|
||||
)]
|
||||
/// Whether to suppress `ANN*` rules for any declaration
|
||||
/// that hasn't been typed at all.
|
||||
/// This makes it easier to gradually add types to a codebase.
|
||||
pub ignore_fully_untyped: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
@@ -11,3 +70,27 @@ pub struct Settings {
|
||||
pub allow_star_arg_any: bool,
|
||||
pub ignore_fully_untyped: bool,
|
||||
}
|
||||
|
||||
impl From<Options> for Settings {
|
||||
fn from(options: Options) -> Self {
|
||||
Self {
|
||||
mypy_init_return: options.mypy_init_return.unwrap_or(false),
|
||||
suppress_dummy_args: options.suppress_dummy_args.unwrap_or(false),
|
||||
suppress_none_returning: options.suppress_none_returning.unwrap_or(false),
|
||||
allow_star_arg_any: options.allow_star_arg_any.unwrap_or(false),
|
||||
ignore_fully_untyped: options.ignore_fully_untyped.unwrap_or(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Settings> for Options {
|
||||
fn from(settings: Settings) -> Self {
|
||||
Self {
|
||||
mypy_init_return: Some(settings.mypy_init_return),
|
||||
suppress_dummy_args: Some(settings.suppress_dummy_args),
|
||||
suppress_none_returning: Some(settings.suppress_none_returning),
|
||||
allow_star_arg_any: Some(settings.allow_star_arg_any),
|
||||
ignore_fully_untyped: Some(settings.ignore_fully_untyped),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::call_path::CallPath;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::call_path::CallPath;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::call_path::CallPath;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_ast::{Ranged, Stmt};
|
||||
use ruff_text_size::{TextLen, TextRange};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of the `assert` keyword.
|
||||
|
||||
@@ -3,9 +3,8 @@ use num_traits::ToPrimitive;
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::call_path::CallPath;
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Operator};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Operator, Ranged};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_python_ast::{Expr, Parameter, ParameterWithDefault, Parameters};
|
||||
use ruff_python_ast::{Expr, Parameter, ParameterWithDefault, Parameters, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_python_ast::Keyword;
|
||||
use ruff_python_ast::{Keyword, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use ruff_python_ast::{self as ast, Expr, Operator};
|
||||
use ruff_python_ast::{self as ast, Expr, Operator, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::any_over_expr;
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_ast::{self as ast, Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::is_const_false;
|
||||
use ruff_python_ast::{self as ast, Arguments};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{self as ast, Arguments, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{self as ast, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::helpers::is_const_false;
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{self as ast, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::helpers::is_const_none;
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{self as ast, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::Truthiness;
|
||||
use ruff_python_ast::{self as ast, Arguments, Constant, Expr, Keyword};
|
||||
use ruff_python_ast::{self as ast, Arguments, Constant, Expr, Keyword, Ranged};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::{
|
||||
checkers::ast::Checker, registry::Rule, rules::flake8_bandit::helpers::string_literal,
|
||||
|
||||
@@ -2,8 +2,7 @@ use num_traits::{One, Zero};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_python_ast::{self as ast, Ranged};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
//! Check for calls to suspicious functions, or calls into suspicious modules.
|
||||
//!
|
||||
//! See: <https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html>
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_ast::{self as ast, Expr, Ranged};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::AsRule;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user